code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
print("Hello from Python")
|
cemc/safeexec
|
tests/hello.py
|
Python
|
agpl-3.0
| 27
|
"""Execute Ansible sanity tests."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import abc
import glob
import json
import os
import re
import collections
from .. import types as t
from ..util import (
ApplicationError,
SubprocessError,
display,
import_plugins,
load_plugins,
parse_to_list_of_dict,
ABC,
ANSIBLE_TEST_DATA_ROOT,
is_binary_file,
read_lines_without_comments,
get_available_python_versions,
find_python,
is_subdir,
paths_to_dirs,
get_ansible_version,
)
from ..util_common import (
run_command,
handle_layout_messages,
)
from ..ansible_util import (
ansible_environment,
check_pyyaml,
)
from ..target import (
walk_internal_targets,
walk_sanity_targets,
TestTarget,
)
from ..executor import (
get_changes_filter,
AllTargetsSkipped,
Delegate,
install_command_requirements,
SUPPORTED_PYTHON_VERSIONS,
)
from ..config import (
SanityConfig,
)
from ..test import (
TestSuccess,
TestFailure,
TestSkipped,
TestMessage,
calculate_best_confidence,
)
from ..data import (
data_context,
)
COMMAND = 'sanity'
SANITY_ROOT = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'sanity')
def command_sanity(args):
"""
:type args: SanityConfig
"""
handle_layout_messages(data_context().content.sanity_messages)
changes = get_changes_filter(args)
require = args.require + changes
targets = SanityTargets.create(args.include, args.exclude, require)
if not targets.include:
raise AllTargetsSkipped()
if args.delegate:
raise Delegate(require=changes, exclude=args.exclude)
tests = sanity_get_tests()
if args.test:
tests = [target for target in tests if target.name in args.test]
else:
disabled = [target.name for target in tests if not target.enabled and not args.allow_disabled]
tests = [target for target in tests if target.enabled or args.allow_disabled]
if disabled:
display.warning('Skipping tests disabled by default without --allow-disabled: %s' % ', '.join(sorted(disabled)))
if args.skip_test:
tests = [target for target in tests if target.name not in args.skip_test]
total = 0
failed = []
requirements_installed = set() # type: t.Set[str]
for test in tests:
if args.list_tests:
display.info(test.name)
continue
available_versions = sorted(get_available_python_versions(SUPPORTED_PYTHON_VERSIONS).keys())
if args.python:
# specific version selected
versions = (args.python,)
elif isinstance(test, SanityMultipleVersion):
# try all supported versions for multi-version tests when a specific version has not been selected
versions = test.supported_python_versions
elif not test.supported_python_versions or args.python_version in test.supported_python_versions:
# the test works with any version or the version we're already running
versions = (args.python_version,)
else:
# available versions supported by the test
versions = tuple(sorted(set(available_versions) & set(test.supported_python_versions)))
# use the lowest available version supported by the test or the current version as a fallback (which will be skipped)
versions = versions[:1] or (args.python_version,)
for version in versions:
if isinstance(test, SanityMultipleVersion):
skip_version = version
else:
skip_version = None
options = ''
if test.supported_python_versions and version not in test.supported_python_versions:
display.warning("Skipping sanity test '%s' on unsupported Python %s." % (test.name, version))
result = SanitySkipped(test.name, skip_version)
elif not args.python and version not in available_versions:
display.warning("Skipping sanity test '%s' on Python %s due to missing interpreter." % (test.name, version))
result = SanitySkipped(test.name, skip_version)
else:
check_pyyaml(args, version)
if test.supported_python_versions:
display.info("Running sanity test '%s' with Python %s" % (test.name, version))
else:
display.info("Running sanity test '%s'" % test.name)
if isinstance(test, SanityCodeSmellTest):
settings = test.load_processor(args)
elif isinstance(test, SanityMultipleVersion):
settings = test.load_processor(args, version)
elif isinstance(test, SanitySingleVersion):
settings = test.load_processor(args)
elif isinstance(test, SanityVersionNeutral):
settings = test.load_processor(args)
else:
raise Exception('Unsupported test type: %s' % type(test))
all_targets = targets.targets
if test.all_targets:
usable_targets = targets.targets
elif test.no_targets:
usable_targets = tuple()
else:
usable_targets = targets.include
all_targets = SanityTargets.filter_and_inject_targets(test, all_targets)
usable_targets = SanityTargets.filter_and_inject_targets(test, usable_targets)
usable_targets = sorted(test.filter_targets(list(usable_targets)))
usable_targets = settings.filter_skipped_targets(usable_targets)
sanity_targets = SanityTargets(tuple(all_targets), tuple(usable_targets))
if usable_targets or test.no_targets:
if version not in requirements_installed:
requirements_installed.add(version)
install_command_requirements(args, version)
if isinstance(test, SanityCodeSmellTest):
result = test.test(args, sanity_targets, version)
elif isinstance(test, SanityMultipleVersion):
result = test.test(args, sanity_targets, version)
options = ' --python %s' % version
elif isinstance(test, SanitySingleVersion):
result = test.test(args, sanity_targets, version)
elif isinstance(test, SanityVersionNeutral):
result = test.test(args, sanity_targets)
else:
raise Exception('Unsupported test type: %s' % type(test))
else:
result = SanitySkipped(test.name, skip_version)
result.write(args)
total += 1
if isinstance(result, SanityFailure):
failed.append(result.test + options)
if failed:
message = 'The %d sanity test(s) listed below (out of %d) failed. See error output above for details.\n%s' % (
len(failed), total, '\n'.join(failed))
if args.failure_ok:
display.error(message)
else:
raise ApplicationError(message)
def collect_code_smell_tests(): # type: () -> t.Tuple[SanityFunc, ...]
"""Return a tuple of available code smell sanity tests."""
paths = glob.glob(os.path.join(SANITY_ROOT, 'code-smell', '*.py'))
if data_context().content.is_ansible:
# include Ansible specific code-smell tests which are not configured to be skipped
ansible_code_smell_root = os.path.join(data_context().content.root, 'test', 'sanity', 'code-smell')
skip_tests = read_lines_without_comments(os.path.join(ansible_code_smell_root, 'skip.txt'), remove_blank_lines=True, optional=True)
paths.extend(path for path in glob.glob(os.path.join(ansible_code_smell_root, '*.py')) if os.path.basename(path) not in skip_tests)
paths = sorted(p for p in paths if os.access(p, os.X_OK) and os.path.isfile(p))
tests = tuple(SanityCodeSmellTest(p) for p in paths)
return tests
def sanity_get_tests():
"""
:rtype: tuple[SanityFunc]
"""
return SANITY_TESTS
class SanityIgnoreParser:
"""Parser for the consolidated sanity test ignore file."""
NO_CODE = '_'
def __init__(self, args): # type: (SanityConfig) -> None
if data_context().content.collection:
ansible_version = '%s.%s' % tuple(get_ansible_version().split('.')[:2])
ansible_label = 'Ansible %s' % ansible_version
file_name = 'ignore-%s.txt' % ansible_version
else:
ansible_label = 'Ansible'
file_name = 'ignore.txt'
self.args = args
self.relative_path = os.path.join(data_context().content.sanity_path, file_name)
self.path = os.path.join(data_context().content.root, self.relative_path)
self.ignores = collections.defaultdict(lambda: collections.defaultdict(dict)) # type: t.Dict[str, t.Dict[str, t.Dict[str, int]]]
self.skips = collections.defaultdict(lambda: collections.defaultdict(int)) # type: t.Dict[str, t.Dict[str, int]]
self.parse_errors = [] # type: t.List[t.Tuple[int, int, str]]
self.file_not_found_errors = [] # type: t.List[t.Tuple[int, str]]
lines = read_lines_without_comments(self.path, optional=True)
targets = SanityTargets.get_targets()
paths = set(target.path for target in targets)
tests_by_name = {} # type: t.Dict[str, SanityTest]
versioned_test_names = set() # type: t.Set[str]
unversioned_test_names = {} # type: t.Dict[str, str]
directories = paths_to_dirs(list(paths))
paths_by_test = {} # type: t.Dict[str, t.Set[str]]
display.info('Read %d sanity test ignore line(s) for %s from: %s' % (len(lines), ansible_label, self.relative_path), verbosity=1)
for test in sanity_get_tests():
test_targets = SanityTargets.filter_and_inject_targets(test, targets)
paths_by_test[test.name] = set(target.path for target in test.filter_targets(test_targets))
if isinstance(test, SanityMultipleVersion):
versioned_test_names.add(test.name)
tests_by_name.update(dict(('%s-%s' % (test.name, python_version), test) for python_version in test.supported_python_versions))
else:
unversioned_test_names.update(dict(('%s-%s' % (test.name, python_version), test.name) for python_version in SUPPORTED_PYTHON_VERSIONS))
tests_by_name[test.name] = test
for line_no, line in enumerate(lines, start=1):
if not line:
self.parse_errors.append((line_no, 1, "Line cannot be empty or contain only a comment"))
continue
parts = line.split(' ')
path = parts[0]
codes = parts[1:]
if not path:
self.parse_errors.append((line_no, 1, "Line cannot start with a space"))
continue
if path.endswith(os.path.sep):
if path not in directories:
self.file_not_found_errors.append((line_no, path))
continue
else:
if path not in paths:
self.file_not_found_errors.append((line_no, path))
continue
if not codes:
self.parse_errors.append((line_no, len(path), "Error code required after path"))
continue
code = codes[0]
if not code:
self.parse_errors.append((line_no, len(path) + 1, "Error code after path cannot be empty"))
continue
if len(codes) > 1:
self.parse_errors.append((line_no, len(path) + len(code) + 2, "Error code cannot contain spaces"))
continue
parts = code.split('!')
code = parts[0]
commands = parts[1:]
parts = code.split(':')
test_name = parts[0]
error_codes = parts[1:]
test = tests_by_name.get(test_name)
if not test:
unversioned_name = unversioned_test_names.get(test_name)
if unversioned_name:
self.parse_errors.append((line_no, len(path) + len(unversioned_name) + 2, "Sanity test '%s' cannot use a Python version like '%s'" % (
unversioned_name, test_name)))
elif test_name in versioned_test_names:
self.parse_errors.append((line_no, len(path) + len(test_name) + 1, "Sanity test '%s' requires a Python version like '%s-%s'" % (
test_name, test_name, args.python_version)))
else:
self.parse_errors.append((line_no, len(path) + 2, "Sanity test '%s' does not exist" % test_name))
continue
if path.endswith(os.path.sep) and not test.include_directories:
self.parse_errors.append((line_no, 1, "Sanity test '%s' does not support directory paths" % test_name))
continue
if path not in paths_by_test[test.name] and not test.no_targets:
self.parse_errors.append((line_no, 1, "Sanity test '%s' does not test path '%s'" % (test_name, path)))
continue
if commands and error_codes:
self.parse_errors.append((line_no, len(path) + len(test_name) + 2, "Error code cannot contain both '!' and ':' characters"))
continue
if commands:
command = commands[0]
if len(commands) > 1:
self.parse_errors.append((line_no, len(path) + len(test_name) + len(command) + 3, "Error code cannot contain multiple '!' characters"))
continue
if command == 'skip':
if not test.can_skip:
self.parse_errors.append((line_no, len(path) + len(test_name) + 2, "Sanity test '%s' cannot be skipped" % test_name))
continue
existing_line_no = self.skips.get(test_name, {}).get(path)
if existing_line_no:
self.parse_errors.append((line_no, 1, "Duplicate '%s' skip for path '%s' first found on line %d" % (test_name, path, existing_line_no)))
continue
self.skips[test_name][path] = line_no
continue
self.parse_errors.append((line_no, len(path) + len(test_name) + 2, "Command '!%s' not recognized" % command))
continue
if not test.can_ignore:
self.parse_errors.append((line_no, len(path) + 1, "Sanity test '%s' cannot be ignored" % test_name))
continue
if test.error_code:
if not error_codes:
self.parse_errors.append((line_no, len(path) + len(test_name) + 1, "Sanity test '%s' requires an error code" % test_name))
continue
error_code = error_codes[0]
if len(error_codes) > 1:
self.parse_errors.append((line_no, len(path) + len(test_name) + len(error_code) + 3, "Error code cannot contain multiple ':' characters"))
continue
else:
if error_codes:
self.parse_errors.append((line_no, len(path) + len(test_name) + 2, "Sanity test '%s' does not support error codes" % test_name))
continue
error_code = self.NO_CODE
existing = self.ignores.get(test_name, {}).get(path, {}).get(error_code)
if existing:
if test.error_code:
self.parse_errors.append((line_no, 1, "Duplicate '%s' ignore for error code '%s' for path '%s' first found on line %d" % (
test_name, error_code, path, existing)))
else:
self.parse_errors.append((line_no, 1, "Duplicate '%s' ignore for path '%s' first found on line %d" % (
test_name, path, existing)))
continue
self.ignores[test_name][path][error_code] = line_no
@staticmethod
def load(args): # type: (SanityConfig) -> SanityIgnoreParser
"""Return the current SanityIgnore instance, initializing it if needed."""
try:
return SanityIgnoreParser.instance
except AttributeError:
pass
SanityIgnoreParser.instance = SanityIgnoreParser(args)
return SanityIgnoreParser.instance
class SanityIgnoreProcessor:
"""Processor for sanity test ignores for a single run of one sanity test."""
def __init__(self,
args, # type: SanityConfig
test, # type: SanityTest
python_version, # type: t.Optional[str]
): # type: (...) -> None
name = test.name
code = test.error_code
if python_version:
full_name = '%s-%s' % (name, python_version)
else:
full_name = name
self.args = args
self.test = test
self.code = code
self.parser = SanityIgnoreParser.load(args)
self.ignore_entries = self.parser.ignores.get(full_name, {})
self.skip_entries = self.parser.skips.get(full_name, {})
self.used_line_numbers = set() # type: t.Set[int]
def filter_skipped_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
"""Return the given targets, with any skipped paths filtered out."""
return sorted(target for target in targets if target.path not in self.skip_entries)
def process_errors(self, errors, paths): # type: (t.List[SanityMessage], t.List[str]) -> t.List[SanityMessage]
"""Return the given errors filtered for ignores and with any settings related errors included."""
errors = self.filter_messages(errors)
errors.extend(self.get_errors(paths))
errors = sorted(set(errors))
return errors
def filter_messages(self, messages): # type: (t.List[SanityMessage]) -> t.List[SanityMessage]
"""Return a filtered list of the given messages using the entries that have been loaded."""
filtered = []
for message in messages:
path_entry = self.ignore_entries.get(message.path)
if path_entry:
code = message.code if self.code else SanityIgnoreParser.NO_CODE
line_no = path_entry.get(code)
if line_no:
self.used_line_numbers.add(line_no)
continue
filtered.append(message)
return filtered
def get_errors(self, paths): # type: (t.List[str]) -> t.List[SanityMessage]
"""Return error messages related to issues with the file."""
messages = []
# unused errors
unused = [] # type: t.List[t.Tuple[int, str, str]]
if self.test.no_targets or self.test.all_targets:
# tests which do not accept a target list, or which use all targets, always return all possible errors, so all ignores can be checked
targets = SanityTargets.get_targets()
test_targets = SanityTargets.filter_and_inject_targets(self.test, targets)
paths = [target.path for target in test_targets]
for path in paths:
path_entry = self.ignore_entries.get(path)
if not path_entry:
continue
unused.extend((line_no, path, code) for code, line_no in path_entry.items() if line_no not in self.used_line_numbers)
messages.extend(SanityMessage(
code=self.code,
message="Ignoring '%s' on '%s' is unnecessary" % (code, path) if self.code else "Ignoring '%s' is unnecessary" % path,
path=self.parser.relative_path,
line=line,
column=1,
confidence=calculate_best_confidence(((self.parser.path, line), (path, 0)), self.args.metadata) if self.args.metadata.changes else None,
) for line, path, code in unused)
return messages
class SanitySuccess(TestSuccess):
"""Sanity test success."""
def __init__(self, test, python_version=None):
"""
:type test: str
:type python_version: str
"""
super(SanitySuccess, self).__init__(COMMAND, test, python_version)
class SanitySkipped(TestSkipped):
"""Sanity test skipped."""
def __init__(self, test, python_version=None):
"""
:type test: str
:type python_version: str
"""
super(SanitySkipped, self).__init__(COMMAND, test, python_version)
class SanityFailure(TestFailure):
"""Sanity test failure."""
def __init__(self, test, python_version=None, messages=None, summary=None):
"""
:type test: str
:type python_version: str
:type messages: list[SanityMessage]
:type summary: unicode
"""
super(SanityFailure, self).__init__(COMMAND, test, python_version, messages, summary)
class SanityMessage(TestMessage):
"""Single sanity test message for one file."""
class SanityTargets:
"""Sanity test target information."""
def __init__(self, targets, include): # type: (t.Tuple[TestTarget], t.Tuple[TestTarget]) -> None
self.targets = targets
self.include = include
@staticmethod
def create(include, exclude, require): # type: (t.List[str], t.List[str], t.List[str]) -> SanityTargets
"""Create a SanityTargets instance from the given include, exclude and require lists."""
_targets = SanityTargets.get_targets()
_include = walk_internal_targets(_targets, include, exclude, require)
return SanityTargets(_targets, _include)
@staticmethod
def filter_and_inject_targets(test, targets): # type: (SanityTest, t.Iterable[TestTarget]) -> t.List[TestTarget]
"""Filter and inject targets based on test requirements and the given target list."""
test_targets = list(targets)
if not test.include_symlinks:
# remove all symlinks unless supported by the test
test_targets = [target for target in test_targets if not target.symlink]
if not test.include_directories or not test.include_symlinks:
# exclude symlinked directories unless supported by the test
test_targets = [target for target in test_targets if not target.path.endswith(os.path.sep)]
if test.include_directories:
# include directories containing any of the included files
test_targets += tuple(TestTarget(path, None, None, '') for path in paths_to_dirs([target.path for target in test_targets]))
if not test.include_symlinks:
# remove all directory symlinks unless supported by the test
test_targets = [target for target in test_targets if not target.symlink]
return test_targets
@staticmethod
def get_targets(): # type: () -> t.Tuple[TestTarget, ...]
"""Return a tuple of sanity test targets. Uses a cached version when available."""
try:
return SanityTargets.get_targets.targets
except AttributeError:
SanityTargets.get_targets.targets = tuple(sorted(walk_sanity_targets()))
return SanityTargets.get_targets.targets
class SanityTest(ABC):
"""Sanity test base class."""
__metaclass__ = abc.ABCMeta
ansible_only = False
def __init__(self, name):
self.name = name
self.enabled = True
@property
def error_code(self): # type: () -> t.Optional[str]
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
return None
@property
def can_ignore(self): # type: () -> bool
"""True if the test supports ignore entries."""
return True
@property
def can_skip(self): # type: () -> bool
"""True if the test supports skip entries."""
return not self.all_targets and not self.no_targets
@property
def all_targets(self): # type: () -> bool
"""True if test targets will not be filtered using includes, excludes, requires or changes. Mutually exclusive with no_targets."""
return False
@property
def no_targets(self): # type: () -> bool
"""True if the test does not use test targets. Mutually exclusive with all_targets."""
return False
@property
def include_directories(self): # type: () -> bool
"""True if the test targets should include directories."""
return False
@property
def include_symlinks(self): # type: () -> bool
"""True if the test targets should include symlinks."""
return False
@property
def supported_python_versions(self): # type: () -> t.Optional[t.Tuple[str, ...]]
"""A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
return tuple(python_version for python_version in SUPPORTED_PYTHON_VERSIONS if python_version.startswith('3.'))
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
"""Return the given list of test targets, filtered to include only those relevant for the test."""
if self.no_targets:
return []
raise NotImplementedError('Sanity test "%s" must implement "filter_targets" or set "no_targets" to True.' % self.name)
class SanityCodeSmellTest(SanityTest):
"""Sanity test script."""
def __init__(self, path):
name = os.path.splitext(os.path.basename(path))[0]
config_path = os.path.splitext(path)[0] + '.json'
super(SanityCodeSmellTest, self).__init__(name)
self.path = path
self.config_path = config_path if os.path.exists(config_path) else None
self.config = None
if self.config_path:
with open(self.config_path, 'r') as config_fd:
self.config = json.load(config_fd)
if self.config:
self.enabled = not self.config.get('disabled')
self.output = self.config.get('output') # type: t.Optional[str]
self.extensions = self.config.get('extensions') # type: t.List[str]
self.prefixes = self.config.get('prefixes') # type: t.List[str]
self.files = self.config.get('files') # type: t.List[str]
self.text = self.config.get('text') # type: t.Optional[bool]
self.ignore_self = self.config.get('ignore_self') # type: bool
self.__all_targets = self.config.get('all_targets') # type: bool
self.__no_targets = self.config.get('no_targets') # type: bool
self.__include_directories = self.config.get('include_directories') # type: bool
self.__include_symlinks = self.config.get('include_symlinks') # type: bool
else:
self.output = None
self.extensions = []
self.prefixes = []
self.files = []
self.text = None # type: t.Optional[bool]
self.ignore_self = False
self.__all_targets = False
self.__no_targets = True
self.__include_directories = False
self.__include_symlinks = False
if self.no_targets:
mutually_exclusive = (
'extensions',
'prefixes',
'files',
'text',
'ignore_self',
'all_targets',
'include_directories',
'include_symlinks',
)
problems = sorted(name for name in mutually_exclusive if getattr(self, name))
if problems:
raise ApplicationError('Sanity test "%s" option "no_targets" is mutually exclusive with options: %s' % (self.name, ', '.join(problems)))
@property
def all_targets(self): # type: () -> bool
"""True if test targets will not be filtered using includes, excludes, requires or changes. Mutually exclusive with no_targets."""
return self.__all_targets
@property
def no_targets(self): # type: () -> bool
"""True if the test does not use test targets. Mutually exclusive with all_targets."""
return self.__no_targets
@property
def include_directories(self): # type: () -> bool
"""True if the test targets should include directories."""
return self.__include_directories
@property
def include_symlinks(self): # type: () -> bool
"""True if the test targets should include symlinks."""
return self.__include_symlinks
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
"""Return the given list of test targets, filtered to include only those relevant for the test."""
if self.no_targets:
return []
if self.text is not None:
if self.text:
targets = [target for target in targets if not is_binary_file(target.path)]
else:
targets = [target for target in targets if is_binary_file(target.path)]
if self.extensions:
targets = [target for target in targets if os.path.splitext(target.path)[1] in self.extensions
or (is_subdir(target.path, 'bin') and '.py' in self.extensions)]
if self.prefixes:
targets = [target for target in targets if any(target.path.startswith(pre) for pre in self.prefixes)]
if self.files:
targets = [target for target in targets if os.path.basename(target.path) in self.files]
if self.ignore_self and data_context().content.is_ansible:
relative_self_path = os.path.relpath(self.path, data_context().content.root)
targets = [target for target in targets if target.path != relative_self_path]
return targets
def test(self, args, targets, python_version):
"""
:type args: SanityConfig
:type targets: SanityTargets
:type python_version: str
:rtype: TestResult
"""
cmd = [find_python(python_version), self.path]
env = ansible_environment(args, color=False)
pattern = None
data = None
settings = self.load_processor(args)
paths = [target.path for target in targets.include]
if self.config:
if self.output == 'path-line-column-message':
pattern = '^(?P<path>[^:]*):(?P<line>[0-9]+):(?P<column>[0-9]+): (?P<message>.*)$'
elif self.output == 'path-message':
pattern = '^(?P<path>[^:]*): (?P<message>.*)$'
else:
pattern = ApplicationError('Unsupported output type: %s' % self.output)
if not self.no_targets:
data = '\n'.join(paths)
if data:
display.info(data, verbosity=4)
try:
stdout, stderr = run_command(args, cmd, data=data, env=env, capture=True)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
stderr = ex.stderr
status = ex.status
if args.explain:
return SanitySuccess(self.name)
if stdout and not stderr:
if pattern:
matches = parse_to_list_of_dict(pattern, stdout)
messages = [SanityMessage(
message=m['message'],
path=m['path'],
line=int(m.get('line', 0)),
column=int(m.get('column', 0)),
) for m in matches]
messages = settings.process_errors(messages, paths)
if not messages:
return SanitySuccess(self.name)
return SanityFailure(self.name, messages=messages)
if stderr or status:
summary = u'%s' % SubprocessError(cmd=cmd, status=status, stderr=stderr, stdout=stdout)
return SanityFailure(self.name, summary=summary)
messages = settings.process_errors([], paths)
if messages:
return SanityFailure(self.name, messages=messages)
return SanitySuccess(self.name)
def load_processor(self, args): # type: (SanityConfig) -> SanityIgnoreProcessor
"""Load the ignore processor for this sanity test."""
return SanityIgnoreProcessor(args, self, None)
class SanityFunc(SanityTest):
"""Base class for sanity test plugins."""
def __init__(self):
name = self.__class__.__name__
name = re.sub(r'Test$', '', name) # drop Test suffix
name = re.sub(r'(.)([A-Z][a-z]+)', r'\1-\2', name).lower() # use dashes instead of capitalization
super(SanityFunc, self).__init__(name)
class SanityVersionNeutral(SanityFunc):
"""Base class for sanity test plugins which are idependent of the python version being used."""
@abc.abstractmethod
def test(self, args, targets):
"""
:type args: SanityConfig
:type targets: SanityTargets
:rtype: TestResult
"""
def load_processor(self, args): # type: (SanityConfig) -> SanityIgnoreProcessor
"""Load the ignore processor for this sanity test."""
return SanityIgnoreProcessor(args, self, None)
@property
def supported_python_versions(self): # type: () -> t.Optional[t.Tuple[str, ...]]
"""A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
return None
class SanitySingleVersion(SanityFunc):
"""Base class for sanity test plugins which should run on a single python version."""
@abc.abstractmethod
def test(self, args, targets, python_version):
"""
:type args: SanityConfig
:type targets: SanityTargets
:type python_version: str
:rtype: TestResult
"""
def load_processor(self, args): # type: (SanityConfig) -> SanityIgnoreProcessor
"""Load the ignore processor for this sanity test."""
return SanityIgnoreProcessor(args, self, None)
class SanityMultipleVersion(SanityFunc):
"""Base class for sanity test plugins which should run on multiple python versions."""
@abc.abstractmethod
def test(self, args, targets, python_version):
"""
:type args: SanityConfig
:type targets: SanityTargets
:type python_version: str
:rtype: TestResult
"""
def load_processor(self, args, python_version): # type: (SanityConfig, str) -> SanityIgnoreProcessor
"""Load the ignore processor for this sanity test."""
return SanityIgnoreProcessor(args, self, python_version)
@property
def supported_python_versions(self): # type: () -> t.Optional[t.Tuple[str, ...]]
"""A tuple of supported Python versions or None if the test does not depend on specific Python versions."""
return SUPPORTED_PYTHON_VERSIONS
SANITY_TESTS = (
)
def sanity_init():
"""Initialize full sanity test list (includes code-smell scripts determined at runtime)."""
import_plugins('sanity')
sanity_plugins = {} # type: t.Dict[str, t.Type[SanityFunc]]
load_plugins(SanityFunc, sanity_plugins)
sanity_tests = tuple([plugin() for plugin in sanity_plugins.values() if data_context().content.is_ansible or not plugin.ansible_only])
global SANITY_TESTS # pylint: disable=locally-disabled, global-statement
SANITY_TESTS = tuple(sorted(sanity_tests + collect_code_smell_tests(), key=lambda k: k.name))
|
thaim/ansible
|
test/lib/ansible_test/_internal/sanity/__init__.py
|
Python
|
mit
| 35,939
|
from django.conf.urls.defaults import *
urlpatterns = patterns('apps.network.views',
url(r'^add/', 'add'),
url(r'^edit/', 'edit'),
url(r'^delete/(?P<pk>\d+)/', 'delete'),
url(r'^(?P<pk>\d+)/', 'overview'),
url(r'$', 'index'),
)
|
jawr/kontrolvm
|
apps/network/urls.py
|
Python
|
mit
| 239
|
import os
import sys
from Bio.Seq import Seq
def main(*args, **kwargs):
fpath = os.path.join(os.getcwd(), args[-2])
tmp = []
with open(fpath,'r') as f:
for line in f:
txt = line.strip()
tmp.append(txt)
S1 = set(tmp)
S2 = set([str(Seq(s).reverse_complement()) for s in tmp])
S = S1.union(S2)
res = []
for s in S:
res.append((s[:-1],s[1:]))
for t1,t2 in res:
print '(%s, %s)' % (t1,t2)
out = os.path.join(os.getcwd(),args[-1])
f = open(out, 'w')
for t1,t2 in res:
txt = '(%s, %s)\n' % (t1,t2)
f.write(txt)
f.close()
if __name__ == '__main__':
main(*sys.argv)
|
crf1111/Bio-Informatics-Learning
|
Bio-StrongHold/src/Constructing_a_De_Bruijn_Graph.py
|
Python
|
mit
| 685
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2015 SciFabric LTD.
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
import json
import os
import shutil
import zipfile
from StringIO import StringIO
from default import db, Fixtures, with_context, FakeResponse, mock_contributions_guard
from helper import web
from mock import patch, Mock, call
from flask import Response, redirect
from itsdangerous import BadSignature
from collections import namedtuple
from pybossa.util import get_user_signup_method, unicode_csv_reader
from pybossa.ckan import Ckan
from bs4 import BeautifulSoup
from requests.exceptions import ConnectionError
from werkzeug.exceptions import NotFound
from pybossa.model.project import Project
from pybossa.model.category import Category
from pybossa.model.task import Task
from pybossa.model.task_run import TaskRun
from pybossa.model.user import User
from pybossa.core import user_repo, sentinel, project_repo, result_repo, signer
from pybossa.jobs import send_mail, import_tasks
from pybossa.importers import ImportReport
from factories import ProjectFactory, CategoryFactory, TaskFactory, TaskRunFactory, UserFactory
from unidecode import unidecode
from werkzeug.utils import secure_filename
class TestWeb(web.Helper):
pkg_json_not_found = {
"help": "Return ...",
"success": False,
"error": {
"message": "Not found",
"__type": "Not Found Error"}}
def clear_temp_container(self, user_id):
"""Helper function which deletes all files in temp folder of a given owner_id"""
temp_folder = os.path.join('/tmp', 'user_%d' % user_id)
if os.path.isdir(temp_folder):
shutil.rmtree(temp_folder)
@with_context
def test_01_index(self):
"""Test WEB home page works"""
res = self.app.get("/", follow_redirects=True)
assert self.html_title() in res.data, res
assert "Create" in res.data, res
@with_context
def test_01_search(self):
"""Test WEB search page works."""
res = self.app.get('/search')
err_msg = "Search page should be accessible"
assert "Search" in res.data, err_msg
@with_context
def test_result_view(self):
"""Test WEB result page works."""
import os
APP_ROOT = os.path.dirname(os.path.abspath(__file__))
template_folder = os.path.join(APP_ROOT, '..', 'pybossa',
self.flask_app.template_folder)
file_name = os.path.join(template_folder, "home", "_results.html")
with open(file_name, "w") as f:
f.write("foobar")
res = self.app.get('/results')
assert "foobar" in res.data, res.data
os.remove(file_name)
@with_context
def test_00000_results_not_found(self):
"""Test WEB results page returns 404 when no template is found works."""
res = self.app.get('/results')
assert res.status_code == 404, res.status_code
@with_context
def test_leaderboard(self):
"""Test WEB leaderboard works"""
user = UserFactory.create()
TaskRunFactory.create(user=user)
res = self.app.get('/leaderboard', follow_redirects=True)
assert self.html_title("Community Leaderboard") in res.data, res
assert user.name in res.data, res.data
@with_context
@patch('pybossa.cache.project_stats.pygeoip', autospec=True)
def test_project_stats(self, mock1):
"""Test WEB project stats page works"""
res = self.register()
res = self.signin()
res = self.new_project(short_name="igil")
returns = [Mock()]
returns[0].GeoIP.return_value = 'gic'
returns[0].GeoIP.record_by_addr.return_value = {}
mock1.side_effects = returns
project = db.session.query(Project).first()
user = db.session.query(User).first()
# Without stats
url = '/project/%s/stats' % project.short_name
res = self.app.get(url)
assert "Sorry" in res.data, res.data
# We use a string here to check that it works too
task = Task(project_id=project.id, n_answers=10)
db.session.add(task)
db.session.commit()
for i in range(10):
task_run = TaskRun(project_id=project.id, task_id=1,
user_id=user.id,
info={'answer': 1})
db.session.add(task_run)
db.session.commit()
self.app.get('api/project/%s/newtask' % project.id)
# With stats
url = '/project/%s/stats' % project.short_name
res = self.app.get(url)
assert res.status_code == 200, res.status_code
assert "Distribution" in res.data, res.data
with patch.dict(self.flask_app.config, {'GEO': True}):
url = '/project/%s/stats' % project.short_name
res = self.app.get(url)
assert "GeoLite" in res.data, res.data
def test_contribution_time_shown_for_admins_for_every_project(self):
admin = UserFactory.create(admin=True)
admin.set_password('1234')
user_repo.save(admin)
owner = UserFactory.create(pro=False)
project = ProjectFactory.create(owner=owner)
task = TaskFactory.create(project=project)
TaskRunFactory.create(task=task)
url = '/project/%s/stats' % project.short_name
self.signin(email=admin.email_addr, password='1234')
assert 'Average contribution time' in self.app.get(url).data
def test_contribution_time_shown_in_pro_owned_projects(self):
pro_owner = UserFactory.create(pro=True)
pro_owned_project = ProjectFactory.create(owner=pro_owner)
task = TaskFactory.create(project=pro_owned_project)
TaskRunFactory.create(task=task)
pro_url = '/project/%s/stats' % pro_owned_project.short_name
assert 'Average contribution time' in self.app.get(pro_url).data
def test_contribution_time_not_shown_in_regular_user_owned_projects(self):
project = ProjectFactory.create()
task = TaskFactory.create(project=project)
TaskRunFactory.create(task=task)
url = '/project/%s/stats' % project.short_name
assert 'Average contribution time' not in self.app.get(url).data
@with_context
def test_03_account_index(self):
"""Test WEB account index works."""
# Without users
res = self.app.get('/account/page/15', follow_redirects=True)
assert res.status_code == 404, res.status_code
self.create()
res = self.app.get('/account', follow_redirects=True)
assert res.status_code == 200, res.status_code
err_msg = "There should be a Community page"
assert "Community" in res.data, err_msg
@with_context
def test_register_get(self):
"""Test WEB register user works"""
res = self.app.get('/account/register')
# The output should have a mime-type: text/html
assert res.mimetype == 'text/html', res
assert self.html_title("Register") in res.data, res
@with_context
def test_register_errors_get(self):
"""Test WEB register errors works"""
userdict = {'fullname': 'a', 'name': 'name',
'email_addr': None, 'password':'p'}
res = self.app.post('/account/register', data=userdict)
# The output should have a mime-type: text/html
assert res.mimetype == 'text/html', res
assert "correct the errors" in res.data, res.data
@with_context
@patch('pybossa.view.account.mail_queue', autospec=True)
@patch('pybossa.view.account.render_template')
@patch('pybossa.view.account.signer')
def test_register_post_creates_email_with_link(self, signer, render, queue):
"""Test WEB register post creates and sends the confirmation email if
account validation is enabled"""
from flask import current_app
current_app.config['ACCOUNT_CONFIRMATION_DISABLED'] = False
data = dict(fullname="John Doe", name="johndoe",
password="p4ssw0rd", confirm="p4ssw0rd",
email_addr="johndoe@example.com")
signer.dumps.return_value = ''
render.return_value = ''
res = self.app.post('/account/register', data=data)
del data['confirm']
current_app.config['ACCOUNT_CONFIRMATION_DISABLED'] = True
signer.dumps.assert_called_with(data, salt='account-validation')
render.assert_any_call('/account/email/validate_account.md',
user=data,
confirm_url='http://localhost/account/register/confirmation?key=')
assert send_mail == queue.enqueue.call_args[0][0], "send_mail not called"
mail_data = queue.enqueue.call_args[0][1]
assert 'subject' in mail_data.keys()
assert 'recipients' in mail_data.keys()
assert 'body' in mail_data.keys()
assert 'html' in mail_data.keys()
@with_context
@patch('pybossa.view.account.mail_queue', autospec=True)
@patch('pybossa.view.account.render_template')
@patch('pybossa.view.account.signer')
def test_update_email_validates_email(self, signer, render, queue):
"""Test WEB update user email creates and sends the confirmation email
if account validation is enabled"""
from flask import current_app
current_app.config['ACCOUNT_CONFIRMATION_DISABLED'] = False
self.register()
signer.dumps.return_value = ''
render.return_value = ''
self.update_profile(email_addr="new@mail.com")
current_app.config['ACCOUNT_CONFIRMATION_DISABLED'] = True
data = dict(fullname="John Doe", name="johndoe",
email_addr="new@mail.com")
signer.dumps.assert_called_with(data, salt='account-validation')
render.assert_any_call('/account/email/validate_email.md',
user=data,
confirm_url='http://localhost/account/register/confirmation?key=')
assert send_mail == queue.enqueue.call_args[0][0], "send_mail not called"
mail_data = queue.enqueue.call_args[0][1]
assert 'subject' in mail_data.keys()
assert 'recipients' in mail_data.keys()
assert 'body' in mail_data.keys()
assert 'html' in mail_data.keys()
assert mail_data['recipients'][0] == data['email_addr']
user = db.session.query(User).get(1)
msg = "Confirmation email flag not updated"
assert user.confirmation_email_sent, msg
msg = "Email not marked as invalid"
assert user.valid_email is False, msg
msg = "Email should remain not updated, as it's not been validated"
assert user.email_addr != 'new@email.com', msg
@with_context
def test_confirm_email_returns_404(self):
"""Test WEB confirm_email returns 404 when disabled."""
res = self.app.get('/account/confir-email', follow_redirects=True)
assert res.status_code == 404, res.status_code
@with_context
@patch('pybossa.view.account.mail_queue', autospec=True)
@patch('pybossa.view.account.render_template')
@patch('pybossa.view.account.signer')
def test_validate_email(self, signer, render, queue):
"""Test WEB validate email sends the confirmation email
if account validation is enabled"""
from flask import current_app
current_app.config['ACCOUNT_CONFIRMATION_DISABLED'] = False
self.register()
user = db.session.query(User).get(1)
user.valid_email = False
db.session.commit()
signer.dumps.return_value = ''
render.return_value = ''
data = dict(fullname=user.fullname, name=user.name,
email_addr=user.email_addr)
res = self.app.get('/account/confirm-email', follow_redirects=True)
signer.dumps.assert_called_with(data, salt='account-validation')
render.assert_any_call('/account/email/validate_email.md',
user=data,
confirm_url='http://localhost/account/register/confirmation?key=')
assert send_mail == queue.enqueue.call_args[0][0], "send_mail not called"
mail_data = queue.enqueue.call_args[0][1]
assert 'subject' in mail_data.keys()
assert 'recipients' in mail_data.keys()
assert 'body' in mail_data.keys()
assert 'html' in mail_data.keys()
assert mail_data['recipients'][0] == data['email_addr']
user = db.session.query(User).get(1)
msg = "Confirmation email flag not updated"
assert user.confirmation_email_sent, msg
msg = "Email not marked as invalid"
assert user.valid_email is False, msg
current_app.config['ACCOUNT_CONFIRMATION_DISABLED'] = True
@with_context
def test_register_post_valid_data_validation_enabled(self):
"""Test WEB register post with valid form data and account validation
enabled"""
from flask import current_app
current_app.config['ACCOUNT_CONFIRMATION_DISABLED'] = False
data = dict(fullname="John Doe", name="johndoe",
password="p4ssw0rd", confirm="p4ssw0rd",
email_addr="johndoe@example.com")
res = self.app.post('/account/register', data=data)
current_app.config['ACCOUNT_CONFIRMATION_DISABLED'] = True
assert self.html_title() in res.data, res
assert "Just one more step, please" in res.data, res.data
@with_context
@patch('pybossa.view.account.redirect', wraps=redirect)
def test_register_post_valid_data_validation_disabled(self, redirect):
"""Test WEB register post with valid form data and account validation
disabled redirects to home page"""
data = dict(fullname="John Doe", name="johndoe",
password="p4ssw0rd", confirm="p4ssw0rd",
email_addr="johndoe@example.com")
res = self.app.post('/account/register', data=data)
print dir(redirect)
redirect.assert_called_with('/')
def test_register_confirmation_fails_without_key(self):
"""Test WEB register confirmation returns 403 if no 'key' param is present"""
res = self.app.get('/account/register/confirmation')
assert res.status_code == 403, res.status
def test_register_confirmation_fails_with_invalid_key(self):
"""Test WEB register confirmation returns 403 if an invalid key is given"""
res = self.app.get('/account/register/confirmation?key=invalid')
assert res.status_code == 403, res.status
@patch('pybossa.view.account.signer')
def test_register_confirmation_gets_account_data_from_key(self, fake_signer):
"""Test WEB register confirmation gets the account data from the key"""
exp_time = self.flask_app.config.get('ACCOUNT_LINK_EXPIRATION')
fake_signer.loads.return_value = dict(fullname='FN', name='name',
email_addr='email', password='password')
res = self.app.get('/account/register/confirmation?key=valid-key')
fake_signer.loads.assert_called_with('valid-key', max_age=exp_time, salt='account-validation')
@patch('pybossa.view.account.signer')
def test_register_confirmation_validates_email(self, fake_signer):
"""Test WEB validates email"""
self.register()
user = db.session.query(User).get(1)
user.valid_email = False
user.confirmation_email_sent = True
db.session.commit()
fake_signer.loads.return_value = dict(fullname=user.fullname,
name=user.name,
email_addr=user.email_addr)
self.app.get('/account/register/confirmation?key=valid-key')
user = db.session.query(User).get(1)
assert user is not None
msg = "Email has not been validated"
assert user.valid_email, msg
msg = "Confirmation email flag has not been restored"
assert user.confirmation_email_sent is False, msg
@patch('pybossa.view.account.signer')
def test_register_confirmation_validates_n_updates_email(self, fake_signer):
"""Test WEB validates and updates email"""
self.register()
user = db.session.query(User).get(1)
user.valid_email = False
user.confirmation_email_sent = True
db.session.commit()
fake_signer.loads.return_value = dict(fullname=user.fullname,
name=user.name,
email_addr='new@email.com')
self.app.get('/account/register/confirmation?key=valid-key')
user = db.session.query(User).get(1)
assert user is not None
msg = "Email has not been validated"
assert user.valid_email, msg
msg = "Confirmation email flag has not been restored"
assert user.confirmation_email_sent is False, msg
msg = 'Email should be updated after validation.'
assert user.email_addr == 'new@email.com', msg
@patch('pybossa.view.account.newsletter', autospec=True)
@patch('pybossa.view.account.url_for')
@patch('pybossa.view.account.signer')
def test_confirm_account_newsletter(self, fake_signer, url_for, newsletter):
"""Test WEB confirm email shows newsletter or home."""
newsletter.ask_user_to_subscribe.return_value = True
self.register()
user = db.session.query(User).get(1)
user.valid_email = False
db.session.commit()
fake_signer.loads.return_value = dict(fullname=user.fullname,
name=user.name,
email_addr=user.email_addr)
self.app.get('/account/register/confirmation?key=valid-key')
url_for.assert_called_with('account.newsletter_subscribe', next=None)
newsletter.ask_user_to_subscribe.return_value = False
self.app.get('/account/register/confirmation?key=valid-key')
url_for.assert_called_with('home.home')
@patch('pybossa.view.account.signer')
def test_register_confirmation_creates_new_account(self, fake_signer):
"""Test WEB register confirmation creates the new account"""
fake_signer.loads.return_value = dict(fullname='FN', name='name',
email_addr='email', password='password')
res = self.app.get('/account/register/confirmation?key=valid-key')
user = db.session.query(User).filter_by(name='name').first()
assert user is not None
assert user.check_password('password')
@with_context
def test_04_signin_signout(self):
"""Test WEB sign in and sign out works"""
res = self.register()
# Log out as the registration already logs in the user
res = self.signout()
res = self.signin(method="GET")
assert self.html_title("Sign in") in res.data, res.data
assert "Sign in" in res.data, res.data
res = self.signin(email='')
assert "Please correct the errors" in res.data, res
assert "The e-mail is required" in res.data, res
res = self.signin(password='')
assert "Please correct the errors" in res.data, res
assert "You must provide a password" in res.data, res
res = self.signin(email='', password='')
assert "Please correct the errors" in res.data, res
assert "The e-mail is required" in res.data, res
assert "You must provide a password" in res.data, res
# Non-existant user
msg = "Ooops, we didn't find you in the system"
res = self.signin(email='wrongemail')
assert msg in res.data, res.data
res = self.signin(email='wrongemail', password='wrongpassword')
assert msg in res.data, res
# Real user but wrong password or username
msg = "Ooops, Incorrect email/password"
res = self.signin(password='wrongpassword')
assert msg in res.data, res
res = self.signin()
assert self.html_title() in res.data, res
assert "Welcome back %s" % "John Doe" in res.data, res
# Check profile page with several information chunks
res = self.profile()
assert self.html_title("Profile") in res.data, res
assert "John Doe" in res.data, res
assert "johndoe@example.com" in res.data, res
# Log out
res = self.signout()
assert self.html_title() in res.data, res
assert "You are now signed out" in res.data, res
# Request profile as an anonymous user
# Check profile page with several information chunks
res = self.profile()
assert "John Doe" in res.data, res
assert "johndoe@example.com" not in res.data, res
# Try to access protected areas like update
res = self.app.get('/account/johndoe/update', follow_redirects=True)
# As a user must be signed in to access, the page the title will be the
# redirection to log in
assert self.html_title("Sign in") in res.data, res.data
assert "Please sign in to access this page." in res.data, res.data
res = self.signin(next='%2Faccount%2Fprofile')
assert self.html_title("Profile") in res.data, res
assert "Welcome back %s" % "John Doe" in res.data, res
@with_context
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
def test_profile_applications(self, mock):
"""Test WEB user profile project page works."""
self.create()
self.signin(email=Fixtures.email_addr, password=Fixtures.password)
self.new_project()
url = '/account/%s/applications' % Fixtures.name
res = self.app.get(url)
assert "Projects" in res.data, res.data
assert "Published" in res.data, res.data
assert "Draft" in res.data, res.data
assert Fixtures.project_name in res.data, res.data
url = '/account/fakename/applications'
res = self.app.get(url)
assert res.status_code == 404, res.status_code
url = '/account/%s/applications' % Fixtures.name2
res = self.app.get(url)
assert res.status_code == 403, res.status_code
@with_context
def test_05_update_user_profile(self):
"""Test WEB update user profile"""
# Create an account and log in
self.register()
url = "/account/fake/update"
res = self.app.get(url, follow_redirects=True)
assert res.status_code == 404, res.status_code
# Update profile with new data
res = self.update_profile(method="GET")
msg = "Update your profile: %s" % "John Doe"
assert self.html_title(msg) in res.data, res.data
msg = 'input id="id" name="id" type="hidden" value="1"'
assert msg in res.data, res
assert "John Doe" in res.data, res
assert "Save the changes" in res.data, res
res = self.update_profile(fullname="John Doe 2",
email_addr="johndoe2@example",
locale="en")
assert "Please correct the errors" in res.data, res.data
res = self.update_profile(fullname="John Doe 2",
email_addr="johndoe2@example.com",
locale="en")
title = "Update your profile: John Doe 2"
assert self.html_title(title) in res.data, res.data
user = user_repo.get_by(email_addr='johndoe2@example.com')
assert "Your profile has been updated!" in res.data, res.data
assert "John Doe 2" in res.data, res
assert "John Doe 2" == user.fullname, user.fullname
assert "johndoe" in res.data, res
assert "johndoe" == user.name, user.name
assert "johndoe2@example.com" in res.data, res
assert "johndoe2@example.com" == user.email_addr, user.email_addr
assert user.subscribed is False, user.subscribed
# Updating the username field forces the user to re-log in
res = self.update_profile(fullname="John Doe 2",
email_addr="johndoe2@example.com",
locale="en",
new_name="johndoe2")
assert "Your profile has been updated!" in res.data, res
assert "Please sign in" in res.data, res.data
res = self.signin(method="POST", email="johndoe2@example.com",
password="p4ssw0rd",
next="%2Faccount%2Fprofile")
assert "Welcome back John Doe 2" in res.data, res.data
assert "John Doe 2" in res.data, res
assert "johndoe2" in res.data, res
assert "johndoe2@example.com" in res.data, res
res = self.signout()
assert self.html_title() in res.data, res
assert "You are now signed out" in res.data, res
# A user must be signed in to access the update page, the page
# the title will be the redirection to log in
res = self.update_profile(method="GET")
assert self.html_title("Sign in") in res.data, res
assert "Please sign in to access this page." in res.data, res
# A user must be signed in to access the update page, the page
# the title will be the redirection to log in
res = self.update_profile()
assert self.html_title("Sign in") in res.data, res
assert "Please sign in to access this page." in res.data, res
self.register(fullname="new", name="new")
url = "/account/johndoe2/update"
res = self.app.get(url)
assert res.status_code == 403
@with_context
def test_05a_get_nonexistant_app(self):
"""Test WEB get not existant project should return 404"""
res = self.app.get('/project/nonapp', follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
@with_context
def test_05b_get_nonexistant_app_newtask(self):
"""Test WEB get non existant project newtask should return 404"""
res = self.app.get('/project/noapp/presenter', follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
res = self.app.get('/project/noapp/newtask', follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
@with_context
def test_05c_get_nonexistant_app_tutorial(self):
"""Test WEB get non existant project tutorial should return 404"""
res = self.app.get('/project/noapp/tutorial', follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
@with_context
def test_05d_get_nonexistant_app_delete(self):
"""Test WEB get non existant project delete should return 404"""
self.register()
# GET
res = self.app.get('/project/noapp/delete', follow_redirects=True)
assert res.status == '404 NOT FOUND', res.data
# POST
res = self.delete_project(short_name="noapp")
assert res.status == '404 NOT FOUND', res.status
@with_context
def test_05d_get_nonexistant_app_update(self):
"""Test WEB get non existant project update should return 404"""
self.register()
# GET
res = self.app.get('/project/noapp/update', follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
# POST
res = self.update_project(short_name="noapp")
assert res.status == '404 NOT FOUND', res.status
@with_context
def test_05d_get_nonexistant_app_import(self):
"""Test WEB get non existant project import should return 404"""
self.register()
# GET
res = self.app.get('/project/noapp/import', follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
# POST
res = self.app.post('/project/noapp/import', follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
@with_context
def test_05d_get_nonexistant_app_task(self):
"""Test WEB get non existant project task should return 404"""
res = self.app.get('/project/noapp/task', follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
# Pagination
res = self.app.get('/project/noapp/task/25', follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
@with_context
def test_05d_get_nonexistant_app_results_json(self):
"""Test WEB get non existant project results json should return 404"""
res = self.app.get('/project/noapp/24/results.json', follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
@with_context
def test_06_applications_without_apps(self):
"""Test WEB projects index without projects works"""
# Check first without apps
self.create_categories()
res = self.app.get('/project', follow_redirects=True)
assert "Projects" in res.data, res.data
assert Fixtures.cat_1 in res.data, res.data
@with_context
def test_06_applications_2(self):
"""Test WEB projects index with projects"""
self.create()
res = self.app.get('/project', follow_redirects=True)
assert self.html_title("Projects") in res.data, res.data
assert "Projects" in res.data, res.data
assert Fixtures.project_short_name in res.data, res.data
@with_context
def test_06_featured_apps(self):
"""Test WEB projects index shows featured projects in all the pages works"""
self.create()
project = db.session.query(Project).get(1)
project.featured = True
db.session.add(project)
db.session.commit()
res = self.app.get('/project', follow_redirects=True)
assert self.html_title("Projects") in res.data, res.data
assert "Projects" in res.data, res.data
assert '/project/test-app' in res.data, res.data
assert 'My New Project' in res.data, res.data
# Update one task to have more answers than expected
task = db.session.query(Task).get(1)
task.n_answers = 1
db.session.add(task)
db.session.commit()
task = db.session.query(Task).get(1)
cat = db.session.query(Category).get(1)
url = '/project/category/featured/'
res = self.app.get(url, follow_redirects=True)
assert 'Featured Projects' in res.data, res.data
@with_context
@patch('pybossa.ckan.requests.get')
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
def test_10_get_application(self, Mock, mock2):
"""Test WEB project URL/<short_name> works"""
# Sign in and create a project
html_request = FakeResponse(text=json.dumps(self.pkg_json_not_found),
status_code=200,
headers={'content-type': 'application/json'},
encoding='utf-8')
Mock.return_value = html_request
self.register()
res = self.new_project()
project = db.session.query(Project).first()
project.published = True
db.session.commit()
TaskFactory.create(project=project)
res = self.app.get('/project/sampleapp', follow_redirects=True)
msg = "Project: Sample Project"
assert self.html_title(msg) in res.data, res
err_msg = "There should be a contribute button"
assert "Start Contributing Now!" in res.data, err_msg
res = self.app.get('/project/sampleapp/settings', follow_redirects=True)
assert res.status == '200 OK', res.status
self.signout()
# Now as an anonymous user
res = self.app.get('/project/sampleapp', follow_redirects=True)
assert self.html_title("Project: Sample Project") in res.data, res
assert "Start Contributing Now!" in res.data, err_msg
res = self.app.get('/project/sampleapp/settings', follow_redirects=True)
assert res.status == '200 OK', res.status
err_msg = "Anonymous user should be redirected to sign in page"
assert "Please sign in to access this page" in res.data, err_msg
# Now with a different user
self.register(fullname="Perico Palotes", name="perico")
res = self.app.get('/project/sampleapp', follow_redirects=True)
assert self.html_title("Project: Sample Project") in res.data, res
assert "Start Contributing Now!" in res.data, err_msg
res = self.app.get('/project/sampleapp/settings')
assert res.status == '403 FORBIDDEN', res.status
@with_context
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
def test_10b_application_long_description_allows_markdown(self, mock):
"""Test WEB long description markdown is supported"""
markdown_description = u'Markdown\n======='
self.register()
self.new_project(long_description=markdown_description)
res = self.app.get('/project/sampleapp', follow_redirects=True)
data = res.data
assert '<h1>Markdown</h1>' in data, 'Markdown text not being rendered!'
@with_context
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
def test_11_create_application(self, mock):
"""Test WEB create a project works"""
# Create a project as an anonymous user
res = self.new_project(method="GET")
assert self.html_title("Sign in") in res.data, res
assert "Please sign in to access this page" in res.data, res
res = self.new_project()
assert self.html_title("Sign in") in res.data, res.data
assert "Please sign in to access this page." in res.data, res.data
# Sign in and create a project
res = self.register()
res = self.new_project(method="GET")
assert self.html_title("Create a Project") in res.data, res
assert "Create the project" in res.data, res
res = self.new_project(long_description='My Description')
assert "Sample Project" in res.data
assert "Project created!" in res.data, res
project = db.session.query(Project).first()
assert project.name == 'Sample Project', 'Different names %s' % project.name
assert project.short_name == 'sampleapp', \
'Different names %s' % project.short_name
assert project.long_description == 'My Description', \
"Long desc should be the same: %s" % project.long_description
assert project.category is not None, \
"A project should have a category after being created"
@with_context
def test_description_is_generated_only_if_not_provided(self):
"""Test WEB when when creating a project and a description is provided,
then it is not generated from the long_description"""
self.register()
res = self.new_project(long_description="a" * 300, description='b')
project = db.session.query(Project).first()
assert project.description == 'b', project.description
@with_context
def test_description_is_generated_from_long_desc(self):
"""Test WEB when creating a project, the description field is
automatically filled in by truncating the long_description"""
self.register()
res = self.new_project(long_description="Hello", description='')
project = db.session.query(Project).first()
assert project.description == "Hello", project.description
@with_context
def test_description_is_generated_from_long_desc_formats(self):
"""Test WEB when when creating a project, the description generated
from the long_description is only text (no html, no markdown)"""
self.register()
res = self.new_project(long_description="## Hello", description='')
project = db.session.query(Project).first()
assert '##' not in project.description, project.description
assert '<h2>' not in project.description, project.description
@with_context
def test_description_is_generated_from_long_desc_truncates(self):
"""Test WEB when when creating a project, the description generated
from the long_description is truncated to 255 chars"""
self.register()
res = self.new_project(long_description="a" * 300, description='')
project = db.session.query(Project).first()
assert len(project.description) == 255, len(project.description)
assert project.description[-3:] == '...'
@with_context
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
def test_11_a_create_application_errors(self, mock):
"""Test WEB create a project issues the errors"""
self.register()
# Required fields checks
# Issue the error for the project.name
res = self.new_project(name="")
err_msg = "A project must have a name"
assert "This field is required" in res.data, err_msg
# Issue the error for the project.short_name
res = self.new_project(short_name="")
err_msg = "A project must have a short_name"
assert "This field is required" in res.data, err_msg
# Issue the error for the project.description
res = self.new_project(long_description="")
err_msg = "A project must have a description"
assert "This field is required" in res.data, err_msg
# Issue the error for the project.short_name
res = self.new_project(short_name='$#/|')
err_msg = "A project must have a short_name without |/$# chars"
assert '$#&\/| and space symbols are forbidden' in res.data, err_msg
# Now Unique checks
self.new_project()
res = self.new_project()
err_msg = "There should be a Unique field"
assert "Name is already taken" in res.data, err_msg
assert "Short Name is already taken" in res.data, err_msg
@patch('pybossa.ckan.requests.get')
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
@patch('pybossa.forms.validator.requests.get')
def test_12_update_application(self, Mock, mock, mock_webhook):
"""Test WEB update project works"""
html_request = FakeResponse(text=json.dumps(self.pkg_json_not_found),
status_code=200,
headers={'content-type': 'application/json'},
encoding='utf-8')
Mock.return_value = html_request
mock_webhook.return_value = html_request
self.register()
self.new_project()
# Get the Update Project web page
res = self.update_project(method="GET")
msg = "Project: Sample Project · Update"
assert self.html_title(msg) in res.data, res
msg = 'input id="id" name="id" type="hidden" value="1"'
assert msg in res.data, res
assert "Save the changes" in res.data, res
# Check form validation
res = self.update_project(new_name="",
new_short_name="",
new_description="New description",
new_long_description='New long desc')
assert "Please correct the errors" in res.data, res.data
# Update the project
res = self.update_project(new_name="New Sample Project",
new_short_name="newshortname",
new_description="New description",
new_long_description='New long desc')
project = db.session.query(Project).first()
assert "Project updated!" in res.data, res.data
err_msg = "Project name not updated %s" % project.name
assert project.name == "New Sample Project", err_msg
err_msg = "Project short name not updated %s" % project.short_name
assert project.short_name == "newshortname", err_msg
err_msg = "Project description not updated %s" % project.description
assert project.description == "New description", err_msg
err_msg = "Project long description not updated %s" % project.long_description
assert project.long_description == "New long desc", err_msg
@with_context
@patch('pybossa.forms.validator.requests.get')
def test_webhook_to_project(self, mock):
"""Test WEB update sets a webhook for the project"""
html_request = FakeResponse(text=json.dumps(self.pkg_json_not_found),
status_code=200,
headers={'content-type': 'application/json'},
encoding='utf-8')
mock.return_value = html_request
self.register()
owner = db.session.query(User).first()
project = ProjectFactory.create(owner=owner)
new_webhook = 'http://mynewserver.com/'
self.update_project(id=project.id, short_name=project.short_name,
new_webhook=new_webhook)
err_msg = "There should be an updated webhook url."
assert project.webhook == new_webhook, err_msg
@with_context
@patch('pybossa.forms.validator.requests.get')
def test_webhook_to_project_fails(self, mock):
"""Test WEB update does not set a webhook for the project"""
html_request = FakeResponse(text=json.dumps(self.pkg_json_not_found),
status_code=404,
headers={'content-type': 'application/json'},
encoding='utf-8')
mock.return_value = html_request
self.register()
owner = db.session.query(User).first()
project = ProjectFactory.create(owner=owner)
new_webhook = 'http://mynewserver.com/'
self.update_project(id=project.id, short_name=project.short_name,
new_webhook=new_webhook)
err_msg = "There should not be an updated webhook url."
assert project.webhook != new_webhook, err_msg
@with_context
@patch('pybossa.forms.validator.requests.get')
def test_webhook_to_project_conn_err(self, mock):
"""Test WEB update does not set a webhook for the project"""
from requests.exceptions import ConnectionError
mock.side_effect = ConnectionError
self.register()
owner = db.session.query(User).first()
project = ProjectFactory.create(owner=owner)
new_webhook = 'http://mynewserver.com/'
res = self.update_project(id=project.id, short_name=project.short_name,
new_webhook=new_webhook)
err_msg = "There should not be an updated webhook url."
assert project.webhook != new_webhook, err_msg
@with_context
@patch('pybossa.forms.validator.requests.get')
def test_add_password_to_project(self, mock_webhook):
"""Test WEB update sets a password for the project"""
html_request = FakeResponse(text=json.dumps(self.pkg_json_not_found),
status_code=200,
headers={'content-type': 'application/json'},
encoding='utf-8')
mock_webhook.return_value = html_request
self.register()
owner = db.session.query(User).first()
project = ProjectFactory.create(owner=owner)
self.update_project(id=project.id, short_name=project.short_name,
new_protect='true', new_password='mysecret')
assert project.needs_password(), 'Password not set'
@with_context
@patch('pybossa.forms.validator.requests.get')
def test_remove_password_from_project(self, mock_webhook):
"""Test WEB update removes the password of the project"""
html_request = FakeResponse(text=json.dumps(self.pkg_json_not_found),
status_code=200,
headers={'content-type': 'application/json'},
encoding='utf-8')
mock_webhook.return_value = html_request
self.register()
owner = db.session.query(User).first()
project = ProjectFactory.create(info={'passwd_hash': 'mysecret'}, owner=owner)
self.update_project(id=project.id, short_name=project.short_name,
new_protect='false', new_password='')
assert not project.needs_password(), 'Password not deleted'
@with_context
def test_update_application_errors(self):
"""Test WEB update form validation issues the errors"""
self.register()
self.new_project()
res = self.update_project(new_name="")
assert "This field is required" in res.data
res = self.update_project(new_short_name="")
assert "This field is required" in res.data
res = self.update_project(new_description="")
assert "You must provide a description." in res.data
res = self.update_project(new_description="a" * 256)
assert "Field cannot be longer than 255 characters." in res.data
res = self.update_project(new_long_description="")
assert "This field is required" not in res.data
@with_context
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
def test_14_delete_application(self, mock):
"""Test WEB delete project works"""
self.create()
self.register()
self.new_project()
res = self.delete_project(method="GET")
msg = "Project: Sample Project · Delete"
assert self.html_title(msg) in res.data, res
assert "No, do not delete it" in res.data, res
project = db.session.query(Project).filter_by(short_name='sampleapp').first()
res = self.delete_project(method="GET")
msg = "Project: Sample Project · Delete"
assert self.html_title(msg) in res.data, res
assert "No, do not delete it" in res.data, res
res = self.delete_project()
assert "Project deleted!" in res.data, res
self.signin(email=Fixtures.email_addr2, password=Fixtures.password)
res = self.delete_project(short_name=Fixtures.project_short_name)
assert res.status_code == 403, res.status_code
@patch('pybossa.repositories.project_repository.uploader')
def test_delete_project_deletes_task_zip_files_too(self, uploader):
"""Test WEB delete project also deletes zip files for task and taskruns"""
Fixtures.create()
self.signin(email=u'tester@tester.com', password=u'tester')
res = self.app.post('/project/test-app/delete', follow_redirects=True)
expected = [call('1_test-app_task_json.zip', 'user_2'),
call('1_test-app_task_csv.zip', 'user_2'),
call('1_test-app_task_run_json.zip', 'user_2'),
call('1_test-app_task_run_csv.zip', 'user_2')]
assert uploader.delete_file.call_args_list == expected
@with_context
def test_15_twitter_email_warning(self):
"""Test WEB Twitter email warning works"""
# This test assumes that the user allows Twitter to authenticate,
# returning a valid resp. The only difference is a user object
# without a password
# Register a user and sign out
user = User(name="tester", passwd_hash="tester",
fullname="tester",
email_addr="tester")
user.set_password('tester')
db.session.add(user)
db.session.commit()
db.session.query(User).all()
# Sign in again and check the warning message
self.signin(email="tester", password="tester")
res = self.app.get('/', follow_redirects=True)
msg = ("Please update your e-mail address in your"
" profile page, right now it is empty!")
user = db.session.query(User).get(1)
assert msg in res.data, res.data
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
def test_16_task_status_completed(self, mock):
"""Test WEB Task Status Completed works"""
self.register()
self.new_project()
project = db.session.query(Project).first()
# We use a string here to check that it works too
project.published = True
task = Task(project_id=project.id, n_answers=10)
db.session.add(task)
db.session.commit()
res = self.app.get('project/%s/tasks/browse' % (project.short_name),
follow_redirects=True)
dom = BeautifulSoup(res.data)
assert "Sample Project" in res.data, res.data
assert '0 of 10' in res.data, res.data
err_msg = "Download button should be disabled"
assert dom.find(id='nothingtodownload') is not None, err_msg
for i in range(5):
task_run = TaskRun(project_id=project.id, task_id=1,
info={'answer': 1})
db.session.add(task_run)
db.session.commit()
self.app.get('api/project/%s/newtask' % project.id)
res = self.app.get('project/%s/tasks/browse' % (project.short_name),
follow_redirects=True)
dom = BeautifulSoup(res.data)
assert "Sample Project" in res.data, res.data
assert '5 of 10' in res.data, res.data
err_msg = "Download Partial results button should be shown"
assert dom.find(id='partialdownload') is not None, err_msg
for i in range(5):
task_run = TaskRun(project_id=project.id, task_id=1,
info={'answer': 1})
db.session.add(task_run)
db.session.commit()
self.app.get('api/project/%s/newtask' % project.id)
self.signout()
project = db.session.query(Project).first()
res = self.app.get('project/%s/tasks/browse' % (project.short_name),
follow_redirects=True)
assert "Sample Project" in res.data, res.data
msg = 'Task <span class="label label-success">#1</span>'
assert msg in res.data, res.data
assert '10 of 10' in res.data, res.data
dom = BeautifulSoup(res.data)
err_msg = "Download Full results button should be shown"
assert dom.find(id='fulldownload') is not None, err_msg
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
def test_17_export_task_runs(self, mock):
"""Test WEB TaskRun export works"""
self.register()
self.new_project()
project = db.session.query(Project).first()
task = Task(project_id=project.id, n_answers=10)
db.session.add(task)
db.session.commit()
for i in range(10):
task_run = TaskRun(project_id=project.id, task_id=1, info={'answer': 1})
db.session.add(task_run)
db.session.commit()
project = db.session.query(Project).first()
res = self.app.get('project/%s/%s/results.json' % (project.short_name, 1),
follow_redirects=True)
data = json.loads(res.data)
assert len(data) == 10, data
for tr in data:
assert tr['info']['answer'] == 1, tr
# Check with correct project but wrong task id
res = self.app.get('project/%s/%s/results.json' % (project.short_name, 5000),
follow_redirects=True)
assert res.status_code == 404, res.status_code
@with_context
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
def test_18_task_status_wip(self, mock):
"""Test WEB Task Status on going works"""
self.register()
self.new_project()
project = db.session.query(Project).first()
project.published = True
task = Task(project_id=project.id, n_answers=10)
db.session.add(task)
db.session.commit()
self.signout()
project = db.session.query(Project).first()
res = self.app.get('project/%s/tasks/browse' % (project.short_name),
follow_redirects=True)
assert "Sample Project" in res.data, res.data
msg = 'Task <span class="label label-info">#1</span>'
assert msg in res.data, res.data
assert '0 of 10' in res.data, res.data
# For a non existing page
res = self.app.get('project/%s/tasks/browse/5000' % (project.short_name),
follow_redirects=True)
assert res.status_code == 404, res.status_code
@with_context
def test_19_app_index_categories(self):
"""Test WEB Project Index categories works"""
self.register()
self.create()
self.signout()
res = self.app.get('project', follow_redirects=True)
assert "Projects" in res.data, res.data
assert Fixtures.cat_1 in res.data, res.data
task = db.session.query(Task).get(1)
# Update one task to have more answers than expected
task.n_answers = 1
db.session.add(task)
db.session.commit()
task = db.session.query(Task).get(1)
cat = db.session.query(Category).get(1)
url = '/project/category/%s/' % Fixtures.cat_1
res = self.app.get(url, follow_redirects=True)
tmp = '%s Projects' % Fixtures.cat_1
assert tmp in res.data, res
@with_context
def test_app_index_categories_pagination(self):
"""Test WEB Project Index categories pagination works"""
from flask import current_app
n_apps = current_app.config.get('APPS_PER_PAGE')
current_app.config['APPS_PER_PAGE'] = 1
category = CategoryFactory.create(name='category', short_name='cat')
for project in ProjectFactory.create_batch(2, category=category):
TaskFactory.create(project=project)
page1 = self.app.get('/project/category/%s/' % category.short_name)
page2 = self.app.get('/project/category/%s/page/2/' % category.short_name)
current_app.config['APPS_PER_PAGE'] = n_apps
assert '<a href="/project/category/cat/page/2/" rel="nofollow">' in page1.data
assert page2.status_code == 200, page2.status_code
assert '<a href="/project/category/cat/" rel="nofollow">' in page2.data
@with_context
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
def test_20_app_index_published(self, mock):
"""Test WEB Project Index published works"""
self.register()
self.new_project()
self.update_project(new_category_id="1")
project = db.session.query(Project).first()
project.published = True
db.session.commit()
self.signout()
res = self.app.get('project', follow_redirects=True)
assert "%s Projects" % Fixtures.cat_1 in res.data, res.data
assert "draft" not in res.data, res.data
assert "Sample Project" in res.data, res.data
@with_context
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
def test_20_app_index_draft(self, mock):
"""Test WEB Project Index draft works"""
# Create root
self.register()
self.new_project()
self.signout()
# Create a user
self.register(fullname="jane", name="jane", email="jane@jane.com")
self.signout()
# As Anonymous
res = self.app.get('/project/category/draft', follow_redirects=True)
dom = BeautifulSoup(res.data)
err_msg = "Anonymous should not see draft apps"
assert dom.find(id='signin') is not None, err_msg
# As authenticated but not admin
self.signin(email="jane@jane.com", password="p4ssw0rd")
res = self.app.get('/project/category/draft', follow_redirects=True)
assert res.status_code == 403, "Non-admin should not see draft apps"
self.signout()
# As Admin
self.signin()
res = self.app.get('/project/category/draft', follow_redirects=True)
assert "project-published" not in res.data, res.data
assert "draft" in res.data, res.data
assert "Sample Project" in res.data, res.data
assert 'Draft Projects' in res.data, res.data
@with_context
def test_21_get_specific_ongoing_task_anonymous(self):
"""Test WEB get specific ongoing task_id for
a project works as anonymous"""
self.create()
self.delete_task_runs()
project = db.session.query(Project).first()
task = db.session.query(Task)\
.filter(Project.id == project.id)\
.first()
res = self.app.get('project/%s/task/%s' % (project.short_name, task.id),
follow_redirects=True)
assert 'TaskPresenter' in res.data, res.data
msg = "?next=%2Fproject%2F" + project.short_name + "%2Ftask%2F" + str(task.id)
assert msg in res.data, res.data
# Try with only registered users
project.allow_anonymous_contributors = False
db.session.add(project)
db.session.commit()
res = self.app.get('project/%s/task/%s' % (project.short_name, task.id),
follow_redirects=True)
assert "sign in to participate" in res.data
@with_context
def test_23_get_specific_ongoing_task_user(self):
"""Test WEB get specific ongoing task_id for a project works as an user"""
self.create()
self.delete_task_runs()
self.register()
self.signin()
project = db.session.query(Project).first()
task = db.session.query(Task).filter(Project.id == project.id).first()
res = self.app.get('project/%s/task/%s' % (project.short_name, task.id),
follow_redirects=True)
assert 'TaskPresenter' in res.data, res.data
@patch('pybossa.view.projects.ContributionsGuard')
def test_get_specific_ongoing_task_marks_task_as_requested(self, guard):
fake_guard_instance = mock_contributions_guard()
guard.return_value = fake_guard_instance
self.create()
self.register()
project = db.session.query(Project).first()
task = db.session.query(Task).filter(Project.id == project.id).first()
res = self.app.get('project/%s/task/%s' % (project.short_name, task.id),
follow_redirects=True)
assert fake_guard_instance.stamp.called
@with_context
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
def test_25_get_wrong_task_app(self, mock):
"""Test WEB get wrong task.id for a project works"""
self.create()
project1 = db.session.query(Project).get(1)
project1_short_name = project1.short_name
db.session.query(Task).filter(Task.project_id == 1).first()
self.register()
self.new_project()
app2 = db.session.query(Project).get(2)
self.new_task(app2.id)
task2 = db.session.query(Task).filter(Task.project_id == 2).first()
task2_id = task2.id
self.signout()
res = self.app.get('/project/%s/task/%s' % (project1_short_name, task2_id))
assert "Error" in res.data, res.data
msg = "This task does not belong to %s" % project1_short_name
assert msg in res.data, res.data
@with_context
def test_26_tutorial_signed_user(self):
"""Test WEB tutorials work as signed in user"""
self.create()
project1 = db.session.query(Project).get(1)
project1.info = dict(tutorial="some help", task_presenter="presenter")
db.session.commit()
self.register()
# First time accessing the project should redirect me to the tutorial
res = self.app.get('/project/test-app/newtask', follow_redirects=True)
err_msg = "There should be some tutorial for the project"
assert "some help" in res.data, err_msg
# Second time should give me a task, and not the tutorial
res = self.app.get('/project/test-app/newtask', follow_redirects=True)
assert "some help" not in res.data
# Check if the tutorial can be accessed directly
res = self.app.get('/project/test-app/tutorial', follow_redirects=True)
err_msg = "There should be some tutorial for the project"
assert "some help" in res.data, err_msg
@with_context
def test_27_tutorial_anonymous_user(self):
"""Test WEB tutorials work as an anonymous user"""
self.create()
project = db.session.query(Project).get(1)
project.info = dict(tutorial="some help", task_presenter="presenter")
db.session.commit()
# First time accessing the project should redirect me to the tutorial
res = self.app.get('/project/test-app/newtask', follow_redirects=True)
err_msg = "There should be some tutorial for the project"
assert "some help" in res.data, err_msg
# Second time should give me a task, and not the tutorial
res = self.app.get('/project/test-app/newtask', follow_redirects=True)
assert "some help" not in res.data
# Check if the tutorial can be accessed directly
res = self.app.get('/project/test-app/tutorial', follow_redirects=True)
err_msg = "There should be some tutorial for the project"
assert "some help" in res.data, err_msg
@with_context
def test_28_non_tutorial_signed_user(self):
"""Test WEB project without tutorial work as signed in user"""
self.create()
project = db.session.query(Project).get(1)
project.info = dict(task_presenter="the real presenter")
db.session.commit()
self.register()
# First time accessing the project should show the presenter
res = self.app.get('/project/test-app/newtask', follow_redirects=True)
err_msg = "There should be a presenter for the project"
assert "the real presenter" in res.data, err_msg
# Second time accessing the project should show the presenter
res = self.app.get('/project/test-app/newtask', follow_redirects=True)
assert "the real presenter" in res.data, err_msg
@with_context
def test_29_non_tutorial_anonymous_user(self):
"""Test WEB project without tutorials work as an anonymous user"""
self.create()
project = db.session.query(Project).get(1)
project.info = dict(task_presenter="the real presenter")
db.session.commit()
# First time accessing the project should show the presenter
res = self.app.get('/project/test-app/newtask', follow_redirects=True)
err_msg = "There should be a presenter for the project"
assert "the real presenter" in res.data, err_msg
# Second time accessing the project should show the presenter
res = self.app.get('/project/test-app/newtask', follow_redirects=True)
assert "the real presenter" in res.data, err_msg
def test_message_is_flashed_contributing_to_project_without_presenter(self):
project = ProjectFactory.create(info={})
task = TaskFactory.create(project=project)
newtask_url = '/project/%s/newtask' % project.short_name
task_url = '/project/%s/task/%s' % (project.short_name, task.id)
message = ("Sorry, but this project is still a draft and does "
"not have a task presenter.")
newtask_response = self.app.get(newtask_url, follow_redirects=True)
task_response = self.app.get(task_url, follow_redirects=True)
# TODO: Do not test this for now. Needs discussion about text or id
# assert message in newtask_response.data
# assert message in task_response.data
@with_context
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
def test_30_app_id_owner(self, mock):
"""Test WEB project settings page shows the ID to the owner"""
self.register()
self.new_project()
res = self.app.get('/project/sampleapp/settings', follow_redirects=True)
assert "Sample Project" in res.data, ("Project should be shown to "
"the owner")
# TODO: Needs discussion. Disable for now.
# msg = '<strong><i class="icon-cog"></i> ID</strong>: 1'
# err_msg = "Project ID should be shown to the owner"
# assert msg in res.data, err_msg
self.signout()
self.create()
self.signin(email=Fixtures.email_addr2, password=Fixtures.password)
res = self.app.get('/project/sampleapp/settings', follow_redirects=True)
assert res.status_code == 403, res.status_code
@with_context
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
@patch('pybossa.ckan.requests.get')
def test_30_app_id_anonymous_user(self, Mock, mock):
"""Test WEB project page does not show the ID to anonymous users"""
html_request = FakeResponse(text=json.dumps(self.pkg_json_not_found),
status_code=200,
headers={'content-type': 'application/json'},
encoding='utf-8')
Mock.return_value = html_request
self.register()
self.new_project()
project = db.session.query(Project).first()
project.published = True
db.session.commit()
self.signout()
res = self.app.get('/project/sampleapp', follow_redirects=True)
assert "Sample Project" in res.data, ("Project name should be shown"
" to users")
assert '<strong><i class="icon-cog"></i> ID</strong>: 1' not in \
res.data, "Project ID should be shown to the owner"
@with_context
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
def test_31_user_profile_progress(self, mock):
"""Test WEB user progress profile page works"""
self.register()
self.new_project()
project = db.session.query(Project).first()
task = Task(project_id=project.id, n_answers=10)
db.session.add(task)
task_run = TaskRun(project_id=project.id, task_id=1, user_id=1,
info={'answer': 1})
db.session.add(task_run)
db.session.commit()
res = self.app.get('account/johndoe', follow_redirects=True)
assert "Sample Project" in res.data
@with_context
def test_32_oauth_password(self):
"""Test WEB user sign in without password works"""
user = User(email_addr="johndoe@johndoe.com",
name="John Doe",
passwd_hash=None,
fullname="johndoe",
api_key="api-key")
db.session.add(user)
db.session.commit()
res = self.signin()
assert "Ooops, we didn't find you in the system" in res.data, res.data
@with_context
def test_39_google_oauth_creation(self):
"""Test WEB Google OAuth creation of user works"""
fake_response = {
u'access_token': u'access_token',
u'token_type': u'Bearer',
u'expires_in': 3600,
u'id_token': u'token'}
fake_user = {
u'family_name': u'Doe', u'name': u'John Doe',
u'picture': u'https://goo.gl/img.jpg',
u'locale': u'en',
u'gender': u'male',
u'email': u'john@gmail.com',
u'birthday': u'0000-01-15',
u'link': u'https://plus.google.com/id',
u'given_name': u'John',
u'id': u'111111111111111111111',
u'verified_email': True}
from pybossa.view import google
response_user = google.manage_user(fake_response['access_token'],
fake_user)
user = db.session.query(User).get(1)
assert user.email_addr == response_user.email_addr, response_user
@with_context
def test_40_google_oauth_creation(self):
"""Test WEB Google OAuth detects same user name/email works"""
fake_response = {
u'access_token': u'access_token',
u'token_type': u'Bearer',
u'expires_in': 3600,
u'id_token': u'token'}
fake_user = {
u'family_name': u'Doe', u'name': u'John Doe',
u'picture': u'https://goo.gl/img.jpg',
u'locale': u'en',
u'gender': u'male',
u'email': u'john@gmail.com',
u'birthday': u'0000-01-15',
u'link': u'https://plus.google.com/id',
u'given_name': u'John',
u'id': u'111111111111111111111',
u'verified_email': True}
self.register()
self.signout()
from pybossa.view import google
response_user = google.manage_user(fake_response['access_token'],
fake_user)
assert response_user is None, response_user
@with_context
def test_39_facebook_oauth_creation(self):
"""Test WEB Facebook OAuth creation of user works"""
fake_response = {
u'access_token': u'access_token',
u'token_type': u'Bearer',
u'expires_in': 3600,
u'id_token': u'token'}
fake_user = {
u'username': u'teleyinex',
u'first_name': u'John',
u'last_name': u'Doe',
u'verified': True,
u'name': u'John Doe',
u'locale': u'en_US',
u'gender': u'male',
u'email': u'johndoe@example.com',
u'quotes': u'"quote',
u'link': u'http://www.facebook.com/johndoe',
u'timezone': 1,
u'updated_time': u'2011-11-11T12:33:52+0000',
u'id': u'11111'}
from pybossa.view import facebook
response_user = facebook.manage_user(fake_response['access_token'],
fake_user)
user = db.session.query(User).get(1)
assert user.email_addr == response_user.email_addr, response_user
@with_context
def test_40_facebook_oauth_creation(self):
"""Test WEB Facebook OAuth detects same user name/email works"""
fake_response = {
u'access_token': u'access_token',
u'token_type': u'Bearer',
u'expires_in': 3600,
u'id_token': u'token'}
fake_user = {
u'username': u'teleyinex',
u'first_name': u'John',
u'last_name': u'Doe',
u'verified': True,
u'name': u'John Doe',
u'locale': u'en_US',
u'gender': u'male',
u'email': u'johndoe@example.com',
u'quotes': u'"quote',
u'link': u'http://www.facebook.com/johndoe',
u'timezone': 1,
u'updated_time': u'2011-11-11T12:33:52+0000',
u'id': u'11111'}
self.register()
self.signout()
from pybossa.view import facebook
response_user = facebook.manage_user(fake_response['access_token'],
fake_user)
assert response_user is None, response_user
@with_context
def test_39_twitter_oauth_creation(self):
"""Test WEB Twitter OAuth creation of user works"""
fake_response = {
u'access_token': {u'oauth_token': u'oauth_token',
u'oauth_token_secret': u'oauth_token_secret'},
u'token_type': u'Bearer',
u'expires_in': 3600,
u'id_token': u'token'}
fake_user = {u'screen_name': u'johndoe',
u'user_id': u'11111'}
from pybossa.view import twitter
response_user = twitter.manage_user(fake_response['access_token'],
fake_user)
user = db.session.query(User).get(1)
assert user.email_addr == response_user.email_addr, response_user
res = self.signin(email=user.email_addr, password='wrong')
msg = "It seems like you signed up with your Twitter account"
assert msg in res.data, msg
@with_context
def test_40_twitter_oauth_creation(self):
"""Test WEB Twitter OAuth detects same user name/email works"""
fake_response = {
u'access_token': {u'oauth_token': u'oauth_token',
u'oauth_token_secret': u'oauth_token_secret'},
u'token_type': u'Bearer',
u'expires_in': 3600,
u'id_token': u'token'}
fake_user = {u'screen_name': u'johndoe',
u'user_id': u'11111'}
self.register()
self.signout()
from pybossa.view import twitter
response_user = twitter.manage_user(fake_response['access_token'],
fake_user)
assert response_user is None, response_user
@with_context
def test_41_password_change(self):
"""Test WEB password changing"""
password = "mehpassword"
self.register(password=password)
res = self.app.post('/account/johndoe/update',
data={'current_password': password,
'new_password': "p4ssw0rd",
'confirm': "p4ssw0rd",
'btn': 'Password'},
follow_redirects=True)
assert "Yay, you changed your password succesfully!" in res.data, res.data
password = "p4ssw0rd"
self.signin(password=password)
res = self.app.post('/account/johndoe/update',
data={'current_password': "wrongpassword",
'new_password': "p4ssw0rd",
'confirm': "p4ssw0rd",
'btn': 'Password'},
follow_redirects=True)
msg = "Your current password doesn't match the one in our records"
assert msg in res.data
res = self.app.post('/account/johndoe/update',
data={'current_password': '',
'new_password': '',
'confirm': '',
'btn': 'Password'},
follow_redirects=True)
msg = "Please correct the errors"
assert msg in res.data
@with_context
def test_42_password_link(self):
"""Test WEB visibility of password change link"""
self.register()
res = self.app.get('/account/johndoe/update')
assert "Change your Password" in res.data
user = User.query.get(1)
user.twitter_user_id = 1234
db.session.add(user)
db.session.commit()
res = self.app.get('/account/johndoe/update')
assert "Change your Password" not in res.data, res.data
@with_context
def test_43_terms_of_use_and_data(self):
"""Test WEB terms of use is working"""
res = self.app.get('account/signin', follow_redirects=True)
assert "/help/terms-of-use" in res.data, res.data
assert "http://opendatacommons.org/licenses/by/" in res.data, res.data
res = self.app.get('account/register', follow_redirects=True)
assert "http://okfn.org/terms-of-use/" in res.data, res.data
assert "http://opendatacommons.org/licenses/by/" in res.data, res.data
@with_context
@patch('pybossa.view.account.signer.loads')
def test_44_password_reset_key_errors(self, Mock):
"""Test WEB password reset key errors are caught"""
self.register()
user = User.query.get(1)
userdict = {'user': user.name, 'password': user.passwd_hash}
fakeuserdict = {'user': user.name, 'password': 'wronghash'}
fakeuserdict_err = {'user': user.name, 'passwd': 'some'}
fakeuserdict_form = {'user': user.name, 'passwd': 'p4ssw0rD'}
key = signer.dumps(userdict, salt='password-reset')
returns = [BadSignature('Fake Error'), BadSignature('Fake Error'), userdict,
fakeuserdict, userdict, userdict, fakeuserdict_err]
def side_effects(*args, **kwargs):
result = returns.pop(0)
if isinstance(result, BadSignature):
raise result
return result
Mock.side_effect = side_effects
# Request with no key
res = self.app.get('/account/reset-password', follow_redirects=True)
assert 403 == res.status_code
# Request with invalid key
res = self.app.get('/account/reset-password?key=foo', follow_redirects=True)
assert 403 == res.status_code
# Request with key exception
res = self.app.get('/account/reset-password?key=%s' % (key), follow_redirects=True)
assert 403 == res.status_code
res = self.app.get('/account/reset-password?key=%s' % (key), follow_redirects=True)
assert 200 == res.status_code
res = self.app.get('/account/reset-password?key=%s' % (key), follow_redirects=True)
assert 403 == res.status_code
# Check validation
res = self.app.post('/account/reset-password?key=%s' % (key),
data={'new_password': '',
'confirm': '#4a4'},
follow_redirects=True)
assert "Please correct the errors" in res.data, res.data
res = self.app.post('/account/reset-password?key=%s' % (key),
data={'new_password': 'p4ssw0rD',
'confirm': 'p4ssw0rD'},
follow_redirects=True)
assert "You reset your password successfully!" in res.data
# Request without password
res = self.app.get('/account/reset-password?key=%s' % (key), follow_redirects=True)
assert 403 == res.status_code
@with_context
@patch('pybossa.view.account.mail_queue', autospec=True)
@patch('pybossa.view.account.signer')
def test_45_password_reset_link(self, signer, queue):
"""Test WEB password reset email form"""
res = self.app.post('/account/forgot-password',
data={'email_addr': "johndoe@example.com"},
follow_redirects=True)
assert ("We don't have this email in our records. You may have"
" signed up with a different email or used Twitter, "
"Facebook, or Google to sign-in") in res.data
self.register()
self.register(name='janedoe')
self.register(name='google')
self.register(name='facebook')
user = User.query.get(1)
jane = User.query.get(2)
jane.twitter_user_id = 10
google = User.query.get(3)
google.google_user_id = 103
facebook = User.query.get(4)
facebook.facebook_user_id = 104
db.session.add_all([jane, google, facebook])
db.session.commit()
data = {'password': user.passwd_hash, 'user': user.name}
self.app.post('/account/forgot-password',
data={'email_addr': user.email_addr},
follow_redirects=True)
signer.dumps.assert_called_with(data, salt='password-reset')
enqueue_call = queue.enqueue.call_args_list[0]
assert send_mail == enqueue_call[0][0], "send_mail not called"
assert 'Click here to recover your account' in enqueue_call[0][1]['body']
assert 'To recover your password' in enqueue_call[0][1]['html']
data = {'password': jane.passwd_hash, 'user': jane.name}
self.app.post('/account/forgot-password',
data={'email_addr': 'janedoe@example.com'},
follow_redirects=True)
enqueue_call = queue.enqueue.call_args_list[1]
assert send_mail == enqueue_call[0][0], "send_mail not called"
assert 'your Twitter account to ' in enqueue_call[0][1]['body']
assert 'your Twitter account to ' in enqueue_call[0][1]['html']
data = {'password': google.passwd_hash, 'user': google.name}
self.app.post('/account/forgot-password',
data={'email_addr': 'google@example.com'},
follow_redirects=True)
enqueue_call = queue.enqueue.call_args_list[2]
assert send_mail == enqueue_call[0][0], "send_mail not called"
assert 'your Google account to ' in enqueue_call[0][1]['body']
assert 'your Google account to ' in enqueue_call[0][1]['html']
data = {'password': facebook.passwd_hash, 'user': facebook.name}
self.app.post('/account/forgot-password',
data={'email_addr': 'facebook@example.com'},
follow_redirects=True)
enqueue_call = queue.enqueue.call_args_list[3]
assert send_mail == enqueue_call[0][0], "send_mail not called"
assert 'your Facebook account to ' in enqueue_call[0][1]['body']
assert 'your Facebook account to ' in enqueue_call[0][1]['html']
# Test with not valid form
res = self.app.post('/account/forgot-password',
data={'email_addr': ''},
follow_redirects=True)
msg = "Something went wrong, please correct the errors"
assert msg in res.data, res.data
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
def test_46_tasks_exists(self, mock):
"""Test WEB tasks page works."""
self.register()
self.new_project()
res = self.app.get('/project/sampleapp/tasks/', follow_redirects=True)
assert "Edit the task presenter" in res.data, \
"Task Presenter Editor should be an option"
@with_context
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
def test_47_task_presenter_editor_loads(self, mock):
"""Test WEB task presenter editor loads"""
self.register()
self.new_project()
res = self.app.get('/project/sampleapp/tasks/taskpresentereditor',
follow_redirects=True)
err_msg = "Task Presenter options not found"
assert "Task Presenter Editor" in res.data, err_msg
err_msg = "Basic template not found"
assert "The most basic template" in res.data, err_msg
err_msg = "Image Pattern Recognition not found"
assert "Image Pattern Recognition" in res.data, err_msg
err_msg = "Sound Pattern Recognition not found"
assert "Sound Pattern Recognition" in res.data, err_msg
err_msg = "Video Pattern Recognition not found"
assert "Video Pattern Recognition" in res.data, err_msg
err_msg = "Transcribing documents not found"
assert "Transcribing documents" in res.data, err_msg
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
def test_48_task_presenter_editor_works(self, mock):
"""Test WEB task presenter editor works"""
self.register()
self.new_project()
project = db.session.query(Project).first()
err_msg = "Task Presenter should be empty"
assert not project.info.get('task_presenter'), err_msg
res = self.app.get('/project/sampleapp/tasks/taskpresentereditor?template=basic',
follow_redirects=True)
assert "var editor" in res.data, "CodeMirror Editor not found"
assert "Task Presenter" in res.data, "CodeMirror Editor not found"
assert "Task Presenter Preview" in res.data, "CodeMirror View not found"
res = self.app.post('/project/sampleapp/tasks/taskpresentereditor',
data={'editor': 'Some HTML code!'},
follow_redirects=True)
assert "Sample Project" in res.data, "Does not return to project details"
project = db.session.query(Project).first()
err_msg = "Task Presenter failed to update"
assert project.info['task_presenter'] == 'Some HTML code!', err_msg
# Check it loads the previous posted code:
res = self.app.get('/project/sampleapp/tasks/taskpresentereditor',
follow_redirects=True)
assert "Some HTML code" in res.data, res.data
@patch('pybossa.ckan.requests.get')
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
@patch('pybossa.forms.validator.requests.get')
def test_48_update_app_info(self, Mock, mock, mock_webhook):
"""Test WEB project update/edit works keeping previous info values"""
html_request = FakeResponse(text=json.dumps(self.pkg_json_not_found),
status_code=200,
headers={'content-type': 'application/json'},
encoding='utf-8')
Mock.return_value = html_request
mock_webhook.return_value = html_request
self.register()
self.new_project()
project = db.session.query(Project).first()
err_msg = "Task Presenter should be empty"
assert not project.info.get('task_presenter'), err_msg
res = self.app.post('/project/sampleapp/tasks/taskpresentereditor',
data={'editor': 'Some HTML code!'},
follow_redirects=True)
assert "Sample Project" in res.data, "Does not return to project details"
project = db.session.query(Project).first()
for i in range(10):
key = "key_%s" % i
project.info[key] = i
db.session.add(project)
db.session.commit()
_info = project.info
self.update_project()
project = db.session.query(Project).first()
for key in _info:
assert key in project.info.keys(), \
"The key %s is lost and it should be here" % key
assert project.name == "Sample Project", "The project has not been updated"
error_msg = "The project description has not been updated"
assert project.description == "Description", error_msg
error_msg = "The project long description has not been updated"
assert project.long_description == "Long desc", error_msg
@with_context
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
def test_49_announcement_messages(self, mock):
"""Test WEB announcement messages works"""
self.register()
res = self.app.get("/", follow_redirects=True)
error_msg = "There should be a message for the root user"
print res.data
assert "Root Message" in res.data, error_msg
error_msg = "There should be a message for the user"
assert "User Message" in res.data, error_msg
error_msg = "There should not be an owner message"
assert "Owner Message" not in res.data, error_msg
# Now make the user a project owner
self.new_project()
res = self.app.get("/", follow_redirects=True)
error_msg = "There should be a message for the root user"
assert "Root Message" in res.data, error_msg
error_msg = "There should be a message for the user"
assert "User Message" in res.data, error_msg
error_msg = "There should be an owner message"
assert "Owner Message" in res.data, error_msg
self.signout()
# Register another user
self.register(fullname="Jane Doe", name="janedoe",
password="janedoe", email="jane@jane.com")
res = self.app.get("/", follow_redirects=True)
error_msg = "There should not be a message for the root user"
assert "Root Message" not in res.data, error_msg
error_msg = "There should be a message for the user"
assert "User Message" in res.data, error_msg
error_msg = "There should not be an owner message"
assert "Owner Message" not in res.data, error_msg
self.signout()
# Now as an anonymous user
res = self.app.get("/", follow_redirects=True)
error_msg = "There should not be a message for the root user"
assert "Root Message" not in res.data, error_msg
error_msg = "There should not be a message for the user"
assert "User Message" not in res.data, error_msg
error_msg = "There should not be an owner message"
assert "Owner Message" not in res.data, error_msg
@with_context
def test_50_export_task_json(self):
"""Test WEB export Tasks to JSON works"""
Fixtures.create()
# First test for a non-existant project
uri = '/project/somethingnotexists/tasks/export'
res = self.app.get(uri, follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
# Now get the tasks in JSON format
uri = "/project/somethingnotexists/tasks/export?type=task&format=json"
res = self.app.get(uri, follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
# Now with a real project
uri = '/project/%s/tasks/export' % Fixtures.project_short_name
res = self.app.get(uri, follow_redirects=True)
heading = "Export All Tasks and Task Runs"
assert heading in res.data, "Export page should be available\n %s" % res.data
# Now test that a 404 is raised when an arg is invalid
uri = "/project/%s/tasks/export?type=ask&format=json" % Fixtures.project_short_name
res = self.app.get(uri, follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
uri = "/project/%s/tasks/export?format=json" % Fixtures.project_short_name
res = self.app.get(uri, follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
uri = "/project/%s/tasks/export?type=task" % Fixtures.project_short_name
res = self.app.get(uri, follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
# And a 415 is raised if the requested format is not supported or invalid
uri = "/project/%s/tasks/export?type=task&format=gson" % Fixtures.project_short_name
res = self.app.get(uri, follow_redirects=True)
assert res.status == '415 UNSUPPORTED MEDIA TYPE', res.status
# Now get the tasks in JSON format
self.clear_temp_container(1) # Project ID 1 is assumed here. See project.id below.
uri = "/project/%s/tasks/export?type=task&format=json" % Fixtures.project_short_name
res = self.app.get(uri, follow_redirects=True)
zip = zipfile.ZipFile(StringIO(res.data))
# Check only one file in zipfile
err_msg = "filename count in ZIP is not 1"
assert len(zip.namelist()) == 1, err_msg
# Check ZIP filename
extracted_filename = zip.namelist()[0]
assert extracted_filename == 'test-app_task.json', zip.namelist()[0]
exported_tasks = json.loads(zip.read(extracted_filename))
project = db.session.query(Project)\
.filter_by(short_name=Fixtures.project_short_name)\
.first()
err_msg = "The number of exported tasks is different from Project Tasks"
assert len(exported_tasks) == len(project.tasks), err_msg
# Tasks are exported as an attached file
content_disposition = 'attachment; filename=%d_test-app_task_json.zip' % project.id
assert res.headers.get('Content-Disposition') == content_disposition, res.headers
def test_export_task_json_support_non_latin1_project_names(self):
project = ProjectFactory.create(name=u'Измени Киев!', short_name=u'Измени Киев!')
self.clear_temp_container(project.owner_id)
res = self.app.get('project/%s/tasks/export?type=task&format=json' % project.short_name,
follow_redirects=True)
filename = secure_filename(unidecode(u'Измени Киев!'))
assert filename in res.headers.get('Content-Disposition'), res.headers
def test_export_taskrun_json_support_non_latin1_project_names(self):
project = ProjectFactory.create(name=u'Измени Киев!', short_name=u'Измени Киев!')
res = self.app.get('project/%s/tasks/export?type=task_run&format=json' % project.short_name,
follow_redirects=True)
filename = secure_filename(unidecode(u'Измени Киев!'))
assert filename in res.headers.get('Content-Disposition'), res.headers
def test_export_task_csv_support_non_latin1_project_names(self):
project = ProjectFactory.create(name=u'Измени Киев!', short_name=u'Измени Киев!')
TaskFactory.create(project=project)
res = self.app.get('/project/%s/tasks/export?type=task&format=csv' % project.short_name,
follow_redirects=True)
filename = secure_filename(unidecode(u'Измени Киев!'))
assert filename in res.headers.get('Content-Disposition'), res.headers
def test_export_taskrun_csv_support_non_latin1_project_names(self):
project = ProjectFactory.create(name=u'Измени Киев!', short_name=u'Измени Киев!')
task = TaskFactory.create(project=project)
TaskRunFactory.create(task=task)
res = self.app.get('/project/%s/tasks/export?type=task_run&format=csv' % project.short_name,
follow_redirects=True)
filename = secure_filename(unidecode(u'Измени Киев!'))
assert filename in res.headers.get('Content-Disposition'), res.headers
@with_context
def test_export_taskruns_json(self):
"""Test WEB export Task Runs to JSON works"""
Fixtures.create()
# First test for a non-existant project
uri = '/project/somethingnotexists/tasks/export'
res = self.app.get(uri, follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
# Now get the tasks in JSON format
uri = "/project/somethingnotexists/tasks/export?type=task&format=json"
res = self.app.get(uri, follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
# Now with a real project
self.clear_temp_container(1) # Project ID 1 is assumed here. See project.id below.
uri = '/project/%s/tasks/export' % Fixtures.project_short_name
res = self.app.get(uri, follow_redirects=True)
heading = "Export All Tasks and Task Runs"
assert heading in res.data, "Export page should be available\n %s" % res.data
# Now get the tasks in JSON format
uri = "/project/%s/tasks/export?type=task_run&format=json" % Fixtures.project_short_name
res = self.app.get(uri, follow_redirects=True)
zip = zipfile.ZipFile(StringIO(res.data))
# Check only one file in zipfile
err_msg = "filename count in ZIP is not 1"
assert len(zip.namelist()) == 1, err_msg
# Check ZIP filename
extracted_filename = zip.namelist()[0]
assert extracted_filename == 'test-app_task_run.json', zip.namelist()[0]
exported_task_runs = json.loads(zip.read(extracted_filename))
project = db.session.query(Project)\
.filter_by(short_name=Fixtures.project_short_name)\
.first()
err_msg = "The number of exported task runs is different from Project Tasks"
assert len(exported_task_runs) == len(project.task_runs), err_msg
# Task runs are exported as an attached file
content_disposition = 'attachment; filename=%d_test-app_task_run_json.zip' % project.id
assert res.headers.get('Content-Disposition') == content_disposition, res.headers
@with_context
def test_export_task_json_no_tasks_returns_file_with_empty_list(self):
"""Test WEB export Tasks to JSON returns empty list if no tasks in project"""
project = ProjectFactory.create(short_name='no_tasks_here')
uri = "/project/%s/tasks/export?type=task&format=json" % project.short_name
res = self.app.get(uri, follow_redirects=True)
zip = zipfile.ZipFile(StringIO(res.data))
extracted_filename = zip.namelist()[0]
exported_task_runs = json.loads(zip.read(extracted_filename))
assert exported_task_runs == [], exported_task_runs
@with_context
def test_export_task_csv(self):
"""Test WEB export Tasks to CSV works"""
# Fixtures.create()
# First test for a non-existant project
uri = '/project/somethingnotexists/tasks/export'
res = self.app.get(uri, follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
# Now get the tasks in CSV format
uri = "/project/somethingnotexists/tasks/export?type=task&format=csv"
res = self.app.get(uri, follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
# Now get the wrong table name in CSV format
uri = "/project/%s/tasks/export?type=wrong&format=csv" % Fixtures.project_short_name
res = self.app.get(uri, follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
# Now with a real project
project = ProjectFactory.create()
self.clear_temp_container(project.owner_id)
for i in range(0, 5):
task = TaskFactory.create(project=project, info={'question': i})
uri = '/project/%s/tasks/export' % project.short_name
res = self.app.get(uri, follow_redirects=True)
heading = "Export All Tasks and Task Runs"
data = res.data.decode('utf-8')
assert heading in data, "Export page should be available\n %s" % data
# Now get the tasks in CSV format
uri = "/project/%s/tasks/export?type=task&format=csv" % project.short_name
res = self.app.get(uri, follow_redirects=True)
zip = zipfile.ZipFile(StringIO(res.data))
# Check only one file in zipfile
err_msg = "filename count in ZIP is not 1"
assert len(zip.namelist()) == 1, err_msg
# Check ZIP filename
extracted_filename = zip.namelist()[0]
assert extracted_filename == 'project1_task.csv', zip.namelist()[0]
csv_content = StringIO(zip.read(extracted_filename))
csvreader = unicode_csv_reader(csv_content)
project = db.session.query(Project)\
.filter_by(short_name=project.short_name)\
.first()
exported_tasks = []
n = 0
for row in csvreader:
print row
if n != 0:
exported_tasks.append(row)
else:
keys = row
n = n + 1
err_msg = "The number of exported tasks is different from Project Tasks"
assert len(exported_tasks) == len(project.tasks), err_msg
for t in project.tasks:
err_msg = "All the task column names should be included"
for tk in t.dictize().keys():
expected_key = "task__%s" % tk
assert expected_key in keys, err_msg
err_msg = "All the task.info column names should be included"
for tk in t.info.keys():
expected_key = "taskinfo__%s" % tk
assert expected_key in keys, err_msg
for et in exported_tasks:
task_id = et[keys.index('task__id')]
task = db.session.query(Task).get(task_id)
task_dict = task.dictize()
for k in task_dict:
slug = 'task__%s' % k
err_msg = "%s != %s" % (task_dict[k], et[keys.index(slug)])
if k != 'info':
assert unicode(task_dict[k]) == et[keys.index(slug)], err_msg
else:
assert json.dumps(task_dict[k]) == et[keys.index(slug)], err_msg
for k in task_dict['info'].keys():
slug = 'taskinfo__%s' % k
err_msg = "%s != %s" % (task_dict['info'][k], et[keys.index(slug)])
assert unicode(task_dict['info'][k]) == et[keys.index(slug)], err_msg
# Tasks are exported as an attached file
content_disposition = 'attachment; filename=%d_project1_task_csv.zip' % project.id
assert res.headers.get('Content-Disposition') == content_disposition, res.headers
@with_context
def test_export_task_csv_no_tasks_returns_empty_file(self):
"""Test WEB export Tasks to CSV returns empty file if no tasks in project"""
project = ProjectFactory.create(short_name='no_tasks_here')
uri = "/project/%s/tasks/export?type=task&format=csv" % project.short_name
res = self.app.get(uri, follow_redirects=True)
zip = zipfile.ZipFile(StringIO(res.data))
extracted_filename = zip.namelist()[0]
csv_content = StringIO(zip.read(extracted_filename))
csvreader = unicode_csv_reader(csv_content)
is_empty = True
for line in csvreader:
is_empty = False, line
assert is_empty
@with_context
def test_53_export_task_runs_csv(self):
"""Test WEB export Task Runs to CSV works"""
# First test for a non-existant project
uri = '/project/somethingnotexists/tasks/export'
res = self.app.get(uri, follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
# Now get the tasks in CSV format
uri = "/project/somethingnotexists/tasks/export?type=tas&format=csv"
res = self.app.get(uri, follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
# Now with a real project
project = ProjectFactory.create()
self.clear_temp_container(project.owner_id)
task = TaskFactory.create(project=project)
for i in range(2):
task_run = TaskRunFactory.create(project=project, task=task, info={'answer': i})
uri = '/project/%s/tasks/export' % project.short_name
res = self.app.get(uri, follow_redirects=True)
heading = "Export All Tasks and Task Runs"
data = res.data.decode('utf-8')
assert heading in data, "Export page should be available\n %s" % data
# Now get the tasks in CSV format
uri = "/project/%s/tasks/export?type=task_run&format=csv" % project.short_name
res = self.app.get(uri, follow_redirects=True)
zip = zipfile.ZipFile(StringIO(res.data))
# Check only one file in zipfile
err_msg = "filename count in ZIP is not 1"
assert len(zip.namelist()) == 1, err_msg
# Check ZIP filename
extracted_filename = zip.namelist()[0]
assert extracted_filename == 'project1_task_run.csv', zip.namelist()[0]
csv_content = StringIO(zip.read(extracted_filename))
csvreader = unicode_csv_reader(csv_content)
project = db.session.query(Project)\
.filter_by(short_name=project.short_name)\
.first()
exported_task_runs = []
n = 0
for row in csvreader:
if n != 0:
exported_task_runs.append(row)
else:
keys = row
n = n + 1
err_msg = "The number of exported task runs is different \
from Project Tasks Runs: %s != %s" % (len(exported_task_runs), len(project.task_runs))
assert len(exported_task_runs) == len(project.task_runs), err_msg
for t in project.tasks[0].task_runs:
for tk in t.dictize().keys():
expected_key = "task_run__%s" % tk
assert expected_key in keys, expected_key
for tk in t.info.keys():
expected_key = "task_runinfo__%s" % tk
assert expected_key in keys, expected_key
for et in exported_task_runs:
task_run_id = et[keys.index('task_run__id')]
task_run = db.session.query(TaskRun).get(task_run_id)
task_run_dict = task_run.dictize()
for k in task_run_dict:
slug = 'task_run__%s' % k
err_msg = "%s != %s" % (task_run_dict[k], et[keys.index(slug)])
if k != 'info':
assert unicode(task_run_dict[k]) == et[keys.index(slug)], err_msg
else:
assert json.dumps(task_run_dict[k]) == et[keys.index(slug)], err_msg
for k in task_run_dict['info'].keys():
slug = 'task_runinfo__%s' % k
err_msg = "%s != %s" % (task_run_dict['info'][k], et[keys.index(slug)])
assert unicode(task_run_dict['info'][k]) == et[keys.index(slug)], err_msg
# Task runs are exported as an attached file
content_disposition = 'attachment; filename=%d_project1_task_run_csv.zip' % project.id
assert res.headers.get('Content-Disposition') == content_disposition, res.headers
@with_context
@patch('pybossa.view.projects.Ckan', autospec=True)
def test_export_tasks_ckan_exception(self, mock1):
mocks = [Mock()]
from test_ckan import TestCkanModule
fake_ckn = TestCkanModule()
package = fake_ckn.pkg_json_found
package['id'] = 3
mocks[0].package_exists.return_value = (False,
Exception("CKAN: error",
"error", 500))
# mocks[0].package_create.return_value = fake_ckn.pkg_json_found
# mocks[0].resource_create.return_value = dict(result=dict(id=3))
# mocks[0].datastore_create.return_value = 'datastore'
# mocks[0].datastore_upsert.return_value = 'datastore'
mock1.side_effect = mocks
"""Test WEB Export CKAN Tasks works."""
Fixtures.create()
user = db.session.query(User).filter_by(name=Fixtures.name).first()
project = db.session.query(Project).first()
user.ckan_api = 'ckan-api-key'
project.owner_id = user.id
db.session.add(user)
db.session.add(project)
db.session.commit()
self.signin(email=user.email_addr, password=Fixtures.password)
# Now with a real project
uri = '/project/%s/tasks/export' % Fixtures.project_short_name
res = self.app.get(uri, follow_redirects=True)
heading = "Export All Tasks and Task Runs"
assert heading in res.data, "Export page should be available\n %s" % res.data
# Now get the tasks in CKAN format
uri = "/project/%s/tasks/export?type=task&format=ckan" % Fixtures.project_short_name
with patch.dict(self.flask_app.config, {'CKAN_URL': 'http://ckan.com'}):
# First time exporting the package
res = self.app.get(uri, follow_redirects=True)
msg = 'Error'
err_msg = "An exception should be raised"
assert msg in res.data, err_msg
@with_context
@patch('pybossa.view.projects.Ckan', autospec=True)
def test_export_tasks_ckan_connection_error(self, mock1):
mocks = [Mock()]
from test_ckan import TestCkanModule
fake_ckn = TestCkanModule()
package = fake_ckn.pkg_json_found
package['id'] = 3
mocks[0].package_exists.return_value = (False, ConnectionError)
# mocks[0].package_create.return_value = fake_ckn.pkg_json_found
# mocks[0].resource_create.return_value = dict(result=dict(id=3))
# mocks[0].datastore_create.return_value = 'datastore'
# mocks[0].datastore_upsert.return_value = 'datastore'
mock1.side_effect = mocks
"""Test WEB Export CKAN Tasks works."""
Fixtures.create()
user = db.session.query(User).filter_by(name=Fixtures.name).first()
project = db.session.query(Project).first()
user.ckan_api = 'ckan-api-key'
project.owner_id = user.id
db.session.add(user)
db.session.add(project)
db.session.commit()
self.signin(email=user.email_addr, password=Fixtures.password)
# Now with a real project
uri = '/project/%s/tasks/export' % Fixtures.project_short_name
res = self.app.get(uri, follow_redirects=True)
heading = "Export All Tasks and Task Runs"
assert heading in res.data, "Export page should be available\n %s" % res.data
# Now get the tasks in CKAN format
uri = "/project/%s/tasks/export?type=task&format=ckan" % Fixtures.project_short_name
with patch.dict(self.flask_app.config, {'CKAN_URL': 'http://ckan.com'}):
# First time exporting the package
res = self.app.get(uri, follow_redirects=True)
msg = 'CKAN server seems to be down'
err_msg = "A connection exception should be raised"
assert msg in res.data, err_msg
@with_context
@patch('pybossa.view.projects.Ckan', autospec=True)
def test_task_export_tasks_ckan_first_time(self, mock1):
"""Test WEB Export CKAN Tasks works without an existing package."""
# Second time exporting the package
mocks = [Mock()]
resource = dict(name='task', id=1)
package = dict(id=3, resources=[resource])
mocks[0].package_exists.return_value = (None, None)
mocks[0].package_create.return_value = package
#mocks[0].datastore_delete.return_value = None
mocks[0].datastore_create.return_value = None
mocks[0].datastore_upsert.return_value = None
mocks[0].resource_create.return_value = dict(result=dict(id=3))
mocks[0].datastore_create.return_value = 'datastore'
mocks[0].datastore_upsert.return_value = 'datastore'
mock1.side_effect = mocks
Fixtures.create()
user = db.session.query(User).filter_by(name=Fixtures.name).first()
project = db.session.query(Project).first()
user.ckan_api = 'ckan-api-key'
project.owner_id = user.id
db.session.add(user)
db.session.add(project)
db.session.commit()
self.signin(email=user.email_addr, password=Fixtures.password)
# First test for a non-existant project
uri = '/project/somethingnotexists/tasks/export'
res = self.app.get(uri, follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
# Now get the tasks in CKAN format
uri = "/project/somethingnotexists/tasks/export?type=task&format=ckan"
res = self.app.get(uri, follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
# Now get the tasks in CKAN format
uri = "/project/somethingnotexists/tasks/export?type=other&format=ckan"
res = self.app.get(uri, follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
# Now with a real project
uri = '/project/%s/tasks/export' % Fixtures.project_short_name
res = self.app.get(uri, follow_redirects=True)
heading = "Export All Tasks and Task Runs"
assert heading in res.data, "Export page should be available\n %s" % res.data
# Now get the tasks in CKAN format
uri = "/project/%s/tasks/export?type=task&format=ckan" % Fixtures.project_short_name
with patch.dict(self.flask_app.config, {'CKAN_URL': 'http://ckan.com'}):
# First time exporting the package
res = self.app.get(uri, follow_redirects=True)
msg = 'Data exported to http://ckan.com'
err_msg = "Tasks should be exported to CKAN"
assert msg in res.data, err_msg
@with_context
@patch('pybossa.view.projects.Ckan', autospec=True)
def test_task_export_tasks_ckan_second_time(self, mock1):
"""Test WEB Export CKAN Tasks works with an existing package."""
# Second time exporting the package
mocks = [Mock()]
resource = dict(name='task', id=1)
package = dict(id=3, resources=[resource])
mocks[0].package_exists.return_value = (package, None)
mocks[0].package_update.return_value = package
mocks[0].datastore_delete.return_value = None
mocks[0].datastore_create.return_value = None
mocks[0].datastore_upsert.return_value = None
mocks[0].resource_create.return_value = dict(result=dict(id=3))
mocks[0].datastore_create.return_value = 'datastore'
mocks[0].datastore_upsert.return_value = 'datastore'
mock1.side_effect = mocks
Fixtures.create()
user = db.session.query(User).filter_by(name=Fixtures.name).first()
project = db.session.query(Project).first()
user.ckan_api = 'ckan-api-key'
project.owner_id = user.id
db.session.add(user)
db.session.add(project)
db.session.commit()
self.signin(email=user.email_addr, password=Fixtures.password)
# First test for a non-existant project
uri = '/project/somethingnotexists/tasks/export'
res = self.app.get(uri, follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
# Now get the tasks in CKAN format
uri = "/project/somethingnotexists/tasks/export?type=task&format=ckan"
res = self.app.get(uri, follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
# Now with a real project
uri = '/project/%s/tasks/export' % Fixtures.project_short_name
res = self.app.get(uri, follow_redirects=True)
heading = "Export All Tasks and Task Runs"
assert heading in res.data, "Export page should be available\n %s" % res.data
# Now get the tasks in CKAN format
uri = "/project/%s/tasks/export?type=task&format=ckan" % Fixtures.project_short_name
#res = self.app.get(uri, follow_redirects=True)
with patch.dict(self.flask_app.config, {'CKAN_URL': 'http://ckan.com'}):
# First time exporting the package
res = self.app.get(uri, follow_redirects=True)
msg = 'Data exported to http://ckan.com'
err_msg = "Tasks should be exported to CKAN"
assert msg in res.data, err_msg
@with_context
@patch('pybossa.view.projects.Ckan', autospec=True)
def test_task_export_tasks_ckan_without_resources(self, mock1):
"""Test WEB Export CKAN Tasks works without resources."""
mocks = [Mock()]
package = dict(id=3, resources=[])
mocks[0].package_exists.return_value = (package, None)
mocks[0].package_update.return_value = package
mocks[0].resource_create.return_value = dict(result=dict(id=3))
mocks[0].datastore_create.return_value = 'datastore'
mocks[0].datastore_upsert.return_value = 'datastore'
mock1.side_effect = mocks
Fixtures.create()
user = db.session.query(User).filter_by(name=Fixtures.name).first()
project = db.session.query(Project).first()
user.ckan_api = 'ckan-api-key'
project.owner_id = user.id
db.session.add(user)
db.session.add(project)
db.session.commit()
self.signin(email=user.email_addr, password=Fixtures.password)
# First test for a non-existant project
uri = '/project/somethingnotexists/tasks/export'
res = self.app.get(uri, follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
# Now get the tasks in CKAN format
uri = "/project/somethingnotexists/tasks/export?type=task&format=ckan"
res = self.app.get(uri, follow_redirects=True)
assert res.status == '404 NOT FOUND', res.status
# Now with a real project
uri = '/project/%s/tasks/export' % Fixtures.project_short_name
res = self.app.get(uri, follow_redirects=True)
heading = "Export All Tasks and Task Runs"
assert heading in res.data, "Export page should be available\n %s" % res.data
# Now get the tasks in CKAN format
uri = "/project/%s/tasks/export?type=task&format=ckan" % Fixtures.project_short_name
#res = self.app.get(uri, follow_redirects=True)
with patch.dict(self.flask_app.config, {'CKAN_URL': 'http://ckan.com'}):
# First time exporting the package
res = self.app.get(uri, follow_redirects=True)
msg = 'Data exported to http://ckan.com'
err_msg = "Tasks should be exported to CKAN"
assert msg in res.data, err_msg
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
def test_get_import_tasks_no_params_shows_options_and_templates(self, mock):
"""Test WEB import tasks displays the different importers and template
tasks"""
Fixtures.create()
self.register()
self.new_project()
res = self.app.get('/project/sampleapp/tasks/import', follow_redirects=True)
err_msg = "There should be a CSV importer"
assert "type=csv" in res.data, err_msg
err_msg = "There should be a GDocs importer"
assert "type=gdocs" in res.data, err_msg
err_msg = "There should be an Epicollect importer"
assert "type=epicollect" in res.data, err_msg
err_msg = "There should be a Flickr importer"
assert "type=flickr" in res.data, err_msg
err_msg = "There should be a Dropbox importer"
assert "type=dropbox" in res.data, err_msg
err_msg = "There should be a Twitter importer"
assert "type=twitter" in res.data, err_msg
err_msg = "There should be an S3 importer"
assert "type=s3" in res.data, err_msg
err_msg = "There should be an Image template"
assert "template=image" in res.data, err_msg
err_msg = "There should be a Map template"
assert "template=map" in res.data, err_msg
err_msg = "There should be a PDF template"
assert "template=pdf" in res.data, err_msg
err_msg = "There should be a Sound template"
assert "template=sound" in res.data, err_msg
err_msg = "There should be a Video template"
assert "template=video" in res.data, err_msg
self.signout()
self.signin(email=Fixtures.email_addr2, password=Fixtures.password)
res = self.app.get('/project/sampleapp/tasks/import', follow_redirects=True)
assert res.status_code == 403, res.status_code
def test_get_import_tasks_with_specific_variant_argument(self):
"""Test task importer with specific importer variant argument
shows the form for it, for each of the variants"""
self.register()
owner = db.session.query(User).first()
project = ProjectFactory.create(owner=owner)
# CSV
url = "/project/%s/tasks/import?type=csv" % project.short_name
res = self.app.get(url, follow_redirects=True)
data = res.data.decode('utf-8')
assert "From a CSV file" in data
assert 'action="/project/%E2%9C%93project1/tasks/import"' in data
# Google Docs
url = "/project/%s/tasks/import?type=gdocs" % project.short_name
res = self.app.get(url, follow_redirects=True)
data = res.data.decode('utf-8')
assert "From a Google Docs Spreadsheet" in data
assert 'action="/project/%E2%9C%93project1/tasks/import"' in data
# Epicollect Plus
url = "/project/%s/tasks/import?type=epicollect" % project.short_name
res = self.app.get(url, follow_redirects=True)
data = res.data.decode('utf-8')
assert "From an EpiCollect Plus project" in data
assert 'action="/project/%E2%9C%93project1/tasks/import"' in data
# Flickr
url = "/project/%s/tasks/import?type=flickr" % project.short_name
res = self.app.get(url, follow_redirects=True)
data = res.data.decode('utf-8')
assert "From a Flickr Album" in data
assert 'action="/project/%E2%9C%93project1/tasks/import"' in data
# Dropbox
url = "/project/%s/tasks/import?type=dropbox" % project.short_name
res = self.app.get(url, follow_redirects=True)
data = res.data.decode('utf-8')
assert "From your Dropbox account" in data
assert 'action="/project/%E2%9C%93project1/tasks/import"' in data
# Twitter
url = "/project/%s/tasks/import?type=twitter" % project.short_name
res = self.app.get(url, follow_redirects=True)
data = res.data.decode('utf-8')
assert "From a Twitter hashtag or account" in data
assert 'action="/project/%E2%9C%93project1/tasks/import"' in data
# S3
url = "/project/%s/tasks/import?type=s3" % project.short_name
res = self.app.get(url, follow_redirects=True)
data = res.data.decode('utf-8')
assert "From an Amazon S3 bucket" in data
assert 'action="/project/%E2%9C%93project1/tasks/import"' in data
# Invalid
url = "/project/%s/tasks/import?type=invalid" % project.short_name
res = self.app.get(url, follow_redirects=True)
assert res.status_code == 404, res.status_code
@patch('pybossa.core.importer.get_all_importer_names')
def test_get_importer_doesnt_show_unavailable_importers(self, names):
names.return_value = ['csv', 'gdocs', 'epicollect', 's3']
self.register()
owner = db.session.query(User).first()
project = ProjectFactory.create(owner=owner)
url = "/project/%s/tasks/import" % project.short_name
res = self.app.get(url, follow_redirects=True)
assert "type=flickr" not in res.data
assert "type=dropbox" not in res.data
assert "type=twitter" not in res.data
@patch('pybossa.view.projects.redirect', wraps=redirect)
@patch('pybossa.importers.csv.requests.get')
def test_import_tasks_redirects_on_success(self, request, redirect):
"""Test WEB when importing tasks succeeds, user is redirected to tasks main page"""
csv_file = FakeResponse(text='Foo,Bar,Baz\n1,2,3', status_code=200,
headers={'content-type': 'text/plain'},
encoding='utf-8')
request.return_value = csv_file
self.register()
self.new_project()
project = db.session.query(Project).first()
url = '/project/%s/tasks/import' % project.short_name
res = self.app.post(url, data={'csv_url': 'http://myfakecsvurl.com',
'formtype': 'csv', 'form_name': 'csv'},
follow_redirects=True)
assert "1 new task was imported successfully" in res.data
redirect.assert_called_with('/project/%s/tasks/' % project.short_name)
@patch('pybossa.view.projects.importer.count_tasks_to_import')
@patch('pybossa.view.projects.importer.create_tasks')
def test_import_few_tasks_is_done_synchronously(self, create, count):
"""Test WEB importing a small amount of tasks is done synchronously"""
count.return_value = 1
create.return_value = ImportReport(message='1 new task was imported successfully', metadata=None, total=1)
self.register()
self.new_project()
project = db.session.query(Project).first()
url = '/project/%s/tasks/import' % project.short_name
res = self.app.post(url, data={'csv_url': 'http://myfakecsvurl.com',
'formtype': 'csv', 'form_name': 'csv'},
follow_redirects=True)
assert "1 new task was imported successfully" in res.data
@patch('pybossa.view.projects.importer_queue', autospec=True)
@patch('pybossa.view.projects.importer.count_tasks_to_import')
def test_import_tasks_as_background_job(self, count_tasks, queue):
"""Test WEB importing a big amount of tasks is done in the background"""
from pybossa.view.projects import MAX_NUM_SYNCHRONOUS_TASKS_IMPORT
count_tasks.return_value = MAX_NUM_SYNCHRONOUS_TASKS_IMPORT + 1
self.register()
self.new_project()
project = db.session.query(Project).first()
url = '/project/%s/tasks/import' % project.short_name
res = self.app.post(url, data={'csv_url': 'http://myfakecsvurl.com',
'formtype': 'csv', 'form_name': 'csv'},
follow_redirects=True)
tasks = db.session.query(Task).all()
assert tasks == [], "Tasks should not be immediately added"
data = {'type': 'csv', 'csv_url': 'http://myfakecsvurl.com'}
queue.enqueue.assert_called_once_with(import_tasks, project.id, **data)
msg = "You're trying to import a large amount of tasks, so please be patient.\
You will receive an email when the tasks are ready."
assert msg in res.data
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
@patch('pybossa.importers.csv.requests.get')
def test_bulk_csv_import_works(self, Mock, mock):
"""Test WEB bulk import works"""
csv_file = FakeResponse(text='Foo,Bar,priority_0\n1,2,3', status_code=200,
headers={'content-type': 'text/plain'},
encoding='utf-8')
Mock.return_value = csv_file
self.register()
self.new_project()
project = db.session.query(Project).first()
url = '/project/%s/tasks/import' % (project.short_name)
res = self.app.post(url, data={'csv_url': 'http://myfakecsvurl.com',
'formtype': 'csv', 'form_name': 'csv'},
follow_redirects=True)
task = db.session.query(Task).first()
assert {u'Bar': u'2', u'Foo': u'1'} == task.info
assert task.priority_0 == 3
assert "1 new task was imported successfully" in res.data
# Check that only new items are imported
empty_file = FakeResponse(text='Foo,Bar,priority_0\n1,2,3\n4,5,6',
status_code=200,
headers={'content-type': 'text/plain'},
encoding='utf-8')
Mock.return_value = empty_file
project = db.session.query(Project).first()
url = '/project/%s/tasks/import' % (project.short_name)
res = self.app.post(url, data={'csv_url': 'http://myfakecsvurl.com',
'formtype': 'csv', 'form_name': 'csv'},
follow_redirects=True)
project = db.session.query(Project).first()
assert len(project.tasks) == 2, "There should be only 2 tasks"
n = 0
csv_tasks = [{u'Foo': u'1', u'Bar': u'2'}, {u'Foo': u'4', u'Bar': u'5'}]
for t in project.tasks:
assert t.info == csv_tasks[n], "The task info should be the same"
n += 1
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
@patch('pybossa.importers.csv.requests.get')
def test_bulk_gdocs_import_works(self, Mock, mock):
"""Test WEB bulk GDocs import works."""
csv_file = FakeResponse(text='Foo,Bar,priority_0\n1,2,3', status_code=200,
headers={'content-type': 'text/plain'},
encoding='utf-8')
Mock.return_value = csv_file
self.register()
self.new_project()
project = db.session.query(Project).first()
url = '/project/%s/tasks/import' % (project.short_name)
res = self.app.post(url, data={'googledocs_url': 'http://drive.google.com',
'formtype': 'gdocs', 'form_name': 'gdocs'},
follow_redirects=True)
task = db.session.query(Task).first()
assert {u'Bar': u'2', u'Foo': u'1'} == task.info
assert task.priority_0 == 3
assert "1 new task was imported successfully" in res.data
# Check that only new items are imported
empty_file = FakeResponse(text='Foo,Bar,priority_0\n1,2,3\n4,5,6',
status_code=200,
headers={'content-type': 'text/plain'},
encoding='utf-8')
Mock.return_value = empty_file
project = db.session.query(Project).first()
url = '/project/%s/tasks/import' % (project.short_name)
res = self.app.post(url, data={'googledocs_url': 'http://drive.google.com',
'formtype': 'gdocs', 'form_name': 'gdocs'},
follow_redirects=True)
project = db.session.query(Project).first()
assert len(project.tasks) == 2, "There should be only 2 tasks"
n = 0
csv_tasks = [{u'Foo': u'1', u'Bar': u'2'}, {u'Foo': u'4', u'Bar': u'5'}]
for t in project.tasks:
assert t.info == csv_tasks[n], "The task info should be the same"
n += 1
# Check that only new items are imported
project = db.session.query(Project).first()
url = '/project/%s/tasks/import' % (project.short_name)
res = self.app.post(url, data={'googledocs_url': 'http://drive.google.com',
'formtype': 'gdocs', 'form_name': 'gdocs'},
follow_redirects=True)
project = db.session.query(Project).first()
assert len(project.tasks) == 2, "There should be only 2 tasks"
n = 0
csv_tasks = [{u'Foo': u'1', u'Bar': u'2'}, {u'Foo': u'4', u'Bar': u'5'}]
for t in project.tasks:
assert t.info == csv_tasks[n], "The task info should be the same"
n += 1
assert "no new records" in res.data, res.data
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
@patch('pybossa.importers.epicollect.requests.get')
def test_bulk_epicollect_import_works(self, Mock, mock):
"""Test WEB bulk Epicollect import works"""
data = [dict(DeviceID=23)]
fake_response = FakeResponse(text=json.dumps(data), status_code=200,
headers={'content-type': 'application/json'},
encoding='utf-8')
Mock.return_value = fake_response
self.register()
self.new_project()
project = db.session.query(Project).first()
res = self.app.post(('/project/%s/tasks/import' % (project.short_name)),
data={'epicollect_project': 'fakeproject',
'epicollect_form': 'fakeform',
'formtype': 'json', 'form_name': 'epicollect'},
follow_redirects=True)
project = db.session.query(Project).first()
err_msg = "Tasks should be imported"
assert "1 new task was imported successfully" in res.data, err_msg
tasks = db.session.query(Task).filter_by(project_id=project.id).all()
err_msg = "The imported task from EpiCollect is wrong"
assert tasks[0].info['DeviceID'] == 23, err_msg
data = [dict(DeviceID=23), dict(DeviceID=24)]
fake_response = FakeResponse(text=json.dumps(data), status_code=200,
headers={'content-type': 'application/json'},
encoding='utf-8')
Mock.return_value = fake_response
res = self.app.post(('/project/%s/tasks/import' % (project.short_name)),
data={'epicollect_project': 'fakeproject',
'epicollect_form': 'fakeform',
'formtype': 'json', 'form_name': 'epicollect'},
follow_redirects=True)
project = db.session.query(Project).first()
assert len(project.tasks) == 2, "There should be only 2 tasks"
n = 0
epi_tasks = [{u'DeviceID': 23}, {u'DeviceID': 24}]
for t in project.tasks:
assert t.info == epi_tasks[n], "The task info should be the same"
n += 1
@patch('pybossa.importers.flickr.requests.get')
def test_bulk_flickr_import_works(self, request):
"""Test WEB bulk Flickr import works"""
data = {
"photoset": {
"id": "72157633923521788",
"primary": "8947113500",
"owner": "32985084@N00",
"ownername": "Teleyinex",
"photo": [{"id": "8947115130", "secret": "00e2301a0d",
"server": "5441", "farm": 6, "title": "Title",
"isprimary": 0, "ispublic": 1, "isfriend": 0,
"isfamily": 0}
],
"page": 1,
"per_page": "500",
"perpage": "500",
"pages": 1,
"total": 1,
"title": "Science Hack Day Balloon Mapping Workshop"},
"stat": "ok"}
fake_response = FakeResponse(text=json.dumps(data), status_code=200,
headers={'content-type': 'application/json'},
encoding='utf-8')
request.return_value = fake_response
self.register()
self.new_project()
project = db.session.query(Project).first()
res = self.app.post(('/project/%s/tasks/import' % (project.short_name)),
data={'album_id': '1234',
'form_name': 'flickr'},
follow_redirects=True)
project = db.session.query(Project).first()
err_msg = "Tasks should be imported"
assert "1 new task was imported successfully" in res.data, err_msg
tasks = db.session.query(Task).filter_by(project_id=project.id).all()
expected_info = {
u'url': u'https://farm6.staticflickr.com/5441/8947115130_00e2301a0d.jpg',
u'url_m': u'https://farm6.staticflickr.com/5441/8947115130_00e2301a0d_m.jpg',
u'url_b': u'https://farm6.staticflickr.com/5441/8947115130_00e2301a0d_b.jpg',
u'link': u'https://www.flickr.com/photos/32985084@N00/8947115130',
u'title': u'Title'}
assert tasks[0].info == expected_info, tasks[0].info
def test_flickr_importer_page_shows_option_to_log_into_flickr(self):
self.register()
owner = db.session.query(User).first()
project = ProjectFactory.create(owner=owner)
url = "/project/%s/tasks/import?type=flickr" % project.short_name
res = self.app.get(url)
login_url = '/flickr/?next=%2Fproject%2F%25E2%259C%2593project1%2Ftasks%2Fimport%3Ftype%3Dflickr'
assert login_url in res.data
def test_bulk_dropbox_import_works(self):
"""Test WEB bulk Dropbox import works"""
dropbox_file_data = (u'{"bytes":286,'
u'"link":"https://www.dropbox.com/s/l2b77qvlrequ6gl/test.txt?dl=0",'
u'"name":"test.txt",'
u'"icon":"https://www.dropbox.com/static/images/icons64/page_white_text.png"}')
self.register()
self.new_project()
project = db.session.query(Project).first()
res = self.app.post('/project/%s/tasks/import' % project.short_name,
data={'files-0': dropbox_file_data,
'form_name': 'dropbox'},
follow_redirects=True)
project = db.session.query(Project).first()
err_msg = "Tasks should be imported"
tasks = db.session.query(Task).filter_by(project_id=project.id).all()
expected_info = {
u'link_raw': u'https://www.dropbox.com/s/l2b77qvlrequ6gl/test.txt?raw=1',
u'link': u'https://www.dropbox.com/s/l2b77qvlrequ6gl/test.txt?dl=0',
u'filename': u'test.txt'}
assert tasks[0].info == expected_info, tasks[0].info
@patch('pybossa.importers.twitterapi.Twitter')
@patch('pybossa.importers.twitterapi.oauth2_dance')
def test_bulk_twitter_import_works(self, oauth, client):
"""Test WEB bulk Twitter import works"""
tweet_data = {
'statuses': [
{
u'created_at': 'created',
u'favorite_count': 77,
u'coordinates': 'coords',
u'id_str': u'1',
u'id': 1,
u'retweet_count': 44,
u'user': {'screen_name': 'fulanito'},
u'text': 'this is a tweet #match'
}
]
}
client_instance = Mock()
client_instance.search.tweets.return_value = tweet_data
client.return_value = client_instance
self.register()
self.new_project()
project = db.session.query(Project).first()
res = self.app.post('/project/%s/tasks/import' % project.short_name,
data={'source': '#match',
'max_tweets': 1,
'form_name': 'twitter'},
follow_redirects=True)
project = db.session.query(Project).first()
err_msg = "Tasks should be imported"
tasks = db.session.query(Task).filter_by(project_id=project.id).all()
expected_info = {
u'created_at': 'created',
u'favorite_count': 77,
u'coordinates': 'coords',
u'id_str': u'1',
u'id': 1,
u'retweet_count': 44,
u'user': {'screen_name': 'fulanito'},
u'user_screen_name': 'fulanito',
u'text': 'this is a tweet #match'
}
assert tasks[0].info == expected_info, tasks[0].info
def test_bulk_s3_import_works(self):
"""Test WEB bulk S3 import works"""
self.register()
self.new_project()
project = db.session.query(Project).first()
res = self.app.post('/project/%s/tasks/import' % project.short_name,
data={'files-0': 'myfile.txt',
'bucket': 'mybucket',
'form_name': 's3'},
follow_redirects=True)
project = db.session.query(Project).first()
err_msg = "Tasks should be imported"
tasks = db.session.query(Task).filter_by(project_id=project.id).all()
expected_info = {
u'url': u'https://mybucket.s3.amazonaws.com/myfile.txt',
u'filename': u'myfile.txt',
u'link': u'https://mybucket.s3.amazonaws.com/myfile.txt'
}
assert tasks[0].info == expected_info, tasks[0].info
@with_context
def test_55_facebook_account_warning(self):
"""Test WEB Facebook OAuth user gets a hint to sign in"""
user = User(fullname='John',
name='john',
email_addr='john@john.com',
info={})
user.info = dict(facebook_token=u'facebook')
msg, method = get_user_signup_method(user)
err_msg = "Should return 'facebook' but returned %s" % method
assert method == 'facebook', err_msg
user.info = dict(google_token=u'google')
msg, method = get_user_signup_method(user)
err_msg = "Should return 'google' but returned %s" % method
assert method == 'google', err_msg
user.info = dict(twitter_token=u'twitter')
msg, method = get_user_signup_method(user)
err_msg = "Should return 'twitter' but returned %s" % method
assert method == 'twitter', err_msg
user.info = {}
msg, method = get_user_signup_method(user)
err_msg = "Should return 'local' but returned %s" % method
assert method == 'local', err_msg
@with_context
def test_56_delete_tasks(self):
"""Test WEB delete tasks works"""
Fixtures.create()
# Anonymous user
res = self.app.get('/project/test-app/tasks/delete', follow_redirects=True)
err_msg = "Anonymous user should be redirected for authentication"
assert "Please sign in to access this page" in res.data, err_msg
err_msg = "Anonymous user should not be allowed to delete tasks"
res = self.app.post('/project/test-app/tasks/delete', follow_redirects=True)
err_msg = "Anonymous user should not be allowed to delete tasks"
assert "Please sign in to access this page" in res.data, err_msg
# Authenticated user but not owner
self.register()
res = self.app.get('/project/test-app/tasks/delete', follow_redirects=True)
err_msg = "Authenticated user but not owner should get 403 FORBIDDEN in GET"
assert res.status == '403 FORBIDDEN', err_msg
res = self.app.post('/project/test-app/tasks/delete', follow_redirects=True)
err_msg = "Authenticated user but not owner should get 403 FORBIDDEN in POST"
assert res.status == '403 FORBIDDEN', err_msg
self.signout()
# Owner
tasks = db.session.query(Task).filter_by(project_id=1).all()
res = self.signin(email=u'tester@tester.com', password=u'tester')
res = self.app.get('/project/test-app/tasks/delete', follow_redirects=True)
err_msg = "Owner user should get 200 in GET"
assert res.status == '200 OK', err_msg
assert len(tasks) > 0, "len(project.tasks) > 0"
res = self.app.post('/project/test-app/tasks/delete', follow_redirects=True)
err_msg = "Owner should get 200 in POST"
assert res.status == '200 OK', err_msg
tasks = db.session.query(Task).filter_by(project_id=1).all()
assert len(tasks) == 0, "len(project.tasks) != 0"
# Admin
res = self.signin(email=u'root@root.com', password=u'tester' + 'root')
res = self.app.get('/project/test-app/tasks/delete', follow_redirects=True)
err_msg = "Admin user should get 200 in GET"
assert res.status_code == 200, err_msg
res = self.app.post('/project/test-app/tasks/delete', follow_redirects=True)
err_msg = "Admin should get 200 in POST"
assert res.status_code == 200, err_msg
@patch('pybossa.repositories.task_repository.uploader')
def test_delete_tasks_removes_existing_zip_files(self, uploader):
"""Test WEB delete tasks also deletes zip files for task and taskruns"""
Fixtures.create()
self.signin(email=u'tester@tester.com', password=u'tester')
res = self.app.post('/project/test-app/tasks/delete', follow_redirects=True)
expected = [call('1_test-app_task_json.zip', 'user_2'),
call('1_test-app_task_csv.zip', 'user_2'),
call('1_test-app_task_run_json.zip', 'user_2'),
call('1_test-app_task_run_csv.zip', 'user_2')]
assert uploader.delete_file.call_args_list == expected
@with_context
def test_57_reset_api_key(self):
"""Test WEB reset api key works"""
url = "/account/johndoe/update"
# Anonymous user
res = self.app.get(url, follow_redirects=True)
err_msg = "Anonymous user should be redirected for authentication"
assert "Please sign in to access this page" in res.data, err_msg
res = self.app.post(url, follow_redirects=True)
assert "Please sign in to access this page" in res.data, err_msg
# Authenticated user
self.register()
user = db.session.query(User).get(1)
url = "/account/%s/update" % user.name
api_key = user.api_key
res = self.app.get(url, follow_redirects=True)
err_msg = "Authenticated user should get access to reset api key page"
assert res.status_code == 200, err_msg
assert "reset your personal API Key" in res.data, err_msg
url = "/account/%s/resetapikey" % user.name
res = self.app.post(url, follow_redirects=True)
err_msg = "Authenticated user should be able to reset his api key"
assert res.status_code == 200, err_msg
user = db.session.query(User).get(1)
err_msg = "New generated API key should be different from old one"
assert api_key != user.api_key, err_msg
self.signout()
self.register(fullname="new", name="new")
res = self.app.post(url)
assert res.status_code == 403, res.status_code
url = "/account/fake/resetapikey"
res = self.app.post(url)
assert res.status_code == 404, res.status_code
@with_context
@patch('pybossa.cache.site_stats.get_locs', return_value=[{'latitude': 0, 'longitude': 0}])
def test_58_global_stats(self, mock1):
"""Test WEB global stats of the site works"""
Fixtures.create()
url = "/stats"
res = self.app.get(url, follow_redirects=True)
err_msg = "There should be a Global Statistics page of the project"
assert "General Statistics" in res.data, err_msg
with patch.dict(self.flask_app.config, {'GEO': True}):
res = self.app.get(url, follow_redirects=True)
assert "GeoLite" in res.data, res.data
@with_context
def test_59_help_api(self):
"""Test WEB help api page exists"""
Fixtures.create()
url = "/help/api"
res = self.app.get(url, follow_redirects=True)
err_msg = "There should be a help api page"
assert "API Help" in res.data, err_msg
@with_context
def test_59_help_license(self):
"""Test WEB help license page exists."""
url = "/help/license"
res = self.app.get(url, follow_redirects=True)
err_msg = "There should be a help license page"
assert "Licenses" in res.data, err_msg
@with_context
def test_59_about(self):
"""Test WEB help about page exists."""
url = "/about"
res = self.app.get(url, follow_redirects=True)
err_msg = "There should be an about page"
assert "About" in res.data, err_msg
@with_context
def test_59_help_tos(self):
"""Test WEB help TOS page exists."""
url = "/help/terms-of-use"
res = self.app.get(url, follow_redirects=True)
err_msg = "There should be a TOS page"
assert "Terms for use" in res.data, err_msg
@with_context
def test_59_help_policy(self):
"""Test WEB help policy page exists."""
url = "/help/cookies-policy"
res = self.app.get(url, follow_redirects=True)
err_msg = "There should be a TOS page"
assert "uses cookies" in res.data, err_msg
@with_context
def test_59_help_privacy(self):
"""Test WEB help privacy page exists."""
url = "/help/privacy"
res = self.app.get(url, follow_redirects=True)
err_msg = "There should be a privacy policy page"
assert "Privacy" in res.data, err_msg
@with_context
def test_69_allow_anonymous_contributors(self):
"""Test WEB allow anonymous contributors works"""
Fixtures.create()
project = db.session.query(Project).first()
url = '/project/%s/newtask' % project.short_name
# All users are allowed to participate by default
# As Anonymous user
res = self.app.get(url, follow_redirects=True)
err_msg = "The anonymous user should be able to participate"
assert project.name in res.data, err_msg
# As registered user
self.register()
self.signin()
res = self.app.get(url, follow_redirects=True)
err_msg = "The anonymous user should be able to participate"
assert project.name in res.data, err_msg
self.signout()
# Now only allow authenticated users
project.allow_anonymous_contributors = False
db.session.add(project)
db.session.commit()
# As Anonymous user
res = self.app.get(url, follow_redirects=True)
err_msg = "User should be redirected to sign in"
project = db.session.query(Project).first()
msg = "Oops! You have to sign in to participate in <strong>%s</strong>" % project.name
assert msg in res.data, err_msg
# As registered user
res = self.signin()
res = self.app.get(url, follow_redirects=True)
err_msg = "The authenticated user should be able to participate"
assert project.name in res.data, err_msg
self.signout()
# Now only allow authenticated users
project.allow_anonymous_contributors = False
db.session.add(project)
db.session.commit()
res = self.app.get(url, follow_redirects=True)
err_msg = "Only authenticated users can participate"
assert "You have to sign in" in res.data, err_msg
@with_context
def test_70_public_user_profile(self):
"""Test WEB public user profile works"""
Fixtures.create()
# Should work as an anonymous user
url = '/account/%s/' % Fixtures.name
res = self.app.get(url, follow_redirects=True)
err_msg = "There should be a public profile page for the user"
assert Fixtures.fullname in res.data, err_msg
# Should work as an authenticated user
self.signin()
res = self.app.get(url, follow_redirects=True)
assert Fixtures.fullname in res.data, err_msg
# Should return 404 when a user does not exist
url = '/account/a-fake-name-that-does-not-exist/'
res = self.app.get(url, follow_redirects=True)
err_msg = "It should return a 404"
assert res.status_code == 404, err_msg
@with_context
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
def test_74_task_settings_page(self, mock):
"""Test WEB TASK SETTINGS page works"""
# Creat root user
self.register()
self.signout()
# As owner
self.register(fullname="owner", name="owner")
res = self.new_project()
url = "/project/sampleapp/tasks/settings"
res = self.app.get(url, follow_redirects=True)
dom = BeautifulSoup(res.data)
divs = ['task_scheduler', 'task_delete', 'task_redundancy']
for div in divs:
err_msg = "There should be a %s section" % div
assert dom.find(id=div) is not None, err_msg
self.signout()
# As an authenticated user
self.register(fullname="juan", name="juan")
res = self.app.get(url, follow_redirects=True)
err_msg = "User should not be allowed to access this page"
assert res.status_code == 403, err_msg
self.signout()
# As an anonymous user
res = self.app.get(url, follow_redirects=True)
dom = BeautifulSoup(res.data)
err_msg = "User should be redirected to sign in"
assert dom.find(id="signin") is not None, err_msg
# As root
self.signin()
res = self.app.get(url, follow_redirects=True)
dom = BeautifulSoup(res.data)
divs = ['task_scheduler', 'task_delete', 'task_redundancy']
for div in divs:
err_msg = "There should be a %s section" % div
assert dom.find(id=div) is not None, err_msg
@with_context
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
def test_75_task_settings_scheduler(self, mock):
"""Test WEB TASK SETTINGS scheduler page works"""
# Creat root user
self.register()
self.signout()
# Create owner
self.register(fullname="owner", name="owner")
self.new_project()
url = "/project/sampleapp/tasks/scheduler"
form_id = 'task_scheduler'
self.signout()
# As owner and root
for i in range(0, 1):
if i == 0:
# As owner
self.signin(email="owner@example.com")
sched = 'depth_first'
else:
sched = 'default'
self.signin()
res = self.app.get(url, follow_redirects=True)
dom = BeautifulSoup(res.data)
err_msg = "There should be a %s section" % form_id
assert dom.find(id=form_id) is not None, err_msg
res = self.task_settings_scheduler(short_name="sampleapp",
sched=sched)
err_msg = "Task Scheduler should be updated"
assert "Project Task Scheduler updated" in res.data, err_msg
assert "success" in res.data, err_msg
project = db.session.query(Project).get(1)
assert project.info['sched'] == sched, err_msg
self.signout()
# As an authenticated user
self.register(fullname="juan", name="juan")
res = self.app.get(url, follow_redirects=True)
err_msg = "User should not be allowed to access this page"
assert res.status_code == 403, err_msg
self.signout()
# As an anonymous user
res = self.app.get(url, follow_redirects=True)
dom = BeautifulSoup(res.data)
err_msg = "User should be redirected to sign in"
assert dom.find(id="signin") is not None, err_msg
@with_context
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
def test_76_task_settings_redundancy(self, mock):
"""Test WEB TASK SETTINGS redundancy page works"""
# Creat root user
self.register()
self.signout()
# Create owner
self.register(fullname="owner", name="owner")
self.new_project()
self.new_task(1)
url = "/project/sampleapp/tasks/redundancy"
form_id = 'task_redundancy'
self.signout()
# As owner and root
for i in range(0, 1):
if i == 0:
# As owner
self.signin(email="owner@example.com")
n_answers = 20
else:
n_answers = 10
self.signin()
res = self.app.get(url, follow_redirects=True)
dom = BeautifulSoup(res.data)
# Correct values
err_msg = "There should be a %s section" % form_id
assert dom.find(id=form_id) is not None, err_msg
res = self.task_settings_redundancy(short_name="sampleapp",
n_answers=n_answers)
db.session.close()
err_msg = "Task Redundancy should be updated"
assert "Redundancy of Tasks updated" in res.data, err_msg
assert "success" in res.data, err_msg
project = db.session.query(Project).get(1)
for t in project.tasks:
assert t.n_answers == n_answers, err_msg
# Wrong values, triggering the validators
res = self.task_settings_redundancy(short_name="sampleapp",
n_answers=0)
err_msg = "Task Redundancy should be a value between 0 and 1000"
assert "error" in res.data, err_msg
assert "success" not in res.data, err_msg
res = self.task_settings_redundancy(short_name="sampleapp",
n_answers=10000000)
err_msg = "Task Redundancy should be a value between 0 and 1000"
assert "error" in res.data, err_msg
assert "success" not in res.data, err_msg
self.signout()
# As an authenticated user
self.register(fullname="juan", name="juan")
res = self.app.get(url, follow_redirects=True)
err_msg = "User should not be allowed to access this page"
assert res.status_code == 403, err_msg
self.signout()
# As an anonymous user
res = self.app.get(url, follow_redirects=True)
dom = BeautifulSoup(res.data)
err_msg = "User should be redirected to sign in"
assert dom.find(id="signin") is not None, err_msg
@with_context
def test_task_redundancy_update_updates_task_state(self):
"""Test WEB when updating the redundancy of the tasks in a project, the
state of the task is updated in consecuence"""
# Creat root user
self.register()
self.new_project()
self.new_task(1)
url = "/project/sampleapp/tasks/redundancy"
project = db.session.query(Project).get(1)
for t in project.tasks:
tr = TaskRun(project_id=project.id, task_id=t.id)
db.session.add(tr)
db.session.commit()
err_msg = "Task state should be completed"
res = self.task_settings_redundancy(short_name="sampleapp",
n_answers=1)
for t in project.tasks:
assert t.state == 'completed', err_msg
res = self.task_settings_redundancy(short_name="sampleapp",
n_answers=2)
err_msg = "Task state should be ongoing"
db.session.add(project)
db.session.commit()
for t in project.tasks:
assert t.state == 'ongoing', t.state
@with_context
@patch('pybossa.view.projects.uploader.upload_file', return_value=True)
def test_77_task_settings_priority(self, mock):
"""Test WEB TASK SETTINGS priority page works"""
# Creat root user
self.register()
self.signout()
# Create owner
self.register(fullname="owner", name="owner")
self.new_project()
self.new_task(1)
url = "/project/sampleapp/tasks/priority"
form_id = 'task_priority'
self.signout()
# As owner and root
project = db.session.query(Project).get(1)
_id = project.tasks[0].id
for i in range(0, 1):
if i == 0:
# As owner
self.signin(email="owner@example.com")
task_ids = str(_id)
priority_0 = 1.0
else:
task_ids = "1"
priority_0 = 0.5
self.signin()
res = self.app.get(url, follow_redirects=True)
dom = BeautifulSoup(res.data)
# Correct values
err_msg = "There should be a %s section" % form_id
assert dom.find(id=form_id) is not None, err_msg
res = self.task_settings_priority(short_name="sampleapp",
task_ids=task_ids,
priority_0=priority_0)
err_msg = "Task Priority should be updated"
assert "error" not in res.data, err_msg
assert "success" in res.data, err_msg
task = db.session.query(Task).get(_id)
assert task.id == int(task_ids), err_msg
assert task.priority_0 == priority_0, err_msg
# Wrong values, triggering the validators
res = self.task_settings_priority(short_name="sampleapp",
priority_0=3,
task_ids="1")
err_msg = "Task Priority should be a value between 0.0 and 1.0"
assert "error" in res.data, err_msg
assert "success" not in res.data, err_msg
res = self.task_settings_priority(short_name="sampleapp",
task_ids="1, 2")
err_msg = "Task Priority task_ids should be a comma separated, no spaces, integers"
assert "error" in res.data, err_msg
assert "success" not in res.data, err_msg
res = self.task_settings_priority(short_name="sampleapp",
task_ids="1,a")
err_msg = "Task Priority task_ids should be a comma separated, no spaces, integers"
assert "error" in res.data, err_msg
assert "success" not in res.data, err_msg
self.signout()
# As an authenticated user
self.register(fullname="juan", name="juan")
res = self.app.get(url, follow_redirects=True)
err_msg = "User should not be allowed to access this page"
assert res.status_code == 403, err_msg
self.signout()
# As an anonymous user
res = self.app.get(url, follow_redirects=True)
dom = BeautifulSoup(res.data)
err_msg = "User should be redirected to sign in"
assert dom.find(id="signin") is not None, err_msg
@with_context
def test_78_cookies_warning(self):
"""Test WEB cookies warning is displayed"""
# As Anonymous
res = self.app.get('/', follow_redirects=True)
dom = BeautifulSoup(res.data)
err_msg = "If cookies are not accepted, cookies banner should be shown"
assert dom.find(id='cookies_warning') is not None, err_msg
# As user
self.signin(email=Fixtures.email_addr2, password=Fixtures.password)
res = self.app.get('/', follow_redirects=True)
dom = BeautifulSoup(res.data)
err_msg = "If cookies are not accepted, cookies banner should be shown"
assert dom.find(id='cookies_warning') is not None, err_msg
self.signout()
# As admin
self.signin(email=Fixtures.root_addr, password=Fixtures.root_password)
res = self.app.get('/', follow_redirects=True)
dom = BeautifulSoup(res.data)
err_msg = "If cookies are not accepted, cookies banner should be shown"
assert dom.find(id='cookies_warning') is not None, err_msg
self.signout()
@with_context
def test_79_cookies_warning2(self):
"""Test WEB cookies warning is hidden"""
# As Anonymous
self.app.set_cookie("localhost", "cookieconsent_dismissed", "Yes")
res = self.app.get('/', follow_redirects=True, headers={})
dom = BeautifulSoup(res.data)
err_msg = "If cookies are not accepted, cookies banner should be hidden"
assert dom.find('div', attrs={'class': 'cc_banner-wrapper'}) is None, err_msg
# As user
self.signin(email=Fixtures.email_addr2, password=Fixtures.password)
res = self.app.get('/', follow_redirects=True)
dom = BeautifulSoup(res.data)
err_msg = "If cookies are not accepted, cookies banner should be hidden"
assert dom.find('div', attrs={'class': 'cc_banner-wrapper'}) is None, err_msg
self.signout()
# As admin
self.signin(email=Fixtures.root_addr, password=Fixtures.root_password)
res = self.app.get('/', follow_redirects=True)
dom = BeautifulSoup(res.data)
err_msg = "If cookies are not accepted, cookies banner should be hidden"
assert dom.find('div', attrs={'class': 'cc_banner-wrapper'}) is None, err_msg
self.signout()
@with_context
def test_user_with_no_more_tasks_find_volunteers(self):
"""Test WEB when a user has contributed to all available tasks, he is
asked to find new volunteers for a project, if the project is not
completed yet (overall progress < 100%)"""
self.register()
user = User.query.first()
project = ProjectFactory.create(owner=user)
task = TaskFactory.create(project=project)
taskrun = TaskRunFactory.create(task=task, user=user)
res = self.app.get('/project/%s/newtask' % project.short_name)
message = "Sorry, you've contributed to all the tasks for this project, but this project still needs more volunteers, so please spread the word!"
assert message in res.data
self.signout()
@with_context
def test_user_with_no_more_tasks_find_volunteers_project_completed(self):
"""Test WEB when a user has contributed to all available tasks, he is
not asked to find new volunteers for a project, if the project is
completed (overall progress = 100%)"""
self.register()
user = User.query.first()
project = ProjectFactory.create(owner=user)
task = TaskFactory.create(project=project, n_answers=1)
taskrun = TaskRunFactory.create(task=task, user=user)
res = self.app.get('/project/%s/newtask' % project.short_name)
assert task.state == 'completed', task.state
message = "Sorry, you've contributed to all the tasks for this project, but this project still needs more volunteers, so please spread the word!"
assert message not in res.data
self.signout()
@with_context
def test_results(self):
"""Test WEB results shows no data as no template and no data."""
tr = TaskRunFactory.create()
project = project_repo.get(tr.project_id)
url = '/project/%s/results' % project.short_name
res = self.app.get(url, follow_redirects=True)
dom = BeautifulSoup(res.data)
assert dom.find(id="noresult") is not None, res.data
@with_context
def test_results_with_values(self):
"""Test WEB results with values are not shown as no template but data."""
task = TaskFactory.create(n_answers=1)
tr = TaskRunFactory.create(task=task)
project = project_repo.get(tr.project_id)
url = '/project/%s/results' % project.short_name
result = result_repo.get_by(project_id=project.id)
result.info = dict(foo='bar')
result_repo.update(result)
res = self.app.get(url, follow_redirects=True)
dom = BeautifulSoup(res.data)
assert dom.find(id="noresult") is not None, res.data
@with_context
def test_results_with_values_and_template(self):
"""Test WEB results with values and template is shown."""
task = TaskFactory.create(n_answers=1)
tr = TaskRunFactory.create(task=task)
project = project_repo.get(tr.project_id)
project.info['results'] = "The results"
project_repo.update(project)
url = '/project/%s/results' % project.short_name
result = result_repo.get_by(project_id=project.id)
result.info = dict(foo='bar')
result_repo.update(result)
res = self.app.get(url, follow_redirects=True)
assert "The results" in res.data, res.data
|
geotagx/pybossa
|
test/test_web.py
|
Python
|
agpl-3.0
| 167,235
|
import urllib.request
from html.parser import HTMLParser
def get_refs_list(filename):
refs_source_file = open(filename, 'r')
refs_source_list = []
line_index = 0
recording = False
current_entry = ""
for line in refs_source_file:
line_index += 1
l = line.strip()
if l == "" or l[0] == "#":
# Skip empty lines, or those beginning with #
pass
elif l[0] == "%":
# Toggle whether it's taking a verbatim entry, and ignore the rest of the line
if recording:
refs_source_list.append([current_entry])
recording = False
elif not recording:
current_entry = ""
recording = True
elif recording:
current_entry += line
else:
# Check the line has the correct structure, of two comma-separated sections
if len(l.split(",")) == 2:
refs_source_list.append(l.split(","))
else:
print("ALERT: Incorrectly formatted line!")
print("Line number " + str(line_index) + " has " + str(len(l.split(","))) + " sections!")
print("This line has been ignored.\n")
refs_source_file.close()
return refs_source_list
# This relies on the fact that dblp presents the references as .bib files, at an address highly related to the address provided in the source file.
# (And avoids having to do genuine HTML parsing to get the information we want)
# The url will either have /bibtex/ or /bibtex1/ in, to be replaced with simply /bib1/
def retrieve_bibtex_url(dblp_url):
url = dblp_url.replace("/bibtex1/","/bib1/")
url = url.replace("/bibtex/","/bib1/")
return url + ".bib"
def retrieve_bibtex(dblp_url):
return urllib.request.urlopen(retrieve_bibtex_url(dblp_url)).read().decode('utf-8')
def get_bibtex(ref_key,dblp_url):
raw_bibtex = retrieve_bibtex(dblp_url).strip()
# We rely on the citation key being between the first '{' and ',' in the bibtex file
key_pos = raw_bibtex.find("{") +1
comma_pos = raw_bibtex.find(",")
mangled_bibtex = raw_bibtex[:key_pos] + ref_key + raw_bibtex[comma_pos:]
return mangled_bibtex
refs_source_list = get_refs_list("example.txt")
bib_file = open("refs.bib","w")
for source in refs_source_list:
if len(source) == 2:
bib_file.write(get_bibtex(source[0],source[1]))
elif len(source) == 1:
# It was a verbatim entry
bib_file.write(source[0])
bib_file.write("\n\n")
bib_file.close()
|
ConradCB/DBLPTeX
|
bib_generator.py
|
Python
|
unlicense
| 2,318
|
#!/usr/bin/python
#
# Teleportation quantum circuit simulation proof-of-concept
# Copyright (C) 2008 Robert Nowotniak
#
# Based on:
# [Bras98] Gilles Brassard. Teleportation as a quantum computation. 1998
#
from numpy import *
from random import random
from math import *
import sys
s2 = sqrt(2) / 2
# basic quantum gates
H = s2 * array([
[1, 1],
[1,-1]])
CNot = array([
[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 0, 1],
[0, 0, 1, 0]])
CNot2 = array([
[1, 0, 0, 0],
[0, 0, 0, 1],
[0, 0, 1, 0],
[0, 1, 0, 0]])
phasePi = array([
[1, 0],
[0, -1]
])
Not = array([
[0, 1],
[1, 0],
])
I = identity(2)
Swap = array([
[1, 0, 0, 0],
[0, 0, 1, 0],
[0, 1, 0, 0],
[0, 0, 0, 1]])
# quantum gates for Brassard teleportation circuit
L = s2 * array([
[ 1, -1],
[ 1, 1],
])
R = s2 * array([
[ 1, 1],
[-1, 1],
])
S = array([
[ 1j, 0],
[ 0, 1],
])
T = array([
[-1, 0],
[ 0, -1j],
])
ket0 = transpose(array([[1, 0]]))
ket1 = transpose(array([[0, 1]]))
# state for teleportation
psi = array([
[ 2.0/7 * (cos(pi/2/9) + 1.0j*sin(pi/2/9)) ],
[ sqrt(45)/7 * (cos(pi/3*2) + 1.0j*sin(pi/3*2)) ],
])
# print abs(psi[1,0])**2 + abs(psi[0,0])**2
qreg = kron(kron(psi, ket0), ket0)
# print qreg
stage1 = matrix(kron(kron(I, L), I))
stage2 = matrix(kron(I, CNot))
stage3 = matrix(kron(CNot, I))
stage4 = matrix(kron(kron(R, I), I))
circ1 = stage4 * stage3 * stage2 * stage1
output1 = dot(circ1, qreg)
print 'quantum state just before the measurement in the middle:'
print output1
print
# measure-and-resend qubits
p00 = abs(output1[0])**2 + abs(output1[1])**2
p01 = abs(output1[2])**2 + abs(output1[3])**2
p10 = abs(output1[4])**2 + abs(output1[5])**2
p11 = abs(output1[6])**2 + abs(output1[7])**2
r = random()
if r < p00:
bits = '00'
output1[2:8] = 0
elif r < p00 + p01:
bits = '01'
output1[0:2] = 0
output1[4:8] = 0
elif r < p00 + p01 + p10:
bits = '10'
output1[0:4] = 0
output1[6:8] = 0
else:
bits = '11'
output1[0:6] = 0
# normalize
length = sqrt(sum(array(abs(output1))**2))
output1 = matrix(array(output1) / length)
print 'quantum state after measurement:'
print output1
print
# end of measure-and-resend
stage5 = matrix(kron(S, CNot))
stage6a = matrix(kron(I, Swap))
stage6b = matrix(kron(CNot2, I))
stage6c = stage6a
stage6 = dot(dot(stage6a, stage6b), stage6c)
stage7 = matrix(kron(kron(S, I), T))
stage8 = stage6
circ2 = stage8 * stage7 * stage6 * stage5
output = dot(circ2, output1)
print 'Result of teleportation:'
print output
print
# print sum(array(abs(output1))**2)
if bits == '00':
expected = kron(ket0, ket0)
elif bits == '01':
expected = kron(ket0, ket1)
elif bits == '10':
expected = kron(ket1, ket0)
elif bits == '11':
expected = kron(ket1, ket1)
expected = kron(expected, psi)
print 'Expected final state:'
print expected
print
#print sum(array(abs(expected))**2)
|
rnowotniak/qclib
|
old/teleportation.py
|
Python
|
gpl-3.0
| 2,982
|
#!/usr/bin/env python
"""A script to prepare the source tree for building."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
# This script must have no special requirements because it wont be able to
# import any GRR stuff until the protos are built.
import argparse
import os
import subprocess
import sys
parser = argparse.ArgumentParser()
parser.add_argument(
"--clean",
action="store_true",
default=False,
help="Clean compiled protos.")
args = parser.parse_args()
ROOT = os.path.dirname(os.path.abspath(__file__))
def Clean():
"""Clean out compiled protos."""
# Find all the compiled proto files and unlink them.
for (root, _, files) in os.walk(ROOT):
for filename in files:
full_filename = os.path.join(root, filename)
if full_filename.endswith("_pb2.py") or full_filename.endswith(
"_pb2.pyc"):
os.unlink(full_filename)
def MakeProto():
"""Make sure our protos have been compiled to python libraries."""
# Start running from one directory above the grr directory which is found by
# this scripts's location as __file__.
cwd = os.path.dirname(os.path.abspath(__file__))
# Find all the .proto files.
protos_to_compile = []
for (root, _, files) in os.walk(cwd):
for filename in files:
full_filename = os.path.join(root, filename)
if full_filename.endswith(".proto"):
proto_stat = os.stat(full_filename)
pb2_path = full_filename.rsplit(".", 1)[0] + "_pb2.py"
try:
pb2_stat = os.stat(pb2_path)
if pb2_stat.st_mtime >= proto_stat.st_mtime:
continue
except (OSError, IOError):
pass
protos_to_compile.append(full_filename)
if protos_to_compile:
for proto in protos_to_compile:
command = [
sys.executable,
"-m",
"grpc_tools.protoc",
# Write the python files next to the .proto files.
"--python_out",
ROOT,
"--proto_path=%s" % ROOT,
proto
]
print(
"Compiling %s with (cwd: %s): %s" % (proto, ROOT, " ".join(command)))
# The protoc compiler is too dumb to deal with full paths - it expects a
# relative path from the current working directory.
subprocess.check_call(command, cwd=ROOT)
if __name__ == "__main__":
if args.clean:
Clean()
MakeProto()
|
dunkhong/grr
|
grr/proto/makefile.py
|
Python
|
apache-2.0
| 2,468
|
from __future__ import print_function
from __future__ import unicode_literals
import re
import time
import socket
from netmiko.cisco_base_connection import CiscoSSHConnection
class HPProcurveSSH(CiscoSSHConnection):
def session_preparation(self):
"""
Prepare the session after the connection has been established.
Procurve uses - 'Press any key to continue'
"""
delay_factor = self.select_delay_factor(delay_factor=0)
time.sleep(2 * delay_factor)
self.write_channel("\n")
time.sleep(2 * delay_factor)
self.write_channel("\n")
time.sleep(2 * delay_factor)
# HP output contains VT100 escape codes
self.ansi_escape_codes = True
self.set_base_prompt()
self.disable_paging(command="\nno page\n")
self.set_terminal_width(command='terminal width 511')
def enable(self, cmd='enable', pattern='password', re_flags=re.IGNORECASE,
default_username='manager'):
"""Enter enable mode"""
debug = False
output = self.send_command_timing(cmd)
if 'username' in output.lower():
output += self.send_command_timing(default_username)
if 'password' in output.lower():
output += self.send_command_timing(self.secret)
if debug:
print(output)
self.clear_buffer()
return output
def cleanup(self):
"""Gracefully exit the SSH session."""
self.exit_config_mode()
self.write_channel("logout\n")
count = 0
while count <= 5:
time.sleep(.5)
output = self.read_channel()
if 'Do you want to log out' in output:
self.write_channel("y\n")
# Don't automatically save the config (user's responsibility)
elif 'Do you want to save the current' in output:
self.write_channel("n\n")
try:
self.write_channel("\n")
except socket.error:
break
count += 1
|
shamanu4/netmiko
|
netmiko/hp/hp_procurve_ssh.py
|
Python
|
mit
| 2,061
|
"""
WSGI config for locallibrary project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "locallibrary.settings")
application = get_wsgi_application()
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
application = get_wsgi_application()
application = DjangoWhiteNoise(application)
|
evan-mp/django_local_library
|
locallibrary/wsgi.py
|
Python
|
gpl-3.0
| 582
|
# coding=utf-8
"""This file, docker_api.py, provides a wrapper abstraction for Docker containers (over a 3rd party abstraction)."""
from libraries.python.common_traits.trait_name import TraitName
from libraries.python.universal_utilities.system import colored_output as L
from time import sleep
class Container(TraitName):
'''Represents a single container.'''
def __init__(self, name: str, config: dict, project):
TraitName.__init__(self, name)
self.project = project
self.z_index = int(config['z_index'])
self.run_in_background = int(config['in_background']) == 1
self.container = None
def get_name(self) -> str:
name = ' - Container{' + self.name + '}'
name += ' ' * (35 - len(name))
return name
def current_status(self) -> str:
status = self.get_name() + ' is '
if self.is_running():
return status + 'running'
return status + 'not running'
def run(self) -> None:
self.project.run_container(self)
def kill(self) -> None:
self.container.kill()
def print_status(self) -> None:
if not self.is_running():
L.Green(self.get_name() + ' is being ran').p()
self.run()
if 'postgres' in self.get_name():
L.Green('Sleeping for 10 seconds to give the DB time to start up.').p()
sleep(10)
else:
L.Yellow(self.current_status()).p()
def is_running(self) -> bool:
return self.container is not None
def set_instance(self, container) -> None:
self.container = container
|
utarsuno/quasar_source
|
libraries/python/docker/container.py
|
Python
|
mit
| 1,660
|
"""
Django settings for openshift project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
import imp
ON_OPENSHIFT = False
if os.environ.has_key('OPENSHIFT_REPO_DIR'):
ON_OPENSHIFT = True
BASE_DIR = os.path.dirname(os.path.realpath(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
# SECRET_KEY = 'ascq#%bii8(tld52#(^*ht@pzq%=nyb7fdv+@ok$u^iwb@2hwh'
default_keys = { 'SECRET_KEY': 'vm4rl5*ymb@2&d_(gc$gb-^twq9w(u69hi--%$5xrh!xk(t%hw' }
use_keys = default_keys
if ON_OPENSHIFT:
imp.find_module('openshiftlibs')
import openshiftlibs
use_keys = openshiftlibs.openshift_secure(default_keys)
SECRET_KEY = use_keys['SECRET_KEY']
# SECURITY WARNING: don't run with debug turned on in production!
if ON_OPENSHIFT:
DEBUG = False
else:
DEBUG = True
TEMPLATE_DEBUG = DEBUG
if DEBUG:
ALLOWED_HOSTS = []
else:
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'minesweep',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
# If you want configure the REDISCLOUD
if 'REDISCLOUD_URL' in os.environ and 'REDISCLOUD_PORT' in os.environ and 'REDISCLOUD_PASSWORD' in os.environ:
redis_server = os.environ['REDISCLOUD_URL']
redis_port = os.environ['REDISCLOUD_PORT']
redis_password = os.environ['REDISCLOUD_PASSWORD']
CACHES = {
'default' : {
'BACKEND' : 'redis_cache.RedisCache',
'LOCATION' : '%s:%d'%(redis_server,int(redis_port)),
'OPTIONS' : {
'DB':0,
'PARSER_CLASS' : 'redis.connection.HiredisParser',
'PASSWORD' : redis_password,
}
}
}
MIDDLEWARE_CLASSES = ('django.middleware.cache.UpdateCacheMiddleware',) + MIDDLEWARE_CLASSES + ('django.middleware.cache.FetchFromCacheMiddleware',)
ROOT_URLCONF = 'urls'
WSGI_APPLICATION = 'wsgi.application'
TEMPLATE_DIRS = (
os.path.join(BASE_DIR,'templates'),
)
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
if ON_OPENSHIFT:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(os.environ['OPENSHIFT_DATA_DIR'], 'db.sqlite3'),
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, '..', 'static')
STATIC_URL = '/static/'
|
snocorp/meansweep
|
wsgi/openshift/settings.py
|
Python
|
mit
| 3,625
|
"""Methods for running the Terminal GUI"""
import argparse
import json
import sys
import os
import datetime
import logging
import threading
from collections import OrderedDict as odict
import numpy as np
from PySide2 import QtCore, QtGui, QtSvg, QtWidgets
from autopilot import prefs
from autopilot.core import styles
if __name__ == '__main__':
# Parse arguments - this should have been called with a .json prefs file passed
# We'll try to look in the default location first
parser = argparse.ArgumentParser(description="Run an autopilot Terminal")
parser.add_argument('-f', '--prefs', help="Location of .json prefs file (created during setup_autopilot.py)")
args = parser.parse_args()
if not args.prefs:
prefs_file = '/usr/autopilot/prefs.json'
if not os.path.exists(prefs_file):
raise Exception("No Prefs file passed, and file not in default location")
raise Warning('No prefs file passed, loaded from default location. Should pass explicitly with -p')
else:
prefs_file = args.prefs
# init prefs for module access
prefs.init(prefs_file)
from autopilot.core.subject import Subject
from autopilot.core.plots import Plot_Widget
from autopilot.core.networking import Terminal_Station, Net_Node
from autopilot.core.utils import InvokeEvent, Invoker, get_invoker
from autopilot.core.gui import Control_Panel, Protocol_Wizard, Weights, Reassign, Calibrate_Water, Bandwidth_Test
from autopilot.core.loggers import init_logger
# Try to import viz, but continue if that doesn't work
IMPORTED_VIZ = False
VIZ_ERROR = None
try:
from autopilot import viz
IMPORTED_VIZ = True
except ImportError as e:
VIZ_ERROR = str(e)
import pdb
# TODO: Be more complete about generating logs
# TODO: Make exit graceful
# TODO: Make 'edit subject' button
# TODO: Make experiment tags, save and populate?
# http://zetcode.com/gui/pysidetutorial/layoutmanagement/
# https://wiki.qt.io/PySide_Tutorials
_TERMINAL = None
class Terminal(QtWidgets.QMainWindow):
"""
Central host to a swarm of :class:`.Pilot` s and user-facing
:mod:`~.core.gui` objects.
Called as a module with the -f flag to give the location of a prefs file, eg::
python terminal.py -f prefs_file.json
if the -f flag is not passed, looks in the default location for prefs
(ie. `/usr/autopilot/prefs.json`)
**Listens used by the internal :class:`.Net_Node` **
+---------------+--------------------------------+--------------------------------------------------------+
| Key | Method | Description |
+===============+================================+========================================================+
| `'STATE'` | :meth:`~.Terminal.l_state` | A Pi has changed state |
+---------------+--------------------------------+--------------------------------------------------------+
| `'PING'` | :meth:`~.Terminal.l_ping` | Someone wants to know if we're alive |
+---------------+--------------------------------+--------------------------------------------------------+
| `'DATA'` | :meth:`~.Terminal.l_data` | Receiving data to store |
+---------------+--------------------------------+--------------------------------------------------------+
| `'HANDSHAKE'` | :meth:`~.Terminal.l_handshake` | Pilot first contact, telling us it's alive and its IP |
+---------------+--------------------------------+--------------------------------------------------------+
.. note::
See :mod:`autopilot.prefs` for full list of prefs needed by terminal!
Attributes:
node (:class:`~.networking.Net_Node`): Our Net_Node we use to communicate with our main networking object
networking (:class:`~.networking.Terminal_Station`): Our networking object to communicate with the outside world
subjects (dict): A dictionary mapping subject ID to :class:`~.subject.Subject` object.
pilots (dict): A dictionary mapping pilot ID to a list of its subjects, its IP, and any other pilot attributes.
layout (:class:`QtWidgets.QGridLayout`): Layout used to organize widgets
control_panel (:class:`~.gui.Control_Panel`): Control Panel to manage pilots and subjects
data_panel (:class:`~.plots.Plot_Widget`): Plots for each pilot and subject.
logo (:class:`QtWidgets.QLabel`): Label holding our beautiful logo ;X
logger (:class:`logging.Logger`): Used to log messages and network events.
settings (:class:`PySide2.QtCore.QSettings`): QSettings used to store pyside configuration like window size,
stored in ``prefs.get("TERMINAL_SETTINGS_FN")``
"""
def __init__(self):
# type: () -> None
super(Terminal, self).__init__()
# store instance
globals()['_TERMINAL'] = self
# Load settings
# Currently, the only setting is "geometry", but loading here
# in case we start to use other ones in the future
self.settings = QtCore.QSettings(prefs.get("TERMINAL_SETTINGS_FN"),
QtCore.QSettings.NativeFormat)
# networking
self.node = None
self.networking = None
self.heartbeat_dur = 10 # check every n seconds whether our pis are around still
# data
self.subjects = {} # Dict of our open subject objects
self.pilots = None
# gui
self.layout = None
self.widget = None
self.file_menu = None
self.tool_menu = None
self.control_panel = None
self.data_panel = None
self.logo = None
# logging
self.logger = init_logger(self)
# Load pilots db as ordered dictionary
with open(prefs.get('PILOT_DB')) as pilot_file:
self.pilots = json.load(pilot_file, object_pairs_hook=odict)
# Listen dictionary - which methods to call for different messages
# Methods are spawned in new threads using handle_message
self.listens = {
'STATE': self.l_state, # A Pi has changed state
'PING' : self.l_ping, # Someone wants to know if we're alive
'DATA' : self.l_data,
'CONTINUOUS': self.l_data, # handle continuous data same way as other data
'STREAM': self.l_data,
'HANDSHAKE': self.l_handshake # a pi is making first contact, telling us its IP
}
# Make invoker object to send GUI events back to the main thread
# self.invoker = Invoker()
self.invoker = get_invoker()
# prefs.add('INVOKER', self.invoker)
self.initUI()
# Start Networking
# Networking is in two parts,
# "internal" networking for messages sent to and from the Terminal object itself
# "external" networking for messages to and from all the other components,
# The split is so the external networking can run in another process, do potentially time-consuming tasks
# like resending & confirming message delivery without blocking or missing messages
self.node = Net_Node(id="_T", upstream='T', port=prefs.get('MSGPORT'), listens=self.listens)
self.logger.info("Net Node Initialized")
# Start external communications in own process
# Has to be after init_network so it makes a new context
self.networking = Terminal_Station(self.pilots)
self.networking.start()
self.logger.info("Station object Initialized")
# send an initial ping looking for our pilots
self.node.send('T', 'INIT')
# start beating ur heart
# self.heartbeat_timer = threading.Timer(self.heartbeat_dur, self.heartbeat)
# self.heartbeat_timer.daemon = True
# self.heartbeat_timer.start()
#self.heartbeat(once=True)
self.logger.info('Terminal Initialized')
def initUI(self):
"""
Initializes graphical elements of Terminal.
Including...
* Toolbar
* :class:`.gui.Control_Panel`
* :class:`.plots.Plot_Widget`
"""
# Set central widget
self.widget = QtWidgets.QWidget()
self.setCentralWidget(self.widget)
# Set the layout
self.layout = QtWidgets.QGridLayout()
self.layout.setSpacing(0)
self.layout.setContentsMargins(0,0,0,0)
self.widget.setLayout(self.layout)
# Set title
self.setWindowTitle('Terminal')
#self.menuBar().setFixedHeight(40)
# This is the pixel resolution of the entire screen
screensize = app.primaryScreen().size()
# This is the available geometry of the primary screen, excluding
# window manager reserved areas such as task bars and system menus.
primary_display = app.primaryScreen().availableGeometry()
## Initalize the menuBar
# Linux: Set the menuBar to a fixed height
# Darwin: Don't worry about menuBar
if sys.platform == 'darwin':
bar_height = 0
else:
bar_height = (primary_display.height()/30)+5
self.menuBar().setFixedHeight(bar_height)
# Create a File menu
self.file_menu = self.menuBar().addMenu("&File")
self.file_menu.setObjectName("file")
# Add "New Pilot" and "New Protocol" actions to File menu
new_pilot_act = QtWidgets.QAction("New &Pilot", self, triggered=self.new_pilot)
new_prot_act = QtWidgets.QAction("New Pro&tocol", self, triggered=self.new_protocol)
#batch_create_subjects = QtWidgets.QAction("Batch &Create subjects", self, triggered=self.batch_subjects)
# TODO: Update pis
self.file_menu.addAction(new_pilot_act)
self.file_menu.addAction(new_prot_act)
#self.file_menu.addAction(batch_create_subjects)
# Create a Tools menu
self.tool_menu = self.menuBar().addMenu("&Tools")
# Add actions to Tools menu
subject_weights_act = QtWidgets.QAction("View Subject &Weights", self, triggered=self.subject_weights)
update_protocol_act = QtWidgets.QAction("Update Protocols", self, triggered=self.update_protocols)
reassign_act = QtWidgets.QAction("Batch Reassign Protocols", self, triggered=self.reassign_protocols)
calibrate_act = QtWidgets.QAction("Calibrate &Water Ports", self, triggered=self.calibrate_ports)
self.tool_menu.addAction(subject_weights_act)
self.tool_menu.addAction(update_protocol_act)
self.tool_menu.addAction(reassign_act)
self.tool_menu.addAction(calibrate_act)
# Create a Plots menu and add Psychometric Curve action
self.plots_menu = self.menuBar().addMenu("&Plots")
psychometric = QtGui.QAction("Psychometric Curve", self, triggered=self.plot_psychometric)
self.plots_menu.addAction(psychometric)
# Create a Tests menu and add a Test Bandwidth action
self.tests_menu = self.menuBar().addMenu("Test&s")
bandwidth_test_act = QtWidgets.QAction("Test Bandwidth", self, triggered=self.test_bandwidth)
self.tests_menu.addAction(bandwidth_test_act)
## Init main panels and add to layout
# Control panel sits on the left, controls pilots & subjects
self.control_panel = Control_Panel(pilots=self.pilots,
subjects=self.subjects,
start_fn=self.toggle_start)
# Data panel sits on the right, plots stuff.
self.data_panel = Plot_Widget()
self.data_panel.init_plots(self.pilots.keys())
# Set logo to corner widget
if sys.platform != 'darwin':
self.menuBar().setCornerWidget(self.logo, QtCore.Qt.TopRightCorner)
self.menuBar().adjustSize()
# Add Control Panel and Data Panel to main layout
#self.layout.addWidget(self.logo, 0,0,1,2)
self.layout.addWidget(self.control_panel, 0,0,1,1)
self.layout.addWidget(self.data_panel, 0,1,1,1)
self.layout.setColumnStretch(0, 1)
self.layout.setColumnStretch(1, 3)
## Set window size
# The window size behavior depends on TERMINAL_WINSIZE_BEHAVIOR pref
# If 'remember': restore to the geometry from the last close
# If 'maximum': restore to fill the entire screen
# If 'moderate': restore to a reasonable size of (1000, 400) pixels
terminal_winsize_behavior = prefs.get('TERMINAL_WINSIZE_BEHAVIOR')
# Set geometry according to pref
if terminal_winsize_behavior == 'maximum':
# Set geometry to available geometry
self.setGeometry(primary_display)
# Set SizePolicy to maximum
self.setSizePolicy(
QtWidgets.QSizePolicy.Maximum, QtWidgets.QSizePolicy.Maximum)
# Move to top left corner of primary display
self.move(primary_display.left(), primary_display.top())
# Also set the maximum height of each panel
self.control_panel.setMaximumHeight(primary_display.height())
self.data_panel.setMaximumHeight(primary_display.height())
elif terminal_winsize_behavior == 'remember':
# Attempt to restore previous geometry
if self.settings.value("geometry") is None:
# It was never saved, for instance, this is the first time
# this app has been run
# So default to the moderate size
self.move(primary_display.left(), primary_display.top())
self.resize(1000, 400)
else:
# It was saved, so restore the last geometry
self.restoreGeometry(self.settings.value("geometry"))
elif terminal_winsize_behavior == "custom":
custom_size = prefs.get('TERMINAL_CUSTOM_SIZE')
self.move(custom_size[0], custom_size[1])
self.resize(custom_size[2], custom_size[3])
else:
if terminal_winsize_behavior != 'moderate':
self.logger.warning(f'TERMINAL_WINSIZE_BEHAVIOR {terminal_winsize_behavior} is not implemented, defaulting to "moderate"')
# The moderate size
self.move(primary_display.left(), primary_display.top())
self.resize(1000, 400)
## Finalize some aesthetics
# set stylesheet for main window
self.setStyleSheet(styles.TERMINAL)
# set fonts to antialias
self.setFont(self.font().setStyleStrategy(QtGui.QFont.PreferAntialias))
## Show, and log that initialization is complete
self.show()
logging.info('UI Initialized')
def reset_ui(self):
"""
Clear Layout and call :meth:`~.Terminal.initUI` again
"""
# type: () -> None
self.layout = QtWidgets.QGridLayout()
self.layout.setSpacing(0)
self.layout.setContentsMargins(0,0,0,0)
self.widget.setLayout(self.layout)
self.setCentralWidget(self.widget)
self.initUI()
##########################3
# Listens & inter-object methods
def heartbeat(self, once=False):
"""
Perioducally send an ``INIT`` message that checks the status of connected pilots
sent with frequency according to :attr:`.Terminal.heartbeat_dur`
Args:
once (bool): if True, do a single heartbeat but don't start a thread to do more.
"""
self.node.send('T', 'INIT', repeat=False, flags={'NOREPEAT': True})
if not once:
self.heartbeat_timer = threading.Timer(self.heartbeat_dur, self.heartbeat)
self.heartbeat_timer.daemon = True
self.heartbeat_timer.start()
def toggle_start(self, starting, pilot, subject=None):
"""Start or Stop running the currently selected subject's task. Sends a
message containing the task information to the concerned pilot.
Each :class:`Pilot_Panel` is given a lambda function that calls this
one with the arguments specified See :class:`Pilot_Button`, as it is
what calls this function.
Args:
starting (bool): Does this button press mean we are starting (True)
or stopping (False) the task?
pilot: Which Pilot is starting or stopping?
subject: Which Subject is currently selected?
"""
# stopping is the enemy of starting so we put them in the same function to learn about each other
if starting is True:
# Get Weights
start_weight, ok = QtWidgets.QInputDialog.getDouble(self, "Set Starting Weight",
"Starting Weight:")
if ok:
# Ope'nr up if she aint
if subject not in self.subjects.keys():
self.subjects[subject] = Subject(subject)
task = self.subjects[subject].prepare_run()
task['pilot'] = pilot
self.subjects[subject].update_weights(start=float(start_weight))
self.node.send(to=pilot, key="START", value=task)
# also let the plot know to start
self.node.send(to="P_{}".format(pilot), key="START", value=task)
else:
# pressed cancel, don't start
return
else:
# Get Weights
stop_weight, ok = QtWidgets.QInputDialog.getDouble(self, "Set Stopping Weight",
"Stopping Weight:")
if ok:
# Send message to pilot to stop running,
# it should initiate a coherence checking routine to make sure
# its data matches what the Terminal got,
# so the terminal will handle closing the subject object
self.node.send(to=pilot, key="STOP")
# also let the plot know to start
self.node.send(to="P_{}".format(pilot), key="STOP")
# TODO: Start coherence checking ritual
# TODO: Auto-select the next subject in the list.
self.subjects[subject].stop_run()
self.subjects[subject].update_weights(stop=float(stop_weight))
else:
# pressed cancel
return
############################
# MESSAGE HANDLING METHODS
def l_data(self, value):
"""
A Pilot has sent us data.
`value` field of message should have `subject` and `pilot` added to dictionary for identification.
Any key in `value` that matches a column in the subject's trial data table will be saved.
If the subject graduates after receiving this piece of data, stop the current
task running on the Pilot and send the new one.
Args:
value (dict): A dict of field-value pairs to save
"""
# A Pi has sent us data, let's save it huh?
subject_name = value['subject']
self.subjects[subject_name].save_data(value)
if self.subjects[subject_name].did_graduate.is_set() is True:
self.node.send(to=value['pilot'], key="STOP", value={'graduation':True})
self.subjects[subject_name].stop_run()
self.subjects[subject_name].graduate()
task = self.subjects[subject_name].prepare_run()
task['pilot'] = value['pilot']
self.node.send(to=value['pilot'], key="START", value=task)
def l_ping(self, value):
"""
TODO:
Reminder to implement heartbeating.
Note:
Currently unused, as Terminal Net_Node stability hasn't been
a problem and no universal system of heartbeating has been
established (global stability has not been an issue).
Args:
value: (unused)
"""
# Only our Station object should ever ping us, because
# we otherwise want it handling any pings on our behalf.
# self.send_message('ALIVE', value=b'T')
pass
def l_state(self, value):
"""A Pilot has changed state, keep track of it.
Args:
value (dict): dict containing `state` .
"""
# TODO: If we are stopping, we enter into a cohere state
# TODO: If we are stopped, close the subject object.
# TODO: Also tell the relevant dataview to clear
# update the pilot button
if value['pilot'] in self.pilots.keys():
if 'state' not in self.pilots[value['pilot']].keys():
self.pilots[value['pilot']]['state'] = value['state']
#self.control_panel.panels[value['pilot']].button.set_state(value['state'])
elif value['state'] != self.pilots[value['pilot']]['state']:
#self.control_panel.panels[value['pilot']].button.set_state(value['state'])
self.pilots[value['pilot']]['state'] = value['state']
def l_handshake(self, value):
"""
Pilot is sending its IP and state on startup.
If we haven't heard of this pilot before, make a new entry in :attr:`~.Terminal.pilots`
and :meth:`.gui.Control_Panel.update_db` .
Args:
value (dict): dict containing `ip` and `state`
"""
if value['pilot'] in self.pilots.keys():
if 'ip' in value.keys():
self.pilots[value['pilot']]['ip'] = value['ip']
if 'state' in value.keys():
self.pilots[value['pilot']]['state'] = value['state']
else:
self.new_pilot(name=value['pilot'], ip=value['ip'])
# update the pilot button
if value['pilot'] in self.control_panel.panels.keys():
self.control_panel.panels[value['pilot']].button.set_state(value['state'])
self.control_panel.update_db()
#############################
# GUI & etc. methods
def new_pilot(self, ip='', name=None):
"""
Make a new entry in :attr:`.Terminal.pilots` and make appropriate
GUI elements.
Args:
ip (str): Optional. if given, stored in db.
name (str): If None, prompted for a name, otherwise used for entry in pilot DB.
"""
if name is None:
name, ok = QtWidgets.QInputDialog.getText(self, "Pilot ID", "Pilot ID:")
# make sure we won't overwrite ourself
if name in self.pilots.keys():
# TODO: Pop a window confirming we want to overwrite
pass
if name != '':
new_pilot = {name:{'subjects':[], 'ip':ip}}
self.control_panel.update_db(new=new_pilot)
self.reset_ui()
else:
# Idk maybe pop a dialog window but i don't really see why
pass
def new_protocol(self):
"""
Open a :class:`.gui.Protocol_Wizard` to create a new protocol.
Prompts for name of protocol, then saves in `prefs.get('PROTOCOLDIR')`
"""
self.new_protocol_window = Protocol_Wizard()
self.new_protocol_window.exec_()
if self.new_protocol_window.result() == 1:
steps = self.new_protocol_window.steps
# The values useful to the step functions are stored with a 'value' key in the param_dict
save_steps = []
for s in steps:
param_values = {}
for k, v in s.items():
if 'value' in v.keys():
param_values[k] = v['value']
elif k == 'stim':
# TODO: Super hacky - don't do this. Refactor params already.
param_values[k] = {}
for stimtype, stim in v.items():
param_values[k][stimtype] = stim
save_steps.append(param_values)
# Name the protocol
name, ok = QtWidgets.QInputDialog.getText(self, "Name Protocol", "Protocol Name:")
if ok and name != '':
protocol_file = os.path.join(prefs.get('PROTOCOLDIR'), name + '.json')
with open(protocol_file, 'w') as pfile_open:
json.dump(save_steps, pfile_open, indent=4, separators=(',', ': '), sort_keys=True)
elif name == '' or not ok:
placeholder_name = 'protocol_created_{}'.format(datetime.date.today().isoformat())
protocol_file = os.path.join(prefs.get('PROTOCOLDIR'), placeholder_name + '.json')
with open(protocol_file, 'w') as pfile_open:
json.dump(save_steps, pfile_open, indent=4, separators=(',', ': '), sort_keys=True)
@property
def subject_list(self):
"""
Get a list of all subject IDs
Returns:
list: list of all subject IDs present in :attr:`.Terminal.pilots`
"""
subjects = []
for pilot, vals in self.pilots.items():
subjects.extend(vals['subjects'])
# use sets to get a unique list
subjects = list(set(subjects))
return subjects
def subject_weights(self):
"""
Gets recent weights from all :attr:`~.Terminal.subjects` and
open a :class:`.gui.Weights` window to view or set weights.
"""
subjects = self.subject_list
# open objects if not already
for subject in subjects:
if subject not in self.subjects.keys():
self.subjects[subject] = Subject(subject)
# for each subject, get weight
weights = []
for subject in subjects:
weight = self.subjects[subject].get_weight(include_baseline=True)
weight['subject'] = subject
weights.append(weight)
self.weight_widget = Weights(weights, self.subjects)
self.weight_widget.show()
def update_protocols(self):
"""
If we change the protocol file, update the stored version in subject files
"""
#
# get list of protocol files
protocols = os.listdir(prefs.get('PROTOCOLDIR'))
protocols = [p for p in protocols if p.endswith('.json')]
updated_subjects = []
subjects = self.subject_list
for subject in subjects:
if subject not in self.subjects.keys():
self.subjects[subject] = Subject(subject)
protocol_bool = [self.subjects[subject].protocol_name == os.path.splitext(p)[0] for p in protocols]
if any(protocol_bool):
which_prot = np.where(protocol_bool)[0][0]
protocol = protocols[which_prot]
self.subjects[subject].assign_protocol(os.path.join(prefs.get('PROTOCOLDIR'), protocol), step_n=self.subjects[subject].step)
updated_subjects.append(subject)
msgbox = QtWidgets.QMessageBox()
msgbox.setText("Subject Protocols Updated for:")
msgbox.setDetailedText("\n".join(sorted(updated_subjects)))
msgbox.exec_()
@property
def protocols(self):
"""
Returns:
list: list of protocol files in ``prefs.get('PROTOCOLDIR')``
"""
# get list of protocol files
protocols = os.listdir(prefs.get('PROTOCOLDIR'))
protocols = [os.path.splitext(p)[0] for p in protocols if p.endswith('.json')]
return protocols
@property
def subject_protocols(self):
"""
Returns:
subject_protocols (dict): a dictionary of subjects: [protocol, step]
"""
# get subjects and current protocols
subjects = self.subject_list
subjects_protocols = {}
for subject in subjects:
if subject not in self.subjects.keys():
self.subjects[subject] = Subject(subject)
subjects_protocols[subject] = [self.subjects[subject].protocol_name, self.subjects[subject].step]
return subjects_protocols
def reassign_protocols(self):
"""
Batch reassign protocols and steps.
Opens a :class:`.gui.Reassign` window after getting protocol data,
and applies any changes made in the window.
"""
reassign_window = Reassign(self.subject_protocols, self.protocols)
reassign_window.exec_()
if reassign_window.result() == 1:
subject_protocols = reassign_window.subjects
for subject, protocol in subject_protocols.items():
step = protocol[1]
protocol = protocol[0]
# since assign_protocol also changes the step, stash the step number here to tell if it's changed
subject_orig_step = self.subjects[subject].step
# if the protocol is the blank protocol, do nothing
if not protocol:
self.logger.info(f'Protocol for {subject} set to blank, not setting')
continue
if self.subjects[subject].protocol_name != protocol:
self.logger.info('Setting {} protocol from {} to {}'.format(subject, self.subjects[subject].protocol_name, protocol))
protocol_file = os.path.join(prefs.get('PROTOCOLDIR'), protocol + '.json')
self.subjects[subject].assign_protocol(protocol_file, step)
if subject_orig_step != step:
self.logger.info('Setting {} step from {} to {}'.format(subject, subject_orig_step, step))
step_name = self.subjects[subject].protocol[step]['step_name']
#update history also flushes current - aka it also actually changes the step number
self.subjects[subject].update_history('step', step_name, step)
else:
self.logger.debug('reassign cancelled')
def calibrate_ports(self):
"""
Calibrate :class:`.hardware.gpio.Solenoid` objects.
See :class:`.gui.Calibrate_Water`.
After calibration routine, send results to pilot for storage.
"""
calibrate_window = Calibrate_Water(self.pilots)
calibrate_window.exec_()
if calibrate_window.result() == 1:
for pilot, p_widget in calibrate_window.pilot_widgets.items():
p_results = p_widget.volumes
# p_results are [port][dur] = {params} so running the same duration will
# overwrite a previous run. unnest here so pi can keep a record
unnested_results = {}
for port, result in p_results.items():
unnested_results[port] = []
# result is [dur] = {params}
for dur, inner_result in result.items():
inner_result['dur'] = dur
unnested_results[port].append(inner_result)
# send to pi
self.node.send(to=pilot, key="CALIBRATE_RESULT",
value = unnested_results)
msgbox = QtWidgets.QMessageBox()
msgbox.setText("Calibration results sent!")
msgbox.exec_()
def test_bandwidth(self):
"""
Test bandwidth of Pilot connection with variable sized arrays as paylods
See :class:`.gui.Bandwidth_Test`
"""
# turn off logging while we run
prev_networking_loglevel = self.networking.logger.level
prev_node_loglevel = self.node.logger.level
self.networking.logger.setLevel(logging.ERROR)
self.node.logger.setLevel(logging.ERROR)
bandwidth_test = Bandwidth_Test(self.pilots)
bandwidth_test.exec_()
self.networking.logger.setLevel(prev_networking_loglevel)
self.node.logger.setLevel(prev_node_loglevel)
def plot_psychometric(self):
"""
Select subject, step, and variables to plot a psychometric curve
"""
if not IMPORTED_VIZ:
_ = pop_dialog("Vizualisation function couldn't be imported!", "error", VIZ_ERROR)
return
psychometric_dialog = Psychometric(self.subject_protocols)
psychometric_dialog.exec_()
# if user cancels, return
if psychometric_dialog.result() != 1:
return
chart = viz.plot_psychometric(psychometric_dialog.plot_params)
text, ok = QtGui.QInputDialog.getText(self, 'save plot?', 'what to call this thing')
if ok:
chart.save(text)
#chart.serve()
#viz.plot_psychometric(self.subjects_protocols)
#result = psychometric_dialog.exec_()
def closeEvent(self, event):
"""
When Closing the Terminal Window, close any running subject objects,
'KILL' our networking object.
Since the `:class:`.Net_Node` keeping us alive is a `daemon`, no need
to explicitly kill it.
"""
# Save the window geometry, to be optionally restored next time
self.settings.setValue("geometry", self.saveGeometry())
# TODO: Check if any subjects are currently running, pop dialog asking if we want to stop
# Close all subjects files
for m in self.subjects.values():
if m.running is True:
m.stop_run()
# Stop networking
# send message to kill networking process
self.node.send(key="KILL")
event.accept()
# Create the QApplication and run it
# Prefs were already loaded at the very top
if __name__ == "__main__":
app = QtWidgets.QApplication(sys.argv)
#app.setGraphicsSystem("opengl")
app.setStyle('GTK+') # Keeps some GTK errors at bay
ex = Terminal()
sys.exit(app.exec_())
|
wehr-lab/RPilot
|
autopilot/core/terminal.py
|
Python
|
gpl-3.0
| 33,971
|
# -----------------------------------------------------------------------------
# Copyright (c) 2014--, The American Gut Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
import posixpath
import urlparse
from json import loads, dumps
from collections import defaultdict
from future.utils import viewitems
from tornado.escape import url_escape
from wtforms import Form
from amgut import media_locale, text_locale
from amgut.lib.data_access.sql_connection import TRN
from amgut.connections import redis
from amgut.lib.vioscreen import encrypt_key
class PartitionResponse(object):
"""Partition responses based on the response type
Splits up the responses based on whether the response can or cannot be
associated with a foreign key in the database.
"""
def __init__(self, question_types):
self.with_fk = {}
self.without_fk = {}
self._question_types = question_types
self._dmap = {'SINGLE': self.with_fk,
'MULTIPLE': self.with_fk,
'TEXT': self.without_fk,
'STRING': self.without_fk}
def __setitem__(self, qid, value):
d = self._dmap[self._question_types[qid]]
self._store(d, qid, value)
def _store(self, d, qid, value):
d[qid] = value
def make_survey_class(group, survey_type):
"""Creates a form class for a group of questions
The top-level attributes of the generated class correspond to the
question_ids from amgut.lib.human_survey_supp structures
Select fields are generated for questions that require a single response,
and sets of checkboxes for questions that can have multiple responses
"""
attrs = {}
prompts = {}
triggers = defaultdict(list)
triggered = defaultdict(list)
for q in group.questions:
for eid, element in zip(q.interface_element_ids, q.interface_elements):
attrs[eid] = element
prompts[eid] = q.question
if q.triggers:
for triggered_id, triggering_responses in q.triggers.items():
triggers[eid].extend(triggering_responses)
triggered[eid].extend(group.id_to_eid[triggered_id])
attrs['prompts'] = prompts
attrs['triggers'] = triggers
attrs['triggered'] = triggered
attrs['supplemental_eids'] = group.supplemental_eids
return type(survey_type, (Form,), attrs)
def store_survey(survey, survey_id):
"""Store the survey
Parameters
----------
survey : amgut.lib.data_access.survey.Survey
The corresponding survey
survey_id : str
The corresponding survey ID to retreive from redis
"""
def get_survey_question_id(key):
return int(key.split('_')[-2])
data = redis.hgetall(survey_id)
to_store = PartitionResponse(survey.question_types)
consent_details = loads(data.pop('consent'))
if 'existing' in data:
data.pop('existing')
for page in data:
page_data = loads(data[page])
questions = page_data['questions']
for quest, resps in viewitems(questions):
qid = get_survey_question_id(quest)
qtype = survey.question_types[qid]
if resps is None:
resps = {-1} # unspecified multiple choice
elif qtype in ['SINGLE', 'MULTIPLE']:
resps = set([int(i) for i in resps])
else:
pass
to_store[qid] = resps
with_fk_inserts = []
for qid, indices in viewitems(to_store.with_fk):
question = survey.questions[qid]
for idx in indices:
resp = question.responses[idx] if idx != -1 else survey.unspecified
with_fk_inserts.append((survey_id, qid, resp))
without_fk_inserts = [(survey_id, qid, dumps(v))
for qid, v in viewitems(to_store.without_fk)]
survey.store_survey(consent_details, with_fk_inserts, without_fk_inserts)
def survey_vioscreen(survey_id, consent_info):
"""Return a formatted text block and URL for the external survey"""
tl = text_locale['human_survey_completed.html']
embedded_text = tl['SURVEY_VIOSCREEN']
url = ("https://vioscreen.com/remotelogin.aspx?Key=%s&RegCode=KLUCB" %
url_escape(encrypt_key(survey_id)))
return embedded_text % url
def survey_asd(survey_id, consent_info):
"""Return a formatted text block and URL for the external survey"""
tl = text_locale['human_survey_completed.html']
url = media_locale['SURVEY_ASD_URL'] % {'survey_id': survey_id}
embedded_text = tl['SURVEY_ASD']
return embedded_text % url
def survey_fermented(survey_id, consent_info):
"""Return a formatted text block and URL for the external survey"""
tl = text_locale['human_survey_completed.html']
url = ('/authed/secondary_survey/?type=fermented&participant_name=%s' %
consent_info['participant_name'])
embedded_text = tl['SURVEY_FERMENTED']
return embedded_text % url
def survey_surf(survey_id, consent_info):
"""Return a formatted text block and URL for the external survey"""
tl = text_locale['human_survey_completed.html']
url = ('/authed/secondary_survey/?type=surf&participant_name=%s' %
consent_info['participant_name'])
embedded_text = tl['SURVEY_SURF']
return embedded_text % url
external_surveys = (survey_vioscreen, survey_fermented, survey_surf)
def rollback(f):
"""Decorator for test functions to rollback on complete."""
def inner(*args, **kwargs):
with TRN:
x = f(*args, **kwargs)
TRN.rollback()
return x
return inner
def basejoin(base, url):
"""
Add the specified relative URL to the supplied base URL.
>>> tests = [
... ('https://abc.xyz', 'd/e'),
... ('https://abc.xyz/', 'd/e'),
... ('https://abc.xyz', '/d/e'),
... ('https://abc.xyz/', '/d/e'),
...
... ('https://abc.xyz', '/d/e?a=b'),
... ('https://abc.xyz/', '/d/e?a=b'),
...
... ('https://abc.xyz', 'd/e/'),
... ('https://abc.xyz/', 'd/e/'),
... ('https://abc.xyz', '/d/e/'),
... ('https://abc.xyz/', '/d/e/'),
...
... ('https://abc.xyz', 'd/e/?a=b'),
... ('https://abc.xyz/', 'd/e/?a=b'),
...
... ('https://abc.xyz/f', 'd/e/'),
... ('https://abc.xyz/f/', 'd/e/'),
... ('https://abc.xyz/f', '/d/e/'),
... ('https://abc.xyz/f/', '/d/e/'),
...
... ('https://abc.xyz/f', './e/'),
... ('https://abc.xyz/f/', './e/'),
...
... ('https://abc.xyz/f', '../e/'),
... ('https://abc.xyz/f/', '../e/'),
...
... ('https://abc.xyz/f', 'd/../e/'),
... ('https://abc.xyz/f/', 'd/../e/'),
... ('https://abc.xyz/f', '/d/../e/'),
... ('https://abc.xyz/f/', '/d/../e/'),
... ]
>>> for result in [basejoin(a, b) for a, b in tests]:
... print result
https://abc.xyz/d/e
https://abc.xyz/d/e
https://abc.xyz/d/e
https://abc.xyz/d/e
https://abc.xyz/d/e?a=b
https://abc.xyz/d/e?a=b
https://abc.xyz/d/e/
https://abc.xyz/d/e/
https://abc.xyz/d/e/
https://abc.xyz/d/e/
https://abc.xyz/d/e/?a=b
https://abc.xyz/d/e/?a=b
https://abc.xyz/f/d/e/
https://abc.xyz/f/d/e/
https://abc.xyz/f/d/e/
https://abc.xyz/f/d/e/
https://abc.xyz/f/e/
https://abc.xyz/f/e/
https://abc.xyz/f/e/
https://abc.xyz/f/e/
https://abc.xyz/f/e/
https://abc.xyz/f/e/
https://abc.xyz/f/e/
https://abc.xyz/f/e/
"""
# The base URL is authoritative: a URL like '../' should not remove
# portions of the base URL.
if not base.endswith('/'):
base += '/'
# Handle internal compactions, e.g. "./e/../d/" becomes "./d/"
normalized_url = posixpath.normpath(url)
# Ditto authoritativeness.
if normalized_url.startswith('..'):
normalized_url = normalized_url[2:]
# Ditto authoritativeness.
if normalized_url.startswith('/'):
normalized_url = '.' + normalized_url
# normpath removes an ending slash, add it back if necessary
if url.endswith('/') and not normalized_url.endswith('/'):
normalized_url += '/'
join = urlparse.urljoin(base, normalized_url)
joined_url = urlparse.urlparse(join)
return urlparse.urlunparse((joined_url.scheme,
joined_url.netloc,
joined_url.path,
joined_url.params,
joined_url.query,
joined_url.fragment))
if __name__ == '__main__':
import doctest
doctest.testmod(verbose=True, optionflags=(doctest.NORMALIZE_WHITESPACE |
doctest.REPORT_NDIFF))
|
squirrelo/american-gut-web
|
amgut/lib/util.py
|
Python
|
bsd-3-clause
| 9,073
|
from optparse import OptionParser
import os,sys
from oldowan.mitotype.matcher import HVRMatcher
from oldowan.mitotype.prevalidate import prevalidate_submission
def run_command():
"""Perform automated human mtDNA haplotype identification."""
# Set up the options parser
usage = "usage: %prog [options] sequence|filename"
parser = OptionParser(usage=usage)
parser.add_option('-f',
'--file',
action='store_true',
default=False,
help='load sequences from FASTA file',
dest='use_file')
parser.add_option('-c',
'--csv',
action='store_true',
dest='csv',
default=False,
help='output in comma-separated-value format')
parser.add_option('-n',
'--no-csv-header',
action='store_false',
dest='csv_header',
default=True,
help='output a csv header')
parser.add_option('-o',
'--out',
dest='outfile',
help='write results to FILE',
default=False,
metavar='FILE')
# Parse the options
(options, args) = parser.parse_args()
# At least one argument is always required.
# It will be either the sequence to be tested, or
# When the -f flag is used, the filename of the fasta file
# to be tested
if len(args) != 1:
if options.use_file:
print 'You must provide a filename!'
print "Type 'mitotype -h' for help."
else:
print 'You must provide a sequence to test'
print "Type 'mitotype -h' for help."
sys.exit(1)
# If we've made it this far we're probably going to have to do some
# actual work; initialize the matcher.
hvrm = HVRMatcher()
# Do the work, either:
# (1) load the fasta file
# (2) use sequence passed on the command line
working_text = ''
if options.use_file:
if os.path.exists(args[0]):
f = open(args[0], 'r')
working_text = f.read()
f.close()
else:
print 'ERROR: Could not find file: %s' % args[0]
sys.exit(1)
else:
working_text = args[0]
vi = prevalidate_submission(working_text)
if not vi.valid:
print 'ERROR: Could not validate input: %s' % vi.problem
results = hvrm.match(working_text, vi)
# If outfile option is used, make stdout point to that file
if options.outfile:
outf = open(options.outfile, 'w')
sys.stdout = outf
# If we're outputing to CSV, spit out a header
if options.csv and options.csv_header:
print 'Query Label,Query Defining Positions,Motif Label,Match Score,Motif Defining Positions,Source'
# Output the results
for r in results:
if options.csv:
for row in r.csv_rows():
print row
else:
print r
sys.stdout.flush()
|
ryanraaum/oldowan.mitotype
|
oldowan/mitotype/commandline.py
|
Python
|
mit
| 3,160
|
from sip import *
from random import *
###################### defines: ############################
true=1
false=0
###################### MAIN WIDGETS: #######################
def getMainControl():
return MainControl.getInstance(0)
def getMolecularStructure():
return MolecularStructure.getInstance(0)
def getMolecularControl():
return MolecularControl.getInstance(0)
def getDisplayProperties():
return DisplayProperties.getInstance(0)
def getScene():
return Scene.getInstance(0)
#def getEScene():
# return EditableScene.getInstance(0)
def getGeometricControl():
return GeometricControl.getInstance(0)
def getLogView():
return LogView.getInstance(0)
def getPyWidget():
return PyWidget.getInstance(0)
def getDatasetControl():
return DatasetControl.getInstance(0)
def hideAllWidgets():
getScene().setWidgetVisible(0)
getGeometricControl().setWidgetVisible(0)
getMolecularControl().setWidgetVisible(0)
getLogView().setWidgetVisible(0)
getDatasetControl().setWidgetVisible(0)
getPyWidget().setWidgetVisible(0)
def showOnlyScene():
hideAllWidgets()
getScene().setWidgetVisible(1)
getMainControl().processEvents(5000)
def setSceneSize(width, height):
getMainControl().setContentSize(width, height)
showOnlyScene()
getScene().update(false)
###################### SHORTCUTS: #######################
# map a key to a python command:
# modifier can be "" or None, Shift, Ctrl
def map(modifier, key, command):
getPyWidget().map(modifier, key, command)
def log(to_log):
getMainControl().setStatusbarText(to_log, 1)
print to_log
def abortScript():
getPyWidget().abortScript()
def quit():
getMainControl().quit()
def run(file):
getPyWidget().openFile(file, true)
def openFile(file):
return getMainControl().openFile(file)
def getSystems():
return getMainControl().getCompositeManager().getComposites()
def getSystem(nr):
return getSystems()[nr]
def getSelection():
return getMainControl().getSelection()
def getMolecularControlSelection():
return getMainControl().getMolecularControlSelection()
# get a list with the Highlighted System or first one
def getOneSystem():
s = getMolecularControlSelection()
if len(s) == 0 or len(s) > 1:
log("Warning: One System should be highlighted!")
s.append(getSystem(0))
S = s[0].getRoot()
l = []
l.append(S)
getMolecularControl().highlight(l)
return l
def getRepresentations():
return getMainControl().getRepresentationManager().getRepresentations()
def getRepresentationByName(name):
"""Return the first representation with the given name"""
reps = getRepresentations()
for rep in reps:
if rep.getName() == name:
return rep
return None
def getForceField():
return getMolecularStructure().getForceField()
def setCamera(camera):
getScene().setCamera(camera)
###################### HOTKEYS: #######################
def hideAllRepresentations():
for i in range(len(getRepresentations())):
getRepresentations()[i].setHidden(1)
getMainControl().update(getRepresentations()[i])
def clearRepresentations():
while len(getRepresentations()) > 0:
getMainControl().remove(getRepresentations()[0])
def clearMolecules():
while len(getSystems()) > 0:
getMainControl().remove(getSystem(0))
def clearAll():
clearMolecules()
clearRepresentations()
def setMultithreading(mode):
getMainControl().setMultithreading(mode)
def runScript(filename):
getPyWidget().openFile(filename, true)
def runCurrentScript():
getPyWidget().runCurrentScript()
def quickSave():
getMainControl().quickSave()
def quickLoad():
getMainControl().quickLoad()
def removeWater():
getMainControl().clearSelection()
setMultithreading(0)
if getMolecularControl().applySelector("residue(HOH)") == 0:
setMultithreading(1)
return
getMolecularControl().highlightSelection()
getMolecularControl().cut()
setMultithreading(1)
def addOptimizedHydrogens():
getOneSystem()
setMultithreading(0)
getMolecularStructure().addHydrogens()
getMolecularControl().applySelector("element(H)")
setMultithreading(1)
getMolecularStructure().runMinimization(false)
def relaxStructure():
s = getOneSystem()
getScene().optimizeStructure()
def highlightLigand():
s = getOneSystem()
removeWater()
S = s[0]
l = []
for r in residues(S):
if not r.isAminoAcid():
l.append(r)
getMolecularControl().highlight(l)
return l
def showCartoonAndLigand():
s = getOneSystem()
S = s[0]
clearRepresentations()
highlightLigand()
getDisplayProperties().selectModel(MODEL_VDW)
getDisplayProperties().selectColoringMethod(COLORING_ELEMENT)
getDisplayProperties().apply()
l = []
l.append(S)
getMolecularControl().highlight(l)
getMolecularControl().centerCamera()
getDisplayProperties().selectModel(MODEL_CARTOON)
getDisplayProperties().selectColoringMethod(COLORING_RESIDUE_INDEX)
getDisplayProperties().apply()
def printAtomTypesForHighlighted():
s = getMolecularControlSelection()
log("Atom types for highlighted Items:")
for r in s:
if r.__class__ == BALL.Atom:
log(str(r.getFullName(Atom.ADD_RESIDUE_ID))+" : "+str(r.getType()))
else:
for a in atoms(r):
log(a.getFullName(Atom.ADD_RESIDUE_ID) +" : "+str(a.getType()))
def printAtomTypesForLigands():
highlightLigand()
printAtomTypesForHighlighted()
def randomizeAtoms(md):
atoml = atoms(getOneSystem()[0])
for i in range(0, len(atoml)):
atoml[i].setPosition(atoml[i].getPosition() + Vector3(uniform(-md,md), uniform(-md,md), uniform(-md,md)))
getMainControl().update(getOneSystem()[0])
###################### EXAMPLES: #######################
def createStickModel():
dp = getDisplayProperties()
dp.setDrawingPrecision(DRAWING_PRECISION_HIGH)
dp.selectMode(DRAWING_MODE_SOLID)
dp.selectModel(MODEL_VDW)
dp.selectColoringMethod(COLORING_ELEMENT)
dp.setTransparency(0)
dp.apply()
def createChainSurfaces():
clearRepresentations()
s = getOneSystem()[0]
getDisplayProperties().selectModel(MODEL_SE_SURFACE)
getDisplayProperties().selectColoringMethod(COLORING_CHAIN)
for c in chains(s):
l = []
l.append(c)
getDisplayProperties().createRepresentation(l)
def addPlane(plane_specifier, height, boundary, bottom = True):
systems = getSystems()
system = systems[0]
bbp = BoundingBoxProcessor()
system.apply(bbp)
v_low = Vector3(bbp.getLower().x, bbp.getLower().y, bbp.getLower().z)
v_upp = Vector3(bbp.getUpper().x, bbp.getUpper().y, bbp.getUpper().z)
#
if (len(systems) > 1):
for system in systems[1:]:
system.apply(bbp)
low = bbp.getLower()
upp = bbp.getUpper()
#
if v_low.x > low.x:
v_low.x = low.x
if v_low.y > low.y:
v_low.y = low.y
if v_low.z > low.z:
v_low.z = low.z
#
if v_upp.x < upp.x:
v_upp.x = upp.x
if v_upp.y < upp.y:
v_upp.y = upp.y
if v_upp.z < upp.z:
v_upp.z = upp.z
#
if (not bottom):
v_tmp = v_low
v_low = v_upp
v_upp = v_tmp
height = height*(-1)
boundary = boundary*(-1)
#
v_low_left = Vector3()
v_low_right = Vector3()
v_upp_right = Vector3()
v_upp_left = Vector3()
#
normal = Vector3()
#
if (plane_specifier == 'x'):
v_low = v_low - Vector3(height, boundary, boundary)
v_upp = v_upp + Vector3(height, boundary, boundary)
#
v_low_left = Vector3(v_low.x, v_low.y, v_low.z)
v_low_right = Vector3(v_low.x, v_upp.y, v_low.z)
v_upp_right = Vector3(v_low.x, v_upp.y, v_upp.z)
v_upp_left = Vector3(v_low.x, v_low.y, v_upp.z)
#
normal = Vector3(1, 0, 0)
#
elif (plane_specifier == 'y'):
v_low = v_low - Vector3(boundary, height, boundary)
v_upp = v_upp + Vector3(boundary, height, boundary)
#
v_low_left = Vector3(v_low.x, v_low.y, v_low.z)
v_low_right = Vector3(v_low.x, v_low.y, v_upp.z)
v_upp_right = Vector3(v_upp.x, v_low.y, v_upp.z)
v_upp_left = Vector3(v_upp.x, v_low.y, v_low.z)
#
normal = Vector3(0, 1, 0)
#
elif (plane_specifier == 'z'):
v_low = v_low - Vector3(boundary, boundary, height)
v_upp = v_upp + Vector3(boundary, boundary, height)
#
v_low_left = Vector3(v_low.x, v_low.y, v_low.z)
v_low_right = Vector3(v_low.x, v_upp.y, v_low.z)
v_upp_right = Vector3(v_upp.x, v_upp.y, v_low.z)
v_upp_left = Vector3(v_upp.x, v_low.y, v_low.z)
#
normal = Vector3(0, 0, 1)
#
plane = Mesh()
#
# the vertices
plane.pushBackVertex(v_low_left)
plane.pushBackVertex(v_low_right)
plane.pushBackVertex(v_upp_right)
plane.pushBackVertex(v_upp_left)
#
# the triangles
t1 = Mesh.Triangle()
t1.v1 = 0 # v_low_left
t1.v2 = 1 # v_low_right
t1.v3 = 2 # v_upp_right
#
plane.pushBackTriangle(t1)
#
t2 = Mesh.Triangle()
t2.v1 = 2 # v_upp_right
t2.v2 = 3 # v_upp_left
t2.v3 = 0 # v_low_left
#
plane.pushBackTriangle(t2)
#
# the normals
for i in range(4):
plane.pushBackNormal(normal)
#
color = ColorRGBA(0.2,0.2,0.2,1)
#
colors = plane.getColors()
colors.append(color)
plane.setColors(colors)
#
r = Representation()
r.insert(plane)
r.setModelType(MODEL_PLANE)
#
getMainControl().insert(r)
getMainControl().update(r)
import os
if os.environ.has_key('BALLVIEW_ENTER_STEREO') and (os.environ['BALLVIEW_ENTER_STEREO'].upper() == "TRUE"):
Scene.getInstance(0).enterStereo()
if os.environ.has_key('BALLVIEW_FULLSCREEN') and (os.environ['BALLVIEW_FULLSCREEN'].upper() == "TRUE"):
getMainControl().showFullScreen()
|
tkemmer/ball
|
data/startup.py
|
Python
|
lgpl-2.1
| 9,387
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""yamltodb - generate SQL statements to update a PostgreSQL database
to match the schema specified in a YAML file"""
from __future__ import print_function
import sys
from argparse import FileType
import yaml
from pyrseas import __version__
from pyrseas.database import Database
from pyrseas.cmdargs import cmd_parser, parse_args
from pyrseas.lib.pycompat import PY2
def main():
"""Convert YAML specifications to database DDL."""
parser = cmd_parser("Generate SQL statements to update a PostgreSQL "
"database to match the schema specified in a "
"YAML-formatted file(s)", __version__)
parser.add_argument('-m', '--multiple-files', action='store_true',
help='input from multiple files (metadata directory)')
parser.add_argument('spec', nargs='?', type=FileType('r'),
default=sys.stdin, help='YAML specification')
parser.add_argument('-1', '--single-transaction', action='store_true',
dest='onetrans', help="wrap commands in BEGIN/COMMIT")
parser.add_argument('-u', '--update', action='store_true',
help="apply changes to database (implies -1)")
parser.add_argument('--revert', action='store_true',
help="generate SQL to revert changes")
parser.add_argument('--quote-reserved', action='store_true',
help="quote SQL reserved words")
parser.add_argument('-n', '--schema', metavar='SCHEMA', dest='schemas',
action='append', default=[],
help="process only named schema(s) (default all)")
cfg = parse_args(parser)
output = cfg['files']['output']
options = cfg['options']
db = Database(cfg)
if options.multiple_files:
inmap = db.map_from_dir()
else:
inmap = yaml.safe_load(options.spec)
stmts = db.diff_map(inmap)
if stmts:
fd = output or sys.stdout
if options.onetrans or options.update:
print("BEGIN;", file=fd)
for stmt in stmts:
if isinstance(stmt, tuple):
outstmt = "".join(stmt) + '\n'
else:
outstmt = "%s;\n" % stmt
if PY2:
outstmt = outstmt.encode('utf-8')
print(outstmt, file=fd)
if options.onetrans or options.update:
print("COMMIT;", file=fd)
if options.update:
try:
for stmt in stmts:
if isinstance(stmt, tuple):
# expected format: (\copy, table, from, path, csv)
db.dbconn.copy_from(stmt[3], stmt[1])
else:
db.dbconn.execute(stmt)
except:
db.dbconn.rollback()
raise
else:
db.dbconn.commit()
print("Changes applied", file=sys.stderr)
if output:
output.close()
if __name__ == '__main__':
main()
|
dvarrazzo/Pyrseas
|
pyrseas/yamltodb.py
|
Python
|
bsd-3-clause
| 3,088
|
from abc import ABCMeta, abstractmethod
from typing import List
from minesweeper.common.values import Board, CellType, InvalidBoard
from minesweeper.create_minefield import CreateMinefieldUseCase
class CreateBoardUseCaseObserver(metaclass=ABCMeta):
@abstractmethod
def did_create_board(self,
board: Board,
board_snapshot: List[List[CellType]]):
pass
class CreateBoardUseCase(object):
def __init__(self, minimum_column_count=5, minimum_row_count=5):
self.minimum_column_count = minimum_column_count
self.minimum_row_count = minimum_row_count
def __call__(self,
row_count: int,
column_count: int,
observer: CreateBoardUseCaseObserver):
if column_count < self.minimum_column_count or row_count < self.minimum_row_count:
raise InvalidBoard(requested_row_count=row_count,
requested_column_count=column_count,
minimum_row_count=self.minimum_row_count,
minimum_column_count=self.minimum_column_count)
board = Board(row_count=row_count, column_count=column_count)
board_snapshot = [[CellType.Unknown for c in range(column_count)] for r in range(row_count)]
observer.did_create_board(board=board, board_snapshot=board_snapshot)
|
wileykestner/minesweeper
|
minesweeper/create_board.py
|
Python
|
mit
| 1,404
|
from django.core.urlresolvers import reverse
from django.http import HttpResponseBadRequest
from django.views.generic import View
from pulp.server import exceptions as pulp_exceptions
from pulp.server.auth import authorization
from pulp.server.db import model
from pulp.server.db.model.consumer import ConsumerGroup
from pulp.server.db.model.criteria import Criteria
from pulp.server.managers import factory
from pulp.server.managers.consumer.group.cud import bind, unbind
from pulp.server.managers.consumer.group import query
from pulp.server.webservices.views.decorators import auth_required
from pulp.server.webservices.views import search
from pulp.server.webservices.views.util import (generate_json_response,
generate_json_response_with_pulp_encoder,
generate_redirect_response,
json_body_allow_empty,
json_body_required)
def serialize(group):
"""
Creates an href to the consumer group and adds it to the consumer group.
:param group: Cosumer group to serialize
:type group: dict
"""
group['_href'] = reverse(
'consumer_group_resource',
kwargs={'consumer_group_id': group['id']}
)
return group
class ConsumerGroupView(View):
"""
Views for consumer groups.
"""
@auth_required(authorization.READ)
def get(self, request):
"""
List the available consumer groups.
:param request: WSGI request object
:type request: django.core.handlers.wsgi.WSGIRequest
:return: Response containing a list of consumer groups
:rtype: django.http.HttpResponse
"""
collection = ConsumerGroup.get_collection()
cursor = collection.find({})
groups = [serialize(group) for group in cursor]
return generate_json_response_with_pulp_encoder(groups)
@auth_required(authorization.CREATE)
@json_body_required
def post(self, request):
"""
Create a consumer group and return a serialized object containing just created group.
:param request: WSGI request object
:type request: django.core.handlers.wsgi.WSGIRequest
:return: Response containing the consumer group
:rtype: django.http.HttpResponse
:raises: MissingValue if group ID is not provided
:raises: InvalidValue if some parameters are invalid
"""
params = request.body_as_json
group_id = params.pop('id', None)
if group_id is None:
raise pulp_exceptions.MissingValue(['id'])
display_name = params.pop('display_name', None)
description = params.pop('description', None)
consumer_ids = params.pop('consumer_ids', None)
notes = params.pop('notes', None)
if params:
raise pulp_exceptions.InvalidValue(params.keys())
manager = factory.consumer_group_manager()
group = manager.create_consumer_group(group_id, display_name, description, consumer_ids,
notes)
link = {"_href": reverse('consumer_group_resource',
kwargs={'consumer_group_id': group['id']})}
group.update(link)
response = generate_json_response_with_pulp_encoder(group)
response_redirect = generate_redirect_response(response, link['_href'])
return response_redirect
class ConsumerGroupResourceView(View):
"""
Views for a specific consumer group.
"""
@auth_required(authorization.READ)
def get(self, request, consumer_group_id):
"""
Return a serialized object representing the requested group.
:param request: WSGI request object
:type request: django.core.handlers.wsgi.WSGIRequest
:param consumer_group_id: id for the requested group
:type consumer_group_id: str
:return: Response containing data for the requested group
:rtype: django.http.HttpResponse
:raises: MissingResource if group ID does not exist
"""
collection = ConsumerGroup.get_collection()
group = collection.find_one({'id': consumer_group_id})
if group is None:
raise pulp_exceptions.MissingResource(consumer_group=consumer_group_id)
return generate_json_response_with_pulp_encoder(serialize(group))
@auth_required(authorization.DELETE)
def delete(self, request, consumer_group_id):
"""
Delete a specified consumer group.
:param request: WSGI request object
:type request: django.core.handlers.wsgi.WSGIRequest
:param consumer_group_id: id for the requested group
:type consumer_group_id: str
:return: An empty response
:rtype: django.http.HttpResponse
"""
manager = factory.consumer_group_manager()
result = manager.delete_consumer_group(consumer_group_id)
return generate_json_response(result)
@auth_required(authorization.UPDATE)
@json_body_allow_empty
def put(self, request, consumer_group_id):
"""
Update a specified consumer group.
:param request: WSGI request object
:type request: django.core.handlers.wsgi.WSGIRequest
:param consumer_group_id: id for the requested group
:type consumer_group_id: str
:return: Response representing the updated group
:rtype: django.http.HttpResponse
"""
update_data = request.body_as_json
manager = factory.consumer_group_manager()
group = manager.update_consumer_group(consumer_group_id, **update_data)
return generate_json_response_with_pulp_encoder(serialize(group))
class ConsumerGroupSearchView(search.SearchView):
"""
This view provides GET and POST searching on Consumer Groups.
"""
response_builder = staticmethod(generate_json_response_with_pulp_encoder)
manager = query.ConsumerGroupQueryManager()
serializer = staticmethod(serialize)
class ConsumerGroupAssociateActionView(View):
"""
Views for consumer association to the group.
"""
@auth_required(authorization.EXECUTE)
@json_body_allow_empty
def post(self, request, consumer_group_id):
"""
Associate a consumer to the group.
:param request: WSGI request object
:type request: django.core.handlers.wsgi.WSGIRequest
:param consumer_group_id: id for the requested group
:type consumer_group_id: str
:return: Response containing consumers bound to the group
:rtype: django.http.HttpResponse
"""
params = request.body_as_json
criteria = Criteria.from_client_input(params.get('criteria', {}))
manager = factory.consumer_group_manager()
manager.associate(consumer_group_id, criteria)
query_manager = factory.consumer_group_query_manager()
group = query_manager.get_group(consumer_group_id)
return generate_json_response_with_pulp_encoder(group['consumer_ids'])
class ConsumerGroupUnassociateActionView(View):
"""
Views for consumer unassociation from the group.
"""
@auth_required(authorization.EXECUTE)
@json_body_allow_empty
def post(self, request, consumer_group_id):
"""
Unassociate a consumer from the group.
:param request: WSGI request object
:type request: django.core.handlers.wsgi.WSGIRequest
:param consumer_group_id: id for the requested group
:type consumer_group_id: str
:return: Response containing consumers bound to the group
:rtype: django.http.HttpResponse
"""
params = request.body_as_json
criteria = Criteria.from_client_input(params.get('criteria', {}))
manager = factory.consumer_group_manager()
manager.unassociate(consumer_group_id, criteria)
query_manager = factory.consumer_group_query_manager()
group = query_manager.get_group(consumer_group_id)
return generate_json_response_with_pulp_encoder(group['consumer_ids'])
class ConsumerGroupContentActionView(View):
"""
Views for content manipulation on consumer group.
"""
@auth_required(authorization.CREATE)
@json_body_allow_empty
def post(self, request, consumer_group_id, action):
"""
Install/update/uninstall content unit/s on each consumer in the group.
:param request: WSGI request object
:type request: django.core.handlers.wsgi.WSGIRequest
:param consumer_group_id: A consumer group ID.
:type consumer_group_id: str
:param action: type of action to perform
:type action: str
"""
method = getattr(self, action, None)
if method:
return method(request, consumer_group_id)
else:
return HttpResponseBadRequest('bad request')
def install(self, request, consumer_group_id):
"""
Install content (units) on the consumers in a consumer group.
Expected body: {units:[], options:<dict>}
where unit is: {type_id:<str>, unit_key={}} and the
options is a dict of install options.
:param request: WSGI request object
:type request: django.core.handlers.wsgi.WSGIRequest
:param consumer_group_id: A consumer group ID.
:type consumer_group_id: str
:raises: OperationPostponed when an async operation is performed
"""
body = request.body_as_json
units = body.get('units')
options = body.get('options')
task = factory.consumer_group_manager().install_content(consumer_group_id,
units, options)
raise pulp_exceptions.OperationPostponed(task)
def update(self, request, consumer_group_id):
"""
Update content (units) on the consumer in a consumer group.
Expected body: {units:[], options:<dict>}
where unit is: {type_id:<str>, unit_key={}} and the
options is a dict of update options.
:param request: WSGI request object
:type request: django.core.handlers.wsgi.WSGIRequest
:param consumer_group_id: A consumer group ID.
:type consumer_group_id: str
:raises: OperationPostponed when an async operation is performed
"""
body = request.body_as_json
units = body.get('units')
options = body.get('options')
task = factory.consumer_group_manager().update_content(consumer_group_id,
units, options)
raise pulp_exceptions.OperationPostponed(task)
def uninstall(self, request, consumer_group_id):
"""
Uninstall content (units) from the consumers in a consumer group.
Expected body: {units:[], options:<dict>}
where unit is: {type_id:<str>, unit_key={}} and the
options is a dict of uninstall options.
:param request: WSGI request object
:type request: django.core.handlers.wsgi.WSGIRequest
:param consumer_group_id: A consumer group ID.
:type consumer_group_id: str
:raises: OperationPostponed when an async operation is performed
"""
body = request.body_as_json
units = body.get('units')
options = body.get('options')
task = factory.consumer_group_manager().uninstall_content(consumer_group_id,
units, options)
raise pulp_exceptions.OperationPostponed(task)
class ConsumerGroupBindingsView(View):
"""
Views for repository binding to the group.
"""
@auth_required(authorization.CREATE)
@json_body_required
def post(self, request, consumer_group_id):
"""
Create a bind association between the consumers belonging to the given
consumer group by id included in the URL path and a repo-distributor
specified in the POST body: {repo_id:<str>, distributor_id:<str>}.
Designed to be idempotent so only MissingResource is expected to
be raised by manager.
:param request: WSGI request object
:type request: django.core.handlers.wsgi.WSGIRequest
:param consumer_group_id: The consumer group ID to bind.
:type consumer_group_id: str
:raises: MissingResource if group id does not exist
:raises: InvalidValue some parameters are invalid
:raises: OperationPostponed when an async operation is performed
"""
params = request.body_as_json
repo_id = params.get('repo_id')
distributor_id = params.get('distributor_id')
binding_config = params.get('binding_config', None)
options = params.get('options', {})
notify_agent = params.get('notify_agent', True)
missing_resources = verify_group_resources(consumer_group_id, repo_id, distributor_id)
if missing_resources:
if 'group_id' in missing_resources:
raise pulp_exceptions.MissingResource(**missing_resources)
else:
raise pulp_exceptions.InvalidValue(list(missing_resources))
bind_args_tuple = (consumer_group_id, repo_id, distributor_id, notify_agent,
binding_config, options)
async_task = bind.apply_async(bind_args_tuple)
raise pulp_exceptions.OperationPostponed(async_task)
class ConsumerGroupBindingView(View):
"""
Represents a specific consumer group binding.
"""
@auth_required(authorization.DELETE)
def delete(self, request, consumer_group_id, repo_id, distributor_id):
"""
Delete a bind association between the consumers belonging to the specified
consumer group and repo-distributor. Designed to be idempotent.
:param request: WSGI request object
:type request: django.core.handlers.wsgi.WSGIRequest
:param consumer_group_id: A consumer group ID.
:type consumer_group_id: str
:param repo_id: A repo ID.
:type repo_id: str
:param distributor_id: A distributor ID.
:type distributor_id: str
:raises: MissingResource if repo, distributor, group id is missing
:raises: OperationPostponed when an async operation is performed
"""
missing_resources = verify_group_resources(consumer_group_id, repo_id, distributor_id)
if missing_resources:
raise pulp_exceptions.MissingResource(**missing_resources)
unbind_args_tuple = (consumer_group_id, repo_id, distributor_id, {})
async_task = unbind.apply_async(unbind_args_tuple)
raise pulp_exceptions.OperationPostponed(async_task)
def verify_group_resources(group_id, repo_id, distributor_id):
"""
Confirm the group, repository, and distributor exist.
:param group_id: The consumer group id to verify the existence of
:type group_id: str
:param repo_id: The repository id to confirm the existence of
:type repo_id: str
:param distributor_id: The distributor id to confirm the existence of on the repository
:type distributor_id: str
:return: A dictionary of the missing resources
:rtype: dict
"""
missing_resources = {}
group_manager = factory.consumer_group_query_manager()
distributor_manager = factory.repo_distributor_manager()
try:
group_manager.get_group(group_id)
except pulp_exceptions.MissingResource:
missing_resources['group_id'] = group_id
repo = model.Repository.objects(repo_id).first()
if repo is None:
missing_resources['repo_id'] = repo_id
try:
distributor_manager.get_distributor(repo_id, distributor_id)
except pulp_exceptions.MissingResource:
missing_resources['distributor_id'] = distributor_id
return missing_resources
|
rbramwell/pulp
|
server/pulp/server/webservices/views/consumer_groups.py
|
Python
|
gpl-2.0
| 15,945
|
import os, json, logging
import shared
from subprocess import Popen, CalledProcessError
import multiprocessing
from tools.shared import check_call
stdout = None
stderr = None
def call_process(cmd):
proc = Popen(cmd, stdout=stdout, stderr=stderr)
proc.communicate()
if proc.returncode != 0:
# Deliberately do not use CalledProcessError, see issue #2944
raise Exception('Command \'%s\' returned non-zero exit status %s' % (cmd, proc.returncode))
CORES = int(os.environ.get('EMCC_CORES') or multiprocessing.cpu_count())
def run_commands(commands):
cores = min(len(commands), CORES)
if cores <= 1:
for command in commands:
call_process(command)
else:
pool = multiprocessing.Pool(processes=cores)
pool.map(call_process, commands, chunksize=1)
def calculate(temp_files, in_temp, stdout_, stderr_, forced=[]):
global stdout, stderr
stdout = stdout_
stderr = stderr_
# Check if we need to include some libraries that we compile. (We implement libc ourselves in js, but
# compile a malloc implementation and stdlibc++.)
def read_symbols(path, exclude=None):
symbols = map(lambda line: line.strip().split(' ')[1], open(path).readlines())
if exclude:
symbols = filter(lambda symbol: symbol not in exclude, symbols)
return set(symbols)
default_opts = []
# If we're building tracing, we should build the system libraries that way too.
if shared.Settings.EMSCRIPTEN_TRACING:
default_opts.append('--tracing')
# XXX We also need to add libc symbols that use malloc, for example strdup. It's very rare to use just them and not
# a normal malloc symbol (like free, after calling strdup), so we haven't hit this yet, but it is possible.
libc_symbols = read_symbols(shared.path_from_root('system', 'lib', 'libc.symbols'))
libcextra_symbols = read_symbols(shared.path_from_root('system', 'lib', 'libcextra.symbols'))
libcxx_symbols = read_symbols(shared.path_from_root('system', 'lib', 'libcxx', 'symbols'), exclude=libc_symbols)
libcxxabi_symbols = read_symbols(shared.path_from_root('system', 'lib', 'libcxxabi', 'symbols'), exclude=libc_symbols)
gl_symbols = read_symbols(shared.path_from_root('system', 'lib', 'gl.symbols'))
# XXX we should disable EMCC_DEBUG when building libs, just like in the relooper
def build_libc(lib_filename, files, lib_opts):
o_s = []
prev_cxx = os.environ.get('EMMAKEN_CXX')
if prev_cxx: os.environ['EMMAKEN_CXX'] = ''
musl_internal_includes = ['-I', shared.path_from_root('system', 'lib', 'libc', 'musl', 'src', 'internal'), '-I', shared.path_from_root('system', 'lib', 'libc', 'musl', 'arch', 'js')]
commands = []
# Hide several musl warnings that produce a lot of spam to unit test build server logs.
# TODO: When updating musl the next time, feel free to recheck which of their warnings might have been fixed, and which ones of these could be cleaned up.
c_opts = ['-Wno-dangling-else', '-Wno-unknown-pragmas', '-Wno-shift-op-parentheses', '-Wno-string-plus-int', '-Wno-logical-op-parentheses', '-Wno-bitwise-op-parentheses', '-Wno-visibility']
for src in files:
o = in_temp(os.path.basename(src) + '.o')
commands.append([shared.PYTHON, shared.EMCC, shared.path_from_root('system', 'lib', src), '-o', o] + musl_internal_includes + default_opts + c_opts + lib_opts)
o_s.append(o)
run_commands(commands)
if prev_cxx: os.environ['EMMAKEN_CXX'] = prev_cxx
shared.Building.link(o_s, in_temp(lib_filename))
return in_temp(lib_filename)
def build_libcxx(src_dirname, lib_filename, files, lib_opts):
o_s = []
commands = []
for src in files:
o = in_temp(src + '.o')
srcfile = shared.path_from_root(src_dirname, src)
commands.append([shared.PYTHON, shared.EMXX, srcfile, '-o', o, '-std=c++11'] + default_opts + lib_opts)
o_s.append(o)
run_commands(commands)
shared.Building.link(o_s, in_temp(lib_filename))
return in_temp(lib_filename)
# libc
def create_libc():
logging.debug(' building libc for cache')
libc_files = [
'dlmalloc.c',
]
musl_files = [
['ctype', [
'isdigit.c',
'isspace.c',
'isupper.c',
'isxdigit.c',
'tolower.c',
]],
['internal', [
'intscan.c',
'floatscan.c',
'shgetc.c',
]],
['math', [
'frexp.c',
'frexpf.c',
'frexpl.c',
'scalbn.c',
'scalbnl.c',
]],
['multibyte', [
'wctomb.c',
'wcrtomb.c',
]],
['prng', [
'__rand48_step.c',
'__seed48.c',
'drand48.c',
'lcong48.c',
'lrand48.c',
'mrand48.c',
'rand_r.c',
'rand.c',
'random.c',
'seed48.c',
'srand48.c'
]],
['stdio', [
'__overflow.c',
'__toread.c',
'__towrite.c',
'__uflow.c',
'fwrite.c',
'snprintf.c',
'sprintf.c',
'vfprintf.c',
'vsnprintf.c',
'vsprintf.c',
]],
['stdlib', [
'atof.c',
'atoi.c',
'atol.c',
'strtod.c',
'strtol.c',
]],
['string', [
'memchr.c',
'memcmp.c',
'strcasecmp.c',
'strcmp.c',
'strncasecmp.c',
'strncmp.c',
]]
]
for directory, sources in musl_files:
libc_files += [os.path.join('libc', 'musl', 'src', directory, source) for source in sources]
return build_libc('libc.bc', libc_files, ['-O2'])
def apply_libc(need):
# libc needs some sign correction. # If we are in mode 0, switch to 2. We will add our lines
try:
if shared.Settings.CORRECT_SIGNS == 0: raise Exception('we need to change to 2')
except: # we fail if equal to 0 - so we need to switch to 2 - or if CORRECT_SIGNS is not even in Settings
shared.Settings.CORRECT_SIGNS = 2
if shared.Settings.CORRECT_SIGNS == 2:
shared.Settings.CORRECT_SIGNS_LINES = [shared.path_from_root('src', 'dlmalloc.c') + ':' + str(i+4) for i in [4816, 4191, 4246, 4199, 4205, 4235, 4227]]
# If we are in mode 1, we are correcting everything anyhow. If we are in mode 3, we will be corrected
# so all is well anyhow too.
return True
# libcextra
def create_libcextra():
logging.debug('building libcextra for cache')
musl_files = [
['compat', [
'strlwr.c',
'strtol_l.c',
'strupr.c'
]],
['ctype', [
'isalnum.c',
'isalpha.c',
'isascii.c',
'isblank.c',
'iscntrl.c',
'isgraph.c',
'islower.c',
'isprint.c',
'ispunct.c',
'iswalnum.c',
'iswalpha.c',
'iswblank.c',
'iswcntrl.c',
'iswctype.c',
'iswdigit.c',
'iswgraph.c',
'iswlower.c',
'iswprint.c',
'iswpunct.c',
'iswspace.c',
'iswupper.c',
'iswxdigit.c',
'toascii.c',
'toupper.c',
'towctrans.c',
'wcswidth.c',
'wctrans.c',
'wcwidth.c',
]],
['legacy', [
'err.c',
]],
['locale', [
'iconv.c',
'isalnum_l.c',
'isalpha_l.c',
'isblank_l.c',
'iscntrl_l.c',
'isdigit_l.c',
'isgraph_l.c',
'islower_l.c',
'isprint_l.c',
'ispunct_l.c',
'isspace_l.c',
'isupper_l.c',
'isxdigit_l.c',
'iswalnum_l.c',
'iswalpha_l.c',
'iswblank_l.c',
'iswcntrl_l.c',
'iswctype_l.c',
'iswdigit_l.c',
'iswgraph_l.c',
'iswlower_l.c',
'iswprint_l.c',
'iswpunct_l.c',
'iswspace_l.c',
'iswupper_l.c',
'iswxdigit_l.c',
'strcoll.c',
'strcasecmp_l.c',
'strfmon.c',
'strncasecmp_l.c',
'strxfrm.c',
'tolower_l.c',
'toupper_l.c',
'towctrans_l.c',
'towlower_l.c',
'towupper_l.c',
'wcscoll.c',
'wcscoll_l.c',
'wcsxfrm.c',
'wcsxfrm_l.c',
'wctrans_l.c',
'wctype_l.c',
]],
['math', [
'__cos.c',
'__cosdf.c',
'__sin.c',
'__sindf.c',
'ilogb.c',
'ilogbf.c',
'ilogbl.c',
'j0.c',
'j0f.c',
'j1.c',
'j1f.c',
'jn.c',
'jnf.c',
'ldexp.c',
'ldexpf.c',
'ldexpl.c',
'logb.c',
'logbf.c',
'logbl.c',
'lgamma.c',
'lgamma_r.c',
'lgammaf.c',
'lgammaf_r.c',
'lgammal.c',
'scalbnf.c',
'signgam.c',
'tgamma.c',
'tgammaf.c',
'tgammal.c'
]],
['misc', [
'ffs.c',
'getopt.c',
'getopt_long.c',
]],
['multibyte', [
'btowc.c',
'internal.c',
'mblen.c',
'mbrlen.c',
'mbrtowc.c',
'mbsinit.c',
'mbsnrtowcs.c',
'mbsrtowcs.c',
'mbstowcs.c',
'mbtowc.c',
'wcsnrtombs.c',
'wcsrtombs.c',
'wcstombs.c',
'wctob.c',
]],
['regex', [
'fnmatch.c',
'glob.c',
'regcomp.c',
'regerror.c',
'regexec.c',
'tre-mem.c',
]],
['stdio', [
'__string_read.c',
'asprintf.c',
'fwprintf.c',
'swprintf.c',
'vfwprintf.c',
'vswprintf.c',
'vwprintf.c',
'wprintf.c',
'fputwc.c',
'fputws.c',
'sscanf.c',
'vasprintf.c',
'vfscanf.c',
'vsscanf.c',
]],
['stdlib', [
'atoll.c',
'bsearch.c',
'ecvt.c',
'fcvt.c',
'gcvt.c',
'qsort.c',
'wcstod.c',
'wcstol.c',
]],
['string', [
'bcmp.c',
'bcopy.c',
'bzero.c',
'index.c',
'memccpy.c',
'memmem.c',
'mempcpy.c',
'memrchr.c',
'rindex.c',
'stpcpy.c',
'strcasestr.c',
'strchr.c',
'strchrnul.c',
'strcspn.c',
'strdup.c',
'strlcat.c',
'strlcpy.c',
'strncat.c',
'strndup.c',
'strnlen.c',
'strpbrk.c',
'strrchr.c',
'strsep.c',
'strsignal.c',
'strspn.c',
'strstr.c',
'strtok.c',
'strtok_r.c',
'strverscmp.c',
'wcpcpy.c',
'wcpncpy.c',
'wcscasecmp.c',
'wcscasecmp_l.c',
'wcscat.c',
'wcschr.c',
'wcscmp.c',
'wcscpy.c',
'wcscspn.c',
'wcsdup.c',
'wcslen.c',
'wcsncasecmp.c',
'wcsncasecmp_l.c',
'wcsncat.c',
'wcsncmp.c',
'wcsncpy.c',
'wcsnlen.c',
'wcspbrk.c',
'wcsrchr.c',
'wcsspn.c',
'wcsstr.c',
'wcstok.c',
'wcswcs.c',
'wmemchr.c',
'wmemcmp.c',
'wmemcpy.c',
'wmemmove.c',
'wmemset.c',
]]
]
libcextra_files = []
for directory, sources in musl_files:
libcextra_files += [os.path.join('libc', 'musl', 'src', directory, source) for source in sources]
return build_libc('libcextra.bc', libcextra_files, ['-O2'])
# libcxx
def create_libcxx():
logging.debug('building libcxx for cache')
libcxx_files = [
'algorithm.cpp',
'condition_variable.cpp',
'future.cpp',
'iostream.cpp',
'memory.cpp',
'random.cpp',
'stdexcept.cpp',
'system_error.cpp',
'utility.cpp',
'bind.cpp',
'debug.cpp',
'hash.cpp',
'mutex.cpp',
'string.cpp',
'thread.cpp',
'valarray.cpp',
'chrono.cpp',
'exception.cpp',
'ios.cpp',
'locale.cpp',
'regex.cpp',
'strstream.cpp'
]
return build_libcxx(os.path.join('system', 'lib', 'libcxx'), 'libcxx.bc', libcxx_files, ['-Oz', '-Wno-warn-absolute-paths', '-I' + shared.path_from_root('system', 'lib', 'libcxxabi', 'include')])
def apply_libcxx(need):
assert shared.Settings.QUANTUM_SIZE == 4, 'We do not support libc++ with QUANTUM_SIZE == 1'
# libcxx might need corrections, so turn them all on. TODO: check which are actually needed
shared.Settings.CORRECT_SIGNS = shared.Settings.CORRECT_OVERFLOWS = shared.Settings.CORRECT_ROUNDINGS = 1
#logging.info('using libcxx turns on CORRECT_* options')
return True
# libcxxabi - just for dynamic_cast for now
def create_libcxxabi():
logging.debug('building libcxxabi for cache')
libcxxabi_files = [
'abort_message.cpp',
'cxa_aux_runtime.cpp',
'cxa_default_handlers.cpp',
'cxa_demangle.cpp',
'cxa_exception_storage.cpp',
'cxa_new_delete.cpp',
'cxa_handlers.cpp',
'exception.cpp',
'stdexcept.cpp',
'typeinfo.cpp',
'private_typeinfo.cpp',
os.path.join('..', '..', 'libcxx', 'new.cpp'),
]
return build_libcxx(os.path.join('system', 'lib', 'libcxxabi', 'src'), 'libcxxabi.bc', libcxxabi_files, ['-Oz', '-Wno-warn-absolute-paths', '-I' + shared.path_from_root('system', 'lib', 'libcxxabi', 'include')])
def apply_libcxxabi(need):
assert shared.Settings.QUANTUM_SIZE == 4, 'We do not support libc++abi with QUANTUM_SIZE == 1'
#logging.info('using libcxxabi, this may need CORRECT_* options')
#shared.Settings.CORRECT_SIGNS = shared.Settings.CORRECT_OVERFLOWS = shared.Settings.CORRECT_ROUNDINGS = 1
return True
# gl
def create_gl():
prev_cxx = os.environ.get('EMMAKEN_CXX')
if prev_cxx: os.environ['EMMAKEN_CXX'] = ''
o = in_temp('gl.o')
check_call([shared.PYTHON, shared.EMCC, shared.path_from_root('system', 'lib', 'gl.c'), '-o', o])
if prev_cxx: os.environ['EMMAKEN_CXX'] = prev_cxx
return o
# Setting this in the environment will avoid checking dependencies and make building big projects a little faster
# 1 means include everything; otherwise it can be the name of a lib (libcxx, etc.)
# You can provide 1 to include everything, or a comma-separated list with the ones you want
force = os.environ.get('EMCC_FORCE_STDLIBS')
force_all = force == '1'
force = set((force.split(',') if force else []) + forced)
if force: logging.debug('forcing stdlibs: ' + str(force))
# Setting this will only use the forced libs in EMCC_FORCE_STDLIBS. This avoids spending time checking
# for unresolved symbols in your project files, which can speed up linking, but if you do not have
# the proper list of actually needed libraries, errors can occur. See below for how we must
# export all the symbols in deps_info when using this option.
only_forced = os.environ.get('EMCC_ONLY_FORCED_STDLIBS')
if only_forced:
temp_files = []
# Add in some hacks for js libraries. If a js lib depends on a symbol provided by a C library, it must be
# added to here, because our deps go only one way (each library here is checked, then we check the next
# in order - libcxx, libcxextra, etc. - and then we run the JS compiler and provide extra symbols from
# library*.js files. But we cannot then go back to the C libraries if a new dep was added!
# TODO: Move all __deps from src/library*.js to deps_info.json, and use that single source of info
# both here and in the JS compiler.
deps_info = json.loads(open(shared.path_from_root('src', 'deps_info.json')).read())
added = set()
def add_back_deps(need):
more = False
for ident, deps in deps_info.iteritems():
if ident in need.undefs and not ident in added:
added.add(ident)
more = True
for dep in deps:
need.undefs.add(dep)
shared.Settings.EXPORTED_FUNCTIONS.append('_' + dep)
if more:
add_back_deps(need) # recurse to get deps of deps
# Scan symbols
symbolses = map(lambda temp_file: shared.Building.llvm_nm(temp_file), temp_files)
if len(symbolses) == 0:
class Dummy:
defs = set()
undefs = set()
symbolses.append(Dummy())
# depend on exported functions
for export in shared.Settings.EXPORTED_FUNCTIONS:
if shared.Settings.VERBOSE: logging.debug('adding dependency on export %s' % export)
symbolses[0].undefs.add(export[1:])
for symbols in symbolses:
add_back_deps(symbols)
# If we are only doing forced stdlibs, then we don't know the actual symbols we need,
# and must assume all of deps_info must be exported. Note that this might cause
# warnings on exports that do not exist.
if only_forced:
for key, value in deps_info.iteritems():
for dep in value:
shared.Settings.EXPORTED_FUNCTIONS.append('_' + dep)
all_needed = set()
for symbols in symbolses:
all_needed.update(symbols.undefs)
for symbols in symbolses:
all_needed.difference_update(symbols.defs)
# Go over libraries to figure out which we must include
ret = []
has = need = None
for name, create, apply_, library_symbols, deps in [('libcxx', create_libcxx, apply_libcxx, libcxx_symbols, ['libcextra', 'libcxxabi']),
('libcextra', create_libcextra, lambda x: True, libcextra_symbols, ['libc']),
('libcxxabi', create_libcxxabi, apply_libcxxabi, libcxxabi_symbols, ['libc']),
('gl', create_gl, lambda x: True, gl_symbols, ['libc']),
('libc', create_libc, apply_libc, libc_symbols, [])]:
force_this = force_all or name in force
if not force_this:
need = set()
has = set()
for symbols in symbolses:
if shared.Settings.VERBOSE: logging.debug('undefs: ' + str(symbols.undefs))
for library_symbol in library_symbols:
if library_symbol in symbols.undefs:
need.add(library_symbol)
if library_symbol in symbols.defs:
has.add(library_symbol)
for haz in has: # remove symbols that are supplied by another of the inputs
if haz in need:
need.remove(haz)
if shared.Settings.VERBOSE: logging.debug('considering %s: we need %s and have %s' % (name, str(need), str(has)))
if force_this or (len(need) > 0 and not only_forced):
if apply_(need):
# We need to build and link the library in
logging.debug('including %s' % name)
libfile = shared.Cache.get(name, create)
ret.append(libfile)
force = force.union(deps)
return ret
#---------------------------------------------------------------------------
# emscripten-ports library management (https://github.com/emscripten-ports)
#---------------------------------------------------------------------------
import ports
class Ports:
@staticmethod
def run_commands(commands): # make easily available for port objects
run_commands(commands)
@staticmethod
def get_dir():
dirname = os.environ.get('EM_PORTS') or os.path.expanduser(os.path.join('~', '.emscripten_ports'))
shared.safe_ensure_dirs(dirname)
return dirname
@staticmethod
def erase():
shared.try_delete(Ports.get_dir())
@staticmethod
def get_build_dir():
return shared.Cache.get_path('ports-builds')
name_cache = set()
@staticmethod
def fetch_project(name, url, expected_version):
fullname = os.path.join(Ports.get_dir(), name)
if name not in Ports.name_cache: # only mention each port once in log
logging.warning('including port: ' + name)
logging.debug(' (at ' + fullname + ')')
Ports.name_cache.add(name)
class State:
retrieved = False
unpacked = False
def retrieve():
logging.warning('retrieving port: ' + name + ' from ' + url)
import urllib2
f = urllib2.urlopen(url)
data = f.read()
open(fullname + '.zip', 'wb').write(data)
State.retrieved = True
def unpack():
logging.warning('unpacking port: ' + name)
import zipfile
shared.safe_ensure_dirs(fullname)
z = zipfile.ZipFile(fullname + '.zip', 'r')
try:
cwd = os.getcwd()
os.chdir(fullname)
z.extractall()
finally:
os.chdir(cwd)
State.unpacked = True
def check_version(expected_version):
try:
ok = False
if not os.path.exists(fullname): return False
subdir = os.listdir(fullname)
if len(subdir) != 1: return False
subdir = subdir[0] # each port has a singleton subdir
f = os.path.join(fullname, subdir, 'version.txt')
if not os.path.exists(f): return False # no version, need an update
version = open(f).read()
version = int(version)
ok = True
finally:
if not ok: logging.error('error when checking port version for ' + name)
return version >= expected_version
# main logic
if not os.path.exists(fullname + '.zip'):
retrieve()
if not os.path.exists(fullname):
unpack()
if not check_version(expected_version):
# fetch a newer version
assert not State.retrieved, 'just retrieved port ' + name + ', but not a new enough version?'
shared.try_delete(fullname)
shared.try_delete(fullname + '.zip')
retrieve()
unpack()
assert check_version(expected_version), 'just retrieved replacement port ' + name + ', but not a new enough version?'
if State.unpacked:
# we unpacked a new version, clear the build in the cache
Ports.clear_project_build(name)
@staticmethod
def build_project(name, subdir, configure, generated_libs, post_create=None):
def create():
logging.warning('building port: ' + name + '...')
port_build_dir = Ports.get_build_dir()
shared.safe_ensure_dirs(port_build_dir)
libs = shared.Building.build_library(name, port_build_dir, None, generated_libs, source_dir=os.path.join(Ports.get_dir(), name, subdir), copy_project=True,
configure=configure, make=['make', '-j' + str(CORES)])
assert len(libs) == 1
if post_create: post_create()
return libs[0]
return shared.Cache.get(name, create)
@staticmethod
def clear_project_build(name):
shared.try_delete(os.path.join(Ports.get_build_dir(), name))
shared.try_delete(shared.Cache.get_path(name + '.bc'))
def get_ports(settings):
ret = []
ok = False
try:
for port in ports.ports:
ret += port.get(Ports, settings, shared)
ok = True
finally:
if not ok:
logging.error('a problem occurred when using an emscripten-ports library. try to run emcc --clear-cache --clear-ports and then run this command again')
return ret
def process_args(args, settings):
for port in ports.ports:
args = port.process_args(Ports, args, settings, shared)
return args
def show_ports():
print 'Available ports:'
for port in ports.ports:
print ' ', port.show()
|
slightperturbation/Cobalt
|
ext/emsdk_portable/emscripten/1.27.0/tools/system_libs.py
|
Python
|
apache-2.0
| 23,035
|
#!/usr/bin/python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example gets all line items that need creatives for the given
order.
To create line items, run create_line_items.py."""
__author__ = ('Nicholas Chen',
'Joseph DiLallo')
# Import appropriate modules from the client library.
from googleads import dfp
# Set the id of the order to get line items from.
ORDER_ID = 'INSERT_ORDER_ID_HERE'
def main(client, order_id):
# Initialize appropriate service.
line_item_service = client.GetService('LineItemService', version='v201505')
# Create statement object to only select line items that need creatives from a
# given order.
values = [{
'key': 'orderId',
'value': {
'xsi_type': 'NumberValue',
'value': order_id
}
}, {
'key': 'isMissingCreatives',
'value': {
'xsi_type': 'BooleanValue',
'value': 'true'
}
}]
query = 'WHERE orderId = :orderId AND '
'isMissingCreatives = :isMissingCreatives'
statement = dfp.FilterStatement(query, values)
while True:
# Get line items by statement.
response = line_item_service.getLineItemsByStatement(
statement.ToStatement())
if 'results' in response:
# Display results.
for line_item in response['results']:
print ('Line item with id \'%s\', belonging to order id \'%s\', and '
'named \'%s\' was found.' % (line_item['id'],
line_item['orderId'],
line_item['name']))
statement.offset += dfp.SUGGESTED_PAGE_LIMIT
else:
break
print '\nNumber of results found: %s' % response['totalResultSetSize']
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client, ORDER_ID)
|
cctaylor/googleads-python-lib
|
examples/dfp/v201505/line_item_service/get_line_items_by_statement.py
|
Python
|
apache-2.0
| 2,425
|
#!/cygdrive/c/Python27
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import os.path
import sys
from xml.dom import minidom
from xml.parsers.expat import ExpatError
ROOT_ADMIN = 'r'
user_to_func = {
ROOT_ADMIN: 'populateForApi',
}
user_to_cns = {
ROOT_ADMIN: 'allCommandNames',
}
dirname_to_user = {
'apis': ROOT_ADMIN,
}
dirname_to_dirname = {
'apis': 'apis',
}
known_categories = {
'Cisco' : 'External Device',
'SystemVm': 'System VM',
'VirtualMachine': 'Virtual Machine',
'VM': 'Virtual Machine',
'Domain': 'Domain',
'Template': 'Template',
'Iso': 'ISO',
'Volume': 'Volume',
'Vlan': 'VLAN',
'IpAddress': 'Address',
'PortForwarding': 'Firewall',
'Firewall': 'Firewall',
'StaticNat': 'NAT',
'IpForwarding': 'NAT',
'Host': 'Host',
'OutOfBand': 'Out-of-band Management',
'Cluster': 'Cluster',
'Account': 'Account',
'Role': 'Role',
'Snapshot': 'Snapshot',
'User': 'User',
'Os': 'Guest OS',
'ServiceOffering': 'Service Offering',
'DiskOffering': 'Disk Offering',
'LoadBalancer': 'Load Balancer',
'SslCert': 'Load Balancer',
'Router': 'Router',
'SystemVm': 'System VM',
'Configuration': 'Configuration',
'Capabilities': 'Configuration',
'Pod': 'Pod',
'PublicIpRange': 'Network',
'Zone': 'Zone',
'Vmware' : 'Zone',
'NetworkOffering': 'Network Offering',
'NetworkACL': 'Network ACL',
'Network': 'Network',
'CiscoNexus': 'Network',
'OpenDaylight': 'Network',
'createServiceInstance': 'Network',
'addGloboDnsHost': 'Network',
'Vpn': 'VPN',
'Limit': 'Limit',
'ResourceCount': 'Limit',
'CloudIdentifier': 'Cloud Identifier',
'InstanceGroup': 'VM Group',
'StorageMaintenance': 'Storage Pool',
'StoragePool': 'Storage Pool',
'StorageProvider': 'Storage Pool',
'SecurityGroup': 'Security Group',
'SSH': 'SSH',
'register': 'Registration',
'AsyncJob': 'Async job',
'Certificate': 'Certificate',
'Hypervisor': 'Hypervisor',
'Alert': 'Alert',
'Event': 'Event',
'login': 'Authentication',
'logout': 'Authentication',
'saml': 'Authentication',
'getSPMetadata': 'Authentication',
'listIdps': 'Authentication',
'authorizeSamlSso': 'Authentication',
'listSamlAuthorization': 'Authentication',
'quota': 'Quota',
'emailTemplate': 'Quota',
'Capacity': 'System Capacity',
'NetworkDevice': 'Network Device',
'ExternalLoadBalancer': 'Ext Load Balancer',
'ExternalFirewall': 'Ext Firewall',
'Usage': 'Usage',
'TrafficMonitor': 'Usage',
'TrafficType': 'Usage',
'Product': 'Product',
'LB': 'Load Balancer',
'ldap': 'LDAP',
'Ldap': 'LDAP',
'Swift': 'Swift',
'S3' : 'S3',
'SecondaryStorage': 'Host',
'Project': 'Project',
'Lun': 'Storage',
'Pool': 'Pool',
'VPC': 'VPC',
'PrivateGateway': 'VPC',
'migrateVpc': 'VPC',
'Simulator': 'simulator',
'StaticRoute': 'VPC',
'Tags': 'Resource tags',
'NiciraNvpDevice': 'Nicira NVP',
'BrocadeVcsDevice': 'Brocade VCS',
'BigSwitchBcfDevice': 'BigSwitch BCF',
'AutoScale': 'AutoScale',
'Counter': 'AutoScale',
'Condition': 'AutoScale',
'Api': 'API Discovery',
'Region': 'Region',
'Detail': 'Resource metadata',
'addIpToNic': 'Nic',
'removeIpFromNic': 'Nic',
'updateVmNicIp': 'Nic',
'listNics':'Nic',
'AffinityGroup': 'Affinity Group',
'addImageStore': 'Image Store',
'listImageStore': 'Image Store',
'deleteImageStore': 'Image Store',
'createSecondaryStagingStore': 'Image Store',
'deleteSecondaryStagingStore': 'Image Store',
'listSecondaryStagingStores': 'Image Store',
'updateImageStore': 'Image Store',
'InternalLoadBalancer': 'Internal LB',
'DeploymentPlanners': 'Configuration',
'ObjectStore': 'Image Store',
'PortableIp': 'Portable IP',
'dedicateHost': 'Dedicate Resources',
'releaseDedicatedHost': 'Dedicate Resources',
'Baremetal' : 'Baremetal',
'UCS' : 'UCS',
'Ucs' : 'UCS',
'CacheStores' : 'Cache Stores',
'CacheStore' : 'Cache Store',
'OvsElement' : 'Ovs Element',
'StratosphereSsp' : ' Stratosphere SSP',
'Metrics' : 'Metrics',
'Infrastructure' : 'Metrics',
'listNetscalerControlCenter' : 'Load Balancer',
'listRegisteredServicePackages': 'Load Balancer',
'listNsVpx' : 'Load Balancer',
'destroyNsVPx': 'Load Balancer',
'deployNetscalerVpx' : 'Load Balancer',
'deleteNetscalerControlCenter' : 'Load Balancer',
'stopNetScalerVpx' : 'Load Balancer',
'deleteServicePackageOffering' : 'Load Balancer',
'destroyNsVpx' : 'Load Balancer',
'startNsVpx' : 'Load Balancer',
'listAnnotations' : 'Annotations',
'addAnnotation' : 'Annotations',
'removeAnnotation' : 'Annotations',
'CA': 'Certificate',
'listElastistorInterface': 'Misc',
'cloudian': 'Cloudian',
'Sioc' : 'Sioc',
'Diagnostics': 'Diagnostics',
'Management': 'Management',
'Backup' : 'Backup and Recovery',
'Restore' : 'Backup and Recovery',
'UnmanagedInstance': 'Virtual Machine',
'KubernetesSupportedVersion': 'Kubernetes Service',
'KubernetesCluster': 'Kubernetes Service',
'UnmanagedInstance': 'Virtual Machine',
'Rolling': 'Rolling Maintenance',
'importVsphereStoragePolicies' : 'vSphere storage policies',
'listVsphereStoragePolicies' : 'vSphere storage policies'
}
categories = {}
def choose_category(fn):
for k, v in known_categories.items():
if k in fn:
return v
raise Exception('Need to add a category for %s to %s:known_categories' %
(fn, __file__))
sys.exit(1)
for f in sys.argv:
dirname, fn = os.path.split(f)
if not fn.endswith('.xml'):
continue
if fn.endswith('Summary.xml'):
continue
if fn.endswith('SummarySorted.xml'):
continue
if fn == 'alert_types.xml':
continue
if dirname.startswith('./'):
dirname = dirname[2:]
try:
with open(f) as data:
dom = minidom.parse(data)
name = dom.getElementsByTagName('name')[0].firstChild.data
isAsync = dom.getElementsByTagName('isAsync')[0].firstChild.data
category = choose_category(fn)
if category not in categories:
categories[category] = []
categories[category].append({
'name': name,
'dirname': dirname_to_dirname[dirname],
'async': isAsync == 'true',
'user': dirname_to_user[dirname],
})
except ExpatError as e:
pass
except IndexError as e:
print(fn)
def xml_for(command):
name = command['name']
async = command['async'] and ' (A)' or ''
dirname = command['dirname']
return '''<xsl:if test="name=\'%(name)s\'">
<li><a href="%(dirname)s/%(name)s.html"><xsl:value-of select="name"/>%(async)s</a></li>
</xsl:if>
''' % locals()
def write_xml(out, user):
with open(out, 'w') as f:
cat_strings = []
for category in categories.keys():
strings = []
for command in categories[category]:
if command['user'] == user:
strings.append(xml_for(command))
if strings:
all_strings = ''.join(strings)
cat_strings.append((len(strings), category, all_strings))
cat_strings.sort(reverse=True)
i = 0
for _1, category, all_strings in cat_strings:
if i == 0:
f.write('<div class="apismallsections">\n')
f.write('''<div class="apismallbullet_box">
<h5>%(category)s</h5>
<ul>
<xsl:for-each select="commands/command">
%(all_strings)s
</xsl:for-each>
</ul>
</div>
''' % locals())
if i == 3:
f.write('</div>\n')
i = 0
else:
i += 1
if i != 0:
f.write('</div>\n')
def java_for(command, user):
name = command['name']
cns = user_to_cns[user]
return '''%(cns)s.add("%(name)s");
''' % locals()
def java_for_user(user):
strings = []
for category in categories.keys():
for command in categories[category]:
if command['user'] == user:
strings.append(java_for(command, user))
func = user_to_func[user]
all_strings = ''.join(strings)
return '''
public void %(func)s() {
%(all_strings)s
}
''' % locals()
def write_java(out):
with open(out, 'w') as f:
f.write('''/* Generated using gen_toc.py. Do not edit. */
import java.util.HashSet;
import java.util.Set;
public class XmlToHtmlConverterData {
Set<String> allCommandNames = new HashSet<String>();
''')
f.write(java_for_user(ROOT_ADMIN) + "\n")
f.write('''
}
''')
write_xml('generatetoc_include.xsl', ROOT_ADMIN)
write_java('XmlToHtmlConverterData.java')
|
GabrielBrascher/cloudstack
|
tools/apidoc/gen_toc.py
|
Python
|
apache-2.0
| 9,699
|
from odoo import api, models
class AccountMove(models.Model):
_inherit = "account.move"
@api.onchange("partner_id")
def _onchange_partner_id(self):
res = super(AccountMove, self)._onchange_partner_id()
self._onchange_partner_shipping_id()
return res
@api.onchange("partner_shipping_id")
def _onchange_partner_shipping_id(self):
res = super(AccountMove, self)._onchange_partner_shipping_id()
self.tax_on_shipping_address = bool(self.partner_shipping_id)
return res
|
OCA/account-fiscal-rule
|
account_avatax_sale/models/account_move.py
|
Python
|
agpl-3.0
| 536
|
# -*- coding: utf-8 -*-
import hr_employee
import hr_operational_department
import res_users
|
xcgd/hr_streamline
|
model/__init__.py
|
Python
|
agpl-3.0
| 93
|
"""
URLs for the CCX Feature.
"""
from django.conf.urls import url
import ccx.views
urlpatterns = [
url(r'^ccx_coach$', ccx.views.dashboard, name='ccx_coach_dashboard'),
url(r'^create_ccx$', ccx.views.create_ccx, name='create_ccx'),
url(r'^save_ccx$', ccx.views.save_ccx, name='save_ccx'),
url(r'^ccx_invite$', ccx.views.ccx_invite, name='ccx_invite'),
url(r'^ccx_schedule$', ccx.views.ccx_schedule, name='ccx_schedule'),
url(r'^ccx_manage_student$', ccx.views.ccx_student_management, name='ccx_manage_student'),
# Grade book
url(r'^ccx_gradebook$', ccx.views.ccx_gradebook, name='ccx_gradebook'),
url(r'^ccx_gradebook/(?P<offset>[0-9]+)$', ccx.views.ccx_gradebook, name='ccx_gradebook'),
url(r'^ccx_grades.csv$', ccx.views.ccx_grades_csv, name='ccx_grades_csv'),
url(r'^ccx_set_grading_policy$', ccx.views.set_grading_policy, name='ccx_set_grading_policy'),
]
|
lduarte1991/edx-platform
|
lms/djangoapps/ccx/urls.py
|
Python
|
agpl-3.0
| 909
|
"""Visualize the outcome of computer experiment.
TODOs
-----
Explore Parameterspace3D
see: http://matplotlib.org/examples/mplot3d/contour3d_demo3.html
fig = plt.figure()
ax = fig.gca(projection="3d")
ax.plot_surface(X, Y, values, rstride=1, cstride=1, alpha=0.3,
cmap=cm.PiYG)
cset = ax.contour(X, Y, values, zdir='y', offset=0.9, cmap=cm.Blues)
cset = ax.contour(X, Y, values, zdir='z', offset=0.0, cmap=cm.Blues)
ax.set_xlabel("tau")
ax.set_ylabel("sigma")
"""
import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
# mpl.use('Agg')
# mpl.style.use("ggplot")
def explore_Parameterspace(TwoDFrame, title="",
cmap='viridis', norm=None, vmin=None, vmax=None):
"""
Explore variables in a 2-dim Parameterspace.
Parameters
----------
TwoDFrame : 2D pandas.DataFrame with index and column names
The data to plot
title : string
Title of the plot (Default: "")
cmap : string
Colormap to use (Default: "RdBu")
vmin : float
Minimum value of the colormap (Default: None)
vmax : float
Maximum vlaue of the colormap (Defualt: None)
Examples
--------
>>> import init_data
>>> data = init_data.get_Data("phi")
>>> explore_Parameterspace(data.unstack(level="deltaT")["<safe>",0.5].
>>> unstack(level="phi"))
"""
xparams = TwoDFrame.columns.values
yparams = TwoDFrame.index.values
assert type(yparams[0]) != tuple, "Be aware of multi indicies"
assert type(xparams[0]) != tuple, "Be aware of multi indicies"
values = TwoDFrame.values
X, Y = _create_meshgrid(xparams, yparams)
fig = plt.figure()
c = plt.pcolormesh(X, Y, values, cmap=cmap, norm=norm, vmin=vmin,
vmax=vmax)
plt.colorbar(c, orientation="vertical")
plt.xlim(np.min(X), np.max(X))
plt.ylim(np.min(Y), np.max(Y))
plt.xlabel(TwoDFrame.columns.name)
plt.ylabel(TwoDFrame.index.name)
plt.title(title)
plt.tight_layout()
return fig
def _create_meshgrid(x, y):
"""
Proper spaced meshgrid.
Create a meshgrid out of the array-like types x and y. We assume that x and
y are equally spaced. The funciton positions the values of x and y into the
middle of the return value.
Parameters
----------
x : 1D array like
The x values
y : 1D array like
The y values
Returns
-------
meshgrid : 2D np.array
widened meshgrid of x and y
Example
-------
>>> _create_meshgrid([1,2], [10, 12])
>>> [array([[ 0.5, 1.5, 2.5],
[ 0.5, 1.5, 2.5],
[ 0.5, 1.5, 2.5]]),
array([[ 9., 9., 9.],
[ 11., 11., 11.],
[ 13., 13., 13.]])]
"""
x = np.array(x)
y = np.array(y)
def broaden_grid(x):
"""Extend the x,y grid for proper spacing."""
dx = x[1:] - x[:-1]
xx = np.concatenate(([x[0]], x))
dxx = np.concatenate(([-dx[0]], dx, [dx[-1]]))
return xx + dxx/2.
X = broaden_grid(x)
Y = broaden_grid(y)
return np.meshgrid(X, Y)
|
wbarfuss/pymofa
|
pymofa/experiment_visualization.py
|
Python
|
mit
| 3,206
|
# -*- coding: utf-8 -*-
# © 2016-TODAY LasLabs Inc.
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html).
from odoo.tests.common import TransactionCase
import mock
model_file = 'openerp.addons.base_fax.models.fax_adapter'
model = '%s.FaxAdapter' % model_file
class TestFaxAdapter(TransactionCase):
def setUp(self, *args, **kwargs):
super(TestFaxAdapter, self).setUp(*args, **kwargs)
self.model_obj = self.env['fax.adapter']
self.test_model = self.env['ir.model'].search(
[('model', '=', 'res.partner')], limit=1
)
self.test_adapter = self.env['res.partner'].search([], limit=1)
self.vals = {
'name': 'Test Adapter',
'adapter_model_id': self.test_model.id,
'adapter_pk': self.test_adapter.id,
}
def _new_record(self):
return self.model_obj.create(self.vals)
def test_compute_adapter_name(self):
rec_id = self._new_record()
self.assertEqual(
self.test_adapter.name, rec_id.adapter_name,
'Adapter name computed incorrectly. Expect "%s", Got "%s"' % (
self.test_adapter.name, rec_id.adapter_name
)
)
def testget_adapter(self):
rec_id = self._new_record()
res = rec_id.get_adapter()
self.assertEqual(
self.test_adapter, res,
'Did not return correct adapter. Expect %s, Got %s' % (
self.test_adapter, res,
)
)
@mock.patch('%s.get_adapter' % model)
def test_action_fetch_payloads(self, mk):
rec_id = self._new_record()
expect = ['Payloads']
rec_id.action_fetch_payloads(expect)
mk().action_fetch_payloads.assert_called_once_with(expect)
@mock.patch('%s.get_adapter' % model)
def test_action_send_passthru(self, mk):
rec_id = self._new_record()
expect = ['dialable', ['payload_ids'], 'send_name']
mk().action_send.side_effect = Exception # Stops the write
try:
rec_id.action_send(*expect)
except Exception:
pass
mk().action_send.assert_called_once_with(*expect)
@mock.patch('%s.get_adapter' % model)
@mock.patch('%s.write' % model)
def test_action_send_write(self, write_mk, mk):
rec_id = self._new_record()
expect = 'Expect'
mk().action_send.return_value = expect
rec_id.action_send(0, 0, 0)
write_mk.assert_called_once_with(
{'transmission_ids': [(0, 0, expect)]}
)
|
laslabs/odoo-telephony
|
base_fax/tests/test_fax_adapter.py
|
Python
|
agpl-3.0
| 2,569
|
from enum import Enum
from typing import Sequence, List
from PyQt5 import QtWidgets, QtCore
from .spacerselector_ui import Ui_Dialog
class SpacerSelectorDialog(QtWidgets.QDialog, Ui_Dialog):
class TargetTypes(Enum):
L1= 1
L2=2
FlightPipes=3
spacers: List[float]
title: str
target: TargetTypes
def __init__(self, parent, availablespacers: Sequence[float], currentspacers: Sequence[float], target: TargetTypes):
super().__init__(parent, QtCore.Qt.Dialog)
self.spacers = sorted(availablespacers)
self.target = target
self.setupUi(self)
for s in currentspacers:
item= [i for i in self.listWidget.findItems(f'{s:.0f}', QtCore.Qt.MatchExactly) if not i.isSelected()][0]
item.setSelected(True)
def setupUi(self, Dialog):
super().setupUi(Dialog)
if self.target == self.TargetTypes.L1:
self.label.setText('Select spacers between pinholes #1 and #2')
elif self.target == self.TargetTypes.L2:
self.label.setText('Select spacers between pinholes #2 and #3')
elif self.target == self.TargetTypes.FlightPipes:
self.label.setText('Select flight pipes')
self.listWidget.addItems([f'{x:.0f}' for x in self.spacers])
def selectedSpacers(self) -> List[float]:
return [self.spacers[index.row()] for index in self.listWidget.selectedIndexes()]
|
awacha/cct
|
cct/qtgui2/setup/geometry/spacerselector.py
|
Python
|
bsd-3-clause
| 1,431
|
#!/usr/bin/env python
from pyqlight import QLight
from collections import OrderedDict
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
def main():
parser = ArgumentParser(description="control a Q-Light warning tower. "
"All lamps should have red, yellow and green "
"lights available. Blue and white lights may or "
"may not be available depending on exact model.",
epilog="Written by Chris Jowett, cryptk@gmail.com",
formatter_class=ArgumentDefaultsHelpFormatter)
lights = parser.add_argument_group('Light Controls', 'Valid states are '
'"off", "on", "blink", "pass"')
light_choices = ['off','on','blink','pass']
lights.add_argument("-r", "--red", help="Desired state of red lamp.",
type=str, nargs='?', default='pass', metavar='STATE',
choices=light_choices)
lights.add_argument("-y", "--yellow", help="Desired state of yellow lamp.",
type=str, nargs='?', default='pass', metavar='STATE',
choices=light_choices)
lights.add_argument("-g", "--green", help="Desired state of green lamp.",
type=str, nargs='?', default='pass', metavar='STATE',
choices=light_choices)
lights.add_argument("-b", "--blue", help="Desired state of blue lamp.",
type=str, nargs='?', default='pass', metavar='STATE',
choices=light_choices)
lights.add_argument("-w", "--white", help="Desired state of white lamp.",
type=str, nargs='?', default='pass', metavar='STATE',
choices=light_choices)
lights.add_argument("-a", "--all-lights", help="State of all lamps.",
type=str, nargs='?', metavar='STATE',
choices=light_choices)
tone = parser.add_argument_group('Tone Controls', 'valid tone options are '
'"off", "tone_1", "tone_2", "tone_3", '
'"tone_4", "tone_5", "pass"')
tone.add_argument("-t", "--tone", help="Desired tone to play.",
type=str, nargs='?', metavar='TONE', default='pass',
choices=['off',
'tone_1',
'tone_2',
'tone_3',
'tone_4',
'tone_5',
'pass'])
tone.add_argument("-d", "--duration",
help="Duration to play tone (in ms).",
type=int, nargs='?', default=0)
args = parser.parse_args()
ql = QLight()
if args.all_lights is not None:
ql.set_all_lights(args.all_lights)
else:
ql.lights = OrderedDict([
('red', args.red),
('yellow', args.yellow),
('green', args.green),
('blue', args.blue),
('white', args.white)
])
ql.update_lamp()
if args.tone:
ql.set_sound(args.tone, args.duration)
|
cryptk/pyqlight
|
pyqlight/command_line.py
|
Python
|
mit
| 3,294
|
# Copyright (c) 2020, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
from mixbox import fields
import cybox.bindings.unix_network_route_entry_object as unix_network_route_entry_binding
from cybox.common import String, UnsignedInteger, UnsignedLong
from cybox.objects.network_route_entry_object import NetworkRouteEntry
class UnixNetworkRouteEntry(NetworkRouteEntry):
_binding = unix_network_route_entry_binding
_binding_class = unix_network_route_entry_binding.UnixNetworkRouteEntryObjectType
_namespace = "http://cybox.mitre.org/objects#UnixNetworkRouteEntryObject-2"
_XSI_NS = "UnixNetworkRouteEntryObj"
_XSI_TYPE = "UnixNetworkRouteEntryObjectType"
flags = fields.TypedField("Flags", String)
mss = fields.TypedField("MSS", UnsignedInteger)
ref = fields.TypedField("Ref", UnsignedLong)
use = fields.TypedField("Use", UnsignedLong)
window = fields.TypedField("Window", UnsignedInteger)
|
CybOXProject/python-cybox
|
cybox/objects/unix_network_route_entry_object.py
|
Python
|
bsd-3-clause
| 967
|
from Screens.Wizard import WizardSummary
from Screens.WizardLanguage import WizardLanguage
from Screens.Wizard import wizardManager
from Screens.Rc import Rc
from Screens.Screen import Screen
from Components.Label import Label
from Components.MenuList import MenuList
from Components.PluginComponent import plugins
from Plugins.Plugin import PluginDescriptor
from Tools.Directories import fileExists, resolveFilename, SCOPE_PLUGINS, SCOPE_SKIN_IMAGE
from Components.Pixmap import Pixmap, MovingPixmap, MultiPixmap
from os import popen, path, makedirs, listdir, access, stat, rename, remove, W_OK, R_OK
from enigma import eEnv
from boxbranding import getBoxType
from Components.config import config, getConfigListEntry, ConfigSubsection, ConfigText, ConfigLocations, ConfigBoolean
from Components.Harddisk import harddiskmanager
config.misc.firstrun = ConfigBoolean(default = True)
config.plugins.configurationbackup = ConfigSubsection()
config.plugins.configurationbackup.backuplocation = ConfigText(default = '/media/hdd/', visible_width = 50, fixed_size = False)
config.plugins.configurationbackup.backupdirs = ConfigLocations(default=[eEnv.resolve('${sysconfdir}/enigma2/'), '/etc/network/interfaces', '/etc/wpa_supplicant.conf', '/etc/wpa_supplicant.ath0.conf', '/etc/wpa_supplicant.wlan0.conf', '/etc/resolv.conf', '/etc/default_gw', '/etc/hostname'])
backupfile = "enigma2settingsbackup.tar.gz"
def checkConfigBackup():
parts = [ (r.description, r.mountpoint) for r in harddiskmanager.getMountedPartitions(onlyhotplug = False)]
box = getBoxType()
for x in parts:
if x[1] == '/':
parts.remove(x)
if len(parts):
for x in parts:
if x[1].endswith('/'):
fullbackupfile = x[1] + 'backup_' + box + '/' + backupfile
if fileExists(fullbackupfile):
config.plugins.configurationbackup.backuplocation.setValue(str(x[1]))
config.plugins.configurationbackup.backuplocation.save()
config.plugins.configurationbackup.save()
return x
fullbackupfile = x[1] + '/backup/' + backupfile
if fileExists(fullbackupfile):
config.plugins.configurationbackup.backuplocation.setValue(str(x[1]))
config.plugins.configurationbackup.backuplocation.save()
config.plugins.configurationbackup.save()
return x
else:
fullbackupfile = x[1] + '/backup_' + box + '/' + backupfile
if fileExists(fullbackupfile):
config.plugins.configurationbackup.backuplocation.setValue(str(x[1]))
config.plugins.configurationbackup.backuplocation.save()
config.plugins.configurationbackup.save()
return x
fullbackupfile = x[1] + '/backup/' + backupfile
if fileExists(fullbackupfile):
config.plugins.configurationbackup.backuplocation.setValue(str(x[1]))
config.plugins.configurationbackup.backuplocation.save()
config.plugins.configurationbackup.save()
return x
return None
def checkBackupFile():
backuplocation = config.plugins.configurationbackup.backuplocation.getValue()
if backuplocation.endswith('/'):
fullbackupfile = backuplocation + 'backup/' + backupfile
if fileExists(fullbackupfile):
return True
else:
return False
else:
fullbackupfile = backuplocation + '/backup/' + backupfile
if fileExists(fullbackupfile):
return True
else:
return False
if checkConfigBackup() is None:
backupAvailable = 0
else:
backupAvailable = 1
class ImageWizard(WizardLanguage, Rc):
skin = """
<screen name="ImageWizard" position="0,0" size="720,576" title="Welcome..." flags="wfNoBorder" >
<widget name="text" position="153,40" size="340,330" font="Regular;22" />
<widget source="list" render="Listbox" position="43,340" size="490,180" scrollbarMode="showOnDemand" >
<convert type="StringList" />
</widget>
<widget name="config" position="53,340" zPosition="1" size="440,180" transparent="1" scrollbarMode="showOnDemand" />
<ePixmap pixmap="skin_default/buttons/button_red.png" position="40,225" zPosition="0" size="15,16" transparent="1" alphatest="on" />
<widget name="languagetext" position="55,225" size="95,30" font="Regular;18" />
<widget name="wizard" pixmap="skin_default/wizard.png" position="40,50" zPosition="10" size="110,174" alphatest="on" />
<widget name="rc" pixmaps="skin_default/rc.png,skin_default/rcold.png" position="530,50" zPosition="10" size="154,500" alphatest="on" />
<widget name="arrowdown" pixmap="skin_default/arrowdown.png" position="-100,-100" zPosition="11" size="37,70" alphatest="on" />
<widget name="arrowdown2" pixmap="skin_default/arrowdown.png" position="-100,-100" zPosition="11" size="37,70" alphatest="on" />
<widget name="arrowup" pixmap="skin_default/arrowup.png" position="-100,-100" zPosition="11" size="37,70" alphatest="on" />
<widget name="arrowup2" pixmap="skin_default/arrowup.png" position="-100,-100" zPosition="11" size="37,70" alphatest="on" />
</screen>"""
def __init__(self, session):
self.xmlfile = resolveFilename(SCOPE_PLUGINS, "SystemPlugins/SoftwareManager/imagewizard.xml")
WizardLanguage.__init__(self, session, showSteps = False, showStepSlider = False)
Rc.__init__(self)
self.session = session
self["wizard"] = Pixmap()
Screen.setTitle(self, _("Welcome…"))
self.selectedDevice = None
def markDone(self):
pass
def listDevices(self):
list = [ (r.description, r.mountpoint) for r in harddiskmanager.getMountedPartitions(onlyhotplug = False)]
for x in list:
result = access(x[1], W_OK) and access(x[1], R_OK)
if result is False or x[1] == '/':
list.remove(x)
for x in list:
if x[1].startswith('/autofs/'):
list.remove(x)
return list
def deviceSelectionMade(self, index):
self.deviceSelect(index)
def deviceSelectionMoved(self):
self.deviceSelect(self.selection)
def deviceSelect(self, device):
self.selectedDevice = device
config.plugins.configurationbackup.backuplocation.value = self.selectedDevice
config.plugins.configurationbackup.backuplocation.save()
config.plugins.configurationbackup.save()
if config.misc.firstrun.value:
wizardManager.registerWizard(ImageWizard, backupAvailable, priority = 10)
|
postla/e2-gui
|
lib/python/Plugins/SystemPlugins/SoftwareManager/ImageWizard.py
|
Python
|
gpl-2.0
| 6,107
|
import unittest
from collections import namedtuple
from io import BytesIO
import codecs
import sha2
import hmac
class TestSHA2(unittest.TestCase):
# test vectors from https://csrc.nist.gov/projects/cryptographic-standards-and-guidelines/example-values
TestVector = namedtuple('TestVector', ['digestcls', 'text', 'key', 'mac'])
TEST_VECTORS = (
# SHA-224 based HMACs
TestVector(
digestcls=sha2.SHA224,
text=b'Sample message for keylen=blocklen',
key=codecs.decode(
'00010203' '04050607' '08090A0B' '0C0D0E0F'
'10111213' '14151617' '18191A1B' '1C1D1E1F'
'20212223' '24252627' '28292A2B' '2C2D2E2F'
'30313233' '34353637' '38393A3B' '3C3D3E3F',
'hex',
),
mac=codecs.decode(
'C7405E3A' 'E058E8CD' '30B08B41' '40248581' 'ED174CB3'
'4E1224BC' 'C1EFC81B',
'hex',
),
),
TestVector(
digestcls=sha2.SHA224,
text=b'Sample message for keylen<blocklen',
key=codecs.decode(
'00010203' '04050607' '08090A0B' '0C0D0E0F' '10111213'
'14151617' '18191A1B',
'hex',
),
mac=codecs.decode(
'E3D249A8' 'CFB67EF8' 'B7A169E9' 'A0A59971' '4A2CECBA'
'65999A51' 'BEB8FBBE',
'hex',
),
),
TestVector(
digestcls=sha2.SHA224,
text=b'Sample message for keylen=blocklen',
key=codecs.decode(
'00010203' '04050607' '08090A0B' '0C0D0E0F' '10111213'
'14151617' '18191A1B' '1C1D1E1F' '20212223' '24252627'
'28292A2B' '2C2D2E2F' '30313233' '34353637' '38393A3B'
'3C3D3E3F' '40414243' '44454647' '48494A4B' '4C4D4E4F'
'50515253' '54555657' '58595A5B' '5C5D5E5F' '60616263',
'hex',
),
mac=codecs.decode(
'91C52509' 'E5AF8531' '601AE623' '0099D90B' 'EF88AAEF'
'B961F408' '0ABC014D',
'hex',
),
),
# SHA-256 based HMACs
TestVector(
digestcls=sha2.SHA256,
text=b'Sample message for keylen=blocklen',
key=codecs.decode(
'00010203' '04050607' '08090A0B' '0C0D0E0F10111213' '14151617'
'18191A1B' '1C1D1E1F' '20212223' '2425262728292A2B' '2C2D2E2F'
'30313233' '34353637' '38393A3B' '3C3D3E3F',
'hex',
),
mac=codecs.decode(
'8BB9A1DB' '9806F20DF7F77B82' '138C7914' 'D174D59E' '13DC4D01'
'69C9057B' '133E1D62',
'hex',
),
),
TestVector(
digestcls=sha2.SHA256,
text=b'Sample message for keylen<blocklen',
key=codecs.decode(
'00010203' '0405060708090A0B' '0C0D0E0F' '10111213' '14151617'
'18191A1B' '1C1D1E1F',
'hex',
),
mac=codecs.decode(
'A28CF431' '30EE696A98F14A37' '678B56BC' 'FCBDD9E5' 'CF69717F'
'ECF5480F' '0EBDF790',
'hex',
),
),
TestVector(
digestcls=sha2.SHA256,
text=b'Sample message for keylen=blocklen',
key=codecs.decode(
'00010203' '04050607' '08090A0B' '0C0D0E0F' '10111213'
'14151617' '18191A1B' '1C1D1E1F' '20212223' '24252627'
'28292A2B' '2C2D2E2F' '30313233' '34353637' '38393A3B'
'3C3D3E3F' '40414243' '44454647' '48494A4B' '4C4D4E4F'
'50515253' '54555657' '58595A5B' '5C5D5E5F' '60616263',
'hex',
),
mac=codecs.decode(
'BDCCB6C7' '2DDEADB5' '00AE7683' '86CB38CC' '41C63DBB'
'0878DDB9' 'C7A38A43' '1B78378D',
'hex',
),
),
# SHA-384 based HMACs
TestVector(
digestcls=sha2.SHA384,
text=b'Sample message for keylen=blocklen',
key=codecs.decode(
'00010203' '04050607' '08090A0B' '0C0D0E0F' '10111213'
'14151617' '18191A1B' '1C1D1E1F' '20212223' '24252627'
'28292A2B' '2C2D2E2F' '30313233' '34353637' '38393A3B'
'3C3D3E3F' '40414243' '44454647' '48494A4B' '4C4D4E4F'
'50515253' '54555657' '58595A5B' '5C5D5E5F' '60616263'
'64656667' '68696A6B' '6C6D6E6F' '70717273' '74757677'
'78797A7B' '7C7D7E7F',
'hex',
),
mac=codecs.decode(
'63C5DAA5' 'E651847C' 'A897C958' '14AB830B' 'EDEDC7D2'
'5E83EEF9' '195CD458' '57A37F44' '8947858F' '5AF50CC2'
'B1B730DD' 'F29671A9',
'hex',
),
),
TestVector(
digestcls=sha2.SHA384,
text=b'Sample message for keylen<blocklen',
key=codecs.decode(
'00010203' '04050607' '08090A0B' '0C0D0E0F' '10111213'
'1415161718191A1B' '1C1D1E1F' '20212223' '24252627' '28292A2B'
'2C2D2E2F',
'hex',
),
mac=codecs.decode(
'6EB242BD' 'BB582CA1' '7BEBFA48' '1B1E2321' '1464D2B7'
'F8C20B9FF2201637' 'B93646AF' '5AE9AC31' '6E98DB45' 'D9CAE773'
'675EEED0',
'hex',
),
),
TestVector(
digestcls=sha2.SHA384,
text=b'Sample message for keylen=blocklen',
key=codecs.decode(
'00010203' '04050607' '08090A0B' '0C0D0E0F' '10111213'
'14151617' '18191A1B' '1C1D1E1F' '20212223' '24252627'
'28292A2B' '2C2D2E2F' '30313233' '34353637' '38393A3B'
'3C3D3E3F' '40414243' '44454647' '48494A4B' '4C4D4E4F'
'50515253' '54555657' '58595A5B' '5C5D5E5F' '60616263'
'64656667' '68696A6B' '6C6D6E6F' '70717273' '74757677'
'78797A7B' '7C7D7E7F' '80818283' '84858687' '88898A8B'
'8C8D8E8F' '90919293' '94959697' '98999A9B' '9C9D9E9F'
'A0A1A2A3' 'A4A5A6A7' 'A8A9AAAB' 'ACADAEAF' 'B0B1B2B3'
'B4B5B6B7' 'B8B9BABB' 'BCBDBEBF' 'C0C1C2C3' 'C4C5C6C7',
'hex',
),
mac=codecs.decode(
'5B664436' 'DF69B0CA' '22551231' 'A3F0A3D5' 'B4F97991'
'713CFA84' 'BFF4D079' '2EFF96C2' '7DCCBBB6' 'F79B65D5'
'48B40E85' '64CEF594',
'hex',
),
),
# SHA-512 based HMACs
TestVector(
digestcls=sha2.SHA512,
text=b'Sample message for keylen=blocklen',
key=codecs.decode(
'00010203' '04050607' '08090A0B' '0C0D0E0F' '10111213'
'14151617' '18191A1B' '1C1D1E1F' '20212223' '24252627'
'28292A2B' '2C2D2E2F' '30313233' '34353637' '38393A3B'
'3C3D3E3F' '40414243' '44454647' '48494A4B' '4C4D4E4F'
'50515253' '54555657' '58595A5B' '5C5D5E5F' '60616263'
'64656667' '68696A6B' '6C6D6E6F' '70717273' '74757677'
'78797A7B' '7C7D7E7F',
'hex',
),
mac=codecs.decode(
'FC25E240' '658CA785' 'B7A811A8' 'D3F7B4CA' '48CFA26A'
'8A366BF2' 'CD1F836B' '05FCB024' 'BD368530' '81811D6C'
'EA4216EB' 'AD79DA1C' 'FCB95EA4' '586B8A0C' 'E356596A'
'55FB1347',
'hex',
),
),
TestVector(
digestcls=sha2.SHA512,
text=b'Sample message for keylen<blocklen',
key=codecs.decode(
'00010203' '04050607' '08090A0B' '0C0D0E0F' '10111213'
'14151617' '18191A1B' '1C1D1E1F' '20212223' '24252627'
'28292A2B' '2C2D2E2F' '30313233' '34353637' '38393A3B'
'3C3D3E3F',
'hex',
),
mac=codecs.decode(
'FD44C18B' 'DA0BB0A6' 'CE0E82B0' '31BF2818' 'F6539BD5'
'6EC00BDC' '10A8A2D7' '30B3634D' 'E2545D63' '9B0F2CF7'
'10D0692C' '72A1896F' '1F211C2B' '922D1A96' 'C392E07E'
'7EA9FEDC',
'hex',
),
),
TestVector(
digestcls=sha2.SHA512,
text=b'Sample message for keylen=blocklen',
key=codecs.decode(
'00010203' '04050607' '08090A0B' '0C0D0E0F' '10111213'
'14151617' '18191A1B' '1C1D1E1F' '20212223' '24252627'
'28292A2B' '2C2D2E2F' '30313233' '34353637' '38393A3B'
'3C3D3E3F' '40414243' '44454647' '48494A4B' '4C4D4E4F'
'50515253' '54555657' '58595A5B' '5C5D5E5F' '60616263'
'64656667' '68696A6B' '6C6D6E6F' '70717273' '74757677'
'78797A7B' '7C7D7E7F' '80818283' '84858687' '88898A8B'
'8C8D8E8F' '90919293' '94959697' '98999A9B' '9C9D9E9F'
'A0A1A2A3' 'A4A5A6A7' 'A8A9AAAB' 'ACADAEAF' 'B0B1B2B3'
'B4B5B6B7' 'B8B9BABB' 'BCBDBEBF' 'C0C1C2C3' 'C4C5C6C7',
'hex',
),
mac=codecs.decode(
'D93EC8D2' 'DE1AD2A9' '957CB9B8' '3F14E76A' 'D6B5E0CC'
'E285079A' '127D3B14' 'BCCB7AA7' '286D4AC0' 'D4CE6421'
'5F2BC9E6' '870B33D9' '7438BE4A' 'AA20CDA5' 'C5A912B4'
'8B8E27F3',
'hex',
),
),
)
def test_hmac(self):
iio = BytesIO()
for tv in self.__class__.TEST_VECTORS:
iio.truncate(0)
iio.seek(0)
iio.write(tv.text)
iio.seek(0)
digest = hmac.digest(iio, tv.key, digestcls=tv.digestcls)
self.assertEqual(tv.mac, digest,
"{}{}".format(tv.digestcls, tv.text))
if __name__ == '__main__':
unittest.main()
|
olbat/o1b4t
|
coding/crypto/test_hmac.py
|
Python
|
gpl-3.0
| 10,162
|
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core.client_options import ClientOptions
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.api_core import operation # type: ignore
from google.api_core import operation_async # type: ignore
from google.cloud.aiplatform_v1.services.migration_service import pagers
from google.cloud.aiplatform_v1.types import migratable_resource
from google.cloud.aiplatform_v1.types import migration_service
from .transports.base import MigrationServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import MigrationServiceGrpcAsyncIOTransport
from .client import MigrationServiceClient
class MigrationServiceAsyncClient:
"""A service that migrates resources from automl.googleapis.com,
datalabeling.googleapis.com and ml.googleapis.com to Vertex AI.
"""
_client: MigrationServiceClient
DEFAULT_ENDPOINT = MigrationServiceClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = MigrationServiceClient.DEFAULT_MTLS_ENDPOINT
annotated_dataset_path = staticmethod(MigrationServiceClient.annotated_dataset_path)
parse_annotated_dataset_path = staticmethod(
MigrationServiceClient.parse_annotated_dataset_path
)
dataset_path = staticmethod(MigrationServiceClient.dataset_path)
parse_dataset_path = staticmethod(MigrationServiceClient.parse_dataset_path)
dataset_path = staticmethod(MigrationServiceClient.dataset_path)
parse_dataset_path = staticmethod(MigrationServiceClient.parse_dataset_path)
dataset_path = staticmethod(MigrationServiceClient.dataset_path)
parse_dataset_path = staticmethod(MigrationServiceClient.parse_dataset_path)
model_path = staticmethod(MigrationServiceClient.model_path)
parse_model_path = staticmethod(MigrationServiceClient.parse_model_path)
model_path = staticmethod(MigrationServiceClient.model_path)
parse_model_path = staticmethod(MigrationServiceClient.parse_model_path)
version_path = staticmethod(MigrationServiceClient.version_path)
parse_version_path = staticmethod(MigrationServiceClient.parse_version_path)
common_billing_account_path = staticmethod(
MigrationServiceClient.common_billing_account_path
)
parse_common_billing_account_path = staticmethod(
MigrationServiceClient.parse_common_billing_account_path
)
common_folder_path = staticmethod(MigrationServiceClient.common_folder_path)
parse_common_folder_path = staticmethod(
MigrationServiceClient.parse_common_folder_path
)
common_organization_path = staticmethod(
MigrationServiceClient.common_organization_path
)
parse_common_organization_path = staticmethod(
MigrationServiceClient.parse_common_organization_path
)
common_project_path = staticmethod(MigrationServiceClient.common_project_path)
parse_common_project_path = staticmethod(
MigrationServiceClient.parse_common_project_path
)
common_location_path = staticmethod(MigrationServiceClient.common_location_path)
parse_common_location_path = staticmethod(
MigrationServiceClient.parse_common_location_path
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
MigrationServiceAsyncClient: The constructed client.
"""
return MigrationServiceClient.from_service_account_info.__func__(MigrationServiceAsyncClient, info, *args, **kwargs) # type: ignore
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
MigrationServiceAsyncClient: The constructed client.
"""
return MigrationServiceClient.from_service_account_file.__func__(MigrationServiceAsyncClient, filename, *args, **kwargs) # type: ignore
from_service_account_json = from_service_account_file
@classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[ClientOptions] = None
):
"""Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variabel is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
return MigrationServiceClient.get_mtls_endpoint_and_cert_source(client_options) # type: ignore
@property
def transport(self) -> MigrationServiceTransport:
"""Returns the transport used by the client instance.
Returns:
MigrationServiceTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(
type(MigrationServiceClient).get_transport_class, type(MigrationServiceClient)
)
def __init__(
self,
*,
credentials: ga_credentials.Credentials = None,
transport: Union[str, MigrationServiceTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the migration service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.MigrationServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = MigrationServiceClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def search_migratable_resources(
self,
request: Union[migration_service.SearchMigratableResourcesRequest, dict] = None,
*,
parent: str = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> pagers.SearchMigratableResourcesAsyncPager:
r"""Searches all of the resources in
automl.googleapis.com, datalabeling.googleapis.com and
ml.googleapis.com that can be migrated to Vertex AI's
given location.
.. code-block:: python
from google.cloud import aiplatform_v1
def sample_search_migratable_resources():
# Create a client
client = aiplatform_v1.MigrationServiceClient()
# Initialize request argument(s)
request = aiplatform_v1.SearchMigratableResourcesRequest(
parent="parent_value",
)
# Make the request
page_result = client.search_migratable_resources(request=request)
# Handle the response
for response in page_result:
print(response)
Args:
request (Union[google.cloud.aiplatform_v1.types.SearchMigratableResourcesRequest, dict]):
The request object. Request message for
[MigrationService.SearchMigratableResources][google.cloud.aiplatform.v1.MigrationService.SearchMigratableResources].
parent (:class:`str`):
Required. The location that the migratable resources
should be searched from. It's the Vertex AI location
that the resources can be migrated to, not the
resources' original location. Format:
``projects/{project}/locations/{location}``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.aiplatform_v1.services.migration_service.pagers.SearchMigratableResourcesAsyncPager:
Response message for
[MigrationService.SearchMigratableResources][google.cloud.aiplatform.v1.MigrationService.SearchMigratableResources].
Iterating over this object will yield results and
resolve additional pages automatically.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = migration_service.SearchMigratableResourcesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.search_migratable_resources,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# This method is paged; wrap the response in a pager, which provides
# an `__aiter__` convenience method.
response = pagers.SearchMigratableResourcesAsyncPager(
method=rpc, request=request, response=response, metadata=metadata,
)
# Done; return the response.
return response
async def batch_migrate_resources(
self,
request: Union[migration_service.BatchMigrateResourcesRequest, dict] = None,
*,
parent: str = None,
migrate_resource_requests: Sequence[
migration_service.MigrateResourceRequest
] = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> operation_async.AsyncOperation:
r"""Batch migrates resources from ml.googleapis.com,
automl.googleapis.com, and datalabeling.googleapis.com
to Vertex AI.
.. code-block:: python
from google.cloud import aiplatform_v1
def sample_batch_migrate_resources():
# Create a client
client = aiplatform_v1.MigrationServiceClient()
# Initialize request argument(s)
migrate_resource_requests = aiplatform_v1.MigrateResourceRequest()
migrate_resource_requests.migrate_ml_engine_model_version_config.endpoint = "endpoint_value"
migrate_resource_requests.migrate_ml_engine_model_version_config.model_version = "model_version_value"
migrate_resource_requests.migrate_ml_engine_model_version_config.model_display_name = "model_display_name_value"
request = aiplatform_v1.BatchMigrateResourcesRequest(
parent="parent_value",
migrate_resource_requests=migrate_resource_requests,
)
# Make the request
operation = client.batch_migrate_resources(request=request)
print("Waiting for operation to complete...")
response = operation.result()
# Handle the response
print(response)
Args:
request (Union[google.cloud.aiplatform_v1.types.BatchMigrateResourcesRequest, dict]):
The request object. Request message for
[MigrationService.BatchMigrateResources][google.cloud.aiplatform.v1.MigrationService.BatchMigrateResources].
parent (:class:`str`):
Required. The location of the migrated resource will
live in. Format:
``projects/{project}/locations/{location}``
This corresponds to the ``parent`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
migrate_resource_requests (:class:`Sequence[google.cloud.aiplatform_v1.types.MigrateResourceRequest]`):
Required. The request messages
specifying the resources to migrate.
They must be in the same location as the
destination. Up to 50 resources can be
migrated in one batch.
This corresponds to the ``migrate_resource_requests`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.api_core.operation_async.AsyncOperation:
An object representing a long-running operation.
The result type for the operation will be
:class:`google.cloud.aiplatform_v1.types.BatchMigrateResourcesResponse`
Response message for
[MigrationService.BatchMigrateResources][google.cloud.aiplatform.v1.MigrationService.BatchMigrateResources].
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([parent, migrate_resource_requests])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = migration_service.BatchMigrateResourcesRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if parent is not None:
request.parent = parent
if migrate_resource_requests:
request.migrate_resource_requests.extend(migrate_resource_requests)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.batch_migrate_resources,
default_timeout=None,
client_info=DEFAULT_CLIENT_INFO,
)
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)),
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Wrap the response in an operation future.
response = operation_async.from_gapic(
response,
self._client._transport.operations_client,
migration_service.BatchMigrateResourcesResponse,
metadata_type=migration_service.BatchMigrateResourcesOperationMetadata,
)
# Done; return the response.
return response
async def __aenter__(self):
return self
async def __aexit__(self, exc_type, exc, tb):
await self.transport.close()
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-aiplatform",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("MigrationServiceAsyncClient",)
|
googleapis/python-aiplatform
|
google/cloud/aiplatform_v1/services/migration_service/async_client.py
|
Python
|
apache-2.0
| 21,005
|
# -*- coding: utf-8 -*-
#------------------------------------------------------------
#------------------------------------------------------------
import selenium
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
import time
import urlparse,urllib2,urllib,re,xbmcplugin,xbmcgui,xbmcaddon,xbmc
import os, sys
from core import logger
from core import config
from core import scrapertools
from core.item import Item
import cookielib
import requests
import os.path
__channel__ = "itastreaming"
__category__ = "F"
__type__ = "generic"
__title__ = "itastreaming"
__language__ = "IT"
COOKIEFILE = "/Users/arturo/itacookie.lwp"
h = {'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:37.0) Gecko/20100101 Firefox/37.0'}
baseUrl = "http://itastreaming.co"
def createCookies():
if not os.path.isfile(COOKIEFILE):
print "File not exists"
#get cookies!
dcap = dict(DesiredCapabilities.PHANTOMJS)
dcap["phantomjs.page.settings.userAgent"] = ("Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:37.0) Gecko/20100101 Firefox/37.0")
browser = webdriver.PhantomJS(executable_path='/bin/phantomjs',desired_capabilities = dcap, service_log_path=os.path.devnull)
browser.get(baseUrl)
time.sleep(10)
a = browser.get_cookies()
print 'Got cloudflare cookies:\n'
browser.close()
b = cookielib.MozillaCookieJar()
for i in a:
# create the cf_session_cookie
ck = cookielib.Cookie(name=i['name'], value=i['value'], domain=i['domain'], path=i['path'], secure=i['secure'], rest=False, version=0,port=None,port_specified=False,domain_specified=False,domain_initial_dot=False,path_specified=True,expires=i['expiry'],discard=True,comment=None,comment_url=None,rfc2109=False)
b.set_cookie(ck)
# save into a file
print b
b.save(filename=COOKIEFILE, ignore_discard=True, ignore_expires=False)
else:
print "found it, do nothing!"
b = True
return b
def isGeneric():
return True
def mainlist(item):
logger.info("pelisalacarta.itastreaming mainlist")
itemlist = []
itemlist.append( Item(channel=__channel__ , action="movies", title="ultimi film inseriti..." , url="http://itastreaming.co" ))
itemlist.append( Item(channel=__channel__ , action="search", title="Cerca Film"))
itemlist.append( Item(channel=__channel__ , action="movies", title="animazione" , url="http://itastreaming.co/genere/animazione" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="avventura" , url="http://itastreaming.co/genere/avventura" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="azione" , url="http://itastreaming.co/genere/azione" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="biografico" , url="http://itastreaming.co/genere/biografico" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="comico" , url="http://itastreaming.co/genere/comico" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="commedia" , url="http://itastreaming.co/genere/commedia" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="documentario" , url="http://itastreaming.co/genere/documentario" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="drammatico" , url="http://itastreaming.co/genere/drammatico" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="erotico" , url="http://itastreaming.co/genere/erotico" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="fantascienza" , url="http://itastreaming.co/genere/fantascienza" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="fantasy" , url="http://itastreaming.co/genere/fantasy" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="gangstar" , url="http://itastreaming.co/genere/gangstar" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="giallo" , url="http://itastreaming.co/genere/giallo" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="guerra" , url="http://itastreaming.co/genere/guerra" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="horror" , url="http://itastreaming.co/genere/horror" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="musical" , url="http://itastreaming.co/genere/musical" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="romantico" , url="http://itastreaming.co/genere/romantico" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="storico" , url="http://itastreaming.co/genere/storico" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="thriller" , url="http://itastreaming.co/genere/thriller" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="western" , url="http://itastreaming.co/genere/western" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="HD" , url="http://itastreaming.co/qualita/hd" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="DVD-RIP" , url="http://itastreaming.co/qualita/dvdripac3" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="CAM" , url="http://itastreaming.co/qualita/cam" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="HD-MD" , url="http://itastreaming.co/qualita/hd-md" ))
itemlist.append( Item(channel=__channel__ , action="movies", title="HD-TS" , url="http://itastreaming.co/qualita/hd-ts" ))
return itemlist
#searching for films
def search(item, text):
createCookies()
itemlist = []
text = text.replace(" ", "%20")
item.url = "http://itastreaming.co/?s=" + text
try:
biscotto = cookielib.MozillaCookieJar()
biscotto.load(COOKIEFILE)
data = requests.get(item.url, cookies=biscotto, headers=h)
data = data.text.encode('utf-8')
data = data.replace('–','-').replace('’',' ')
pattern = '<img class="imx" style="margin-top:0px;" src="?([^>"]+)"?.*?alt="?([^>"]+)"?.*?'
pattern += '<h3><a href="?([^>"]+)"?.*?</h3>'
matches = re.compile(pattern,re.DOTALL).findall(data)
for scrapedthumbnail, scrapedtitle, scrapedurl in matches:
title = scrapedtitle.strip()
url = urlparse.urljoin(item.url, scrapedurl)
#thumbnail = urlparse.urljoin(item.url, scrapedthumbnail)
thumbnail = scrapthumb(title)
itemlist.append(Item(channel=__channel__, action="grabing", title=title, url=url, thumbnail=thumbnail, folder=True))
return itemlist
except:
import sys
for line in sys.exc_info():
logger.error("%s" % line)
return []
#azione "movies" server per estrerre i titoli
def movies(item):
createCookies()
itemlist = []
biscotto = cookielib.MozillaCookieJar()
biscotto.load(COOKIEFILE)
data = requests.get(item.url, cookies=biscotto, headers=h)
data = data.text.encode('utf-8')
data = data.replace('–','-').replace('’',' ')
patron = '<div class="item">\s*'
patron += '<a href="?([^>"]+)"?.*?title="?([^>"]+)"?.*?'
patron += '<div class="img">\s*'
patron += '<img.*?src="([^>"]+)'
matches = re.compile(patron,re.DOTALL).findall(data)
if not matches:
print "Coockies expired!, delete it"
os.remove(COOKIEFILE)
for scrapedurl,scrapedtitle,scrapedthumbnail in matches:
title = scrapedtitle.strip()
url = urlparse.urljoin(item.url,scrapedurl)
thumbnail = scrapthumb(title)
scrapedplot = ""
itemlist.append( Item(channel=__channel__, action="grabing", title=title , url=url , thumbnail=thumbnail , plot=scrapedplot , folder=True) )
#next page
patternpage = '<a rel="nofollow" class="previouspostslink\'" href="(.*?)">Seguente \›</a>'
matches = re.compile(patternpage,re.DOTALL).findall(data)
#print matches
if not matches:
patternpage = "<span class='current'.*?</span>"
patternpage += "<a rel='nofollow' class='page larger' href='([^']+)'>.*?</a>"
matches = re.compile(patternpage,re.DOTALL).findall(data)
#print matches
if len(matches)>0:
scrapedurl = urlparse.urljoin(item.url,matches[0])
itemlist.append( Item(channel=__channel__, action="movies", title="Next Page >>" , url=scrapedurl , folder=True) )
return itemlist
def grabing(item):
itemlist = []
biscotto = cookielib.MozillaCookieJar()
biscotto.load(COOKIEFILE)
data = requests.get(item.url, cookies=biscotto, headers=h)
data = data.text.encode('utf-8')
#esegue questa funziona solo se si clicca sul titolo del film
if item.title:
filmtitle = str(item.title)
filmtitle = filmtitle.replace('–','')
dcap = dict(DesiredCapabilities.PHANTOMJS)
dcap["phantomjs.page.settings.userAgent"] = (
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:37.0) Gecko/20100101 Firefox/37.0")
browser = webdriver.PhantomJS(executable_path='/bin/phantomjs',desired_capabilities = dcap, service_log_path=os.path.devnull)
browser.get(item.url)
time.sleep(7)
try:
nData = browser.execute_script("return nData")
print nData
for block in nData:
itemlist.append( Item(channel=__channel__, action="playit", title=filmtitle + " quality: " + block['width'] + " x " + block['height'] , url=block['url'] ))
browser.close()
except:
fakeurl = re.findall('"((http)s?://.*?hdpass.link.*?)"', data)
print fakeurl
url = fakeurl[0][0]
browser.get(url)
time.sleep(7)
nData = browser.execute_script("return nData")
print nData
print filmtitle
for block in nData:
print block['url']
itemlist.append( Item(channel=__channel__, action="playit", title=filmtitle + " quality: " + block['width'] + " x " + block['height'] , url=block['url'] ))
browser.close()
return itemlist
def playit(item):
itemlist = []
print item.url
itemlist.append( Item(channel=__channel__, action="playit", title=item.title , url=item.url ))
if not xbmc.Player().isPlayingVideo():
xbmc.Player(xbmc.PLAYER_CORE_DVDPLAYER).play(item.url)
return itemlist
def scrapthumb(title):
title = title.strip().replace('–','').replace('’','-').replace('à','a')
title = title.replace(' ','-')
title = title[:-7]
#print title
mdburl = 'https://www.themoviedb.org/search/movie?query=' + title
req = urllib2.Request(mdburl)
response = urllib2.urlopen(req)
data = response.read()
pattern = '<div class="poster">\s*'
pattern += '<a.*?src="(.*?)"'
matches = re.compile(pattern,re.DOTALL).findall(data)
thumbnail = ""
if matches:
thumbnail = matches[0]
thumbnail = thumbnail.replace('w92','original')
else:
print "thumb not found for: " + mdburl
return thumbnail
|
Reat0ide/plugin.video.pelisalacarta
|
pelisalacarta/channels/itastreaming.py
|
Python
|
gpl-3.0
| 11,292
|
from dotenv import load_dotenv
load_dotenv('./.env')
|
mbernson/iscp-search-engine
|
retrouve/__init__.py
|
Python
|
gpl-3.0
| 53
|
"""
Management commands for third_party_auth
"""
import logging
from django.core.management.base import BaseCommand, CommandError
from common.djangoapps.third_party_auth.tasks import fetch_saml_metadata
class Command(BaseCommand):
""" manage.py commands to manage SAML/Shibboleth SSO """
help = '''Configure/maintain/update SAML-based SSO'''
def add_arguments(self, parser):
parser.add_argument('--pull', action='store_true', help="Pull updated metadata from external IDPs")
def handle(self, *args, **options):
should_pull_saml_metadata = options.get('pull', False)
if not should_pull_saml_metadata:
raise CommandError("Command can only be used with '--pull' option.")
log_handler = logging.StreamHandler(self.stdout)
log_handler.setLevel(logging.DEBUG)
log = logging.getLogger('common.djangoapps.third_party_auth.tasks')
log.propagate = False
log.addHandler(log_handler)
total, skipped, attempted, updated, failed, failure_messages = fetch_saml_metadata()
self.stdout.write(
"\nDone."
"\n{total} provider(s) found in database."
"\n{skipped} skipped and {attempted} attempted."
"\n{updated} updated and {failed} failed.\n".format(
total=total,
skipped=skipped, attempted=attempted,
updated=updated, failed=failed,
)
)
if failed > 0:
raise CommandError(
"Command finished with the following exceptions:\n\n{failures}".format(
failures="\n\n".join(failure_messages)
)
)
|
edx/edx-platform
|
common/djangoapps/third_party_auth/management/commands/saml.py
|
Python
|
agpl-3.0
| 1,687
|
# Now we refactor star_thompsonlike.py into lower-level code. Instead
# of a set, the states are a list, together with a boolean array
# ('occupied') saying for each state whether it's on the list. We
# represent the set with this combo of datastructures to make it
# efficient both to enumerate the states in the set and to deduplicate
# them.
# The other change is to precompute the accepts() function to make it
# just an array access at match time.
def search(re, chars):
return run(re(accept), chars)
def run(start, chars):
if accepts[start]:
return True
states = [start]
occupied = [False] * len(nodes)
for ch in chars:
next_states = []
for state in states:
after(ch, state, accept, next_states, occupied)
states = next_states
for state in states:
if accepts[state]:
return True
occupied[state] = False
return False
def after(ch, start, end, next_states, occupied):
while start != end:
tag, r, s = nodes[start]
if tag == 'literal':
if r == ch and not occupied[s]:
next_states.append(s)
occupied[s] = True
break
elif tag == 'either':
after(ch, r, end, next_states, occupied)
start = s
elif tag == 'star':
after(ch, r, start, next_states, occupied)
start = s
else: assert False
accept = 0 # A sentinel node that's never accessed.
nodes = [None]
accepts = [True]
def add(node, accepting):
nodes.append(node)
accepts.append(accepting)
return len(nodes) - 1
def literal(ch): return lambda k: add(('literal', ch, k), False)
def chain(r, s): return lambda k: r(s(k))
def either(r, s):
def r_or_s(k):
rk, sk = r(k), s(k)
return add(('either', rk, sk), accepts[rk] or accepts[sk])
return r_or_s
def star(r):
def rstar(k):
j = add(None, accepts[k])
nodes[j] = ('star', r(j), k)
return j
return rstar
|
darius/regexercise_solutions
|
star_thompsonlike_lowlevel.py
|
Python
|
gpl-3.0
| 2,044
|
from django.conf.urls import url
from django.contrib.admin.utils import quote
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from wagtail.contrib.modeladmin.options import ModelAdmin
from wagtail.contrib.modeladmin.helpers import ButtonHelper
from wagtailmenus.conf import settings
from wagtailmenus import views
class MainMenuAdmin(ModelAdmin):
model = settings.models.MAIN_MENU_MODEL
menu_label = _('Main menu')
menu_icon = settings.MAINMENU_MENU_ICON
index_view_class = views.MainMenuIndexView
edit_view_class = views.MainMenuEditView
add_to_settings_menu = True
def get_form_view_extra_css(self):
if settings.ADD_EDITOR_OVERRIDE_STYLES:
return ['wagtailmenus/css/menu-edit.css']
return []
def get_admin_urls_for_registration(self):
return (
url(self.url_helper.get_action_url_pattern('index'),
self.index_view,
name=self.url_helper.get_action_url_name('index')),
url(self.url_helper.get_action_url_pattern('edit'),
self.edit_view,
name=self.url_helper.get_action_url_name('edit')),
)
class FlatMenuButtonHelper(ButtonHelper):
def copy_button(self, pk, classnames_add=[], classnames_exclude=[]):
cn = self.finalise_classname(classnames_add, classnames_exclude)
return {
'url': self.url_helper.get_action_url('copy', quote(pk)),
'label': _('Copy'),
'classname': cn,
'title': _('Copy this %(model_name)s') % {
'model_name': self.verbose_name,
},
}
def get_buttons_for_obj(self, obj, exclude=[], classnames_add=[],
classnames_exclude=[]):
ph = self.permission_helper
usr = self.request.user
pk = quote(getattr(obj, self.opts.pk.attname))
btns = super().get_buttons_for_obj(
obj, exclude, classnames_add, classnames_exclude)
if('copy' not in exclude and ph.user_can_create(usr)):
btns.append(
self.copy_button(pk, classnames_add, classnames_exclude)
)
return btns
class FlatMenuAdmin(ModelAdmin):
model = settings.models.FLAT_MENU_MODEL
menu_label = _('Flat menus')
menu_icon = settings.FLATMENU_MENU_ICON
button_helper_class = FlatMenuButtonHelper
ordering = ('-site__is_default_site', 'site__hostname', 'handle')
create_view_class = views.FlatMenuCreateView
edit_view_class = views.FlatMenuEditView
add_to_settings_menu = True
def get_form_view_extra_css(self):
if settings.ADD_EDITOR_OVERRIDE_STYLES:
return ['wagtailmenus/css/menu-edit.css']
return []
def copy_view(self, request, instance_pk):
kwargs = {'model_admin': self, 'instance_pk': instance_pk}
return views.FlatMenuCopyView.as_view(**kwargs)(request)
def get_admin_urls_for_registration(self):
urls = super().get_admin_urls_for_registration()
urls += (
url(self.url_helper.get_action_url_pattern('copy'),
self.copy_view,
name=self.url_helper.get_action_url_name('copy')),
)
return urls
def get_list_filter(self, request):
if self.is_multisite_listing(request):
return ('site', 'handle')
return ()
def get_list_display(self, request):
if self.is_multisite_listing(request):
return ('title', 'handle_formatted', 'site', 'items')
return ('title', 'handle_formatted', 'items')
def handle_formatted(self, obj):
return mark_safe('<code>%s</code>' % obj.handle)
handle_formatted.short_description = _('handle')
handle_formatted.admin_order_field = 'handle'
def is_multisite_listing(self, request):
return self.get_queryset(request).values('site').distinct().count() > 1
def items(self, obj):
return obj.get_menu_items_manager().count()
items.short_description = _('no. of items')
|
rkhleics/wagtailmenus
|
wagtailmenus/modeladmin.py
|
Python
|
mit
| 4,086
|
""" SQLAlchemy support. """
from __future__ import absolute_import
import datetime
from types import GeneratorType
import decimal
from sqlalchemy import func
# from sqlalchemy.orm.interfaces import MANYTOONE
from sqlalchemy.orm.collections import InstrumentedList
from sqlalchemy.sql.type_api import TypeDecorator
try:
from sqlalchemy.orm.relationships import RelationshipProperty
except ImportError:
from sqlalchemy.orm.properties import RelationshipProperty
from sqlalchemy.types import (
BIGINT, BOOLEAN, BigInteger, Boolean, CHAR, DATE, DATETIME, DECIMAL, Date,
DateTime, FLOAT, Float, INT, INTEGER, Integer, NCHAR, NVARCHAR, NUMERIC,
Numeric, SMALLINT, SmallInteger, String, TEXT, TIME, Text, Time, Unicode,
UnicodeText, VARCHAR, Enum)
from .. import mix_types as t
from ..main import (
SKIP_VALUE, LOGGER, TypeMixer as BaseTypeMixer, GenFactory as BaseFactory,
Mixer as BaseMixer, partial, faker)
class GenFactory(BaseFactory):
""" Map a sqlalchemy classes to simple types. """
types = {
(String, VARCHAR, Unicode, NVARCHAR, NCHAR, CHAR): str,
(Text, UnicodeText, TEXT): t.Text,
(Boolean, BOOLEAN): bool,
(Date, DATE): datetime.date,
(DateTime, DATETIME): datetime.datetime,
(Time, TIME): datetime.time,
(DECIMAL, Numeric, NUMERIC): decimal.Decimal,
(Float, FLOAT): float,
(Integer, INTEGER, INT): int,
(BigInteger, BIGINT): t.BigInteger,
(SmallInteger, SMALLINT): t.SmallInteger,
}
class TypeMixer(BaseTypeMixer):
""" TypeMixer for SQLAlchemy. """
factory = GenFactory
def __init__(self, cls, **params):
""" Init TypeMixer and save the mapper. """
super(TypeMixer, self).__init__(cls, **params)
self.mapper = self.__scheme._sa_class_manager.mapper
def postprocess(self, target, postprocess_values):
""" Fill postprocess values. """
mixed = []
for name, deffered in postprocess_values:
value = deffered.value
if isinstance(value, GeneratorType):
value = next(value)
if isinstance(value, t.Mix):
mixed.append((name, value))
continue
if isinstance(getattr(target, name), InstrumentedList) and not isinstance(value, list):
value = [value]
setattr(target, name, value)
for name, mix in mixed:
setattr(target, name, mix & target)
if self.__mixer:
target = self.__mixer.postprocess(target)
return target
@staticmethod
def get_default(field):
""" Get default value from field.
:return value: A default value or NO_VALUE
"""
column = field.scheme
if isinstance(column, RelationshipProperty):
column = column.local_remote_pairs[0][0]
if not column.default:
return SKIP_VALUE
if column.default.is_callable:
return column.default.arg(None)
return getattr(column.default, 'arg', SKIP_VALUE)
def gen_select(self, field_name, select):
""" Select exists value from database.
:param field_name: Name of field for generation.
:return : None or (name, value) for later use
"""
if not self.__mixer or not self.__mixer.params.get('session'):
return field_name, SKIP_VALUE
relation = self.mapper.get_property(field_name)
session = self.__mixer.params.get('session')
value = session.query(
relation.mapper.class_
).filter(*select.choices).order_by(func.random()).first()
return self.get_value(field_name, value)
@staticmethod
def is_unique(field):
""" Return True is field's value should be a unique.
:return bool:
"""
scheme = field.scheme
if isinstance(scheme, RelationshipProperty):
scheme = scheme.local_remote_pairs[0][0]
return scheme.unique
@staticmethod
def is_required(field):
""" Return True is field's value should be defined.
:return bool:
"""
column = field.scheme
if isinstance(column, RelationshipProperty):
column = column.local_remote_pairs[0][0]
if field.params:
return True
# According to the SQLAlchemy docs, autoincrement "only has an effect for columns which are
# Integer derived (i.e. INT, SMALLINT, BIGINT) [and] Part of the primary key [...]".
return not column.nullable and not (column.autoincrement and column.primary_key and
isinstance(column.type, Integer))
def get_value(self, field_name, field_value):
""" Get `value` as `field_name`.
:return : None or (name, value) for later use
"""
field = self.__fields.get(field_name)
if field and isinstance(field.scheme, RelationshipProperty):
return field_name, t._Deffered(field_value, field.scheme)
return super(TypeMixer, self).get_value(field_name, field_value)
def make_fabric(self, column, field_name=None, fake=False, kwargs=None): # noqa
""" Make values fabric for column.
:param column: SqlAlchemy column
:param field_name: Field name
:param fake: Force fake data
:return function:
"""
kwargs = {} if kwargs is None else kwargs
if isinstance(column, RelationshipProperty):
return partial(type(self)(
column.mapper.class_, mixer=self.__mixer, fake=self.__fake, factory=self.__factory
).blend, **kwargs)
ftype = type(column.type)
# augmented types created with TypeDecorator
# don't directly inherit from the base types
if TypeDecorator in ftype.__bases__:
ftype = ftype.impl
stype = self.__factory.cls_to_simple(ftype)
if stype is str:
fab = super(TypeMixer, self).make_fabric(
stype, field_name=field_name, fake=fake, kwargs=kwargs)
return lambda: fab()[:column.type.length]
if ftype is Enum:
return partial(faker.random_element, column.type.enums)
return super(TypeMixer, self).make_fabric(
stype, field_name=field_name, fake=fake, kwargs=kwargs)
def guard(self, *args, **kwargs):
""" Look objects in database.
:returns: A finded object or False
"""
try:
session = self.__mixer.params.get('session')
assert session
except (AttributeError, AssertionError):
raise ValueError('Cannot make request to DB.')
qs = session.query(self.mapper).filter(*args, **kwargs)
count = qs.count()
if count == 1:
return qs.first()
if count:
return qs.all()
return False
def reload(self, obj):
""" Reload object from database. """
try:
session = self.__mixer.params.get('session')
session.expire(obj)
session.refresh(obj)
return obj
except (AttributeError, AssertionError):
raise ValueError('Cannot make request to DB.')
def __load_fields(self):
""" Prepare SQLALchemyTypeMixer.
Select columns and relations for data generation.
"""
mapper = self.__scheme._sa_class_manager.mapper
relations = set()
if hasattr(mapper, 'relationships'):
for rel in mapper.relationships:
relations |= rel.local_columns
yield rel.key, t.Field(rel, rel.key)
for key, column in mapper.columns.items():
if column not in relations:
yield key, t.Field(column, key)
class Mixer(BaseMixer):
""" Integration with SQLAlchemy. """
type_mixer_cls = TypeMixer
def __init__(self, session=None, commit=True, **params):
"""Initialize the SQLAlchemy Mixer.
:param fake: (True) Generate fake data instead of random data.
:param session: SQLAlchemy session. Using for commits.
:param commit: (True) Commit instance to session after creation.
"""
super(Mixer, self).__init__(**params)
self.params['session'] = session
self.params['commit'] = bool(session) and commit
def postprocess(self, target):
""" Save objects in db.
:return value: A generated value
"""
if self.params.get('commit'):
session = self.params.get('session')
if not session:
LOGGER.warn("'commit' set true but session not initialized.")
else:
session.add(target)
session.commit()
return target
# Default mixer
mixer = Mixer()
# pylama:ignore=E1120,E0611
|
Nebucatnetzer/tamagotchi
|
pygame/lib/python3.4/site-packages/mixer/backend/sqlalchemy.py
|
Python
|
gpl-2.0
| 8,887
|
from rest_framework import viewsets
from web_db.models import Album, Song
from web_endpoints.serializers import AlbumSerializer, SongSerializer
class AlbumViewSet(viewsets.ModelViewSet):
queryset = Album.objects.all()
serializer_class = AlbumSerializer
class SongViewSet(viewsets.ModelViewSet):
queryset = Song.objects.all()
serializer_class = SongSerializer
|
octaflop/django_music
|
apps/web_endpoints/views.py
|
Python
|
mit
| 381
|
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "PolyTrend", cycle_length = 7, transform = "Anscombe", sigma = 0.0, exog_count = 100, ar_order = 12);
|
antoinecarme/pyaf
|
tests/artificial/transf_Anscombe/trend_PolyTrend/cycle_7/ar_12/test_artificial_1024_Anscombe_PolyTrend_7_12_100.py
|
Python
|
bsd-3-clause
| 266
|
# -*- coding: utf-8 -*-
import numpy as np
from scipy import linalg
from copy import deepcopy
from ..bem import _check_origin
from ..io.constants import FIFF
from ..io.pick import pick_types, pick_info
from ..surface import get_head_surf, get_meg_helmet_surf
from ..io.proj import _has_eeg_average_ref_proj, make_projector
from ..transforms import transform_surface_to, read_trans, _find_trans
from ._make_forward import _create_meg_coils, _create_eeg_els, _read_coil_defs
from ._lead_dots import (_do_self_dots, _do_surface_dots, _get_legen_table,
_get_legen_lut_fast, _get_legen_lut_accurate,
_do_cross_dots)
from ..parallel import check_n_jobs
from ..utils import logger, verbose
from ..fixes import partial
def _is_axial_coil(coil):
is_ax = coil['coil_class'] in (FIFF.FWD_COILC_MAG,
FIFF.FWD_COILC_AXIAL_GRAD,
FIFF.FWD_COILC_AXIAL_GRAD2)
return is_ax
def _ad_hoc_noise(coils, ch_type='meg'):
v = np.empty(len(coils))
if ch_type == 'meg':
axs = np.array([_is_axial_coil(coil) for coil in coils], dtype=bool)
v[axs] = 4e-28 # 20e-15 ** 2
v[np.logical_not(axs)] = 2.5e-25 # 5e-13 ** 2
else:
v.fill(1e-12) # 1e-6 ** 2
cov = dict(diag=True, data=v, eig=None, eigvec=None)
return cov
def _setup_dots(mode, coils, ch_type):
"""Setup dot products"""
int_rad = 0.06
noise = _ad_hoc_noise(coils, ch_type)
if mode == 'fast':
# Use 50 coefficients with nearest-neighbor interpolation
n_coeff = 50
lut_fun = _get_legen_lut_fast
else: # 'accurate'
# Use 100 coefficients with linear interpolation
n_coeff = 100
lut_fun = _get_legen_lut_accurate
lut, n_fact = _get_legen_table(ch_type, False, n_coeff, verbose=False)
lut_fun = partial(lut_fun, lut=lut)
return int_rad, noise, lut_fun, n_fact
def _compute_mapping_matrix(fmd, info):
"""Do the hairy computations"""
logger.info(' Preparing the mapping matrix...')
# assemble a projector and apply it to the data
ch_names = fmd['ch_names']
projs = info.get('projs', list())
proj_op = make_projector(projs, ch_names)[0]
proj_dots = np.dot(proj_op.T, np.dot(fmd['self_dots'], proj_op))
noise_cov = fmd['noise']
# Whiten
if not noise_cov['diag']:
raise NotImplementedError # this shouldn't happen
whitener = np.diag(1.0 / np.sqrt(noise_cov['data'].ravel()))
whitened_dots = np.dot(whitener.T, np.dot(proj_dots, whitener))
# SVD is numerically better than the eigenvalue composition even if
# mat is supposed to be symmetric and positive definite
uu, sing, vv = linalg.svd(whitened_dots, full_matrices=False,
overwrite_a=True)
# Eigenvalue truncation
sumk = np.cumsum(sing)
sumk /= sumk[-1]
fmd['nest'] = np.where(sumk > (1.0 - fmd['miss']))[0][0]
logger.info(' [Truncate at %d missing %g]' % (fmd['nest'], fmd['miss']))
sing = 1.0 / sing[:fmd['nest']]
# Put the inverse together
inv = np.dot(uu[:, :fmd['nest']] * sing, vv[:fmd['nest']]).T
# Sandwich with the whitener
inv_whitened = np.dot(whitener.T, np.dot(inv, whitener))
# Take into account that the lead fields used to compute
# d->surface_dots were unprojected
inv_whitened_proj = (np.dot(inv_whitened.T, proj_op)).T
# Finally sandwich in the selection matrix
# This one picks up the correct lead field projection
mapping_mat = np.dot(fmd['surface_dots'], inv_whitened_proj)
# Optionally apply the average electrode reference to the final field map
if fmd['kind'] == 'eeg':
if _has_eeg_average_ref_proj(projs):
logger.info(' The map will have average electrode reference')
mapping_mat -= np.mean(mapping_mat, axis=0)[np.newaxis, :]
return mapping_mat
def _map_meg_channels(info_from, info_to, mode='fast', origin=(0., 0., 0.04)):
"""Find mapping from one set of channels to another.
Parameters
----------
info_from : mne.io.MeasInfo
The measurement data to interpolate from.
info_to : mne.io.MeasInfo
The measurement info to interpolate to.
mode : str
Either `'accurate'` or `'fast'`, determines the quality of the
Legendre polynomial expansion used. `'fast'` should be sufficient
for most applications.
Returns
-------
mapping : array
A mapping matrix of shape len(pick_to) x len(pick_from).
"""
# no need to apply trans because both from and to coils are in device
# coordinates
templates = _read_coil_defs(verbose=False)
coils_from = _create_meg_coils(info_from['chs'], 'normal',
info_from['dev_head_t'], templates)
coils_to = _create_meg_coils(info_to['chs'], 'normal',
info_to['dev_head_t'], templates)
miss = 1e-4 # Smoothing criterion for MEG
origin = _check_origin(origin, info_from)
#
# Step 2. Calculate the dot products
#
int_rad, noise, lut_fun, n_fact = _setup_dots(mode, coils_from, 'meg')
logger.info(' Computing dot products for %i coils...'
% (len(coils_from)))
self_dots = _do_self_dots(int_rad, False, coils_from, origin, 'meg',
lut_fun, n_fact, n_jobs=1)
logger.info(' Computing cross products for coils %i x %i coils...'
% (len(coils_from), len(coils_to)))
cross_dots = _do_cross_dots(int_rad, False, coils_from, coils_to,
origin, 'meg', lut_fun, n_fact).T
ch_names = [c['ch_name'] for c in info_from['chs']]
fmd = dict(kind='meg', ch_names=ch_names,
origin=origin, noise=noise, self_dots=self_dots,
surface_dots=cross_dots, int_rad=int_rad, miss=miss)
#
# Step 3. Compute the mapping matrix
#
mapping = _compute_mapping_matrix(fmd, info_from)
return mapping
def _as_meg_type_evoked(evoked, ch_type='grad', mode='fast'):
"""Compute virtual evoked using interpolated fields in mag/grad channels.
Parameters
----------
evoked : instance of mne.Evoked
The evoked object.
ch_type : str
The destination channel type. It can be 'mag' or 'grad'.
mode : str
Either `'accurate'` or `'fast'`, determines the quality of the
Legendre polynomial expansion used. `'fast'` should be sufficient
for most applications.
Returns
-------
evoked : instance of mne.Evoked
The transformed evoked object containing only virtual channels.
"""
evoked = evoked.copy()
if ch_type not in ['mag', 'grad']:
raise ValueError('to_type must be "mag" or "grad", not "%s"'
% ch_type)
# pick the original and destination channels
pick_from = pick_types(evoked.info, meg=True, eeg=False,
ref_meg=False)
pick_to = pick_types(evoked.info, meg=ch_type, eeg=False,
ref_meg=False)
if len(pick_to) == 0:
raise ValueError('No channels matching the destination channel type'
' found in info. Please pass an evoked containing'
'both the original and destination channels. Only the'
' locations of the destination channels will be used'
' for interpolation.')
info_from = pick_info(evoked.info, pick_from, copy=True)
info_to = pick_info(evoked.info, pick_to, copy=True)
mapping = _map_meg_channels(info_from, info_to, mode='fast')
# compute evoked data by multiplying by the 'gain matrix' from
# original sensors to virtual sensors
data = np.dot(mapping, evoked.data[pick_from])
# keep only the destination channel types
evoked.pick_types(meg=ch_type, eeg=False, ref_meg=False)
evoked.data = data
# change channel names to emphasize they contain interpolated data
for ch in evoked.info['chs']:
ch['ch_name'] += '_virtual'
evoked.info['ch_names'] = [ch['ch_name'] for ch in evoked.info['chs']]
return evoked
@verbose
def _make_surface_mapping(info, surf, ch_type='meg', trans=None, mode='fast',
n_jobs=1, origin=(0., 0., 0.04), verbose=None):
"""Re-map M/EEG data to a surface
Parameters
----------
info : instance of io.meas_info.Info
Measurement info.
surf : dict
The surface to map the data to. The required fields are `'rr'`,
`'nn'`, and `'coord_frame'`. Must be in head coordinates.
ch_type : str
Must be either `'meg'` or `'eeg'`, determines the type of field.
trans : None | dict
If None, no transformation applied. Should be a Head<->MRI
transformation.
mode : str
Either `'accurate'` or `'fast'`, determines the quality of the
Legendre polynomial expansion used. `'fast'` should be sufficient
for most applications.
n_jobs : int
Number of permutations to run in parallel (requires joblib package).
origin : array-like, shape (3,) | str
Origin of internal and external multipolar moment space in head
coords and in meters. The default is ``'auto'``, which means
a head-digitization-based origin fit.
verbose : bool, str, int, or None
If not None, override default verbose level (see mne.verbose).
Returns
-------
mapping : array
A n_vertices x n_sensors array that remaps the MEG or EEG data,
as `new_data = np.dot(mapping, data)`.
"""
if not all(key in surf for key in ['rr', 'nn']):
raise KeyError('surf must have both "rr" and "nn"')
if 'coord_frame' not in surf:
raise KeyError('The surface coordinate frame must be specified '
'in surf["coord_frame"]')
if mode not in ['accurate', 'fast']:
raise ValueError('mode must be "accurate" or "fast", not "%s"' % mode)
# deal with coordinate frames here -- always go to "head" (easiest)
orig_surf = surf
surf = transform_surface_to(deepcopy(surf), 'head', trans)
n_jobs = check_n_jobs(n_jobs)
origin = _check_origin(origin, info)
#
# Step 1. Prepare the coil definitions
# Do the dot products, assume surf in head coords
#
if ch_type not in ('meg', 'eeg'):
raise ValueError('unknown coil type "%s"' % ch_type)
if ch_type == 'meg':
picks = pick_types(info, meg=True, eeg=False, ref_meg=False)
logger.info('Prepare MEG mapping...')
else:
picks = pick_types(info, meg=False, eeg=True, ref_meg=False)
logger.info('Prepare EEG mapping...')
if len(picks) == 0:
raise RuntimeError('cannot map, no channels found')
chs = pick_info(info, picks, copy=True)['chs']
# create coil defs in head coordinates
if ch_type == 'meg':
# Put them in head coordinates
coils = _create_meg_coils(chs, 'normal', info['dev_head_t'])
type_str = 'coils'
miss = 1e-4 # Smoothing criterion for MEG
else: # EEG
coils = _create_eeg_els(chs)
type_str = 'electrodes'
miss = 1e-3 # Smoothing criterion for EEG
#
# Step 2. Calculate the dot products
#
int_rad, noise, lut_fun, n_fact = _setup_dots(mode, coils, ch_type)
logger.info('Computing dot products for %i %s...' % (len(coils), type_str))
self_dots = _do_self_dots(int_rad, False, coils, origin, ch_type,
lut_fun, n_fact, n_jobs)
sel = np.arange(len(surf['rr'])) # eventually we should do sub-selection
logger.info('Computing dot products for %i surface locations...'
% len(sel))
surface_dots = _do_surface_dots(int_rad, False, coils, surf, sel,
origin, ch_type, lut_fun, n_fact,
n_jobs)
#
# Step 4. Return the result
#
ch_names = [c['ch_name'] for c in chs]
fmd = dict(kind=ch_type, surf=surf, ch_names=ch_names, coils=coils,
origin=origin, noise=noise, self_dots=self_dots,
surface_dots=surface_dots, int_rad=int_rad, miss=miss)
logger.info('Field mapping data ready')
fmd['data'] = _compute_mapping_matrix(fmd, info)
# bring the original back, whatever coord frame it was in
fmd['surf'] = orig_surf
# Remove some unecessary fields
del fmd['self_dots']
del fmd['surface_dots']
del fmd['int_rad']
del fmd['miss']
return fmd
@verbose
def make_field_map(evoked, trans='auto', subject=None, subjects_dir=None,
ch_type=None, mode='fast', meg_surf='helmet',
origin=(0., 0., 0.04), n_jobs=1, verbose=None):
"""Compute surface maps used for field display in 3D
Parameters
----------
evoked : Evoked | Epochs | Raw
The measurement file. Need to have info attribute.
trans : str | 'auto' | None
The full path to the `*-trans.fif` file produced during
coregistration. If present or found using 'auto'
the maps will be in MRI coordinates.
If None, map for EEG data will not be available.
subject : str | None
The subject name corresponding to FreeSurfer environment
variable SUBJECT. If None, map for EEG data will not be available.
subjects_dir : str
The path to the freesurfer subjects reconstructions.
It corresponds to Freesurfer environment variable SUBJECTS_DIR.
ch_type : None | 'eeg' | 'meg'
If None, a map for each available channel type will be returned.
Else only the specified type will be used.
mode : str
Either `'accurate'` or `'fast'`, determines the quality of the
Legendre polynomial expansion used. `'fast'` should be sufficient
for most applications.
meg_surf : str
Should be ``'helmet'`` or ``'head'`` to specify in which surface
to compute the MEG field map. The default value is ``'helmet'``
origin : array-like, shape (3,) | str
Origin of internal and external multipolar moment space in head
coords and in meters. The default is ``'auto'``, which means
a head-digitization-based origin fit.
.. versionadded:: 0.11
n_jobs : int
The number of jobs to run in parallel.
verbose : bool, str, int, or None
If not None, override default verbose level (see mne.verbose).
.. versionadded:: 0.11
Returns
-------
surf_maps : list
The surface maps to be used for field plots. The list contains
separate ones for MEG and EEG (if both MEG and EEG are present).
"""
info = evoked.info
if ch_type is None:
types = [t for t in ['eeg', 'meg'] if t in evoked]
else:
if ch_type not in ['eeg', 'meg']:
raise ValueError("ch_type should be 'eeg' or 'meg' (got %s)"
% ch_type)
types = [ch_type]
if trans == 'auto':
# let's try to do this in MRI coordinates so they're easy to plot
trans = _find_trans(subject, subjects_dir)
if 'eeg' in types and trans is None:
logger.info('No trans file available. EEG data ignored.')
types.remove('eeg')
if len(types) == 0:
raise RuntimeError('No data available for mapping.')
if trans is not None:
trans = read_trans(trans)
if meg_surf not in ['helmet', 'head']:
raise ValueError('Surface to plot MEG fields must be '
'"helmet" or "head"')
surfs = []
for this_type in types:
if this_type == 'meg' and meg_surf == 'helmet':
surf = get_meg_helmet_surf(info, trans)
else:
surf = get_head_surf(subject, subjects_dir=subjects_dir)
surfs.append(surf)
surf_maps = list()
for this_type, this_surf in zip(types, surfs):
this_map = _make_surface_mapping(evoked.info, this_surf, this_type,
trans, n_jobs=n_jobs, origin=origin)
surf_maps.append(this_map)
return surf_maps
|
cmoutard/mne-python
|
mne/forward/_field_interpolation.py
|
Python
|
bsd-3-clause
| 16,212
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
try:
import boto
except ImportError: # pragma: no cover
boto = Attributes = BatchResults = None # noqa
class _void(object):
pass
regions = SQSConnection = Queue = _void
RawMessage = Message = MHMessage = \
EncodedMHMessage = JSONMessage = _void
else:
from boto.sqs.attributes import Attributes
from boto.sqs.batchresults import BatchResults
from boto.sqs.message import (
EncodedMHMessage, Message, MHMessage, RawMessage,
)
from boto.sqs import regions
from boto.sqs.jsonmessage import JSONMessage
from boto.sqs.connection import SQSConnection
from boto.sqs.queue import Queue
__all__ = [
'Attributes', 'BatchResults', 'EncodedMHMessage', 'MHMessage',
'Message', 'RawMessage', 'JSONMessage', 'SQSConnection',
'Queue', 'regions',
]
|
Elastica/kombu
|
kombu/async/aws/sqs/ext.py
|
Python
|
bsd-3-clause
| 907
|
#***************************************************************************
#* (c) Juergen Riegel (juergen.riegel@web.de) 2002 *
#* *
#* This file is part of the FreeCAD CAx development system. *
#* *
#* This program is free software; you can redistribute it and/or modify *
#* it under the terms of the GNU Lesser General Public License (LGPL) *
#* as published by the Free Software Foundation; either version 2 of *
#* the License, or (at your option) any later version. *
#* for detail see the LICENCE text file. *
#* *
#* FreeCAD is distributed in the hope that it will be useful, *
#* but WITHOUT ANY WARRANTY; without even the implied warranty of *
#* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
#* GNU Library General Public License for more details. *
#* *
#* You should have received a copy of the GNU Library General Public *
#* License along with FreeCAD; if not, write to the Free Software *
#* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
#* USA *
#* *
#* Juergen Riegel 2002 *
#***************************************************************************/
import FreeCAD
import sys
import unittest
#---------------------------------------------------------------------------
# define the functions to test the FreeCAD base code
#---------------------------------------------------------------------------
def tryLoadingTest(testName):
"Loads and returns testName, or a failing TestCase if unsuccessful."
try:
return unittest.defaultTestLoader.loadTestsFromName(testName)
except ImportError:
class LoadFailed(unittest.TestCase):
def __init__(self, testName):
# setattr() first, because TestCase ctor checks for methodName.
setattr(self, "failed_to_load_" + testName, self._runTest)
super(LoadFailed, self).__init__("failed_to_load_" + testName)
self.testName = testName
def __name__(self):
return "Loading " + self.testName
def _runTest(self):
self.fail("Couldn't load " + self.testName)
return LoadFailed(testName)
def All():
# Base system tests
tests = [ "UnicodeTests",
"Document",
"UnitTests",
"BaseTests" ]
# Base system gui test
if (FreeCAD.GuiUp == 1):
tests += [ "Workbench",
"Menu" ]
# add the module tests
tests += [ "TestFem",
"MeshTestsApp",
"TestSketcherApp",
"TestPartApp",
"TestPartDesignApp",
"TestSpreadsheet",
"TestTechDrawApp",
"TestPathApp" ]
# gui tests of modules
if (FreeCAD.GuiUp == 1):
tests += [ "TestSketcherGui",
"TestPartGui",
"TestPartDesignGui",
"TestDraft",
"TestArch" ]
suite = unittest.TestSuite()
for test in tests:
suite.addTest(tryLoadingTest(test))
return suite
def TestText(s):
s = unittest.defaultTestLoader.loadTestsFromName(s)
r = unittest.TextTestRunner(stream=sys.stdout, verbosity=2)
return r.run(s)
def Test(s):
TestText(s)
def testAll():
r = unittest.TextTestRunner(stream=sys.stdout, verbosity=2)
return r.run(All())
def testUnit():
TestText(unittest.TestLoader().loadTestsFromName("UnitTests"))
def testDocument():
suite = unittest.TestSuite()
suite.addTest(unittest.defaultTestLoader.loadTestsFromName("Document"))
TestText(suite)
|
bblacey/FreeCAD-MacOS-CI
|
src/Mod/Test/TestApp.py
|
Python
|
lgpl-2.1
| 4,356
|
import numpy as np
""" This is an adhoc integrator made for DDE"""
class myintegrator:
def __init__(self,D,dt,N,tmax):
self.npts=np.floor(D/dt)+1 # points delay
self.dt=dt
self.N=N
self.tmax=tmax
self.D=D
self.fast=True
def DDE_Inh(self,f,x0,uI0,W0,WEI0,stim):
'''In this method we use the 'brute force' aditive normalization
and include adaptation'''
# this method the delay is substracting,
memory=list(x0)# i.e. use the info from the past
myu=list(x0)
myuI=list(uI0)
myW=list(W0)
myWEI=list(WEI0)
n=int(np.floor((self.tmax-self.D)/self.dt)+1)
t=self.D
time=list(np.linspace(0,self.D,self.npts))
#initial conditions
un=memory[-1]
uIn=uI0[-1]
Wn=W0[-1]
WEIn=WEI0[-1]
memory=np.array(memory)
for i in range(0,n):
un=un+self.dt*f(t,memory,uIn,Wn,WEIn,stim)[0]
uIn=uIn+self.dt*f(t,memory,uIn,Wn,WEIn,stim)[1]
Wn=Wn+self.dt*f(t,memory,uIn,Wn,WEIn,stim)[2]
WEIn=WEIn+self.dt*f(t,memory,uIn,Wn,WEIn,stim)[3]
myu.append(un)
myuI.append(uIn)
myWEI.append(WEIn)
t=t+self.dt
if self.fast==False:
myW.append(Wn)
time.append(t)
memory=np.delete(memory,(0),axis=0)
memory=np.vstack((memory,un))
print "Porcentage of the simulation done:",round(100.*float(i)/float(n),2)
myW[-1]=Wn # the last entry is the last connectivity matrix
return np.array(myu),np.array(myuI),np.array(myW),np.array(myWEI),np.array(time)
|
ulisespereira/PereiraBrunel2016
|
figure7/myintegrator.py
|
Python
|
gpl-2.0
| 1,422
|
# Copyright (C) 2010-2014 GRNET S.A.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import distribute_setup
distribute_setup.use_setuptools()
import os
import sys
from distutils.util import convert_path
from fnmatch import fnmatchcase
from setuptools import setup, find_packages
HERE = os.path.abspath(os.path.normpath(os.path.dirname(__file__)))
from synnefo.versions.webproject import __version__
# Package info
VERSION = __version__
SHORT_DESCRIPTION = "Synnefo common infrastructure for Django"
PACKAGES_ROOT = "."
PACKAGES = find_packages(PACKAGES_ROOT)
# Package meta
CLASSIFIERS = []
# Package requirements
INSTALL_REQUIRES = [
'Django>=1.4, <1.5',
'snf-common',
]
EXTRAS_REQUIRES = {
}
TESTS_REQUIRES = [
]
# Provided as an attribute, so you can append to these instead
# of replicating them:
standard_exclude = ["*.py", "*.pyc", "*$py.class", "*~", ".*", "*.bak"]
standard_exclude_directories = [
".*", "CVS", "_darcs", "./build", "./dist", "EGG-INFO", "*.egg-info",
"snf-0.7"
]
# (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org)
# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
# Note: you may want to copy this into your setup.py file verbatim, as
# you can't import this from another package, when you don't know if
# that package is installed yet.
def find_package_data(
where=".",
package="",
exclude=standard_exclude,
exclude_directories=standard_exclude_directories,
only_in_packages=True,
show_ignored=False):
"""
Return a dictionary suitable for use in ``package_data``
in a distutils ``setup.py`` file.
The dictionary looks like::
{"package": [files]}
Where ``files`` is a list of all the files in that package that
don"t match anything in ``exclude``.
If ``only_in_packages`` is true, then top-level directories that
are not packages won"t be included (but directories under packages
will).
Directories matching any pattern in ``exclude_directories`` will
be ignored; by default directories with leading ``.``, ``CVS``,
and ``_darcs`` will be ignored.
If ``show_ignored`` is true, then all the files that aren"t
included in package data are shown on stderr (for debugging
purposes).
Note patterns use wildcards, or can be exact paths (including
leading ``./``), and all searching is case-insensitive.
"""
out = {}
stack = [(convert_path(where), "", package, only_in_packages)]
while stack:
where, prefix, package, only_in_packages = stack.pop(0)
for name in os.listdir(where):
fn = os.path.join(where, name)
if os.path.isdir(fn):
bad_name = False
for pattern in exclude_directories:
if (fnmatchcase(name, pattern)
or fn.lower() == pattern.lower()):
bad_name = True
if show_ignored:
print >> sys.stderr, (
"Directory %s ignored by pattern %s"
% (fn, pattern))
break
if bad_name:
continue
if (os.path.isfile(os.path.join(fn, "__init__.py"))
and not prefix):
if not package:
new_package = name
else:
new_package = package + "." + name
stack.append((fn, "", new_package, False))
else:
stack.append((fn, prefix + name + "/", package, only_in_packages))
elif package or not only_in_packages:
# is a file
bad_name = False
for pattern in exclude:
if (fnmatchcase(name, pattern)
or fn.lower() == pattern.lower()):
bad_name = True
if show_ignored:
print >> sys.stderr, (
"File %s ignored by pattern %s"
% (fn, pattern))
break
if bad_name:
continue
out.setdefault(package, []).append(prefix+name)
return out
setup(
name='snf-webproject',
version=VERSION,
license='GNU GPLv3',
url='http://www.synnefo.org/',
description=SHORT_DESCRIPTION,
classifiers=CLASSIFIERS,
author='Synnefo development team',
author_email='synnefo-devel@googlegroups.com',
maintainer='Synnefo development team',
maintainer_email='synnefo-devel@googlegroups.com',
namespace_packages=['synnefo', 'synnefo.versions'],
packages=PACKAGES,
package_dir={'': PACKAGES_ROOT},
include_package_data=True,
package_data=find_package_data('.'),
zip_safe=False,
install_requires=INSTALL_REQUIRES,
extras_require=EXTRAS_REQUIRES,
tests_require=TESTS_REQUIRES,
dependency_links=['http://www.synnefo.org/packages/pypi'],
entry_points={
'console_scripts': [
'snf-manage = synnefo.webproject.manage:main',
],
'synnefo': [
'default_settings = synnefo.webproject.settings'
]
},
)
|
apyrgio/synnefo
|
snf-webproject/setup.py
|
Python
|
gpl-3.0
| 5,921
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# thumbor imaging service
# https://github.com/globocom/thumbor/wiki
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2011 globo.com timehome@corp.globo.com
'''This is the main module in thumbor'''
__version__ = "5.0.6"
|
lfalcao/thumbor
|
thumbor/__init__.py
|
Python
|
mit
| 316
|
import time
from bitalino import BITalino
# The macAddress variable on Windows can be "XX:XX:XX:XX:XX:XX" or "COMX"
# while on Mac OS can be "/dev/tty.BITalino-XX-XX-DevB" for devices ending with the last 4 digits of the MAC address or "/dev/tty.BITalino-DevB" for the remaining
macAddress = "00:00:00:00:00:00"
# This example will collect data for 5 sec.
running_time = 5
batteryThreshold = 30
acqChannels = [0, 1, 2, 3, 4, 5]
samplingRate = 1000
nSamples = 10
digitalOutput_on = [1, 1]
digitalOutput_off = [0, 0]
# Connect to BITalino
device = BITalino(macAddress)
# Set battery threshold
device.battery(batteryThreshold)
# Read BITalino version
print(device.version())
# Start Acquisition
device.start(samplingRate, acqChannels)
start = time.time()
end = time.time()
while (end - start) < running_time:
# Read samples
print(device.read(nSamples))
end = time.time()
# Turn BITalino led and buzzer on
device.trigger(digitalOutput_on)
# Script sleeps for n seconds
time.sleep(running_time)
# Turn BITalino led and buzzer off
device.trigger(digitalOutput_off)
# Stop acquisition
device.stop()
# Close connection
device.close()
|
BITalinoWorld/revolution-python-api
|
samples/sample.py
|
Python
|
gpl-3.0
| 1,152
|
class Manager(object):
_user = None
_enabled = True
_user_required = True
def set_user_required(self, user_required):
self._user_required = user_required
def get_user_required(self):
return self._user_required
def set_user(self, user):
self._user = user
def get_user(self):
if self._user_required and (not self._user or
self._user.is_anonymous()):
msg = 'To store version valid user is required on ' \
'version_context_manager'
raise Exception(msg)
return self._user
def set_enabled(self, enabled):
self._enabled = enabled
def get_enabled(self):
return self._enabled
version_context_manager = Manager()
class VersionContextAwareApiViewMixin(object):
def initialize_request(self, request, *args, **kargs):
request = super(VersionContextAwareApiViewMixin, self) \
.initialize_request(request, *args, **kargs)
version_context_manager.set_user(request.user)
return request
|
dz0ny/vaultier
|
vaultier/libs/version/context.py
|
Python
|
bsd-3-clause
| 1,091
|
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Options/PathOption.py 4369 2009/09/19 15:58:29 scons"
__doc__ = """Place-holder for the old SCons.Options module hierarchy
This is for backwards compatibility. The new equivalent is the Variables/
class hierarchy. These will have deprecation warnings added (some day),
and will then be removed entirely (some day).
"""
import SCons.Variables
import SCons.Warnings
warned = False
class _PathOptionClass:
def warn(self):
global warned
if not warned:
msg = "The PathOption() function is deprecated; use the PathVariable() function instead."
SCons.Warnings.warn(SCons.Warnings.DeprecatedOptionsWarning, msg)
warned = True
def __call__(self, *args, **kw):
self.warn()
return SCons.Variables.PathVariable(*args, **kw)
def PathAccept(self, *args, **kw):
self.warn()
return SCons.Variables.PathVariable.PathAccept(*args, **kw)
def PathIsDir(self, *args, **kw):
self.warn()
return SCons.Variables.PathVariable.PathIsDir(*args, **kw)
def PathIsDirCreate(self, *args, **kw):
self.warn()
return SCons.Variables.PathVariable.PathIsDirCreate(*args, **kw)
def PathIsFile(self, *args, **kw):
self.warn()
return SCons.Variables.PathVariable.PathIsFile(*args, **kw)
def PathExists(self, *args, **kw):
self.warn()
return SCons.Variables.PathVariable.PathExists(*args, **kw)
PathOption = _PathOptionClass()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
looooo/pivy
|
scons/scons-local-1.2.0.d20090919/SCons/Options/PathOption.py
|
Python
|
isc
| 2,773
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyPytimeparse(PythonPackage):
"""A small Python library to parse various kinds of time expressions."""
homepage = "https://github.com/wroberts/pytimeparse"
pypi = "pytimeparse/pytimeparse-1.1.8.tar.gz"
version('1.1.8', sha256='e86136477be924d7e670646a98561957e8ca7308d44841e21f5ddea757556a0a')
depends_on('py-setuptools', type='build')
|
LLNL/spack
|
var/spack/repos/builtin/packages/py-pytimeparse/package.py
|
Python
|
lgpl-2.1
| 585
|
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2013, 2014 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
from .data_layer import DataLayer
from eve.utils import ParsedRequest, config, document_etag
from eve import ID_FIELD
class BaseProxy(DataLayer):
"""Data layer implementation used to connect the models to the data layer.
Transforms the model data layer API into Eve data layer calls.
"""
def __init__(self, data_layer):
self.data_layer = data_layer
def etag(self, doc):
return doc.get(config.ETAG, document_etag(doc))
def find_one(self, resource, filter, projection):
req = ParsedRequest()
req.args = {}
req.projection = projection
return self.data_layer.find_one(resource, req, **filter)
def find(self, resource, lookup, projection, **options):
req = ParsedRequest()
req.args = {}
req.projection = projection
return self.data_layer.get(resource, req, lookup)
def create(self, resource, docs):
return self.data_layer.create(resource, docs)
def update(self, resource, filter, doc):
return self._update(resource, filter, doc)
def replace(self, resource, filter, doc):
return self._update(resource, filter, doc, method='replace')
def delete(self, resource, filter):
return self.data_layer.delete(resource, filter)
def _update(self, resource, filter, doc, method='update'):
_id = doc.pop(ID_FIELD, None)
original = self.find_one(resource, filter, None)
filter[ID_FIELD] = original[ID_FIELD] # make sure it's correct type
updates = doc.copy()
res = getattr(self.data_layer, method)(resource, filter[ID_FIELD], updates, original)
doc.setdefault(ID_FIELD, _id)
return res
|
nistormihai/superdesk-core
|
apps/common/models/io/base_proxy.py
|
Python
|
agpl-3.0
| 2,002
|
#!/usr/bin/env python
"""
Test of the HDF storage for the Tables API.
Copyright 2009 Glencoe Software, Inc. All rights reserved.
Use is subject to license terms supplied in LICENSE.txt
"""
import unittest, os, tempfile, exceptions, time
import omero.columns
import omero.tables
import portalocker
import logging
import tables
import Ice
from tablestest.library import TestCase
from path import path
logging.basicConfig(level=logging.CRITICAL)
class MockAdapter(object):
def __init__(self, ic):
self.ic = ic
def getCommunicator(self):
return self.ic
class TestHdfStorage(TestCase):
def setUp(self):
self.ic = Ice.initialize()
self.current = Ice.Current()
self.current.adapter = MockAdapter(self.ic)
for of in omero.columns.ObjectFactories.values():
of.register(self.ic)
def cols(self):
a = omero.columns.LongColumnI('a','first',None)
b = omero.columns.LongColumnI('b','first',None)
c = omero.columns.LongColumnI('c','first',None)
return [a,b,c]
def init(self, hdf, meta=False):
if meta:
m = {"analysisA":1,"analysisB":"param","analysisC":4.1}
else:
m = None
hdf.initialize(self.cols(), m)
def append(self, hdf, map):
cols = self.cols()
for col in cols:
try:
col.values = [map[col.name]]
except KeyError:
col.values = []
hdf.append(cols)
def hdfpath(self):
tmpdir = self.tmpdir()
return path(tmpdir) / "test.h5"
def testInvalidFile(self):
self.assertRaises(omero.ApiUsageException, omero.tables.HdfStorage, None)
self.assertRaises(omero.ApiUsageException, omero.tables.HdfStorage, '')
bad = path(self.tmpdir()) / "doesntexist" / "test.h5"
self.assertRaises(omero.ApiUsageException, omero.tables.HdfStorage, bad)
def testValidFile(self):
omero.tables.HdfStorage(self.hdfpath())
def testLocking(self):
tmp = str(self.hdfpath())
hdf1 = omero.tables.HdfStorage(tmp)
try:
hdf2 = omero.tables.HdfStorage(tmp)
self.fail("should be locked")
except omero.LockTimeout, lt:
pass
hdf1.cleanup()
hdf3 = omero.tables.HdfStorage(tmp)
def testSimpleCreation(self):
hdf = omero.tables.HdfStorage(self.hdfpath())
self.init(hdf, False)
hdf.cleanup()
def testCreationWithMetadata(self):
hdf = omero.tables.HdfStorage(self.hdfpath())
self.init(hdf, True)
hdf.cleanup()
def testAddSingleRow(self):
hdf = omero.tables.HdfStorage(self.hdfpath())
self.init(hdf, True)
self.append(hdf, {"a":1,"b":2,"c":3})
hdf.cleanup()
def testModifyRow(self):
hdf = omero.tables.HdfStorage(self.hdfpath())
self.init(hdf, True)
self.append(hdf, {"a":1,"b":2,"c":3})
self.append(hdf, {"a":5,"b":6,"c":7})
data = hdf.readCoordinates(hdf._stamp, [0,1], self.current)
data.columns[0].values[0] = 100
data.columns[0].values[1] = 200
data.columns[1].values[0] = 300
data.columns[1].values[1] = 400
hdf.update(hdf._stamp, data)
data2 = hdf.readCoordinates(hdf._stamp, [0,1], self.current)
hdf.cleanup()
def testReadTicket1951(self):
hdf = omero.tables.HdfStorage(self.hdfpath())
self.init(hdf, True)
self.append(hdf, {"a":1,"b":2,"c":3})
data = hdf.readCoordinates(hdf._stamp, [0], self.current)
data2 = hdf.read(hdf._stamp, [0,1,2], 0, 1, self.current)
hdf.cleanup()
def testSorting(self): # Probably shouldn't work
hdf = omero.tables.HdfStorage(self.hdfpath())
self.init(hdf, True)
self.append(hdf, {"a":0,"b":2,"c":3})
self.append(hdf, {"a":4,"b":4,"c":4})
self.append(hdf, {"a":0,"b":1,"c":0})
self.append(hdf, {"a":0,"b":0,"c":0})
self.append(hdf, {"a":0,"b":4,"c":0})
self.append(hdf, {"a":0,"b":0,"c":0})
rows = hdf.getWhereList(time.time(), '(a==0)', None, 'b', None, None, None)
# Doesn't work yet.
hdf.cleanup()
def testInitializationOnInitializedFileFails(self):
p = self.hdfpath()
hdf = omero.tables.HdfStorage(p)
self.init(hdf, True)
hdf.cleanup()
hdf = omero.tables.HdfStorage(p)
try:
self.init(hdf, True)
self.fail()
except omero.ApiUsageException:
pass
hdf.cleanup()
"""
Hard fails disabled. See #2067
def testAddColumn(self):
self.fail("NYI")
def testMergeFiles(self):
self.fail("NYI")
def testVersion(self):
self.fail("NYI")
"""
def testHandlesExistingDirectory(self):
t = path(self.tmpdir())
h = t / "test.h5"
self.assertTrue(t.exists())
hdf = omero.tables.HdfStorage(h)
hdf.cleanup()
def testStringCol(self):
hdf = omero.tables.HdfStorage(self.hdfpath())
cols = [omero.columns.StringColumnI("name","description",16,None)]
hdf.initialize(cols)
cols[0].settable(hdf._HdfStorage__mea) # Needed for size
cols[0].values = ["foo"]
hdf.append(cols)
rows = hdf.getWhereList(time.time(), '(name=="foo")', None, 'b', None, None, None)
self.assertEquals(1, len(rows))
self.assertEquals(16, hdf.readCoordinates(time.time(), [0], self.current).columns[0].size)
# Doesn't work yet.
hdf.cleanup()
#
# ROIs
#
def testMaskColumn(self):
hdf = omero.tables.HdfStorage(self.hdfpath())
mask = omero.columns.MaskColumnI('mask', 'desc', None)
hdf.initialize([mask], None)
mask.imageId = [1, 2]
mask.theZ = [2, 2]
mask.theT = [3, 3]
mask.x = [4, 4]
mask.y = [5, 5]
mask.w = [6, 6]
mask.h = [7, 7]
mask.bytes = [[0],[0,1,2,3,4]]
hdf.append([mask])
data = hdf.readCoordinates(hdf._stamp, [0,1], self.current)
test = data.columns[0]
self.assertEquals(1, test.imageId[0])
self.assertEquals(2, test.theZ[0])
self.assertEquals(3, test.theT[0])
self.assertEquals(4, test.x[0])
self.assertEquals(5, test.y[0])
self.assertEquals(6, test.w[0])
self.assertEquals(7, test.h[0])
self.assertEquals([0], test.bytes[0])
self.assertEquals(2, test.imageId[1])
self.assertEquals(2, test.theZ[1])
self.assertEquals(3, test.theT[1])
self.assertEquals(4, test.x[1])
self.assertEquals(5, test.y[1])
self.assertEquals(6, test.w[1])
self.assertEquals(7, test.h[1])
self.assertEquals([0,1,2,3,4], test.bytes[1])
hdf.cleanup()
def test_suite():
return 1
if __name__ == '__main__':
unittest.main()
|
joshmoore/openmicroscopy
|
components/tools/OmeroPy/test/tablestest/hdfstorage.py
|
Python
|
gpl-2.0
| 6,978
|
__author__ = 'cag2as'
from helper import greeting
#calls from helper
greeting("hello")
|
cag2as/cs3240-labdemo
|
hello.py
|
Python
|
mit
| 90
|
# -*- coding: utf-8 -
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
#
# Copyright 2011 Cloudant, Inc.
from bucky.metrics.metric import Metric, MetricValue as MV
class Gauge(Metric):
def __init__(self, name):
self.name = name
self.value = 0.0
def update(self, value):
self.value = value
def clear(self):
pass
def metrics(self):
return [MV(self.name, self.value)]
|
JoseKilo/bucky
|
bucky/metrics/gauge.py
|
Python
|
apache-2.0
| 922
|
# Version string following PEP 386
# http://www.python.org/dev/peps/pep-0386/
__version__ = "0.6"
|
seanbrant/django-queued-storage
|
queued_storage/__init__.py
|
Python
|
bsd-3-clause
| 98
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.db.models.deletion
import modelcluster.fields
import wagtail.wagtailcore.blocks
import wagtail.wagtailcore.fields
import wagtail.wagtailembeds.blocks
import wagtail.wagtailimages.blocks
from django.db import migrations, models
import articles.fields
class Migration(migrations.Migration):
dependencies = [
('wagtailimages', '0006_add_verbose_names'),
('wagtailcore', '0001_squashed_0016_change_page_url_path_to_text_field'),
]
operations = [
migrations.CreateModel(
name='ArticleAuthorLink',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('sort_order', models.IntegerField(null=True, editable=False, blank=True)),
],
options={
'ordering': ['sort_order'],
'abstract': False,
},
),
migrations.CreateModel(
name='ArticleListPage',
fields=[
('page_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='ArticlePage',
fields=[
('page_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('subtitle', wagtail.wagtailcore.fields.RichTextField(default='', blank=True)),
('body', articles.fields.BodyField([('Heading', wagtail.wagtailcore.blocks.CharBlock(classname='heading', icon='title')), ('Paragraph', wagtail.wagtailcore.blocks.RichTextBlock(icon='doc-full')), ('Image', wagtail.wagtailimages.blocks.ImageChooserBlock(icon='image')), ('Embed', wagtail.wagtailembeds.blocks.EmbedBlock(icon='site')), ('List', wagtail.wagtailcore.blocks.ListBlock(wagtail.wagtailcore.blocks.RichTextBlock(label='item'), icon='list-ul')), ('Sharable', articles.fields.SharableBlock())])),
('excerpt', wagtail.wagtailcore.fields.RichTextField(default='', blank=True)),
('main_image', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='wagtailimages.Image', null=True)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='ArticleTopicLink',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('article', modelcluster.fields.ParentalKey(related_name='topic_links', to='articles.ArticlePage')),
],
),
migrations.CreateModel(
name='SeriesArticleLink',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('sort_order', models.IntegerField(null=True, editable=False, blank=True)),
('override_text', wagtail.wagtailcore.fields.RichTextField(default='', help_text='This field is optional. If not provided, the text will be pulled from the article page automatically. This field allows you to override the automatic text.', blank=True)),
('article', models.ForeignKey(related_name='series_links', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='articles.ArticlePage', null=True)),
],
options={
'ordering': ['sort_order'],
'abstract': False,
},
),
migrations.CreateModel(
name='SeriesListPage',
fields=[
('page_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='SeriesPage',
fields=[
('page_ptr', models.OneToOneField(parent_link=True, auto_created=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('body', articles.fields.BodyField([('Heading', wagtail.wagtailcore.blocks.CharBlock(classname='heading', icon='title')), ('Paragraph', wagtail.wagtailcore.blocks.RichTextBlock(icon='doc-full')), ('Image', wagtail.wagtailimages.blocks.ImageChooserBlock(icon='image')), ('Embed', wagtail.wagtailembeds.blocks.EmbedBlock(icon='site')), ('List', wagtail.wagtailcore.blocks.ListBlock(wagtail.wagtailcore.blocks.RichTextBlock(label='item'), icon='list-ul')), ('Sharable', articles.fields.SharableBlock())], default='', blank=True)),
('image', models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='wagtailimages.Image', null=True)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
migrations.CreateModel(
name='Topic',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=1024)),
],
),
migrations.AddField(
model_name='seriespage',
name='primary_topic',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='articles.Topic', null=True),
),
migrations.AddField(
model_name='seriesarticlelink',
name='series',
field=modelcluster.fields.ParentalKey(related_name='related_article_links', to='articles.SeriesPage'),
),
migrations.AddField(
model_name='seriesarticlelink',
name='override_image',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='wagtailimages.Image', help_text='This field is optional. If not provided, the image will be pulled from the article page automatically. This field allows you to override the automatic image.', null=True),
),
migrations.AddField(
model_name='articletopiclink',
name='topic',
field=models.ForeignKey(related_name='article_links', to='articles.Topic'),
),
migrations.AddField(
model_name='articlepage',
name='primary_topic',
field=models.ForeignKey(related_name='+', on_delete=django.db.models.deletion.SET_NULL, blank=True, to='articles.Topic', null=True),
),
migrations.AddField(
model_name='articleauthorlink',
name='article',
field=modelcluster.fields.ParentalKey(related_name='author_links', to='articles.ArticlePage'),
),
]
|
albertoconnor/website
|
articles/migrations/0001_initial.py
|
Python
|
mit
| 7,175
|
import numpy as np
from falass import readwrite, dataformat
import os
class Job():
"""The catch all.
This class is used for setting up the falass job -- and is generally a catch all for inputs that do not fit into
other parts.
Parameters
----------
files: falass.readwrite.Files
A Files class item.
layer_thickness: float
The thickness of the layers that the simulation cell should be sliced into.
cut_off_size: float
The size of the simulation cell that should be ignored from the bottom -- this is to allow for the use
of a vacuum gap at the bottom of the cell.
"""
def __init__(self, files, layer_thickness, cut_off_size):
self.files = files
self.layer_thickness = layer_thickness
self.cut_off_size = cut_off_size
self.times = np.asarray(self.files.times)
self.new_file = False
def set_run(self, files=None, layer_thickness=None, cut_off_size=None):
"""Edit job inputs.
This allows parts of the class to be assigned after the initial assignment or changed
Parameters
----------
files: falass.readwrite.Files
A Files class item.
layer_thickness: float
The thickness of the layers that the simulation cell should be sliced into.
cut_off_size: float
The size of the simulation cell that should be ignored from the bottom -- this is to allow for the use
of a vacuum gap at the bottom of the cell.
"""
if files:
self.files = files
self.times = np.asarray(self.files.times)
if layer_thickness:
self.layer_thickness = layer_thickness
if cut_off_size:
self.cut_off_size = cut_off_size
def set_lgts(self):
"""Assign scattering lengths.
Assigned the scattering lengths from the lgtfile to the different atom types. If no lgtfile is defined falass
will help the user to build one by working through the atom types in the pdb file and requesting input of the
real and imaginary scattering lengths. This will also occur if a atom type if found in the pdbfile but not in
the given lgts file. falass will write the lgtfile to disk if atom types do not feature in the given lgtfile or
one is written from scratch.
"""
if self.files.lgtfile:
lines = len(self.files.atoms)
print("Setting atoms lengths")
percentage = 0
readwrite.print_update(percentage)
path, extension = os.path.splitext(self.files.lgtfile)
lgtfile_name = path + extension
for i in range(0, len(self.files.atoms)):
percentage_new = np.floor(i / lines * 100)
percentage = readwrite.check_update(percentage, percentage_new)
for j in range(0, len(self.files.atoms[i])):
duplicate = readwrite.check_duplicates(self.files.scat_lens, self.files.atoms[i][j].atom)
if not duplicate:
self.new_file = True
real_scat_len = input('The following atom type has no scattering length given '
'in the lgt file {} \nPlease define a real scattering length for '
'this atom type: '.format(self.files.atoms[i][j].atom))
imag_scat_len = input('\nPlease define a imaginary scattering length for '
'this atom type: '.format(self.files.atoms[i][j].atom))
self.files.scat_lens.append(dataformat.ScatLens(self.files.atoms[i][j].atom, float(real_scat_len),
float(imag_scat_len)))
readwrite.print_update(100)
else:
self.new_file = True
print('There was no lgt file defined, falass will help you define one and save it for future use.')
for i in range(0, len(self.files.atoms)):
for j in range(0, len(self.files.atoms[i])):
duplicate = readwrite.check_duplicates(self.files.scat_lens, self.files.atoms[i][j].atom)
if not duplicate:
real_scat_len = input('The following atom type has no scattering length given '
'in the lgt file {} \nPlease define a real scattering length for '
'this atom type: '.format(self.files.atoms[i][j].atom))
imag_scat_len = input('\nPlease define a imaginary scattering length for '
'this atom type: '.format(self.files.atoms[i][j].atom))
self.files.scat_lens.append(dataformat.ScatLens(self.files.atoms[i][j].atom, float(real_scat_len),
float(imag_scat_len)))
lgtfile_name = input("What should the lgt file be named? ")
path, extension = os.path.splitext(lgtfile_name)
if extension != '.lgt':
lgtfile_name = path + '.lgt'
if self.new_file:
i = 0
while os.path.isfile(lgtfile_name):
i+=1
lgtfile_name = path + '_' + str(i) + '.lgt'
lgtsf = open(lgtfile_name, 'w')
for i in range(0, len(self.files.scat_lens)):
lgtsf.write('{} {} {}\n'.format(self.files.scat_lens[i].atom, self.files.scat_lens[i].real * 1e5,
self.files.scat_lens[i].imag * 1e5))
print('A new lgtfile has been written with the name {}'.format(lgtfile_name))
def set_times(self, times=None):
"""Assign times to analyse.
The assignment of the simulation timesteps that should be analysed. if none are given all will be analysed.
Parameters
----------
times: array_like float
The timesteps that should be analysed, in the unit of time that present in the pdbfile.
"""
if times:
self.times = np.arange(float(times[0]), float(times[1]) + float(times[2]), float(times[2]))
else:
first_times = float(input(
"Please define the first timestep to be analysed, the first in the pdb file was {} ps: ".format(
self.files.times[0])))
while check_array(self.files.times, first_times) is not True:
first_times = float(input("TIMESTEP NOT FOUND. Please define the first timestep to be analysed, "
"the first in the pdb file was {} ps: ".format(self.files.times[0])))
last_times = float(input(
"Please define the last timestep to be analysed, the last in the pdb file was {} ps: ".format(
self.files.times[-1])))
while check_array(self.files.times, last_times) is not True:
last_times = float(input("TIMESTEP NOT FOUND. Please define the last timestep to be analysed, "
"the last in the pdb file was {} ps: ".format(self.files.times[-1])))
interval_times = float(input("Please define time interval for analysis, the smallest interval in the pdb "
"file was {} ps: ".format(self.files.times[1] - self.files.times[0])))
while interval_times > last_times:
interval_times = float(input("NOT A VALID INTERVAL. Please define time interval for analysis, "
"the smallest interval in the pdb file was {} ps: ".format(self.files.times[1] -
self.files.times[0])))
self.times = np.arange(first_times, last_times + interval_times, interval_times)
def check_array(array, check):
"""Checks if item is in array.
Checks if an item has already been added to an array.
Parameters
----------
array: array-type
The array to check.
check: str
The item to try and find.
Returns
-------
bool
true if the item is already present in the scatlen type array, false if not.
"""
for i in range(0, len(array)):
if check == array[i]:
return True
return False
|
arm61/falass
|
falass/job.py
|
Python
|
mit
| 8,517
|
import unittest
from stratuslab_usecases.cli.TestUtils import sshConnectionOrTimeout
import BasicVmLifecycleTestBase
class testVmIsAccessibleViaSsh(BasicVmLifecycleTestBase.BasicVmTestBase):
vmName = 'ttylinux'
def test_usecase(self):
sshConnectionOrTimeout(self.ip_addresses[0])
def suite():
return unittest.TestLoader().loadTestsFromTestCase(testVmIsAccessibleViaSsh)
|
StratusLab/use-cases
|
src/main/python/stratuslab_usecases/api/testVmIsAccessibleViaSsh.py
|
Python
|
apache-2.0
| 398
|
import os
import shutil
from bears.vcs.CommitBear import _CommitBear
from coala_utils.ContextManagers import change_directory
from coalib.misc.Shell import run_shell_command
class HgCommitBear(_CommitBear):
LANGUAGES = {'Hg'}
CAN_DETECT = {'Formatting'}
ASCIINEMA_URL = 'https://asciinema.org/a/3Kfn2EDjYLmsbPoL7lRuLyhlN'
@classmethod
def check_prerequisites(cls):
if shutil.which('hg') is None:
return 'hg is not installed.'
else:
return True
def get_remotes():
remotes, _ = run_shell_command('hg paths')
return remotes
def get_head_commit(self):
with change_directory(self.get_config_dir() or os.getcwd()):
return run_shell_command('hg log -l 1 --template "{desc}"')
|
coala/coala-bears
|
bears/vcs/mercurial/HgCommitBear.py
|
Python
|
agpl-3.0
| 779
|
NAME='capture'
GCC_LIST=['v4l']
|
unbit/uwsgi-capture
|
uwsgiplugin.py
|
Python
|
mit
| 32
|
#-*- coding:utf-8 -*-
##############################################################################
#
# OpenERP, Designed For QingDao Xiangjie Company
# Powered By Rainsoft(QingDao) Author:Kevin Kong 2014 (kfx2007@163.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv,fields
class rainsoft_partner_update_credit(osv.Model):
_name="rainsoft.partner.update.credit"
_columns={
'credit':fields.float('Credit',size=10),
}
def btn_update(self,cr,uid,ids,context=None):
if context==None:
context={}
partners = self.pool.get('res.partner').browse(cr,uid,context.get(('active_ids'),[]),context=context)
for partner in partners:
partner.write({'credit_limit':context.get(('credit'),0)})
|
kevin8909/xjerp
|
openerp/addons/Rainsoft_Xiangjie/wizard/rainsoft_partner_update_credit.py
|
Python
|
agpl-3.0
| 1,527
|
from django.core.urlresolvers import reverse # noqa
from django import http
from mox import IsA # noqa
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
from novaclient.v1_1 import flavors
class FlavorsTests(test.BaseAdminViewTests):
@test.create_stubs({api.nova: ('flavor_list', 'flavor_create'), })
def test_create_new_flavor_when_none_exist(self):
flavor = self.flavors.first()
eph = getattr(flavor, 'OS-FLV-EXT-DATA:ephemeral')
# no pre-existing flavors
api.nova.flavor_create(IsA(http.HttpRequest),
flavor.name,
flavor.ram,
flavor.vcpus,
flavor.disk,
flavorid=flavor.id,
swap=flavor.swap,
ephemeral=eph).AndReturn(flavor)
api.nova.flavor_list(IsA(http.HttpRequest))
api.nova.flavor_list(IsA(http.HttpRequest))
self.mox.ReplayAll()
url = reverse('horizon:admin:flavors:create')
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed(resp, "admin/flavors/create.html")
data = {'name': flavor.name,
'flavor_id': flavor.id,
'vcpus': flavor.vcpus,
'memory_mb': flavor.ram,
'disk_gb': flavor.disk,
'swap_mb': flavor.swap,
'eph_gb': eph}
resp = self.client.post(url, data)
self.assertRedirectsNoFollow(resp,
reverse("horizon:admin:flavors:index"))
# keeping the 2 edit tests separate to aid debug breaks
@test.create_stubs({api.nova: ('flavor_list',
'flavor_create',
'flavor_delete',
'flavor_get_extras',
'flavor_get'), })
def test_edit_flavor(self):
flavor = self.flavors.first() # has no extra specs
eph = getattr(flavor, 'OS-FLV-EXT-DATA:ephemeral')
extra_specs = getattr(flavor, 'extra_specs')
new_flavor = flavors.Flavor(flavors.FlavorManager(None),
{'id':
"cccccccc-cccc-cccc-cccc-cccccccccccc",
'name': flavor.name,
'vcpus': flavor.vcpus + 1,
'disk': flavor.disk,
'ram': flavor.ram,
'swap': 0,
'OS-FLV-EXT-DATA:ephemeral': eph,
'extra_specs': extra_specs})
# GET
api.nova.flavor_get(IsA(http.HttpRequest), flavor.id).AndReturn(flavor)
# POST
api.nova.flavor_list(IsA(http.HttpRequest))
api.nova.flavor_get(IsA(http.HttpRequest), flavor.id).AndReturn(flavor)
api.nova.flavor_get_extras(IsA(http.HttpRequest), flavor.id, raw=True)\
.AndReturn(extra_specs)
api.nova.flavor_delete(IsA(http.HttpRequest), flavor.id)
api.nova.flavor_create(IsA(http.HttpRequest),
new_flavor.name,
new_flavor.ram,
new_flavor.vcpus,
new_flavor.disk,
swap=flavor.swap,
ephemeral=eph).AndReturn(new_flavor)
self.mox.ReplayAll()
# get_test
url = reverse('horizon:admin:flavors:edit', args=[flavor.id])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed(resp, "admin/flavors/edit.html")
# post test
data = {'flavor_id': flavor.id,
'name': flavor.name,
'vcpus': flavor.vcpus + 1,
'memory_mb': flavor.ram,
'disk_gb': flavor.disk,
'swap_mb': flavor.swap,
'eph_gb': eph}
resp = self.client.post(url, data)
self.assertNoFormErrors(resp)
self.assertMessageCount(success=1)
self.assertRedirectsNoFollow(resp,
reverse("horizon:admin:flavors:index"))
@test.create_stubs({api.nova: ('flavor_list',
'flavor_create',
'flavor_delete',
'flavor_get_extras',
'flavor_extra_set',
'flavor_get'), })
def test_edit_flavor_with_extra_specs(self):
flavor = self.flavors.list()[1] # the second element has extra specs
eph = getattr(flavor, 'OS-FLV-EXT-DATA:ephemeral')
extra_specs = getattr(flavor, 'extra_specs')
new_vcpus = flavor.vcpus + 1
new_flavor = flavors.Flavor(flavors.FlavorManager(None),
{'id':
"cccccccc-cccc-cccc-cccc-cccccccccccc",
'name': flavor.name,
'vcpus': new_vcpus,
'disk': flavor.disk,
'ram': flavor.ram,
'swap': flavor.swap,
'OS-FLV-EXT-DATA:ephemeral': eph,
'extra_specs': extra_specs})
# GET
api.nova.flavor_get(IsA(http.HttpRequest), flavor.id).AndReturn(flavor)
# POST
api.nova.flavor_list(IsA(http.HttpRequest))
api.nova.flavor_get(IsA(http.HttpRequest), flavor.id).AndReturn(flavor)
api.nova.flavor_get_extras(IsA(http.HttpRequest), flavor.id, raw=True)\
.AndReturn(extra_specs)
api.nova.flavor_delete(IsA(http.HttpRequest), flavor.id)
api.nova.flavor_create(IsA(http.HttpRequest),
flavor.name,
flavor.ram,
new_vcpus,
flavor.disk,
swap=flavor.swap,
ephemeral=eph).AndReturn(new_flavor)
api.nova.flavor_extra_set(IsA(http.HttpRequest),
new_flavor.id,
extra_specs)
self.mox.ReplayAll()
#get_test
url = reverse('horizon:admin:flavors:edit', args=[flavor.id])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed(resp, "admin/flavors/edit.html")
#post test
data = {'flavor_id': flavor.id,
'name': flavor.name,
'vcpus': new_vcpus,
'memory_mb': flavor.ram,
'disk_gb': flavor.disk,
'swap_mb': flavor.swap,
'eph_gb': eph}
resp = self.client.post(url, data)
self.assertNoFormErrors(resp)
self.assertMessageCount(success=1)
self.assertRedirectsNoFollow(resp,
reverse("horizon:admin:flavors:index"))
@test.create_stubs({api.nova: ('flavor_list',
'flavor_get'), })
def test_edit_flavor_set_invalid_name(self):
flavor_a = self.flavors.list()[0]
eph = getattr(flavor_a, 'OS-FLV-EXT-DATA:ephemeral')
invalid_flavor_name = "m1.tiny()"
# GET
api.nova.flavor_get(IsA(http.HttpRequest),
flavor_a.id).AndReturn(flavor_a)
# POST
api.nova.flavor_get(IsA(http.HttpRequest),
flavor_a.id).AndReturn(flavor_a)
self.mox.ReplayAll()
# get_test
url = reverse('horizon:admin:flavors:edit', args=[flavor_a.id])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed(resp, "admin/flavors/edit.html")
# post test
data = {'flavor_id': flavor_a.id,
'name': invalid_flavor_name,
'vcpus': flavor_a.vcpus + 1,
'memory_mb': flavor_a.ram,
'disk_gb': flavor_a.disk,
'swap_mb': flavor_a.swap,
'eph_gb': eph}
resp = self.client.post(url, data)
self.assertFormErrors(resp, 1, 'Name may only contain letters, '
'numbers, underscores, periods and hyphens.')
@test.create_stubs({api.nova: ('flavor_list',
'flavor_get'), })
def test_edit_flavor_set_existing_name(self):
flavor_a = self.flavors.list()[0]
flavor_b = self.flavors.list()[1]
eph = getattr(flavor_a, 'OS-FLV-EXT-DATA:ephemeral')
# GET
api.nova.flavor_get(IsA(http.HttpRequest),
flavor_a.id).AndReturn(flavor_a)
# POST
api.nova.flavor_list(IsA(http.HttpRequest)) \
.AndReturn(self.flavors.list())
api.nova.flavor_get(IsA(http.HttpRequest),
flavor_a.id).AndReturn(flavor_a)
self.mox.ReplayAll()
# get_test
url = reverse('horizon:admin:flavors:edit', args=[flavor_a.id])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed(resp, "admin/flavors/edit.html")
# post test
data = {'flavor_id': flavor_a.id,
'name': flavor_b.name,
'vcpus': flavor_a.vcpus + 1,
'memory_mb': flavor_a.ram,
'disk_gb': flavor_a.disk,
'swap_mb': flavor_a.swap,
'eph_gb': eph}
resp = self.client.post(url, data)
self.assertFormErrors(resp, 1, 'The name "m1.massive" '
'is already used by another flavor.')
|
deepakselvaraj/federated-horizon
|
openstack_dashboard/dashboards/admin/flavors/tests.py
|
Python
|
apache-2.0
| 10,073
|
# -*- test-case-name: twisted.test.test_ftp -*-
# Copyright (c) 2001-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
An FTP protocol implementation
@author: Itamar Shtull-Trauring
@author: Jp Calderone
@author: Andrew Bennetts
"""
# System Imports
import os
import time
import re
import operator
import stat
import errno
import fnmatch
import warnings
try:
import pwd, grp
except ImportError:
pwd = grp = None
from zope.interface import Interface, implements
# Twisted Imports
from twisted import copyright
from twisted.internet import reactor, interfaces, protocol, error, defer
from twisted.protocols import basic, policies
from twisted.python import log, failure, filepath
from twisted.python.compat import reduce
from twisted.cred import error as cred_error, portal, credentials, checkers
# constants
# response codes
RESTART_MARKER_REPLY = "100"
SERVICE_READY_IN_N_MINUTES = "120"
DATA_CNX_ALREADY_OPEN_START_XFR = "125"
FILE_STATUS_OK_OPEN_DATA_CNX = "150"
CMD_OK = "200.1"
TYPE_SET_OK = "200.2"
ENTERING_PORT_MODE = "200.3"
CMD_NOT_IMPLMNTD_SUPERFLUOUS = "202"
SYS_STATUS_OR_HELP_REPLY = "211"
DIR_STATUS = "212"
FILE_STATUS = "213"
HELP_MSG = "214"
NAME_SYS_TYPE = "215"
SVC_READY_FOR_NEW_USER = "220.1"
WELCOME_MSG = "220.2"
SVC_CLOSING_CTRL_CNX = "221"
GOODBYE_MSG = "221"
DATA_CNX_OPEN_NO_XFR_IN_PROGRESS = "225"
CLOSING_DATA_CNX = "226"
TXFR_COMPLETE_OK = "226"
ENTERING_PASV_MODE = "227"
ENTERING_EPSV_MODE = "229"
USR_LOGGED_IN_PROCEED = "230.1" # v1 of code 230
GUEST_LOGGED_IN_PROCEED = "230.2" # v2 of code 230
REQ_FILE_ACTN_COMPLETED_OK = "250"
PWD_REPLY = "257.1"
MKD_REPLY = "257.2"
USR_NAME_OK_NEED_PASS = "331.1" # v1 of Code 331
GUEST_NAME_OK_NEED_EMAIL = "331.2" # v2 of code 331
NEED_ACCT_FOR_LOGIN = "332"
REQ_FILE_ACTN_PENDING_FURTHER_INFO = "350"
SVC_NOT_AVAIL_CLOSING_CTRL_CNX = "421.1"
TOO_MANY_CONNECTIONS = "421.2"
CANT_OPEN_DATA_CNX = "425"
CNX_CLOSED_TXFR_ABORTED = "426"
REQ_ACTN_ABRTD_FILE_UNAVAIL = "450"
REQ_ACTN_ABRTD_LOCAL_ERR = "451"
REQ_ACTN_ABRTD_INSUFF_STORAGE = "452"
SYNTAX_ERR = "500"
SYNTAX_ERR_IN_ARGS = "501"
CMD_NOT_IMPLMNTD = "502"
BAD_CMD_SEQ = "503"
CMD_NOT_IMPLMNTD_FOR_PARAM = "504"
NOT_LOGGED_IN = "530.1" # v1 of code 530 - please log in
AUTH_FAILURE = "530.2" # v2 of code 530 - authorization failure
NEED_ACCT_FOR_STOR = "532"
FILE_NOT_FOUND = "550.1" # no such file or directory
PERMISSION_DENIED = "550.2" # permission denied
ANON_USER_DENIED = "550.3" # anonymous users can't alter filesystem
IS_NOT_A_DIR = "550.4" # rmd called on a path that is not a directory
REQ_ACTN_NOT_TAKEN = "550.5"
FILE_EXISTS = "550.6"
IS_A_DIR = "550.7"
PAGE_TYPE_UNK = "551"
EXCEEDED_STORAGE_ALLOC = "552"
FILENAME_NOT_ALLOWED = "553"
RESPONSE = {
# -- 100's --
RESTART_MARKER_REPLY: '110 MARK yyyy-mmmm', # TODO: this must be fixed
SERVICE_READY_IN_N_MINUTES: '120 service ready in %s minutes',
DATA_CNX_ALREADY_OPEN_START_XFR: '125 Data connection already open, starting transfer',
FILE_STATUS_OK_OPEN_DATA_CNX: '150 File status okay; about to open data connection.',
# -- 200's --
CMD_OK: '200 Command OK',
TYPE_SET_OK: '200 Type set to %s.',
ENTERING_PORT_MODE: '200 PORT OK',
CMD_NOT_IMPLMNTD_SUPERFLUOUS: '202 Command not implemented, superfluous at this site',
SYS_STATUS_OR_HELP_REPLY: '211 System status reply',
DIR_STATUS: '212 %s',
FILE_STATUS: '213 %s',
HELP_MSG: '214 help: %s',
NAME_SYS_TYPE: '215 UNIX Type: L8',
WELCOME_MSG: "220 %s",
SVC_READY_FOR_NEW_USER: '220 Service ready',
GOODBYE_MSG: '221 Goodbye.',
DATA_CNX_OPEN_NO_XFR_IN_PROGRESS: '225 data connection open, no transfer in progress',
CLOSING_DATA_CNX: '226 Abort successful',
TXFR_COMPLETE_OK: '226 Transfer Complete.',
ENTERING_PASV_MODE: '227 Entering Passive Mode (%s).',
ENTERING_EPSV_MODE: '229 Entering Extended Passive Mode (|||%s|).', # where is epsv defined in the rfc's?
USR_LOGGED_IN_PROCEED: '230 User logged in, proceed',
GUEST_LOGGED_IN_PROCEED: '230 Anonymous login ok, access restrictions apply.',
REQ_FILE_ACTN_COMPLETED_OK: '250 Requested File Action Completed OK', #i.e. CWD completed ok
PWD_REPLY: '257 "%s"',
MKD_REPLY: '257 "%s" created',
# -- 300's --
'userotp': '331 Response to %s.', # ???
USR_NAME_OK_NEED_PASS: '331 Password required for %s.',
GUEST_NAME_OK_NEED_EMAIL: '331 Guest login ok, type your email address as password.',
REQ_FILE_ACTN_PENDING_FURTHER_INFO: '350 Requested file action pending further information.',
# -- 400's --
SVC_NOT_AVAIL_CLOSING_CTRL_CNX: '421 Service not available, closing control connection.',
TOO_MANY_CONNECTIONS: '421 Too many users right now, try again in a few minutes.',
CANT_OPEN_DATA_CNX: "425 Can't open data connection.",
CNX_CLOSED_TXFR_ABORTED: '426 Transfer aborted. Data connection closed.',
REQ_ACTN_ABRTD_LOCAL_ERR: '451 Requested action aborted. Local error in processing.',
# -- 500's --
SYNTAX_ERR: "500 Syntax error: %s",
SYNTAX_ERR_IN_ARGS: '501 syntax error in argument(s) %s.',
CMD_NOT_IMPLMNTD: "502 Command '%s' not implemented",
BAD_CMD_SEQ: '503 Incorrect sequence of commands: %s',
CMD_NOT_IMPLMNTD_FOR_PARAM: "504 Not implemented for parameter '%s'.",
NOT_LOGGED_IN: '530 Please login with USER and PASS.',
AUTH_FAILURE: '530 Sorry, Authentication failed.',
NEED_ACCT_FOR_STOR: '532 Need an account for storing files',
FILE_NOT_FOUND: '550 %s: No such file or directory.',
PERMISSION_DENIED: '550 %s: Permission denied.',
ANON_USER_DENIED: '550 Anonymous users are forbidden to change the filesystem',
IS_NOT_A_DIR: '550 Cannot rmd, %s is not a directory',
FILE_EXISTS: '550 %s: File exists',
IS_A_DIR: '550 %s: is a directory',
REQ_ACTN_NOT_TAKEN: '550 Requested action not taken: %s',
EXCEEDED_STORAGE_ALLOC: '552 Requested file action aborted, exceeded file storage allocation',
FILENAME_NOT_ALLOWED: '553 Requested action not taken, file name not allowed'
}
class InvalidPath(Exception):
"""
Internal exception used to signify an error during parsing a path.
"""
def toSegments(cwd, path):
"""
Normalize a path, as represented by a list of strings each
representing one segment of the path.
"""
if path.startswith('/'):
segs = []
else:
segs = cwd[:]
for s in path.split('/'):
if s == '.' or s == '':
continue
elif s == '..':
if segs:
segs.pop()
else:
raise InvalidPath(cwd, path)
elif '\0' in s or '/' in s:
raise InvalidPath(cwd, path)
else:
segs.append(s)
return segs
def errnoToFailure(e, path):
"""
Map C{OSError} and C{IOError} to standard FTP errors.
"""
if e == errno.ENOENT:
return defer.fail(FileNotFoundError(path))
elif e == errno.EACCES or e == errno.EPERM:
return defer.fail(PermissionDeniedError(path))
elif e == errno.ENOTDIR:
return defer.fail(IsNotADirectoryError(path))
elif e == errno.EEXIST:
return defer.fail(FileExistsError(path))
elif e == errno.EISDIR:
return defer.fail(IsADirectoryError(path))
else:
return defer.fail()
class FTPCmdError(Exception):
"""
Generic exception for FTP commands.
"""
def __init__(self, *msg):
Exception.__init__(self, *msg)
self.errorMessage = msg
def response(self):
"""
Generate a FTP response message for this error.
"""
return RESPONSE[self.errorCode] % self.errorMessage
class FileNotFoundError(FTPCmdError):
"""
Raised when trying to access a non existent file or directory.
"""
errorCode = FILE_NOT_FOUND
class AnonUserDeniedError(FTPCmdError):
"""
Raised when an anonymous user issues a command that will alter the
filesystem
"""
def __init__(self):
# No message
FTPCmdError.__init__(self, None)
errorCode = ANON_USER_DENIED
class PermissionDeniedError(FTPCmdError):
"""
Raised when access is attempted to a resource to which access is
not allowed.
"""
errorCode = PERMISSION_DENIED
class IsNotADirectoryError(FTPCmdError):
"""
Raised when RMD is called on a path that isn't a directory.
"""
errorCode = IS_NOT_A_DIR
class FileExistsError(FTPCmdError):
"""
Raised when attempted to override an existing resource.
"""
errorCode = FILE_EXISTS
class IsADirectoryError(FTPCmdError):
"""
Raised when DELE is called on a path that is a directory.
"""
errorCode = IS_A_DIR
class CmdSyntaxError(FTPCmdError):
"""
Raised when a command syntax is wrong.
"""
errorCode = SYNTAX_ERR
class CmdArgSyntaxError(FTPCmdError):
"""
Raised when a command is called with wrong value or a wrong number of
arguments.
"""
errorCode = SYNTAX_ERR_IN_ARGS
class CmdNotImplementedError(FTPCmdError):
"""
Raised when an unimplemented command is given to the server.
"""
errorCode = CMD_NOT_IMPLMNTD
class CmdNotImplementedForArgError(FTPCmdError):
"""
Raised when the handling of a parameter for a command is not implemented by
the server.
"""
errorCode = CMD_NOT_IMPLMNTD_FOR_PARAM
class FTPError(Exception):
pass
class PortConnectionError(Exception):
pass
class BadCmdSequenceError(FTPCmdError):
"""
Raised when a client sends a series of commands in an illogical sequence.
"""
errorCode = BAD_CMD_SEQ
class AuthorizationError(FTPCmdError):
"""
Raised when client authentication fails.
"""
errorCode = AUTH_FAILURE
def debugDeferred(self, *_):
log.msg('debugDeferred(): %s' % str(_), debug=True)
# -- DTP Protocol --
_months = [
None,
'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
class DTP(object, protocol.Protocol):
implements(interfaces.IConsumer)
isConnected = False
_cons = None
_onConnLost = None
_buffer = None
def connectionMade(self):
self.isConnected = True
self.factory.deferred.callback(None)
self._buffer = []
def connectionLost(self, reason):
self.isConnected = False
if self._onConnLost is not None:
self._onConnLost.callback(None)
def sendLine(self, line):
self.transport.write(line + '\r\n')
def _formatOneListResponse(self, name, size, directory, permissions, hardlinks, modified, owner, group):
def formatMode(mode):
return ''.join([mode & (256 >> n) and 'rwx'[n % 3] or '-' for n in range(9)])
def formatDate(mtime):
now = time.gmtime()
info = {
'month': _months[mtime.tm_mon],
'day': mtime.tm_mday,
'year': mtime.tm_year,
'hour': mtime.tm_hour,
'minute': mtime.tm_min
}
if now.tm_year != mtime.tm_year:
return '%(month)s %(day)02d %(year)5d' % info
else:
return '%(month)s %(day)02d %(hour)02d:%(minute)02d' % info
format = ('%(directory)s%(permissions)s%(hardlinks)4d '
'%(owner)-9s %(group)-9s %(size)15d %(date)12s '
'%(name)s')
return format % {
'directory': directory and 'd' or '-',
'permissions': formatMode(permissions),
'hardlinks': hardlinks,
'owner': owner[:8],
'group': group[:8],
'size': size,
'date': formatDate(time.gmtime(modified)),
'name': name}
def sendListResponse(self, name, response):
self.sendLine(self._formatOneListResponse(name, *response))
# Proxy IConsumer to our transport
def registerProducer(self, producer, streaming):
return self.transport.registerProducer(producer, streaming)
def unregisterProducer(self):
self.transport.unregisterProducer()
self.transport.loseConnection()
def write(self, data):
if self.isConnected:
return self.transport.write(data)
raise Exception("Crap damn crap damn crap damn")
# Pretend to be a producer, too.
def _conswrite(self, bytes):
try:
self._cons.write(bytes)
except:
self._onConnLost.errback()
def dataReceived(self, bytes):
if self._cons is not None:
self._conswrite(bytes)
else:
self._buffer.append(bytes)
def _unregConsumer(self, ignored):
self._cons.unregisterProducer()
self._cons = None
del self._onConnLost
return ignored
def registerConsumer(self, cons):
assert self._cons is None
self._cons = cons
self._cons.registerProducer(self, True)
for chunk in self._buffer:
self._conswrite(chunk)
self._buffer = None
if self.isConnected:
self._onConnLost = d = defer.Deferred()
d.addBoth(self._unregConsumer)
return d
else:
self._cons.unregisterProducer()
self._cons = None
return defer.succeed(None)
def resumeProducing(self):
self.transport.resumeProducing()
def pauseProducing(self):
self.transport.pauseProducing()
def stopProducing(self):
self.transport.stopProducing()
class DTPFactory(protocol.ClientFactory):
"""
Client factory for I{data transfer process} protocols.
@ivar peerCheck: perform checks to make sure the ftp-pi's peer is the same
as the dtp's
@ivar pi: a reference to this factory's protocol interpreter
@ivar _state: Indicates the current state of the DTPFactory. Initially,
this is L{_IN_PROGRESS}. If the connection fails or times out, it is
L{_FAILED}. If the connection succeeds before the timeout, it is
L{_FINISHED}.
"""
_IN_PROGRESS = object()
_FAILED = object()
_FINISHED = object()
_state = _IN_PROGRESS
# -- configuration variables --
peerCheck = False
# -- class variables --
def __init__(self, pi, peerHost=None, reactor=None):
"""Constructor
@param pi: this factory's protocol interpreter
@param peerHost: if peerCheck is True, this is the tuple that the
generated instance will use to perform security checks
"""
self.pi = pi # the protocol interpreter that is using this factory
self.peerHost = peerHost # the from FTP.transport.peerHost()
self.deferred = defer.Deferred() # deferred will fire when instance is connected
self.delayedCall = None
if reactor is None:
from twisted.internet import reactor
self._reactor = reactor
def buildProtocol(self, addr):
log.msg('DTPFactory.buildProtocol', debug=True)
if self._state is not self._IN_PROGRESS:
return None
self._state = self._FINISHED
self.cancelTimeout()
p = DTP()
p.factory = self
p.pi = self.pi
self.pi.dtpInstance = p
return p
def stopFactory(self):
log.msg('dtpFactory.stopFactory', debug=True)
self.cancelTimeout()
def timeoutFactory(self):
log.msg('timed out waiting for DTP connection')
if self._state is not self._IN_PROGRESS:
return
self._state = self._FAILED
d = self.deferred
self.deferred = None
d.errback(
PortConnectionError(defer.TimeoutError("DTPFactory timeout")))
def cancelTimeout(self):
if self.delayedCall is not None and self.delayedCall.active():
log.msg('cancelling DTP timeout', debug=True)
self.delayedCall.cancel()
def setTimeout(self, seconds):
log.msg('DTPFactory.setTimeout set to %s seconds' % seconds)
self.delayedCall = self._reactor.callLater(seconds, self.timeoutFactory)
def clientConnectionFailed(self, connector, reason):
if self._state is not self._IN_PROGRESS:
return
self._state = self._FAILED
d = self.deferred
self.deferred = None
d.errback(PortConnectionError(reason))
# -- FTP-PI (Protocol Interpreter) --
class ASCIIConsumerWrapper(object):
def __init__(self, cons):
self.cons = cons
self.registerProducer = cons.registerProducer
self.unregisterProducer = cons.unregisterProducer
assert os.linesep == "\r\n" or len(os.linesep) == 1, "Unsupported platform (yea right like this even exists)"
if os.linesep == "\r\n":
self.write = cons.write
def write(self, bytes):
return self.cons.write(bytes.replace(os.linesep, "\r\n"))
class FileConsumer(object):
"""
A consumer for FTP input that writes data to a file.
@ivar fObj: a file object opened for writing, used to write data received.
@type fObj: C{file}
"""
implements(interfaces.IConsumer)
def __init__(self, fObj):
self.fObj = fObj
def registerProducer(self, producer, streaming):
self.producer = producer
assert streaming
def unregisterProducer(self):
self.producer = None
self.fObj.close()
def write(self, bytes):
self.fObj.write(bytes)
class FTPOverflowProtocol(basic.LineReceiver):
"""FTP mini-protocol for when there are too many connections."""
def connectionMade(self):
self.sendLine(RESPONSE[TOO_MANY_CONNECTIONS])
self.transport.loseConnection()
class FTP(object, basic.LineReceiver, policies.TimeoutMixin):
"""
Protocol Interpreter for the File Transfer Protocol
@ivar state: The current server state. One of L{UNAUTH},
L{INAUTH}, L{AUTHED}, L{RENAMING}.
@ivar shell: The connected avatar
@ivar binary: The transfer mode. If false, ASCII.
@ivar dtpFactory: Generates a single DTP for this session
@ivar dtpPort: Port returned from listenTCP
@ivar listenFactory: A callable with the signature of
L{twisted.internet.interfaces.IReactorTCP.listenTCP} which will be used
to create Ports for passive connections (mainly for testing).
@ivar passivePortRange: iterator used as source of passive port numbers.
@type passivePortRange: C{iterator}
"""
disconnected = False
# States an FTP can be in
UNAUTH, INAUTH, AUTHED, RENAMING = range(4)
# how long the DTP waits for a connection
dtpTimeout = 10
portal = None
shell = None
dtpFactory = None
dtpPort = None
dtpInstance = None
binary = True
passivePortRange = xrange(0, 1)
listenFactory = reactor.listenTCP
def reply(self, key, *args):
msg = RESPONSE[key] % args
self.sendLine(msg)
def connectionMade(self):
self.state = self.UNAUTH
self.setTimeout(self.timeOut)
self.reply(WELCOME_MSG, self.factory.welcomeMessage)
def connectionLost(self, reason):
# if we have a DTP protocol instance running and
# we lose connection to the client's PI, kill the
# DTP connection and close the port
if self.dtpFactory:
self.cleanupDTP()
self.setTimeout(None)
if hasattr(self.shell, 'logout') and self.shell.logout is not None:
self.shell.logout()
self.shell = None
self.transport = None
def timeoutConnection(self):
self.transport.loseConnection()
def lineReceived(self, line):
self.resetTimeout()
self.pauseProducing()
def processFailed(err):
if err.check(FTPCmdError):
self.sendLine(err.value.response())
elif (err.check(TypeError) and
err.value.args[0].find('takes exactly') != -1):
self.reply(SYNTAX_ERR, "%s requires an argument." % (cmd,))
else:
log.msg("Unexpected FTP error")
log.err(err)
self.reply(REQ_ACTN_NOT_TAKEN, "internal server error")
def processSucceeded(result):
if isinstance(result, tuple):
self.reply(*result)
elif result is not None:
self.reply(result)
def allDone(ignored):
if not self.disconnected:
self.resumeProducing()
spaceIndex = line.find(' ')
if spaceIndex != -1:
cmd = line[:spaceIndex]
args = (line[spaceIndex + 1:],)
else:
cmd = line
args = ()
d = defer.maybeDeferred(self.processCommand, cmd, *args)
d.addCallbacks(processSucceeded, processFailed)
d.addErrback(log.err)
# XXX It burnsss
# LineReceiver doesn't let you resumeProducing inside
# lineReceived atm
from twisted.internet import reactor
reactor.callLater(0, d.addBoth, allDone)
def processCommand(self, cmd, *params):
cmd = cmd.upper()
if self.state == self.UNAUTH:
if cmd == 'USER':
return self.ftp_USER(*params)
elif cmd == 'PASS':
return BAD_CMD_SEQ, "USER required before PASS"
else:
return NOT_LOGGED_IN
elif self.state == self.INAUTH:
if cmd == 'PASS':
return self.ftp_PASS(*params)
else:
return BAD_CMD_SEQ, "PASS required after USER"
elif self.state == self.AUTHED:
method = getattr(self, "ftp_" + cmd, None)
if method is not None:
return method(*params)
return defer.fail(CmdNotImplementedError(cmd))
elif self.state == self.RENAMING:
if cmd == 'RNTO':
return self.ftp_RNTO(*params)
else:
return BAD_CMD_SEQ, "RNTO required after RNFR"
def getDTPPort(self, factory):
"""
Return a port for passive access, using C{self.passivePortRange}
attribute.
"""
for portn in self.passivePortRange:
try:
dtpPort = self.listenFactory(portn, factory)
except error.CannotListenError:
continue
else:
return dtpPort
raise error.CannotListenError('', portn,
"No port available in range %s" %
(self.passivePortRange,))
def ftp_USER(self, username):
"""
First part of login. Get the username the peer wants to
authenticate as.
"""
if not username:
return defer.fail(CmdSyntaxError('USER requires an argument'))
self._user = username
self.state = self.INAUTH
if self.factory.allowAnonymous and self._user == self.factory.userAnonymous:
return GUEST_NAME_OK_NEED_EMAIL
else:
return (USR_NAME_OK_NEED_PASS, username)
# TODO: add max auth try before timeout from ip...
# TODO: need to implement minimal ABOR command
def ftp_PASS(self, password):
"""
Second part of login. Get the password the peer wants to
authenticate with.
"""
if self.factory.allowAnonymous and self._user == self.factory.userAnonymous:
# anonymous login
creds = credentials.Anonymous()
reply = GUEST_LOGGED_IN_PROCEED
else:
# user login
creds = credentials.UsernamePassword(self._user, password)
reply = USR_LOGGED_IN_PROCEED
del self._user
def _cbLogin((interface, avatar, logout)):
assert interface is IFTPShell, "The realm is busted, jerk."
self.shell = avatar
self.logout = logout
self.workingDirectory = []
self.state = self.AUTHED
return reply
def _ebLogin(failure):
failure.trap(cred_error.UnauthorizedLogin, cred_error.UnhandledCredentials)
self.state = self.UNAUTH
raise AuthorizationError
d = self.portal.login(creds, None, IFTPShell)
d.addCallbacks(_cbLogin, _ebLogin)
return d
def ftp_PASV(self):
"""Request for a passive connection
from the rfc::
This command requests the server-DTP to \"listen\" on a data port
(which is not its default data port) and to wait for a connection
rather than initiate one upon receipt of a transfer command. The
response to this command includes the host and port address this
server is listening on.
"""
# if we have a DTP port set up, lose it.
if self.dtpFactory is not None:
# cleanupDTP sets dtpFactory to none. Later we'll do
# cleanup here or something.
self.cleanupDTP()
self.dtpFactory = DTPFactory(pi=self)
self.dtpFactory.setTimeout(self.dtpTimeout)
self.dtpPort = self.getDTPPort(self.dtpFactory)
host = self.transport.getHost().host
port = self.dtpPort.getHost().port
self.reply(ENTERING_PASV_MODE, encodeHostPort(host, port))
return self.dtpFactory.deferred.addCallback(lambda ign: None)
def ftp_PORT(self, address):
addr = map(int, address.split(','))
ip = '%d.%d.%d.%d' % tuple(addr[:4])
port = addr[4] << 8 | addr[5]
# if we have a DTP port set up, lose it.
if self.dtpFactory is not None:
self.cleanupDTP()
self.dtpFactory = DTPFactory(pi=self, peerHost=self.transport.getPeer().host)
self.dtpFactory.setTimeout(self.dtpTimeout)
self.dtpPort = reactor.connectTCP(ip, port, self.dtpFactory)
def connected(ignored):
return ENTERING_PORT_MODE
def connFailed(err):
err.trap(PortConnectionError)
return CANT_OPEN_DATA_CNX
return self.dtpFactory.deferred.addCallbacks(connected, connFailed)
def ftp_LIST(self, path=''):
""" This command causes a list to be sent from the server to the
passive DTP. If the pathname specifies a directory or other
group of files, the server should transfer a list of files
in the specified directory. If the pathname specifies a
file then the server should send current information on the
file. A null argument implies the user's current working or
default directory.
"""
# Uh, for now, do this retarded thing.
if self.dtpInstance is None or not self.dtpInstance.isConnected:
return defer.fail(BadCmdSequenceError('must send PORT or PASV before RETR'))
# bug in konqueror
if path == "-a":
path = ''
# bug in gFTP 2.0.15
if path == "-aL":
path = ''
# bug in Nautilus 2.10.0
if path == "-L":
path = ''
# bug in ange-ftp
if path == "-la":
path = ''
def gotListing(results):
self.reply(DATA_CNX_ALREADY_OPEN_START_XFR)
for (name, attrs) in results:
self.dtpInstance.sendListResponse(name, attrs)
self.dtpInstance.transport.loseConnection()
return (TXFR_COMPLETE_OK,)
try:
segments = toSegments(self.workingDirectory, path)
except InvalidPath:
return defer.fail(FileNotFoundError(path))
d = self.shell.list(
segments,
('size', 'directory', 'permissions', 'hardlinks',
'modified', 'owner', 'group'))
d.addCallback(gotListing)
return d
def ftp_NLST(self, path):
"""
This command causes a directory listing to be sent from the server to
the client. The pathname should specify a directory or other
system-specific file group descriptor. An empty path implies the current
working directory. If the path is non-existent, send nothing. If the
path is to a file, send only the file name.
@type path: C{str}
@param path: The path for which a directory listing should be returned.
@rtype: L{Deferred}
@return: a L{Deferred} which will be fired when the listing request
is finished.
"""
# XXX: why is this check different from ftp_RETR/ftp_STOR? See #4180
if self.dtpInstance is None or not self.dtpInstance.isConnected:
return defer.fail(
BadCmdSequenceError('must send PORT or PASV before RETR'))
try:
segments = toSegments(self.workingDirectory, path)
except InvalidPath:
return defer.fail(FileNotFoundError(path))
def cbList(results):
"""
Send, line by line, each file in the directory listing, and then
close the connection.
@type results: A C{list} of C{tuple}. The first element of each
C{tuple} is a C{str} and the second element is a C{list}.
@param results: The names of the files in the directory.
@rtype: C{tuple}
@return: A C{tuple} containing the status code for a successful
transfer.
"""
self.reply(DATA_CNX_ALREADY_OPEN_START_XFR)
for (name, ignored) in results:
self.dtpInstance.sendLine(name)
self.dtpInstance.transport.loseConnection()
return (TXFR_COMPLETE_OK,)
def cbGlob(results):
self.reply(DATA_CNX_ALREADY_OPEN_START_XFR)
for (name, ignored) in results:
if fnmatch.fnmatch(name, segments[-1]):
self.dtpInstance.sendLine(name)
self.dtpInstance.transport.loseConnection()
return (TXFR_COMPLETE_OK,)
def listErr(results):
"""
RFC 959 specifies that an NLST request may only return directory
listings. Thus, send nothing and just close the connection.
@type results: L{Failure}
@param results: The L{Failure} wrapping a L{FileNotFoundError} that
occurred while trying to list the contents of a nonexistent
directory.
@rtype: C{tuple}
@returns: A C{tuple} containing the status code for a successful
transfer.
"""
self.dtpInstance.transport.loseConnection()
return (TXFR_COMPLETE_OK,)
# XXX This globbing may be incomplete: see #4181
if segments and (
'*' in segments[-1] or '?' in segments[-1] or
('[' in segments[-1] and ']' in segments[-1])):
d = self.shell.list(segments[:-1])
d.addCallback(cbGlob)
else:
d = self.shell.list(segments)
d.addCallback(cbList)
# self.shell.list will generate an error if the path is invalid
d.addErrback(listErr)
return d
def ftp_CWD(self, path):
try:
segments = toSegments(self.workingDirectory, path)
except InvalidPath:
# XXX Eh, what to fail with here?
return defer.fail(FileNotFoundError(path))
def accessGranted(result):
self.workingDirectory = segments
return (REQ_FILE_ACTN_COMPLETED_OK,)
return self.shell.access(segments).addCallback(accessGranted)
def ftp_CDUP(self):
return self.ftp_CWD('..')
def ftp_PWD(self):
return (PWD_REPLY, '/' + '/'.join(self.workingDirectory))
def ftp_RETR(self, path):
if self.dtpInstance is None:
raise BadCmdSequenceError('PORT or PASV required before RETR')
try:
newsegs = toSegments(self.workingDirectory, path)
except InvalidPath:
return defer.fail(FileNotFoundError(path))
# XXX For now, just disable the timeout. Later we'll want to
# leave it active and have the DTP connection reset it
# periodically.
self.setTimeout(None)
# Put it back later
def enableTimeout(result):
self.setTimeout(self.factory.timeOut)
return result
# And away she goes
if not self.binary:
cons = ASCIIConsumerWrapper(self.dtpInstance)
else:
cons = self.dtpInstance
def cbSent(result):
return (TXFR_COMPLETE_OK,)
def ebSent(err):
log.msg("Unexpected error attempting to transmit file to client:")
log.err(err)
return (CNX_CLOSED_TXFR_ABORTED,)
def cbOpened(file):
# Tell them what to doooo
if self.dtpInstance.isConnected:
self.reply(DATA_CNX_ALREADY_OPEN_START_XFR)
else:
self.reply(FILE_STATUS_OK_OPEN_DATA_CNX)
d = file.send(cons)
d.addCallbacks(cbSent, ebSent)
return d
def ebOpened(err):
if not err.check(PermissionDeniedError, FileNotFoundError, IsNotADirectoryError):
log.msg("Unexpected error attempting to open file for transmission:")
log.err(err)
if err.check(FTPCmdError):
return (err.value.errorCode, '/'.join(newsegs))
return (FILE_NOT_FOUND, '/'.join(newsegs))
d = self.shell.openForReading(newsegs)
d.addCallbacks(cbOpened, ebOpened)
d.addBoth(enableTimeout)
# Pass back Deferred that fires when the transfer is done
return d
def ftp_STOR(self, path):
if self.dtpInstance is None:
raise BadCmdSequenceError('PORT or PASV required before STOR')
try:
newsegs = toSegments(self.workingDirectory, path)
except InvalidPath:
return defer.fail(FileNotFoundError(path))
# XXX For now, just disable the timeout. Later we'll want to
# leave it active and have the DTP connection reset it
# periodically.
self.setTimeout(None)
# Put it back later
def enableTimeout(result):
self.setTimeout(self.factory.timeOut)
return result
def cbSent(result):
return (TXFR_COMPLETE_OK,)
def ebSent(err):
log.msg("Unexpected error receiving file from client:")
log.err(err)
return (CNX_CLOSED_TXFR_ABORTED,)
def cbConsumer(cons):
if not self.binary:
cons = ASCIIConsumerWrapper(cons)
d = self.dtpInstance.registerConsumer(cons)
# Tell them what to doooo
if self.dtpInstance.isConnected:
self.reply(DATA_CNX_ALREADY_OPEN_START_XFR)
else:
self.reply(FILE_STATUS_OK_OPEN_DATA_CNX)
return d
def cbOpened(file):
d = file.receive()
d.addCallback(cbConsumer)
d.addCallback(lambda ignored: file.close())
d.addCallbacks(cbSent, ebSent)
return d
def ebOpened(err):
if not err.check(PermissionDeniedError, FileNotFoundError, IsNotADirectoryError):
log.msg("Unexpected error attempting to open file for upload:")
log.err(err)
if isinstance(err.value, FTPCmdError):
return (err.value.errorCode, '/'.join(newsegs))
return (FILE_NOT_FOUND, '/'.join(newsegs))
d = self.shell.openForWriting(newsegs)
d.addCallbacks(cbOpened, ebOpened)
d.addBoth(enableTimeout)
# Pass back Deferred that fires when the transfer is done
return d
def ftp_SIZE(self, path):
try:
newsegs = toSegments(self.workingDirectory, path)
except InvalidPath:
return defer.fail(FileNotFoundError(path))
def cbStat((size,)):
return (FILE_STATUS, str(size))
return self.shell.stat(newsegs, ('size',)).addCallback(cbStat)
def ftp_MDTM(self, path):
try:
newsegs = toSegments(self.workingDirectory, path)
except InvalidPath:
return defer.fail(FileNotFoundError(path))
def cbStat((modified,)):
return (FILE_STATUS, time.strftime('%Y%m%d%H%M%S', time.gmtime(modified)))
return self.shell.stat(newsegs, ('modified',)).addCallback(cbStat)
def ftp_TYPE(self, type):
p = type.upper()
if p:
f = getattr(self, 'type_' + p[0], None)
if f is not None:
return f(p[1:])
return self.type_UNKNOWN(p)
return (SYNTAX_ERR,)
def type_A(self, code):
if code == '' or code == 'N':
self.binary = False
return (TYPE_SET_OK, 'A' + code)
else:
return defer.fail(CmdArgSyntaxError(code))
def type_I(self, code):
if code == '':
self.binary = True
return (TYPE_SET_OK, 'I')
else:
return defer.fail(CmdArgSyntaxError(code))
def type_UNKNOWN(self, code):
return defer.fail(CmdNotImplementedForArgError(code))
def ftp_SYST(self):
return NAME_SYS_TYPE
def ftp_STRU(self, structure):
p = structure.upper()
if p == 'F':
return (CMD_OK,)
return defer.fail(CmdNotImplementedForArgError(structure))
def ftp_MODE(self, mode):
p = mode.upper()
if p == 'S':
return (CMD_OK,)
return defer.fail(CmdNotImplementedForArgError(mode))
def ftp_MKD(self, path):
try:
newsegs = toSegments(self.workingDirectory, path)
except InvalidPath:
return defer.fail(FileNotFoundError(path))
return self.shell.makeDirectory(newsegs).addCallback(lambda ign: (MKD_REPLY, path))
def ftp_RMD(self, path):
try:
newsegs = toSegments(self.workingDirectory, path)
except InvalidPath:
return defer.fail(FileNotFoundError(path))
return self.shell.removeDirectory(newsegs).addCallback(lambda ign: (REQ_FILE_ACTN_COMPLETED_OK,))
def ftp_DELE(self, path):
try:
newsegs = toSegments(self.workingDirectory, path)
except InvalidPath:
return defer.fail(FileNotFoundError(path))
return self.shell.removeFile(newsegs).addCallback(lambda ign: (REQ_FILE_ACTN_COMPLETED_OK,))
def ftp_NOOP(self):
return (CMD_OK,)
def ftp_RNFR(self, fromName):
self._fromName = fromName
self.state = self.RENAMING
return (REQ_FILE_ACTN_PENDING_FURTHER_INFO,)
def ftp_RNTO(self, toName):
fromName = self._fromName
del self._fromName
self.state = self.AUTHED
try:
fromsegs = toSegments(self.workingDirectory, fromName)
tosegs = toSegments(self.workingDirectory, toName)
except InvalidPath:
return defer.fail(FileNotFoundError(fromName))
return self.shell.rename(fromsegs, tosegs).addCallback(lambda ign: (REQ_FILE_ACTN_COMPLETED_OK,))
def ftp_QUIT(self):
self.reply(GOODBYE_MSG)
self.transport.loseConnection()
self.disconnected = True
def cleanupDTP(self):
"""call when DTP connection exits
"""
log.msg('cleanupDTP', debug=True)
log.msg(self.dtpPort)
dtpPort, self.dtpPort = self.dtpPort, None
if interfaces.IListeningPort.providedBy(dtpPort):
dtpPort.stopListening()
elif interfaces.IConnector.providedBy(dtpPort):
dtpPort.disconnect()
else:
assert False, "dtpPort should be an IListeningPort or IConnector, instead is %r" % (dtpPort,)
self.dtpFactory.stopFactory()
self.dtpFactory = None
if self.dtpInstance is not None:
self.dtpInstance = None
class FTPFactory(policies.LimitTotalConnectionsFactory):
"""
A factory for producing ftp protocol instances
@ivar timeOut: the protocol interpreter's idle timeout time in seconds,
default is 600 seconds.
@ivar passivePortRange: value forwarded to C{protocol.passivePortRange}.
@type passivePortRange: C{iterator}
"""
protocol = FTP
overflowProtocol = FTPOverflowProtocol
allowAnonymous = True
userAnonymous = 'anonymous'
timeOut = 600
welcomeMessage = "Twisted %s FTP Server" % (copyright.version,)
passivePortRange = xrange(0, 1)
def __init__(self, portal=None, userAnonymous='anonymous'):
self.portal = portal
self.userAnonymous = userAnonymous
self.instances = []
def buildProtocol(self, addr):
p = policies.LimitTotalConnectionsFactory.buildProtocol(self, addr)
if p is not None:
p.wrappedProtocol.portal = self.portal
p.wrappedProtocol.timeOut = self.timeOut
p.wrappedProtocol.passivePortRange = self.passivePortRange
return p
def stopFactory(self):
# make sure ftp instance's timeouts are set to None
# to avoid reactor complaints
[p.setTimeout(None) for p in self.instances if p.timeOut is not None]
policies.LimitTotalConnectionsFactory.stopFactory(self)
# -- Cred Objects --
class IFTPShell(Interface):
"""
An abstraction of the shell commands used by the FTP protocol for
a given user account.
All path names must be absolute.
"""
def makeDirectory(path):
"""
Create a directory.
@param path: The path, as a list of segments, to create
@type path: C{list} of C{unicode}
@return: A Deferred which fires when the directory has been
created, or which fails if the directory cannot be created.
"""
def removeDirectory(path):
"""
Remove a directory.
@param path: The path, as a list of segments, to remove
@type path: C{list} of C{unicode}
@return: A Deferred which fires when the directory has been
removed, or which fails if the directory cannot be removed.
"""
def removeFile(path):
"""
Remove a file.
@param path: The path, as a list of segments, to remove
@type path: C{list} of C{unicode}
@return: A Deferred which fires when the file has been
removed, or which fails if the file cannot be removed.
"""
def rename(fromPath, toPath):
"""
Rename a file or directory.
@param fromPath: The current name of the path.
@type fromPath: C{list} of C{unicode}
@param toPath: The desired new name of the path.
@type toPath: C{list} of C{unicode}
@return: A Deferred which fires when the path has been
renamed, or which fails if the path cannot be renamed.
"""
def access(path):
"""
Determine whether access to the given path is allowed.
@param path: The path, as a list of segments
@return: A Deferred which fires with None if access is allowed
or which fails with a specific exception type if access is
denied.
"""
def stat(path, keys=()):
"""
Retrieve information about the given path.
This is like list, except it will never return results about
child paths.
"""
def list(path, keys=()):
"""
Retrieve information about the given path.
If the path represents a non-directory, the result list should
have only one entry with information about that non-directory.
Otherwise, the result list should have an element for each
child of the directory.
@param path: The path, as a list of segments, to list
@type path: C{list} of C{unicode}
@param keys: A tuple of keys desired in the resulting
dictionaries.
@return: A Deferred which fires with a list of (name, list),
where the name is the name of the entry as a unicode string
and each list contains values corresponding to the requested
keys. The following are possible elements of keys, and the
values which should be returned for them:
- C{'size'}: size in bytes, as an integer (this is kinda required)
- C{'directory'}: boolean indicating the type of this entry
- C{'permissions'}: a bitvector (see os.stat(foo).st_mode)
- C{'hardlinks'}: Number of hard links to this entry
- C{'modified'}: number of seconds since the epoch since entry was
modified
- C{'owner'}: string indicating the user owner of this entry
- C{'group'}: string indicating the group owner of this entry
"""
def openForReading(path):
"""
@param path: The path, as a list of segments, to open
@type path: C{list} of C{unicode}
@rtype: C{Deferred} which will fire with L{IReadFile}
"""
def openForWriting(path):
"""
@param path: The path, as a list of segments, to open
@type path: C{list} of C{unicode}
@rtype: C{Deferred} which will fire with L{IWriteFile}
"""
class IReadFile(Interface):
"""
A file out of which bytes may be read.
"""
def send(consumer):
"""
Produce the contents of the given path to the given consumer. This
method may only be invoked once on each provider.
@type consumer: C{IConsumer}
@return: A Deferred which fires when the file has been
consumed completely.
"""
class IWriteFile(Interface):
"""
A file into which bytes may be written.
"""
def receive():
"""
Create a consumer which will write to this file. This method may
only be invoked once on each provider.
@rtype: C{Deferred} of C{IConsumer}
"""
def close():
"""
Perform any post-write work that needs to be done. This method may
only be invoked once on each provider, and will always be invoked
after receive().
@rtype: C{Deferred} of anything: the value is ignored. The FTP client
will not see their upload request complete until this Deferred has
been fired.
"""
def _getgroups(uid):
"""Return the primary and supplementary groups for the given UID.
@type uid: C{int}
"""
result = []
pwent = pwd.getpwuid(uid)
result.append(pwent.pw_gid)
for grent in grp.getgrall():
if pwent.pw_name in grent.gr_mem:
result.append(grent.gr_gid)
return result
def _testPermissions(uid, gid, spath, mode='r'):
"""
checks to see if uid has proper permissions to access path with mode
@type uid: C{int}
@param uid: numeric user id
@type gid: C{int}
@param gid: numeric group id
@type spath: C{str}
@param spath: the path on the server to test
@type mode: C{str}
@param mode: 'r' or 'w' (read or write)
@rtype: C{bool}
@return: True if the given credentials have the specified form of
access to the given path
"""
if mode == 'r':
usr = stat.S_IRUSR
grp = stat.S_IRGRP
oth = stat.S_IROTH
amode = os.R_OK
elif mode == 'w':
usr = stat.S_IWUSR
grp = stat.S_IWGRP
oth = stat.S_IWOTH
amode = os.W_OK
else:
raise ValueError("Invalid mode %r: must specify 'r' or 'w'" % (mode,))
access = False
if os.path.exists(spath):
if uid == 0:
access = True
else:
s = os.stat(spath)
if usr & s.st_mode and uid == s.st_uid:
access = True
elif grp & s.st_mode and gid in _getgroups(uid):
access = True
elif oth & s.st_mode:
access = True
if access:
if not os.access(spath, amode):
access = False
log.msg("Filesystem grants permission to UID %d but it is inaccessible to me running as UID %d" % (
uid, os.getuid()))
return access
class FTPAnonymousShell(object):
"""
An anonymous implementation of IFTPShell
@type filesystemRoot: L{twisted.python.filepath.FilePath}
@ivar filesystemRoot: The path which is considered the root of
this shell.
"""
implements(IFTPShell)
def __init__(self, filesystemRoot):
self.filesystemRoot = filesystemRoot
def _path(self, path):
return reduce(filepath.FilePath.child, path, self.filesystemRoot)
def makeDirectory(self, path):
return defer.fail(AnonUserDeniedError())
def removeDirectory(self, path):
return defer.fail(AnonUserDeniedError())
def removeFile(self, path):
return defer.fail(AnonUserDeniedError())
def rename(self, fromPath, toPath):
return defer.fail(AnonUserDeniedError())
def receive(self, path):
path = self._path(path)
return defer.fail(AnonUserDeniedError())
def openForReading(self, path):
"""
Open C{path} for reading.
@param path: The path, as a list of segments, to open.
@type path: C{list} of C{unicode}
@return: A L{Deferred} is returned that will fire with an object
implementing L{IReadFile} if the file is successfully opened. If
C{path} is a directory, or if an exception is raised while trying
to open the file, the L{Deferred} will fire with an error.
"""
p = self._path(path)
if p.isdir():
# Normally, we would only check for EISDIR in open, but win32
# returns EACCES in this case, so we check before
return defer.fail(IsADirectoryError(path))
try:
f = p.open('r')
except (IOError, OSError), e:
return errnoToFailure(e.errno, path)
except:
return defer.fail()
else:
return defer.succeed(_FileReader(f))
def openForWriting(self, path):
"""
Reject write attempts by anonymous users with
L{PermissionDeniedError}.
"""
return defer.fail(PermissionDeniedError("STOR not allowed"))
def access(self, path):
p = self._path(path)
if not p.exists():
# Again, win32 doesn't report a sane error after, so let's fail
# early if we can
return defer.fail(FileNotFoundError(path))
# For now, just see if we can os.listdir() it
try:
p.listdir()
except (IOError, OSError), e:
return errnoToFailure(e.errno, path)
except:
return defer.fail()
else:
return defer.succeed(None)
def stat(self, path, keys=()):
p = self._path(path)
if p.isdir():
try:
statResult = self._statNode(p, keys)
except (IOError, OSError), e:
return errnoToFailure(e.errno, path)
except:
return defer.fail()
else:
return defer.succeed(statResult)
else:
return self.list(path, keys).addCallback(lambda res: res[0][1])
def list(self, path, keys=()):
"""
Return the list of files at given C{path}, adding C{keys} stat
informations if specified.
@param path: the directory or file to check.
@type path: C{str}
@param keys: the list of desired metadata
@type keys: C{list} of C{str}
"""
filePath = self._path(path)
if filePath.isdir():
entries = filePath.listdir()
fileEntries = [filePath.child(p) for p in entries]
elif filePath.isfile():
entries = [os.path.join(*filePath.segmentsFrom(self.filesystemRoot))]
fileEntries = [filePath]
else:
return defer.fail(FileNotFoundError(path))
results = []
for fileName, filePath in zip(entries, fileEntries):
ent = []
results.append((fileName, ent))
if keys:
try:
ent.extend(self._statNode(filePath, keys))
except (IOError, OSError), e:
return errnoToFailure(e.errno, fileName)
except:
return defer.fail()
return defer.succeed(results)
def _statNode(self, filePath, keys):
"""
Shortcut method to get stat info on a node.
@param filePath: the node to stat.
@type filePath: C{filepath.FilePath}
@param keys: the stat keys to get.
@type keys: C{iterable}
"""
filePath.restat()
return [getattr(self, '_stat_' + k)(filePath.statinfo) for k in keys]
_stat_size = operator.attrgetter('st_size')
_stat_permissions = operator.attrgetter('st_mode')
_stat_hardlinks = operator.attrgetter('st_nlink')
_stat_modified = operator.attrgetter('st_mtime')
def _stat_owner(self, st):
if pwd is not None:
try:
return pwd.getpwuid(st.st_uid)[0]
except KeyError:
pass
return str(st.st_uid)
def _stat_group(self, st):
if grp is not None:
try:
return grp.getgrgid(st.st_gid)[0]
except KeyError:
pass
return str(st.st_gid)
def _stat_directory(self, st):
return bool(st.st_mode & stat.S_IFDIR)
class _FileReader(object):
implements(IReadFile)
def __init__(self, fObj):
self.fObj = fObj
self._send = False
def _close(self, passthrough):
self._send = True
self.fObj.close()
return passthrough
def send(self, consumer):
assert not self._send, "Can only call IReadFile.send *once* per instance"
self._send = True
d = basic.FileSender().beginFileTransfer(self.fObj, consumer)
d.addBoth(self._close)
return d
class FTPShell(FTPAnonymousShell):
"""
An authenticated implementation of L{IFTPShell}.
"""
def makeDirectory(self, path):
p = self._path(path)
try:
p.makedirs()
except (IOError, OSError), e:
return errnoToFailure(e.errno, path)
except:
return defer.fail()
else:
return defer.succeed(None)
def removeDirectory(self, path):
p = self._path(path)
if p.isfile():
# Win32 returns the wrong errno when rmdir is called on a file
# instead of a directory, so as we have the info here, let's fail
# early with a pertinent error
return defer.fail(IsNotADirectoryError(path))
try:
os.rmdir(p.path)
except (IOError, OSError), e:
return errnoToFailure(e.errno, path)
except:
return defer.fail()
else:
return defer.succeed(None)
def removeFile(self, path):
p = self._path(path)
if p.isdir():
# Win32 returns the wrong errno when remove is called on a
# directory instead of a file, so as we have the info here,
# let's fail early with a pertinent error
return defer.fail(IsADirectoryError(path))
try:
p.remove()
except (IOError, OSError), e:
return errnoToFailure(e.errno, path)
except:
return defer.fail()
else:
return defer.succeed(None)
def rename(self, fromPath, toPath):
fp = self._path(fromPath)
tp = self._path(toPath)
try:
os.rename(fp.path, tp.path)
except (IOError, OSError), e:
return errnoToFailure(e.errno, fromPath)
except:
return defer.fail()
else:
return defer.succeed(None)
def openForWriting(self, path):
"""
Open C{path} for writing.
@param path: The path, as a list of segments, to open.
@type path: C{list} of C{unicode}
@return: A L{Deferred} is returned that will fire with an object
implementing L{IWriteFile} if the file is successfully opened. If
C{path} is a directory, or if an exception is raised while trying
to open the file, the L{Deferred} will fire with an error.
"""
p = self._path(path)
if p.isdir():
# Normally, we would only check for EISDIR in open, but win32
# returns EACCES in this case, so we check before
return defer.fail(IsADirectoryError(path))
try:
fObj = p.open('w')
except (IOError, OSError), e:
return errnoToFailure(e.errno, path)
except:
return defer.fail()
return defer.succeed(_FileWriter(fObj))
class _FileWriter(object):
implements(IWriteFile)
def __init__(self, fObj):
self.fObj = fObj
self._receive = False
def receive(self):
assert not self._receive, "Can only call IWriteFile.receive *once* per instance"
self._receive = True
# FileConsumer will close the file object
return defer.succeed(FileConsumer(self.fObj))
def close(self):
return defer.succeed(None)
class BaseFTPRealm:
"""
Base class for simple FTP realms which provides an easy hook for specifying
the home directory for each user.
"""
implements(portal.IRealm)
def __init__(self, anonymousRoot):
self.anonymousRoot = filepath.FilePath(anonymousRoot)
def getHomeDirectory(self, avatarId):
"""
Return a L{FilePath} representing the home directory of the given
avatar. Override this in a subclass.
@param avatarId: A user identifier returned from a credentials checker.
@type avatarId: C{str}
@rtype: L{FilePath}
"""
raise NotImplementedError(
"%r did not override getHomeDirectory" % (self.__class__,))
def requestAvatar(self, avatarId, mind, *interfaces):
for iface in interfaces:
if iface is IFTPShell:
if avatarId is checkers.ANONYMOUS:
avatar = FTPAnonymousShell(self.anonymousRoot)
else:
avatar = FTPShell(self.getHomeDirectory(avatarId))
return (IFTPShell, avatar,
getattr(avatar, 'logout', lambda: None))
raise NotImplementedError(
"Only IFTPShell interface is supported by this realm")
class FTPRealm(BaseFTPRealm):
"""
@type anonymousRoot: L{twisted.python.filepath.FilePath}
@ivar anonymousRoot: Root of the filesystem to which anonymous
users will be granted access.
@type userHome: L{filepath.FilePath}
@ivar userHome: Root of the filesystem containing user home directories.
"""
def __init__(self, anonymousRoot, userHome='/home'):
BaseFTPRealm.__init__(self, anonymousRoot)
self.userHome = filepath.FilePath(userHome)
def getHomeDirectory(self, avatarId):
"""
Use C{avatarId} as a single path segment to construct a child of
C{self.userHome} and return that child.
"""
return self.userHome.child(avatarId)
class SystemFTPRealm(BaseFTPRealm):
"""
L{SystemFTPRealm} uses system user account information to decide what the
home directory for a particular avatarId is.
This works on POSIX but probably is not reliable on Windows.
"""
def getHomeDirectory(self, avatarId):
"""
Return the system-defined home directory of the system user account with
the name C{avatarId}.
"""
path = os.path.expanduser('~' + avatarId)
if path.startswith('~'):
raise cred_error.UnauthorizedLogin()
return filepath.FilePath(path)
# --- FTP CLIENT -------------------------------------------------------------
####
# And now for the client...
# Notes:
# * Reference: http://cr.yp.to/ftp.html
# * FIXME: Does not support pipelining (which is not supported by all
# servers anyway). This isn't a functionality limitation, just a
# small performance issue.
# * Only has a rudimentary understanding of FTP response codes (although
# the full response is passed to the caller if they so choose).
# * Assumes that USER and PASS should always be sent
# * Always sets TYPE I (binary mode)
# * Doesn't understand any of the weird, obscure TELNET stuff (\377...)
# * FIXME: Doesn't share any code with the FTPServer
class ConnectionLost(FTPError):
pass
class CommandFailed(FTPError):
pass
class BadResponse(FTPError):
pass
class UnexpectedResponse(FTPError):
pass
class UnexpectedData(FTPError):
pass
class FTPCommand:
def __init__(self, text=None, public=0):
self.text = text
self.deferred = defer.Deferred()
self.ready = 1
self.public = public
self.transferDeferred = None
def fail(self, failure):
if self.public:
self.deferred.errback(failure)
class ProtocolWrapper(protocol.Protocol):
def __init__(self, original, deferred):
self.original = original
self.deferred = deferred
def makeConnection(self, transport):
self.original.makeConnection(transport)
def dataReceived(self, data):
self.original.dataReceived(data)
def connectionLost(self, reason):
self.original.connectionLost(reason)
# Signal that transfer has completed
self.deferred.callback(None)
class SenderProtocol(protocol.Protocol):
implements(interfaces.IFinishableConsumer)
def __init__(self):
# Fired upon connection
self.connectedDeferred = defer.Deferred()
# Fired upon disconnection
self.deferred = defer.Deferred()
#Protocol stuff
def dataReceived(self, data):
raise UnexpectedData(
"Received data from the server on a "
"send-only data-connection"
)
def makeConnection(self, transport):
protocol.Protocol.makeConnection(self, transport)
self.connectedDeferred.callback(self)
def connectionLost(self, reason):
if reason.check(error.ConnectionDone):
self.deferred.callback('connection done')
else:
self.deferred.errback(reason)
#IFinishableConsumer stuff
def write(self, data):
self.transport.write(data)
def registerProducer(self, producer, streaming):
"""
Register the given producer with our transport.
"""
self.transport.registerProducer(producer, streaming)
def unregisterProducer(self):
"""
Unregister the previously registered producer.
"""
self.transport.unregisterProducer()
def finish(self):
self.transport.loseConnection()
def decodeHostPort(line):
"""Decode an FTP response specifying a host and port.
@return: a 2-tuple of (host, port).
"""
abcdef = re.sub('[^0-9, ]', '', line)
parsed = [int(p.strip()) for p in abcdef.split(',')]
for x in parsed:
if x < 0 or x > 255:
raise ValueError("Out of range", line, x)
a, b, c, d, e, f = parsed
host = "%s.%s.%s.%s" % (a, b, c, d)
port = (int(e) << 8) + int(f)
return host, port
def encodeHostPort(host, port):
numbers = host.split('.') + [str(port >> 8), str(port % 256)]
return ','.join(numbers)
def _unwrapFirstError(failure):
failure.trap(defer.FirstError)
return failure.value.subFailure
class FTPDataPortFactory(protocol.ServerFactory):
"""Factory for data connections that use the PORT command
(i.e. "active" transfers)
"""
noisy = 0
def buildProtocol(self, addr):
# This is a bit hackish -- we already have a Protocol instance,
# so just return it instead of making a new one
# FIXME: Reject connections from the wrong address/port
# (potential security problem)
self.protocol.factory = self
self.port.loseConnection()
return self.protocol
class FTPClientBasic(basic.LineReceiver):
"""
Foundations of an FTP client.
"""
debug = False
def __init__(self):
self.actionQueue = []
self.greeting = None
self.nextDeferred = defer.Deferred().addCallback(self._cb_greeting)
self.nextDeferred.addErrback(self.fail)
self.response = []
self._failed = 0
def fail(self, error):
"""
Give an error to any queued deferreds.
"""
self._fail(error)
def _fail(self, error):
"""
Errback all queued deferreds.
"""
if self._failed:
# We're recursing; bail out here for simplicity
return error
self._failed = 1
if self.nextDeferred:
try:
self.nextDeferred.errback(failure.Failure(ConnectionLost('FTP connection lost', error)))
except defer.AlreadyCalledError:
pass
for ftpCommand in self.actionQueue:
ftpCommand.fail(failure.Failure(ConnectionLost('FTP connection lost', error)))
return error
def _cb_greeting(self, greeting):
self.greeting = greeting
def sendLine(self, line):
"""
(Private) Sends a line, unless line is None.
"""
if line is None:
return
basic.LineReceiver.sendLine(self, line)
def sendNextCommand(self):
"""
(Private) Processes the next command in the queue.
"""
ftpCommand = self.popCommandQueue()
if ftpCommand is None:
self.nextDeferred = None
return
if not ftpCommand.ready:
self.actionQueue.insert(0, ftpCommand)
reactor.callLater(1.0, self.sendNextCommand)
self.nextDeferred = None
return
# FIXME: this if block doesn't belong in FTPClientBasic, it belongs in
# FTPClient.
if ftpCommand.text == 'PORT':
self.generatePortCommand(ftpCommand)
if self.debug:
log.msg('<-- %s' % ftpCommand.text)
self.nextDeferred = ftpCommand.deferred
self.sendLine(ftpCommand.text)
def queueCommand(self, ftpCommand):
"""
Add an FTPCommand object to the queue.
If it's the only thing in the queue, and we are connected and we aren't
waiting for a response of an earlier command, the command will be sent
immediately.
@param ftpCommand: an L{FTPCommand}
"""
self.actionQueue.append(ftpCommand)
if (len(self.actionQueue) == 1 and self.transport is not None and
self.nextDeferred is None):
self.sendNextCommand()
def queueStringCommand(self, command, public=1):
"""
Queues a string to be issued as an FTP command
@param command: string of an FTP command to queue
@param public: a flag intended for internal use by FTPClient. Don't
change it unless you know what you're doing.
@return: a L{Deferred} that will be called when the response to the
command has been received.
"""
ftpCommand = FTPCommand(command, public)
self.queueCommand(ftpCommand)
return ftpCommand.deferred
def popCommandQueue(self):
"""
Return the front element of the command queue, or None if empty.
"""
if self.actionQueue:
return self.actionQueue.pop(0)
else:
return None
def queueLogin(self, username, password):
"""
Login: send the username, send the password.
If the password is C{None}, the PASS command won't be sent. Also, if
the response to the USER command has a response code of 230 (User logged
in), then PASS won't be sent either.
"""
# Prepare the USER command
deferreds = []
userDeferred = self.queueStringCommand('USER ' + username, public=0)
deferreds.append(userDeferred)
# Prepare the PASS command (if a password is given)
if password is not None:
passwordCmd = FTPCommand('PASS ' + password, public=0)
self.queueCommand(passwordCmd)
deferreds.append(passwordCmd.deferred)
# Avoid sending PASS if the response to USER is 230.
# (ref: http://cr.yp.to/ftp/user.html#user)
def cancelPasswordIfNotNeeded(response):
if response[0].startswith('230'):
# No password needed!
self.actionQueue.remove(passwordCmd)
return response
userDeferred.addCallback(cancelPasswordIfNotNeeded)
# Error handling.
for deferred in deferreds:
# If something goes wrong, call fail
deferred.addErrback(self.fail)
# But also swallow the error, so we don't cause spurious errors
deferred.addErrback(lambda x: None)
def lineReceived(self, line):
"""
(Private) Parses the response messages from the FTP server.
"""
# Add this line to the current response
if self.debug:
log.msg('--> %s' % line)
self.response.append(line)
# Bail out if this isn't the last line of a response
# The last line of response starts with 3 digits followed by a space
codeIsValid = re.match(r'\d{3} ', line)
if not codeIsValid:
return
code = line[0:3]
# Ignore marks
if code[0] == '1':
return
# Check that we were expecting a response
if self.nextDeferred is None:
self.fail(UnexpectedResponse(self.response))
return
# Reset the response
response = self.response
self.response = []
# Look for a success or error code, and call the appropriate callback
if code[0] in ('2', '3'):
# Success
self.nextDeferred.callback(response)
elif code[0] in ('4', '5'):
# Failure
self.nextDeferred.errback(failure.Failure(CommandFailed(response)))
else:
# This shouldn't happen unless something screwed up.
log.msg('Server sent invalid response code %s' % (code,))
self.nextDeferred.errback(failure.Failure(BadResponse(response)))
# Run the next command
self.sendNextCommand()
def connectionLost(self, reason):
self._fail(reason)
class _PassiveConnectionFactory(protocol.ClientFactory):
noisy = False
def __init__(self, protoInstance):
self.protoInstance = protoInstance
def buildProtocol(self, ignored):
self.protoInstance.factory = self
return self.protoInstance
def clientConnectionFailed(self, connector, reason):
e = FTPError('Connection Failed', reason)
self.protoInstance.deferred.errback(e)
class FTPClient(FTPClientBasic):
"""
L{FTPClient} is a client implementation of the FTP protocol which
exposes FTP commands as methods which return L{Deferred}s.
Each command method returns a L{Deferred} which is called back when a
successful response code (2xx or 3xx) is received from the server or
which is error backed if an error response code (4xx or 5xx) is received
from the server or if a protocol violation occurs. If an error response
code is received, the L{Deferred} fires with a L{Failure} wrapping a
L{CommandFailed} instance. The L{CommandFailed} instance is created
with a list of the response lines received from the server.
See U{RFC 959<http://www.ietf.org/rfc/rfc959.txt>} for error code
definitions.
Both active and passive transfers are supported.
@ivar passive: See description in __init__.
"""
connectFactory = reactor.connectTCP
def __init__(self, username='anonymous',
password='twisted@twistedmatrix.com',
passive=1):
"""
Constructor.
I will login as soon as I receive the welcome message from the server.
@param username: FTP username
@param password: FTP password
@param passive: flag that controls if I use active or passive data
connections. You can also change this after construction by
assigning to C{self.passive}.
"""
FTPClientBasic.__init__(self)
self.queueLogin(username, password)
self.passive = passive
def fail(self, error):
"""
Disconnect, and also give an error to any queued deferreds.
"""
self.transport.loseConnection()
self._fail(error)
def receiveFromConnection(self, commands, protocol):
"""
Retrieves a file or listing generated by the given command,
feeding it to the given protocol.
@param commands: list of strings of FTP commands to execute then receive
the results of (e.g. C{LIST}, C{RETR})
@param protocol: A L{Protocol} B{instance} e.g. an
L{FTPFileListProtocol}, or something that can be adapted to one.
Typically this will be an L{IConsumer} implementation.
@return: L{Deferred}.
"""
protocol = interfaces.IProtocol(protocol)
wrapper = ProtocolWrapper(protocol, defer.Deferred())
return self._openDataConnection(commands, wrapper)
def queueLogin(self, username, password):
"""
Login: send the username, send the password, and
set retrieval mode to binary
"""
FTPClientBasic.queueLogin(self, username, password)
d = self.queueStringCommand('TYPE I', public=0)
# If something goes wrong, call fail
d.addErrback(self.fail)
# But also swallow the error, so we don't cause spurious errors
d.addErrback(lambda x: None)
def sendToConnection(self, commands):
"""
XXX
@return: A tuple of two L{Deferred}s:
- L{Deferred} L{IFinishableConsumer}. You must call
the C{finish} method on the IFinishableConsumer when the file
is completely transferred.
- L{Deferred} list of control-connection responses.
"""
s = SenderProtocol()
r = self._openDataConnection(commands, s)
return (s.connectedDeferred, r)
def _openDataConnection(self, commands, protocol):
"""
This method returns a DeferredList.
"""
cmds = [FTPCommand(command, public=1) for command in commands]
cmdsDeferred = defer.DeferredList([cmd.deferred for cmd in cmds],
fireOnOneErrback=True, consumeErrors=True)
cmdsDeferred.addErrback(_unwrapFirstError)
if self.passive:
# Hack: use a mutable object to sneak a variable out of the
# scope of doPassive
_mutable = [None]
def doPassive(response):
"""Connect to the port specified in the response to PASV"""
host, port = decodeHostPort(response[-1][4:])
f = _PassiveConnectionFactory(protocol)
_mutable[0] = self.connectFactory(host, port, f)
pasvCmd = FTPCommand('PASV')
self.queueCommand(pasvCmd)
pasvCmd.deferred.addCallback(doPassive).addErrback(self.fail)
results = [cmdsDeferred, pasvCmd.deferred, protocol.deferred]
d = defer.DeferredList(results, fireOnOneErrback=True, consumeErrors=True)
d.addErrback(_unwrapFirstError)
# Ensure the connection is always closed
def close(x, m=_mutable):
m[0] and m[0].disconnect()
return x
d.addBoth(close)
else:
# We just place a marker command in the queue, and will fill in
# the host and port numbers later (see generatePortCommand)
portCmd = FTPCommand('PORT')
# Ok, now we jump through a few hoops here.
# This is the problem: a transfer is not to be trusted as complete
# until we get both the "226 Transfer complete" message on the
# control connection, and the data socket is closed. Thus, we use
# a DeferredList to make sure we only fire the callback at the
# right time.
portCmd.transferDeferred = protocol.deferred
portCmd.protocol = protocol
portCmd.deferred.addErrback(portCmd.transferDeferred.errback)
self.queueCommand(portCmd)
# Create dummy functions for the next callback to call.
# These will also be replaced with real functions in
# generatePortCommand.
portCmd.loseConnection = lambda result: result
portCmd.fail = lambda error: error
# Ensure that the connection always gets closed
cmdsDeferred.addErrback(lambda e, pc=portCmd: pc.fail(e) or e)
results = [cmdsDeferred, portCmd.deferred, portCmd.transferDeferred]
d = defer.DeferredList(results, fireOnOneErrback=True, consumeErrors=True)
d.addErrback(_unwrapFirstError)
for cmd in cmds:
self.queueCommand(cmd)
return d
def generatePortCommand(self, portCmd):
"""
(Private) Generates the text of a given PORT command.
"""
# The problem is that we don't create the listening port until we need
# it for various reasons, and so we have to muck about to figure out
# what interface and port it's listening on, and then finally we can
# create the text of the PORT command to send to the FTP server.
# FIXME: This method is far too ugly.
# FIXME: The best solution is probably to only create the data port
# once per FTPClient, and just recycle it for each new download.
# This should be ok, because we don't pipeline commands.
# Start listening on a port
factory = FTPDataPortFactory()
factory.protocol = portCmd.protocol
listener = reactor.listenTCP(0, factory)
factory.port = listener
# Ensure we close the listening port if something goes wrong
def listenerFail(error, listener=listener):
if listener.connected:
listener.loseConnection()
return error
portCmd.fail = listenerFail
# Construct crufty FTP magic numbers that represent host & port
host = self.transport.getHost().host
port = listener.getHost().port
portCmd.text = 'PORT ' + encodeHostPort(host, port)
def escapePath(self, path):
"""
Returns a FTP escaped path (replace newlines with nulls).
"""
# Escape newline characters
return path.replace('\n', '\0')
def retrieveFile(self, path, protocol, offset=0):
"""
Retrieve a file from the given path
This method issues the 'RETR' FTP command.
The file is fed into the given Protocol instance. The data connection
will be passive if self.passive is set.
@param path: path to file that you wish to receive.
@param protocol: a L{Protocol} instance.
@param offset: offset to start downloading from
@return: L{Deferred}
"""
cmds = ['RETR ' + self.escapePath(path)]
if offset:
cmds.insert(0, ('REST ' + str(offset)))
return self.receiveFromConnection(cmds, protocol)
retr = retrieveFile
def storeFile(self, path, offset=0):
"""
Store a file at the given path.
This method issues the 'STOR' FTP command.
@return: A tuple of two L{Deferred}s:
- L{Deferred} L{IFinishableConsumer}. You must call
the C{finish} method on the IFinishableConsumer when the file
is completely transferred.
- L{Deferred} list of control-connection responses.
"""
cmds = ['STOR ' + self.escapePath(path)]
if offset:
cmds.insert(0, ('REST ' + str(offset)))
return self.sendToConnection(cmds)
stor = storeFile
def rename(self, pathFrom, pathTo):
"""
Rename a file.
This method issues the I{RNFR}/I{RNTO} command sequence to rename
C{pathFrom} to C{pathTo}.
@param: pathFrom: the absolute path to the file to be renamed
@type pathFrom: C{str}
@param: pathTo: the absolute path to rename the file to.
@type pathTo: C{str}
@return: A L{Deferred} which fires when the rename operation has
succeeded or failed. If it succeeds, the L{Deferred} is called
back with a two-tuple of lists. The first list contains the
responses to the I{RNFR} command. The second list contains the
responses to the I{RNTO} command. If either I{RNFR} or I{RNTO}
fails, the L{Deferred} is errbacked with L{CommandFailed} or
L{BadResponse}.
@rtype: L{Deferred}
@since: 8.2
"""
renameFrom = self.queueStringCommand('RNFR ' + self.escapePath(pathFrom))
renameTo = self.queueStringCommand('RNTO ' + self.escapePath(pathTo))
fromResponse = []
# Use a separate Deferred for the ultimate result so that Deferred
# chaining can't interfere with its result.
result = defer.Deferred()
# Bundle up all the responses
result.addCallback(lambda toResponse: (fromResponse, toResponse))
def ebFrom(failure):
# Make sure the RNTO doesn't run if the RNFR failed.
self.popCommandQueue()
result.errback(failure)
# Save the RNFR response to pass to the result Deferred later
renameFrom.addCallbacks(fromResponse.extend, ebFrom)
# Hook up the RNTO to the result Deferred as well
renameTo.chainDeferred(result)
return result
def list(self, path, protocol):
"""
Retrieve a file listing into the given protocol instance.
This method issues the 'LIST' FTP command.
@param path: path to get a file listing for.
@param protocol: a L{Protocol} instance, probably a
L{FTPFileListProtocol} instance. It can cope with most common file
listing formats.
@return: L{Deferred}
"""
if path is None:
path = ''
return self.receiveFromConnection(['LIST ' + self.escapePath(path)], protocol)
def nlst(self, path, protocol):
"""
Retrieve a short file listing into the given protocol instance.
This method issues the 'NLST' FTP command.
NLST (should) return a list of filenames, one per line.
@param path: path to get short file listing for.
@param protocol: a L{Protocol} instance.
"""
if path is None:
path = ''
return self.receiveFromConnection(['NLST ' + self.escapePath(path)], protocol)
def cwd(self, path):
"""
Issues the CWD (Change Working Directory) command. It's also
available as changeDirectory, which parses the result.
@return: a L{Deferred} that will be called when done.
"""
return self.queueStringCommand('CWD ' + self.escapePath(path))
def changeDirectory(self, path):
"""
Change the directory on the server and parse the result to determine
if it was successful or not.
@type path: C{str}
@param path: The path to which to change.
@return: a L{Deferred} which will be called back when the directory
change has succeeded or errbacked if an error occurrs.
"""
warnings.warn(
"FTPClient.changeDirectory is deprecated in Twisted 8.2 and "
"newer. Use FTPClient.cwd instead.",
category=DeprecationWarning,
stacklevel=2)
def cbResult(result):
if result[-1][:3] != '250':
return failure.Failure(CommandFailed(result))
return True
return self.cwd(path).addCallback(cbResult)
def makeDirectory(self, path):
"""
Make a directory
This method issues the MKD command.
@param path: The path to the directory to create.
@type path: C{str}
@return: A L{Deferred} which fires when the server responds. If the
directory is created, the L{Deferred} is called back with the
server response. If the server response indicates the directory
was not created, the L{Deferred} is errbacked with a L{Failure}
wrapping L{CommandFailed} or L{BadResponse}.
@rtype: L{Deferred}
@since: 8.2
"""
return self.queueStringCommand('MKD ' + self.escapePath(path))
def removeFile(self, path):
"""
Delete a file on the server.
L{removeFile} issues a I{DELE} command to the server to remove the
indicated file. Note that this command cannot remove a directory.
@param path: The path to the file to delete. May be relative to the
current dir.
@type path: C{str}
@return: A L{Deferred} which fires when the server responds. On error,
it is errbacked with either L{CommandFailed} or L{BadResponse}. On
success, it is called back with a list of response lines.
@rtype: L{Deferred}
@since: 8.2
"""
return self.queueStringCommand('DELE ' + self.escapePath(path))
def cdup(self):
"""
Issues the CDUP (Change Directory UP) command.
@return: a L{Deferred} that will be called when done.
"""
return self.queueStringCommand('CDUP')
def pwd(self):
"""
Issues the PWD (Print Working Directory) command.
The L{getDirectory} does the same job but automatically parses the
result.
@return: a L{Deferred} that will be called when done. It is up to the
caller to interpret the response, but the L{parsePWDResponse} method
in this module should work.
"""
return self.queueStringCommand('PWD')
def getDirectory(self):
"""
Returns the current remote directory.
@return: a L{Deferred} that will be called back with a C{str} giving
the remote directory or which will errback with L{CommandFailed}
if an error response is returned.
"""
def cbParse(result):
try:
# The only valid code is 257
if int(result[0].split(' ', 1)[0]) != 257:
raise ValueError
except (IndexError, ValueError):
return failure.Failure(CommandFailed(result))
path = parsePWDResponse(result[0])
if path is None:
return failure.Failure(CommandFailed(result))
return path
return self.pwd().addCallback(cbParse)
def quit(self):
"""
Issues the I{QUIT} command.
@return: A L{Deferred} that fires when the server acknowledges the
I{QUIT} command. The transport should not be disconnected until
this L{Deferred} fires.
"""
return self.queueStringCommand('QUIT')
class FTPFileListProtocol(basic.LineReceiver):
"""Parser for standard FTP file listings
This is the evil required to match::
-rw-r--r-- 1 root other 531 Jan 29 03:26 README
If you need different evil for a wacky FTP server, you can
override either C{fileLinePattern} or C{parseDirectoryLine()}.
It populates the instance attribute self.files, which is a list containing
dicts with the following keys (examples from the above line):
- filetype: e.g. 'd' for directories, or '-' for an ordinary file
- perms: e.g. 'rw-r--r--'
- nlinks: e.g. 1
- owner: e.g. 'root'
- group: e.g. 'other'
- size: e.g. 531
- date: e.g. 'Jan 29 03:26'
- filename: e.g. 'README'
- linktarget: e.g. 'some/file'
Note that the 'date' value will be formatted differently depending on the
date. Check U{http://cr.yp.to/ftp.html} if you really want to try to parse
it.
@ivar files: list of dicts describing the files in this listing
"""
fileLinePattern = re.compile(
r'^(?P<filetype>.)(?P<perms>.{9})\s+(?P<nlinks>\d*)\s*'
r'(?P<owner>\S+)\s+(?P<group>\S+)\s+(?P<size>\d+)\s+'
r'(?P<date>...\s+\d+\s+[\d:]+)\s+(?P<filename>([^ ]|\\ )*?)'
r'( -> (?P<linktarget>[^\r]*))?\r?$'
)
delimiter = '\n'
def __init__(self):
self.files = []
def lineReceived(self, line):
d = self.parseDirectoryLine(line)
if d is None:
self.unknownLine(line)
else:
self.addFile(d)
def parseDirectoryLine(self, line):
"""Return a dictionary of fields, or None if line cannot be parsed.
@param line: line of text expected to contain a directory entry
@type line: str
@return: dict
"""
match = self.fileLinePattern.match(line)
if match is None:
return None
else:
d = match.groupdict()
d['filename'] = d['filename'].replace(r'\ ', ' ')
d['nlinks'] = int(d['nlinks'])
d['size'] = int(d['size'])
if d['linktarget']:
d['linktarget'] = d['linktarget'].replace(r'\ ', ' ')
return d
def addFile(self, info):
"""Append file information dictionary to the list of known files.
Subclasses can override or extend this method to handle file
information differently without affecting the parsing of data
from the server.
@param info: dictionary containing the parsed representation
of the file information
@type info: dict
"""
self.files.append(info)
def unknownLine(self, line):
"""Deal with received lines which could not be parsed as file
information.
Subclasses can override this to perform any special processing
needed.
@param line: unparsable line as received
@type line: str
"""
pass
def parsePWDResponse(response):
"""Returns the path from a response to a PWD command.
Responses typically look like::
257 "/home/andrew" is current directory.
For this example, I will return C{'/home/andrew'}.
If I can't find the path, I return C{None}.
"""
match = re.search('"(.*)"', response)
if match:
return match.groups()[0]
else:
return None
|
eunchong/build
|
third_party/twisted_10_2/twisted/protocols/ftp.py
|
Python
|
bsd-3-clause
| 93,283
|
from pycp2k.inputsection import InputSection
class _each140(InputSection):
def __init__(self):
InputSection.__init__(self)
self.Just_energy = None
self.Powell_opt = None
self.Qs_scf = None
self.Xas_scf = None
self.Md = None
self.Pint = None
self.Metadynamics = None
self.Geo_opt = None
self.Rot_opt = None
self.Cell_opt = None
self.Band = None
self.Ep_lin_solver = None
self.Spline_find_coeffs = None
self.Replica_eval = None
self.Bsse = None
self.Shell_opt = None
self.Tddft_scf = None
self._name = "EACH"
self._keywords = {'Bsse': 'BSSE', 'Cell_opt': 'CELL_OPT', 'Just_energy': 'JUST_ENERGY', 'Band': 'BAND', 'Xas_scf': 'XAS_SCF', 'Rot_opt': 'ROT_OPT', 'Replica_eval': 'REPLICA_EVAL', 'Tddft_scf': 'TDDFT_SCF', 'Shell_opt': 'SHELL_OPT', 'Md': 'MD', 'Pint': 'PINT', 'Metadynamics': 'METADYNAMICS', 'Geo_opt': 'GEO_OPT', 'Spline_find_coeffs': 'SPLINE_FIND_COEFFS', 'Powell_opt': 'POWELL_OPT', 'Qs_scf': 'QS_SCF', 'Ep_lin_solver': 'EP_LIN_SOLVER'}
|
SINGROUP/pycp2k
|
pycp2k/classes/_each140.py
|
Python
|
lgpl-3.0
| 1,114
|
# -*- coding: utf-8 -*-
import os
import time
import struct
import json
import requests
from requests import exceptions
from werkzeug.urls import url_fix
from collections import OrderedDict
from pysteamkit.protobuf import steammessages_clientserver_pb2
from pysteamkit.steam_base import EMsg, EResult
from pysteamkit.steam3.client import SteamClient
from pysteamkit.steam3 import msg_base
from pysteamkit.util import Util
from CSGOproto import csgo_base, gcsdk_gcmessages_pb2, cstrike15_gcmessages_pb2
from gevent import sleep, Timeout
CURRENCY = OrderedDict([("USD", (1, u"$")),
("GBP", (2, u"£")),
("EUR", (3, u"€")),
("CHF", (4, u"CHF")),
("RUB", (5, u"pуб")),
("BRL", (7, u"R$")),
("JPY", (8, u"¥")),
("SEK", (9, u"kr")),
("IDR", (10, u"Rp")),
("MYR", (11, u"RM")),
("PHP", (12, u"P")),
("SGD", (13, u"S$")),
("THB", (14, u"฿")),
("KRW", (16, u"₩")),
("TRY", (17, u"TL")),
("MXN", (19, u"Mex$")),
("CAD", (20, u"CDN$")),
("NZD", (22, u"NZ$")),
("CNY", (23, u"¥")),
("INR", (24, u"₹")),
("CLP", (25, u"CLP$")),
("PEN", (26, u"S")),
("COP", (27, u"COL$")),
("ZAR", (28, u"R")),
("HKD", (29, u"HK$")),
("TWD", (30, u"NT$")),
("SRD", (31, u"SR")),
("AED", (32, u"AED"))])
logging = 0
try:
with open('settings.txt', 'r') as settings:
for line in settings.readlines():
if line.startswith('logging='):
logging = int(line.replace('logging=', ''))
except IOError:
logging = 1
def logEvent(text, forced=False):
if logging > 0 or forced:
t = time.strftime(">%H:%M:%S")
with open('log.txt', 'a') as logfile:
logfile.write('%s: %s\n' % (t, text))
class SteamClientHandler(object):
def __init__(self, messageHandler):
self.messageHandler = messageHandler
self.auth_code = None
self.two_factor_code = None
self.firstlogin = False
def try_initialize_connection(self, client):
logEvent('Trying to initialize...')
if not self.get_sentry_file(self.messageHandler.username):
self.firstlogin = True
logon_result = self.messageHandler.client.login(self.messageHandler.username, self.messageHandler.password,
auth_code=self.auth_code, two_factor_code=self.two_factor_code)
logEvent('Logon result ' + str(logon_result.eresult))
if logon_result.eresult == EResult.AccountLogonDenied:
client.disconnect()
# Steam Guard enabled, Steam sent authentication code to email
if logon_result.eresult != EResult.OK:
return logon_result.eresult
return True
def get_sentry_file(self, username):
filename = 'sentry_%s.bin' % (username,)
if not os.path.exists(filename):
return None
with open(filename, 'rb') as f:
return f.read()
def store_sentry_file(self, username, sentryfile):
if not self.two_factor_code:
filename = 'sentry_%s.bin' % (username,)
with open(filename, 'wb') as f:
f.write(sentryfile)
def handle_message(self, emsg, msg):
if logging == 2:
logEvent('Received message ' + str(Util.get_msg(emsg)))
pass
def handle_disconnected(self, client, user_reason):
if not user_reason:
logEvent('Disconnected!')
for x in range(5):
time.sleep(x + 1)
if client.initialize():
return True
return False
class SteamGC(object):
def __init__(self, client, appid):
self.client = client
self.appid = appid
def gcSend(self, inputmsg):
Type = inputmsg.header.emsg
if logging == 2:
logEvent('gcSend ' + str(Util.get_msg(Type)))
message = msg_base.ProtobufMessage(steammessages_clientserver_pb2.CMsgGCClient, EMsg.ClientToGC)
message.header.routing_appid = self.appid
message.body.appid = self.appid
message.body.msgtype = Type|0x80000000 if Util.is_proto(Type) else Type
message.body.payload = inputmsg.serialize()
self.client.connection.send_message(message)
def gcFrom(self, data, protobufType):
if logging == 2:
logEvent('gcFrom ' + str(protobufType))
message = msg_base.ProtobufMessage(protobufType)
message.parse(data)
return message.body
class CSGO(object):
def __init__(self, gc=None):
self.appid = 730
self.gc = gc
self.gc.client.register_listener(self)
self.gc.client.register_message(csgo_base.GCConnectionStatus.GCConnectionStatus_HAVE_SESSION,
msg_base.ProtobufMessage,
cstrike15_gcmessages_pb2.CMsgGCCStrike15_v2_Client2GCEconPreviewDataBlockRequest)
self.gc.client.register_message(csgo_base.ECSGOCMsg.k_EMsgGCCStrike15_v2_Client2GCEconPreviewDataBlockResponse,
msg_base.ProtobufMessage,
cstrike15_gcmessages_pb2.CMsgGCCStrike15_v2_Client2GCEconPreviewDataBlockResponse)
self.gc.client.register_message(csgo_base.EGCBaseClientMsg.k_EMsgGCClientWelcome, msg_base.ProtobufMessage,
gcsdk_gcmessages_pb2.CMsgClientWelcome)
self.gc.client.register_message(EMsg.ClientPlayingSessionState, msg_base.ProtobufMessage,
steammessages_clientserver_pb2.CMsgClientPlayingSessionState)
self.gc.client.register_message(EMsg.ClientToGC, msg_base.ProtobufMessage,
steammessages_clientserver_pb2.CMsgGCClient)
self.gc.client.register_message(EMsg.ClientFromGC, msg_base.ProtobufMessage,
steammessages_clientserver_pb2.CMsgGCClient)
def handle_message(self, emsg, msg):
emsg = Util.get_msg(emsg)
if emsg == EMsg.ClientLoggedOff:
# Game already started on account
self.exit()
def sendClientHello(self):
message = msg_base.ProtobufMessage(gcsdk_gcmessages_pb2.CMsgClientHello,
csgo_base.EGCBaseClientMsg.k_EMsgGCClientHello)
self.gc.gcSend(message)
response = self.gc.client.wait_for_message(EMsg.ClientFromGC)
return response
def launch(self):
message = msg_base.ProtobufMessage(steammessages_clientserver_pb2.CMsgClientGamesPlayed, EMsg.ClientGamesPlayed)
message.body.games_played.add(game_id=self.appid)
self.gc.client.connection.send_message(message)
time.sleep(3)
try:
response = self.sendClientHello()
if Util.get_msg(response.body.msgtype) == csgo_base.EGCBaseClientMsg.k_EMsgGCClientWelcome:
logEvent('Launch successful!')
return True
else:
logEvent('Tried to launch, but got ' + str(Util.get_msg(response.body.msgtype)))
return False
except Exception:
logEvent('Not connected, client hello failed to respond. (This is normal on first attempt)')
return False
def requestEconData(self, param_a, param_d, param_s=0, param_m=0):
message = msg_base.ProtobufMessage(
cstrike15_gcmessages_pb2.CMsgGCCStrike15_v2_Client2GCEconPreviewDataBlockRequest,
csgo_base.ECSGOCMsg.k_EMsgGCCStrike15_v2_Client2GCEconPreviewDataBlockRequest)
message.body.param_s = param_s # SteamID
message.body.param_a = param_a # AssetID
message.body.param_d = param_d
message.body.param_m = param_m # MarketID
self.gc.gcSend(message)
response = self.gc.client.wait_for_message(EMsg.ClientFromGC, timeout=5.0)
if response == 'Timed Out':
return 'Steam servers did not respond, your time delay is probably too small.'
elif Util.get_msg(
response.body.msgtype) == csgo_base.ECSGOCMsg.k_EMsgGCCStrike15_v2_Client2GCEconPreviewDataBlockResponse:
econData = self.gc.gcFrom(response.body.payload,
cstrike15_gcmessages_pb2.CMsgGCCStrike15_v2_Client2GCEconPreviewDataBlockResponse)
return econData
else:
return 'Response was not of type EconPreviewDataBlockResponse'
def exit(self):
message = msg_base.ProtobufMessage(steammessages_clientserver_pb2.CMsgClientGamesPlayed, EMsg.ClientGamesPlayed)
self.gc.client.connection.send_message(message)
class User(object):
def __init__(self):
sleep(0)
self.username = None
self.password = None
self.client = SteamClient(SteamClientHandler(self))
self.gc = SteamGC(self.client, 730)
self.csgo = CSGO(self.gc)
def login(self, username=None, password=None, authcode=None, two_factor_code=None):
if username:
self.username = username
if password:
self.password = password
if authcode:
self.client.callback.auth_code = authcode
if two_factor_code:
self.client.callback.two_factor_code = two_factor_code
return self.client.initialize()
def setState(self, state):
message = msg_base.ProtobufMessage(steammessages_clientserver_pb2.CMsgClientChangeStatus, EMsg.ClientChangeStatus)
message.body.persona_state = state
self.client.connection.send_message(message)
def disconnect(self):
self.csgo.exit()
self.client.disconnect()
def getfloat(paintwear):
buf = struct.pack('i', paintwear)
skinFloat = struct.unpack('f', buf)[0]
return skinFloat
def getMarketItems(url, count, currency, start=0):
if not url.startswith('http://') and not url.startswith('https://'):
url = 'http://' + url
url = url_fix(url)
curr = CURRENCY[currency][0]
urlextender = '/render/?query=&start=%s&count=%s¤cy=%s' % (start, count, curr)
try:
request = requests.get(url + urlextender)
except requests.ConnectionError:
return 'Could not connect. Check URL and make sure you can connect to the internet.', None
except exceptions.InvalidURL:
return 'URL is invalid, please check your market URL.', None
if request.status_code == 404:
return 'Could not connect to Steam. Retry in a few minutes and check URL.', None
if len(request.text) < 1000:
return 'Response from Steam contains no skin data, URL is probably invalid.', None
if request.url != url + urlextender:
return 'Page redirected to %s, so no skins were found. Check your market URL.' % request.url, None
data = request.text.split('"listinginfo":')[1].split(',"assets":')[0]
try:
data = json.loads(data, object_pairs_hook=OrderedDict)
except ValueError:
return 'Response from Steam contains no skin data, URL is probably invalid.', None
# assetID => [marketID, inspect link, formatted price]
datadic = OrderedDict()
soldcount = 0
for marketID in data:
try:
price = int(data[marketID]['converted_price']) + int(data[marketID]['converted_fee'])
padded = "%03d" % (price,)
price = padded[0:-2] + '.' + padded[-2:]
except KeyError:
price = 'SOLD'
soldcount += 1
continue # Delete this line to keep SOLD ITEMS in the result
link = data[marketID]['asset']['market_actions'][0]['link']
assetID = data[marketID]['asset']['id']
datadic[assetID] = [marketID, link.replace('%assetid%', assetID).replace('%listingid%', marketID), price]
return datadic, soldcount
|
adamb70/CSGO-Market-Float-Finder
|
FloatGetter.py
|
Python
|
mit
| 12,393
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Nicolas Bessi, Guewen Baconnier
# Copyright 2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import netsvc
import logging
from openerp.osv.orm import TransientModel, fields
logger = logging.getLogger('credit.control.line.mailing')
class CreditCommunication(TransientModel):
"""Shell class used to provide a base model to email template and reporting.
Il use this approche in version 7 a browse record will exist even if not saved"""
_name = "credit.control.communication"
_description = "credit control communication"
_rec_name = 'partner_id'
_columns = {'partner_id': fields.many2one('res.partner', 'Partner', required=True),
'current_policy_level': fields.many2one('credit.control.policy.level',
'Level', required=True),
'credit_control_line_ids': fields.many2many('credit.control.line',
rel='comm_credit_rel',
string='Credit Lines'),
'company_id': fields.many2one('res.company', 'Company',
required=True),
'user_id': fields.many2one('res.users', 'User')}
_defaults = {'company_id': lambda s, cr, uid, c: s.pool.get('res.company')._company_default_get(
cr, uid, 'credit.control.policy', context=c),
'user_id': lambda s, cr, uid, c: uid}
def get_email(self, cr, uid, com_id, context=None):
"""Return a valid email for customer"""
if isinstance(com_id, list):
assert len(com_id) == 1, "get_email only support one id as parameter"
com_id = com_id[0]
form = self.browse(cr, uid, com_id, context=context)
contact = form.get_contact_address()
return contact.email
def get_contact_address(self, cr, uid, com_id, context=None):
pmod = self.pool['res.partner']
if isinstance(com_id, list):
com_id = com_id[0]
form = self.browse(cr, uid, com_id, context=context)
part = form.partner_id
add_ids = part.address_get(adr_pref=['invoice']) or {}
add_id = add_ids.get('invoice', add_ids.get('default', False))
return pmod.browse(cr, uid, add_id, context)
def _get_credit_lines(self, cr, uid, line_ids, partner_id, level_id, context=None):
"""Return credit lines related to a partner and a policy level"""
cr_line_obj = self.pool.get('credit.control.line')
cr_l_ids = cr_line_obj.search(cr,
uid,
[('id', 'in', line_ids),
('partner_id', '=', partner_id),
('policy_level_id', '=', level_id)],
context=context)
return cr_l_ids
def _generate_comm_from_credit_line_ids(self, cr, uid, line_ids, context=None):
"""Aggregate credit control line by partner, level, and currency
It also generate a communication object per aggregation.
"""
if not line_ids:
return []
comms = []
sql = ("SELECT distinct partner_id, policy_level_id, "
" credit_control_line.currency_id, credit_control_policy_level.level"
" FROM credit_control_line JOIN credit_control_policy_level "
" ON (credit_control_line.policy_level_id = credit_control_policy_level.id)"
" WHERE credit_control_line.id in %s"
" ORDER by credit_control_policy_level.level, credit_control_line.currency_id")
cr.execute(sql, (tuple(line_ids),))
res = cr.dictfetchall()
for level_assoc in res:
data = {}
data['credit_control_line_ids'] = \
[(6, 0, self._get_credit_lines(cr, uid, line_ids,
level_assoc['partner_id'],
level_assoc['policy_level_id'],
context=context))]
data['partner_id'] = level_assoc['partner_id']
data['current_policy_level'] = level_assoc['policy_level_id']
comm_id = self.create(cr, uid, data, context=context)
comms.append(self.browse(cr, uid, comm_id, context=context))
return comms
def _generate_emails(self, cr, uid, comms, context=None):
"""Generate email message using template related to level"""
cr_line_obj = self.pool.get('credit.control.line')
email_temp_obj = self.pool.get('email.template')
email_message_obj = self.pool.get('mail.mail')
att_obj = self.pool.get('ir.attachment')
email_ids = []
essential_fields = ['subject',
'body_html',
'email_from',
'email_to']
for comm in comms:
# we want to use a local cr in order to send the maximum
# of email
template = comm.current_policy_level.email_template_id.id
email_values = {}
cl_ids = [cl.id for cl in comm.credit_control_line_ids]
email_values = email_temp_obj.generate_email(cr, uid,
template,
comm.id,
context=context)
email_values['body_html'] = email_values['body']
email_values['type'] = 'email'
email_id = email_message_obj.create(cr, uid, email_values, context=context)
state = 'sent'
# The mail will not be send, however it will be in the pool, in an
# error state. So we create it, link it with the credit control line
# and put this latter in a `email_error` state we not that we have a
# problem with the email
if any(not email_values.get(field) for field in essential_fields):
state = 'email_error'
cr_line_obj.write(
cr, uid, cl_ids,
{'mail_message_id': email_id,
'state': state},
context=context)
att_ids = []
for att in email_values.get('attachments', []):
attach_fname = att[0]
attach_datas = att[1]
data_attach = {
'name': attach_fname,
'datas': attach_datas,
'datas_fname': attach_fname,
'res_model': 'mail.mail',
'res_id': email_id,
'type': 'binary',
}
att_ids.append(att_obj.create(cr, uid, data_attach, context=context))
email_message_obj.write(cr, uid, [email_id],
{'attachment_ids': [(6, 0, att_ids)]},
context=context)
email_ids.append(email_id)
return email_ids
def _generate_report(self, cr, uid, comms, context=None):
"""Will generate a report by inserting mako template of related policy template"""
service = netsvc.LocalService('report.credit_control_summary')
ids = [x.id for x in comms]
result, format = service.create(cr, uid, ids, {}, {})
return result
def _mark_credit_line_as_sent(self, cr, uid, comms, context=None):
line_ids = []
for comm in comms:
line_ids += [x.id for x in comm.credit_control_line_ids]
l_obj = self.pool.get('credit.control.line')
l_obj.write(cr, uid, line_ids, {'state': 'sent'}, context=context)
return line_ids
|
yvaucher/account-financial-tools
|
__unported__/account_credit_control/wizard/credit_control_communication.py
|
Python
|
agpl-3.0
| 8,761
|
#!/usr/bin/env python
# Copyright (C) 2011-2019 Alexandre Gramfort
# <alexandre.gramfort@inria.fr>
import os
import os.path as op
from setuptools import setup
# get the version (don't import mne here, so dependencies are not needed)
version = None
with open(op.join('mne', '_version.py'), 'r') as fid:
for line in (line.strip() for line in fid):
if line.startswith('__version__'):
version = line.split('=')[1].strip().strip('\'')
break
if version is None:
raise RuntimeError('Could not determine version')
descr = """MNE python project for MEG and EEG data analysis."""
DISTNAME = 'mne'
DESCRIPTION = descr
MAINTAINER = 'Alexandre Gramfort'
MAINTAINER_EMAIL = 'alexandre.gramfort@inria.fr'
URL = 'https://mne.tools/dev/'
LICENSE = 'BSD (3-clause)'
DOWNLOAD_URL = 'http://github.com/mne-tools/mne-python'
VERSION = version
def package_tree(pkgroot):
"""Get the submodule list."""
# Adapted from VisPy
path = op.dirname(__file__)
subdirs = [op.relpath(i[0], path).replace(op.sep, '.')
for i in os.walk(op.join(path, pkgroot))
if '__init__.py' in i[2]]
return sorted(subdirs)
if __name__ == "__main__":
if op.exists('MANIFEST'):
os.remove('MANIFEST')
with open('README.rst', 'r') as fid:
long_description = fid.read()
setup(name=DISTNAME,
maintainer=MAINTAINER,
include_package_data=True,
maintainer_email=MAINTAINER_EMAIL,
description=DESCRIPTION,
license=LICENSE,
url=URL,
version=VERSION,
download_url=DOWNLOAD_URL,
long_description=long_description,
long_description_content_type='text/x-rst',
zip_safe=False, # the package can run out of an .egg file
classifiers=['Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'License :: OSI Approved',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Scientific/Engineering',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Programming Language :: Python :: 3',
],
platforms='any',
python_requires='>=3.6',
install_requires=['numpy>=1.11.3', 'scipy>=0.17.1'],
packages=package_tree('mne'),
package_data={'mne': [
op.join('data', '*.sel'),
op.join('data', 'icos.fif.gz'),
op.join('data', 'coil_def*.dat'),
op.join('data', 'helmets', '*.fif.gz'),
op.join('data', 'FreeSurferColorLUT.txt'),
op.join('data', 'image', '*gif'),
op.join('data', 'image', '*lout'),
op.join('data', 'fsaverage', '*.fif'),
op.join('channels', 'data', 'layouts', '*.lout'),
op.join('channels', 'data', 'layouts', '*.lay'),
op.join('channels', 'data', 'montages', '*.sfp'),
op.join('channels', 'data', 'montages', '*.txt'),
op.join('channels', 'data', 'montages', '*.elc'),
op.join('channels', 'data', 'neighbors', '*.mat'),
op.join('datasets', 'sleep_physionet', 'SHA1SUMS'),
op.join('gui', 'help', '*.json'),
op.join('html', '*.js'),
op.join('html', '*.css'),
op.join('icons', '*.svg'),
op.join('io', 'artemis123', 'resources', '*.csv'),
op.join('io', 'edf', 'gdf_encodes.txt')
]},
entry_points={'console_scripts': [
'mne = mne.commands.utils:main',
]})
|
Teekuningas/mne-python
|
setup.py
|
Python
|
bsd-3-clause
| 3,898
|
# -*- coding: utf-8 -*-
#!/usr/bin/python2.7
#description :This file creates a plot: Number of contributers and their cummulated percentage of contributions. This is calculated for object-creations and -edits ordered (DESC) by contributions
#author :Christopher Barron @ http://giscience.uni-hd.de/
#date :19.01.2013
#version :0.1
#usage :python pyscript.py
#==============================================================================
import psycopg2
from pylab import *
import matplotlib
# import db connection parameters
import db_conn_para as db
###
### Connect to database with psycopg2. Add arguments from parser to the connection-string
###
try:
conn_string="dbname= %s user= %s host= %s password= %s" %(db.g_my_dbname, db.g_my_username, db.g_my_hostname, db.g_my_dbpassword)
print "Connecting to database\n->%s" % (conn_string)
# Verbindung mit der DB mittels psycopg2 herstellen
conn = psycopg2.connect(conn_string)
print "Connection to database was established succesfully"
except:
print "Connection to database failed"
###
### Execute SQL query
###
# New cursor method for sql
cur = conn.cursor()
# Execute SQL query. For more than one row use three '"'
try:
cur.execute("""
-- (1) created objects
-- Add up the percentage of created objects of every user beginning with the user with the most edits. Then user with 2nd most, ...
-- Start with first row of the partition (UNBOUNDED PRECEDING) and end with the current row (CURRENT ROW)
--
SELECT user_name, perc_accumul::float FROM
(SELECT user_name,
created::int,
perc_total,
SUM(perc_total)
OVER (ORDER BY perc_total DESC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS perc_accumul
FROM
-- Total amount of created objects per user and the percentage of all created within the database
(SELECT
DISTINCT(user_name) AS user_name,
COUNT(id) AS created,
COUNT(id) * 100.0 / (SELECT COUNT(id) FROM hist_plp WHERE version = 1 AND minor = 0) AS perc_total
FROM
hist_plp
WHERE
version = 1 AND minor = 0
GROUP BY
user_name) AS foo
) AS foo2;
""")
# Getting a list of tuples from the database-cursor (cur)
data_tuples = []
for row in cur:
data_tuples.append(row)
except:
print "Query could not be executed"
# Datatypes of the returning data: column 1(col1) --> integer, column 2(date) --> string
datatypes = [('col1', 'string'),('col2', 'double')]
# Data-tuple and datatype
data = np.array(data_tuples, dtype=datatypes)
# Date comes from 'col1'
col1 = data['col1']
col2 = data['col2']
###
### Execute SQL query
###
# New cursor method for sql
cur2 = conn.cursor()
# Execute SQL query. For more than one row use three '"'
try:
cur2.execute("""
-- (2) edited objects
-- Add up the percentage of edited objects of every user beginning with the user with the most edits. Then user with 2nd most, ...
-- Start with first row of the partition (UNBOUNDED PRECEDING) and end with the current row (CURRENT ROW)
--
SELECT user_name, perc_accumul::float FROM
(SELECT user_name,
edits,
perc_total,
SUM(perc_total) OVER (ORDER BY perc_total DESC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS perc_accumul
FROM
-- Total amount of edited objects per user and the percentage of all edits within the database
(SELECT
DISTINCT(user_name) AS user_name,
COUNT(id) AS edits,
COUNT(id) * 100.0 / (SELECT COUNT(id) FROM hist_plp WHERE NOT (version = 1 AND minor = 0)) AS perc_total
FROM
hist_plp
WHERE
NOT (version = 1 AND minor = 0)
GROUP BY
user_name) AS foo
) AS foo2;
""")
# Getting a list of tuples from the database-cursor (cur2)
data_tuples = []
for row in cur2:
data_tuples.append(row)
except:
print "Query could not be executed"
# Datatypes of the returning data: column 1(col1) --> integer, column 2(date) --> string
datatypes = [('col3', 'string'),('col4', 'double')]
# Data-tuple and datatype
data = np.array(data_tuples, dtype=datatypes)
# Date comes from 'col1'
col3 = data['col3']
col4 = data['col4']
fig, ax = plt.subplots()
# set figure size
fig.set_size_inches(12,8)
###
### Plot (Line-Chart)
###
# Plot Line-Chart
plt.plot(col2, color = '#2dd700', linewidth=2, label = 'Created Features')
plt.plot(col4, color = '#FF6700', linewidth=2, label = 'Feature-Edits')
###
### START calculate tresholds
###
cur.execute("""
SELECT count(user_name) FROM
(SELECT user_name,
edits,
perc_total,
SUM(perc_total) OVER (ORDER BY perc_total DESC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS perc_accumul
FROM
-- Total amount of edited objects per user and the percentage of all edits within the database
(SELECT
DISTINCT(user_name) AS user_name,
COUNT(id) AS edits,
COUNT(id) * 100.0 / (SELECT COUNT(id) FROM hist_plp WHERE NOT (version = 1 AND minor = 0)) AS perc_total
FROM
hist_plp
WHERE
NOT (version = 1 AND minor = 0)
GROUP BY
user_name) AS foo
) AS foo2
WHERE perc_accumul <= 98
;
""")
global tresh_edit
tresh_edit = cur.fetchone()[0]
cur.execute("""
SELECT count(user_name) FROM
(SELECT user_name,
created::int,
perc_total,
SUM(perc_total)
OVER (ORDER BY perc_total DESC ROWS BETWEEN UNBOUNDED PRECEDING AND CURRENT ROW) AS perc_accumul
FROM
-- Total amount of created objects per user and the percentage of all created within the database
(SELECT
DISTINCT(user_name) AS user_name,
COUNT(id) AS created,
COUNT(id) * 100.0 / (SELECT COUNT(id) FROM hist_plp WHERE version = 1 AND minor = 0) AS perc_total
FROM
hist_plp
WHERE
version = 1 AND minor = 0
GROUP BY
user_name) AS foo
) AS foo2
WHERE perc_accumul <= 98
;
""")
global tresh_created
tresh_created = cur.fetchone()[0]
###
### END calculate tresholds
###
# draw a default vline at x= that spans the yrange for created features <= 98%
l = plt.axvline(x=tresh_created, linestyle='dashed', color='#2dd700', label = '98% Threshold (Feature-Creations)')
# draw a default hline at y=1 that spans the xrange for feature-edits <= 98%
l = plt.axvline(x=tresh_edit, linestyle='dashed', color = '#FF6700', label = '98% Threshold (Feature-Edits)')
# Title of the pie chart
plt.title('Contributer-Distribution of created OSM-Features and OSM-Feature-Edits')
# Label x and y axis
ax.set_xlabel('Number of Contributers')
ax.set_ylabel('Accumulated Number of created OSM-Features and OSM-Features-Edits [%]')
# Place a gray dashed grid behind the thicks (only for y-axis)
ax.yaxis.grid(color='gray', linestyle='dashed')
# Set this grid behind the thicks
ax.set_axisbelow(True)
# Legend
plt.legend(loc = 7)
# Save plot to *.jpeg-file
savefig('pics/c7_user_created_edited_98.jpeg')
plt.clf()
|
zehpunktbarron/iOSMAnalyzer
|
scripts/c7_user_created_edited_98.py
|
Python
|
gpl-3.0
| 6,716
|
RPI_USER_AGENT = "Raspberry Pi WS"
SITE_SERVER_ADDRESS = "127.0.0.1:8000"
SITE_USER_AGENT = "Raspberry Site"
HMAC_TOKEN = "super secret token"
|
blaisejarrett/PiIO.WS
|
rpi_ws/settings.py
|
Python
|
gpl-3.0
| 143
|
#! /usr/bin/python
#
# train_model.py
# used to create a training model and store it in data.inl
#
#
BATCH_SIZE = 100
from subprocess import call
import json
import string
call( "rm train ; g++ -o train train_model.cpp -std=c++0x" , shell=True)
o = json.loads(open("../training_data.json","r").read())
params = ""
count = 0
for case in o:
params += "../training_sample/" + case + ".jpg "
params += o[case]["text"] + " "
count += 1
if count >= BATCH_SIZE: break;
call( "echo '" + params + "' | ./train > data.inl", shell=True )
#call( "./build_decaptcher", shell=True )
|
theshark08/howtobreakacaptcha01
|
src/train_model.py
|
Python
|
mit
| 592
|
# django-openid-auth - OpenID integration for django.contrib.auth
#
# Copyright (C) 2007 Simon Willison
# Copyright (C) 2008-2010 Canonical Ltd.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from django.contrib.auth.models import User
from django.db import models
class Nonce(models.Model):
server_url = models.CharField(max_length=2047)
timestamp = models.IntegerField()
salt = models.CharField(max_length=40)
def __unicode__(self):
return u"Nonce: %s, %s" % (self.server_url, self.salt)
class Association(models.Model):
server_url = models.TextField(max_length=2047)
handle = models.CharField(max_length=255)
secret = models.TextField(max_length=255) # Stored base64 encoded
issued = models.IntegerField()
lifetime = models.IntegerField()
assoc_type = models.TextField(max_length=64)
def __unicode__(self):
return u"Association: %s, %s" % (self.server_url, self.handle)
class UserOpenID(models.Model):
user = models.ForeignKey(User)
claimed_id = models.TextField(max_length=255, unique=True)
display_id = models.TextField(max_length=2047)
|
nekohayo/snowy
|
lib/django_openid_auth/models.py
|
Python
|
agpl-3.0
| 2,355
|
import os
import sys
from pprint import pprint
from resources.terrain import TerrainFile
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage: t.py <filename>")
print(sys.argv)
sys.exit()
file_path = os.path.join(os.path.dirname(os.path.realpath(__file__)),
sys.argv[1])
with open(file_path, 'rb') as f:
t = TerrainFile(f.read())
pprint(t.boundary)
print(len(t.positions))
pprint(t.positions)
#print_hex_view(t.extracted)
|
joetsoi/moonstone
|
python/terrain.py
|
Python
|
agpl-3.0
| 533
|
"""Cache"""
# standard library
import logging
from datetime import datetime, timedelta
from typing import Callable, Optional
# third-party
import arrow
from requests import Session
# first-party
from tcex.api.tc.v2.datastore.datastore import DataStore
from tcex.utils import Utils
# get tcex logger
logger = logging.getLogger('tcex')
class Cache:
"""TcEx Cache Class.
Args:
session: A requests.Session instance with auth configured for the ThreatConnect API.
domain: A value of “system”, “organization”, or “local”.
data_type: A free form type name for the data.
ttl_seconds: Number of seconds the cache is valid.
mapping: Elasticsearch mappings data.
"""
def __init__(
self,
session: Session,
domain: str,
data_type: str,
ttl_seconds: Optional[int] = None,
mapping: Optional[dict] = None,
):
"""Initialize class properties."""
# properties
self.ds = DataStore(session, domain, data_type, mapping)
self.log = logger
self.ttl_seconds: Optional[int] = ttl_seconds
self.utils = Utils()
# Warranty void if any of these are changed. Don't touch.
self._cache_data_key: str = 'cache-data'
self._cache_date_key: str = 'cache-date'
def add(self, rid: str, data: dict, raise_on_error: Optional[bool] = True) -> dict:
"""Write cache data to the data store.
**Example Response**
.. code-block:: json
{
"_index": "$local.usr5_pytest",
"_type": "pytest",
"_id": "cache-one",
"_version": 10,
"result": "updated",
"_shards": {
"total": 2,
"successful": 1,
"failed": 0
},
"_seq_no": 10,
"_primary_term": 1
}
Args:
rid: The record identifier.
data: The record data.
raise_on_error: If True and not r.ok this method will raise a RunTimeError.
Returns:
dict : The response dict
"""
data: dict = {
self._cache_date_key: datetime.utcnow().isoformat(),
self._cache_data_key: data,
}
return self.ds.post(rid, data, raise_on_error)
def delete(self, rid: str, raise_on_error: Optional[bool] = True) -> dict:
"""Write cache data to the data store.
**Example Response**
.. code-block:: json
{
"_index": "$local.usr5_pytest",
"_type": "pytest",
"_id": "cache-delete",
"_version": 2,
"result": "deleted",
"_shards": {
"total": 2,
"successful": 1,
"failed": 0
},
"_seq_no": 34,
"_primary_term": 1
}
Args:
rid: The record identifier.
raise_on_error: If True and not r.ok this method will raise a RunTimeError.
Returns:
dict : The response dict.
"""
return self.ds.delete(rid, raise_on_error)
def get(
self,
rid: str,
data_callback: Optional[Callable[[str], dict]] = None,
raise_on_error: Optional[bool] = True,
) -> dict:
"""Get cached data from the data store.
**Example Response**
.. code-block:: json
{
"cache-date": "2020-07-31T11:44:53.851116",
"cache-data": {
"results": "cached"
}
}
Args:
rid: The record identifier.
data_callback: A method that will return the data.
raise_on_error: If True and not r.ok this method will raise a RunTimeError.
Returns:
dict: The cached data.
"""
cache_data = None
ds_data: dict = self.ds.get(rid, raise_on_error=False)
if ds_data is None:
# default the response when TC API doesn't return a value
ds_data = {'found': False}
if ds_data is not None:
expired = False
if ds_data.get('found') is True:
cache_data: dict = ds_data.get('_source', {})
cache_date: str = cache_data.get(self._cache_date_key)
if self._is_cache_expired(cache_date):
cache_data = None
expired = True
self.log.debug(f'Cached data is expired for ({rid}).')
if expired or ds_data.get('found') is False:
# when cache is expired or does not exist use callback to get data if possible
if callable(data_callback):
# cache_data = self._encode_data(data_callback(rid))
cache_data: Optional[dict] = data_callback(rid)
self.log.debug(f'Using callback data for ({rid}).')
if cache_data:
cache_data = self.update(
rid, cache_data, raise_on_error
) # update the cache data
else:
self.log.debug(f'Using cached data for ({rid}).')
return cache_data
def update(self, rid: str, data: dict, raise_on_error: Optional[bool] = True) -> dict:
"""Write updated cache data to the DataStore.
**Example Response**
.. code-block:: json
{
"cache-date": "2020-07-31T11:44:53.851116",
"cache-data": {
"one": 1
}
}
Args:
rid: The record identifier.
data): The record data.
raise_on_error: If True and not r.ok this method will raise a RunTimeError.
Returns:
dict : The cached data.
"""
cache_date = datetime.utcnow().isoformat()
# cache_data = self._encode_data(data)
data = {self._cache_date_key: cache_date, self._cache_data_key: data}
self.ds.put(rid, data, raise_on_error)
return data
def _is_cache_expired(self, cached_date: str) -> bool:
"""Return True if the provided cache data is expired.
Args:
cached_date: The cache date value.
Returns:
bool: True if cache data is expired.
"""
if self.ttl_seconds is None or self.ttl_seconds == 0:
return True # if ttl_is 0 or None, all cached data is always invalid.
# convert the stored time expression to a datetime
# object (support for different tcex version)
cached_datetime = self.utils.any_to_datetime(cached_date).datetime
# calculate the cache expiration time by adding the ttl seconds to the cached time
cache_expires = cached_datetime + timedelta(seconds=self.ttl_seconds)
# if cache expires is less than "now" then return True/expired
return cache_expires < arrow.get(datetime.utcnow())
|
ThreatConnect-Inc/tcex
|
tcex/api/tc/v2/datastore/cache.py
|
Python
|
apache-2.0
| 7,077
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2015-2016 Simone Donadello
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#pylint: disable-msg=E1101
import bit as lib_bit
class FpgaCommand(object):
def __init__(self, cmd):
cmd = str(cmd)
if not cmd.startswith("0x"):
print "ERROR: wrong call to command \"%s\" with string \"%s\""%(str(type(self)), cmd)
self.bits = [lib_bit.CommandBits(hex_bits=cmd)]
def get_hex(self):
hex_str = ""
for bit in self.bits:
hex_str += bit.hex_bits
return hex_str
def get_bin(self):
bin_str = ""
for bit in self.bits:
bin_str += bit.bin_bits
return bin_str
class LoadCommand(FpgaCommand):
def __init__(self, memory, command, time, address, data):
super(LoadCommand, self).__init__("0x02")
self.bits.append(lib_bit.MemoryLoadBits(int_bits=int(memory)))
self.bits.append(lib_bit.CommandLoadBits(int_bits=int(command)))
self.bits.append(lib_bit.TimeLoadBits(int_bits=int(time)))
self.bits.append(lib_bit.AddressLoadBits(int_bits=int(address)))
self.bits.append(lib_bit.DataLoadBits(int_bits=int(data)))
class LoadDoneCommand(FpgaCommand):
def __init__(self):
super(LoadDoneCommand, self).__init__("0x04")
class RunCommand(FpgaCommand):
def __init__(self):
super(RunCommand, self).__init__("0x0B")
class StopCommand(FpgaCommand):
def __init__(self):
super(StopCommand, self).__init__("0x0A")
class StatusCommand(FpgaCommand):
def __init__(self):
super(StatusCommand, self).__init__("0x08")
class ExtTriggerOnCommand(FpgaCommand):
def __init__(self):
super(ExtTriggerOnCommand, self).__init__("0x07")
class ExtTriggerOffCommand(FpgaCommand):
def __init__(self):
super(ExtTriggerOffCommand, self).__init__("0x06")
|
simondona/exp-control-bec-tn
|
libraries/command.py
|
Python
|
gpl-3.0
| 2,495
|
# Copyright (c) 2016-2018, Neil Booth
#
# All rights reserved.
#
# See the file "LICENCE" for information about the copyright
# and warranty status of this software.
"""Mempool handling."""
import asyncio
import itertools
import time
from abc import ABC, abstractmethod
from asyncio import Lock, sleep
from collections import defaultdict
import attr
from torba.server.hash import hash_to_hex_str, hex_str_to_hash
from torba.server.util import class_logger, chunks
from torba.server.db import UTXO
@attr.s(slots=True)
class MemPoolTx:
prevouts = attr.ib()
# A pair is a (hashX, value) tuple
in_pairs = attr.ib()
out_pairs = attr.ib()
fee = attr.ib()
size = attr.ib()
@attr.s(slots=True)
class MemPoolTxSummary:
hash = attr.ib()
fee = attr.ib()
has_unconfirmed_inputs = attr.ib()
class MemPoolAPI(ABC):
"""A concrete instance of this class is passed to the MemPool object
and used by it to query DB and blockchain state."""
@abstractmethod
async def height(self):
"""Query bitcoind for its height."""
@abstractmethod
def cached_height(self):
"""Return the height of bitcoind the last time it was queried,
for any reason, without actually querying it.
"""
@abstractmethod
async def mempool_hashes(self):
"""Query bitcoind for the hashes of all transactions in its
mempool, returned as a list."""
@abstractmethod
async def raw_transactions(self, hex_hashes):
"""Query bitcoind for the serialized raw transactions with the given
hashes. Missing transactions are returned as None.
hex_hashes is an iterable of hexadecimal hash strings."""
@abstractmethod
async def lookup_utxos(self, prevouts):
"""Return a list of (hashX, value) pairs each prevout if unspent,
otherwise return None if spent or not found.
prevouts - an iterable of (hash, index) pairs
"""
@abstractmethod
async def on_mempool(self, touched, height):
"""Called each time the mempool is synchronized. touched is a set of
hashXs touched since the previous call. height is the
daemon's height at the time the mempool was obtained."""
class MemPool:
"""Representation of the daemon's mempool.
coin - a coin class from coins.py
api - an object implementing MemPoolAPI
Updated regularly in caught-up state. Goal is to enable efficient
response to the calls in the external interface. To that end we
maintain the following maps:
tx: tx_hash -> MemPoolTx
hashXs: hashX -> set of all hashes of txs touching the hashX
"""
def __init__(self, coin, api, refresh_secs=5.0, log_status_secs=120.0):
assert isinstance(api, MemPoolAPI)
self.coin = coin
self.api = api
self.logger = class_logger(__name__, self.__class__.__name__)
self.txs = {}
self.hashXs = defaultdict(set) # None can be a key
self.cached_compact_histogram = []
self.refresh_secs = refresh_secs
self.log_status_secs = log_status_secs
# Prevents mempool refreshes during fee histogram calculation
self.lock = Lock()
async def _logging(self, synchronized_event):
"""Print regular logs of mempool stats."""
self.logger.info('beginning processing of daemon mempool. '
'This can take some time...')
start = time.time()
await synchronized_event.wait()
elapsed = time.time() - start
self.logger.info(f'synced in {elapsed:.2f}s')
while True:
self.logger.info(f'{len(self.txs):,d} txs '
f'touching {len(self.hashXs):,d} addresses')
await sleep(self.log_status_secs)
await synchronized_event.wait()
async def _refresh_histogram(self, synchronized_event):
while True:
await synchronized_event.wait()
async with self.lock:
# Threaded as can be expensive
await asyncio.get_event_loop().run_in_executor(None, self._update_histogram, 100_000)
await sleep(self.coin.MEMPOOL_HISTOGRAM_REFRESH_SECS)
def _update_histogram(self, bin_size):
# Build a histogram by fee rate
histogram = defaultdict(int)
for tx in self.txs.values():
histogram[tx.fee // tx.size] += tx.size
# Now compact it. For efficiency, get_fees returns a
# compact histogram with variable bin size. The compact
# histogram is an array of (fee_rate, vsize) values.
# vsize_n is the cumulative virtual size of mempool
# transactions with a fee rate in the interval
# [rate_(n-1), rate_n)], and rate_(n-1) > rate_n.
# Intervals are chosen to create tranches containing at
# least 100kb of transactions
compact = []
cum_size = 0
r = 0 # ?
for fee_rate, size in sorted(histogram.items(), reverse=True):
cum_size += size
if cum_size + r > bin_size:
compact.append((fee_rate, cum_size))
r += cum_size - bin_size
cum_size = 0
bin_size *= 1.1
self.logger.info(f'compact fee histogram: {compact}')
self.cached_compact_histogram = compact
def _accept_transactions(self, tx_map, utxo_map, touched):
"""Accept transactions in tx_map to the mempool if all their inputs
can be found in the existing mempool or a utxo_map from the
DB.
Returns an (unprocessed tx_map, unspent utxo_map) pair.
"""
hashXs = self.hashXs
txs = self.txs
deferred = {}
unspent = set(utxo_map)
# Try to find all prevouts so we can accept the TX
for hash, tx in tx_map.items():
in_pairs = []
try:
for prevout in tx.prevouts:
utxo = utxo_map.get(prevout)
if not utxo:
prev_hash, prev_index = prevout
# Raises KeyError if prev_hash is not in txs
utxo = txs[prev_hash].out_pairs[prev_index]
in_pairs.append(utxo)
except KeyError:
deferred[hash] = tx
continue
# Spend the prevouts
unspent.difference_update(tx.prevouts)
# Save the in_pairs, compute the fee and accept the TX
tx.in_pairs = tuple(in_pairs)
# Avoid negative fees if dealing with generation-like transactions
# because some in_parts would be missing
tx.fee = max(0, (sum(v for _, v in tx.in_pairs) -
sum(v for _, v in tx.out_pairs)))
txs[hash] = tx
for hashX, value in itertools.chain(tx.in_pairs, tx.out_pairs):
touched.add(hashX)
hashXs[hashX].add(hash)
return deferred, {prevout: utxo_map[prevout] for prevout in unspent}
async def _refresh_hashes(self, synchronized_event):
"""Refresh our view of the daemon's mempool."""
while True:
height = self.api.cached_height()
hex_hashes = await self.api.mempool_hashes()
if height != await self.api.height():
continue
hashes = set(hex_str_to_hash(hh) for hh in hex_hashes)
async with self.lock:
touched = await self._process_mempool(hashes)
synchronized_event.set()
synchronized_event.clear()
await self.api.on_mempool(touched, height)
await sleep(self.refresh_secs)
async def _process_mempool(self, all_hashes):
# Re-sync with the new set of hashes
txs = self.txs
hashXs = self.hashXs
touched = set()
# First handle txs that have disappeared
for tx_hash in set(txs).difference(all_hashes):
tx = txs.pop(tx_hash)
tx_hashXs = set(hashX for hashX, value in tx.in_pairs)
tx_hashXs.update(hashX for hashX, value in tx.out_pairs)
for hashX in tx_hashXs:
hashXs[hashX].remove(tx_hash)
if not hashXs[hashX]:
del hashXs[hashX]
touched.update(tx_hashXs)
# Process new transactions
new_hashes = list(all_hashes.difference(txs))
if new_hashes:
fetches = []
for hashes in chunks(new_hashes, 200):
fetches.append(self._fetch_and_accept(hashes, all_hashes, touched))
tx_map = {}
utxo_map = {}
for fetch in asyncio.as_completed(fetches):
deferred, unspent = await fetch
tx_map.update(deferred)
utxo_map.update(unspent)
prior_count = 0
# FIXME: this is not particularly efficient
while tx_map and len(tx_map) != prior_count:
prior_count = len(tx_map)
tx_map, utxo_map = self._accept_transactions(tx_map, utxo_map,
touched)
if tx_map:
self.logger.info(f'{len(tx_map)} txs dropped')
return touched
async def _fetch_and_accept(self, hashes, all_hashes, touched):
"""Fetch a list of mempool transactions."""
hex_hashes_iter = (hash_to_hex_str(hash) for hash in hashes)
raw_txs = await self.api.raw_transactions(hex_hashes_iter)
def deserialize_txs(): # This function is pure
to_hashX = self.coin.hashX_from_script
deserializer = self.coin.DESERIALIZER
txs = {}
for hash, raw_tx in zip(hashes, raw_txs):
# The daemon may have evicted the tx from its
# mempool or it may have gotten in a block
if not raw_tx:
continue
tx, tx_size = deserializer(raw_tx).read_tx_and_vsize()
# Convert the inputs and outputs into (hashX, value) pairs
# Drop generation-like inputs from MemPoolTx.prevouts
txin_pairs = tuple((txin.prev_hash, txin.prev_idx)
for txin in tx.inputs
if not txin.is_generation())
txout_pairs = tuple((to_hashX(txout.pk_script), txout.value)
for txout in tx.outputs)
txs[hash] = MemPoolTx(txin_pairs, None, txout_pairs,
0, tx_size)
return txs
# Thread this potentially slow operation so as not to block
tx_map = await asyncio.get_event_loop().run_in_executor(None, deserialize_txs)
# Determine all prevouts not in the mempool, and fetch the
# UTXO information from the database. Failed prevout lookups
# return None - concurrent database updates happen - which is
# relied upon by _accept_transactions. Ignore prevouts that are
# generation-like.
prevouts = tuple(prevout for tx in tx_map.values()
for prevout in tx.prevouts
if prevout[0] not in all_hashes)
utxos = await self.api.lookup_utxos(prevouts)
utxo_map = {prevout: utxo for prevout, utxo in zip(prevouts, utxos)}
return self._accept_transactions(tx_map, utxo_map, touched)
#
# External interface
#
async def keep_synchronized(self, synchronized_event):
"""Keep the mempool synchronized with the daemon."""
await asyncio.wait([
self._refresh_hashes(synchronized_event),
self._refresh_histogram(synchronized_event),
self._logging(synchronized_event)
])
async def balance_delta(self, hashX):
"""Return the unconfirmed amount in the mempool for hashX.
Can be positive or negative.
"""
value = 0
if hashX in self.hashXs:
for hash in self.hashXs[hashX]:
tx = self.txs[hash]
value -= sum(v for h168, v in tx.in_pairs if h168 == hashX)
value += sum(v for h168, v in tx.out_pairs if h168 == hashX)
return value
async def compact_fee_histogram(self):
"""Return a compact fee histogram of the current mempool."""
return self.cached_compact_histogram
async def potential_spends(self, hashX):
"""Return a set of (prev_hash, prev_idx) pairs from mempool
transactions that touch hashX.
None, some or all of these may be spends of the hashX, but all
actual spends of it (in the DB or mempool) will be included.
"""
result = set()
for tx_hash in self.hashXs.get(hashX, ()):
tx = self.txs[tx_hash]
result.update(tx.prevouts)
return result
async def transaction_summaries(self, hashX):
"""Return a list of MemPoolTxSummary objects for the hashX."""
result = []
for tx_hash in self.hashXs.get(hashX, ()):
tx = self.txs[tx_hash]
has_ui = any(hash in self.txs for hash, idx in tx.prevouts)
result.append(MemPoolTxSummary(tx_hash, tx.fee, has_ui))
return result
async def unordered_UTXOs(self, hashX):
"""Return an unordered list of UTXO named tuples from mempool
transactions that pay to hashX.
This does not consider if any other mempool transactions spend
the outputs.
"""
utxos = []
for tx_hash in self.hashXs.get(hashX, ()):
tx = self.txs.get(tx_hash)
for pos, (hX, value) in enumerate(tx.out_pairs):
if hX == hashX:
utxos.append(UTXO(-1, pos, tx_hash, 0, value))
return utxos
|
lbryio/lbry
|
torba/torba/server/mempool.py
|
Python
|
mit
| 13,901
|
import unittest
from etk.extractors.spacy_ner_extractor import SpacyNerExtractor
class TestSpacyNerExtractor(unittest.TestCase):
def test_spacy_ner_extractor(self) -> None:
get_attr = ['PERSON', 'ORG', 'GPE']
extractor = SpacyNerExtractor(extractor_name='spacy_ner_extractor')
text = 'Napoléon Bonaparte was a French statesman and military leader who rose to prominence during the French Revolution and led several successful campaigns during the French Revolutionary Wars. As Napoleon, he was Emperor of the French from 1804 until 1814, and again briefly in 1815 during the Hundred Days. Napoleon dominated European and global affairs for more than a decade while leading France against a series of coalitions in the Napoleonic Wars. He won most of these wars and the vast majority of his battles, building a large empire that ruled over continental Europe before its final collapse in 1815. He is considered one of the greatest commanders in history, and his wars and campaigns are studied at military schools worldwide. Napoleon\'s political and cultural legacy has endured as one of the most celebrated and controversial leaders in human history.'
extracted = list()
results = extractor.extract(text, get_attr=get_attr)
for i in results:
extracted_value = {
'value': i.value,
'start_char': i.provenance['start_char'],
'end_char': i.provenance['end_char'],
'start_token': i.provenance['start_token'],
'end_token': i.provenance['end_token'],
'tag': i.tag
}
extracted.append(extracted_value)
expected = [{'value': 'Napoléon Bonaparte', 'start_char': 0, 'end_char': 18, 'start_token': 0, 'end_token': 2,
'tag': 'PERSON'},
{'value': 'Napoleon', 'start_char': 192, 'end_char': 200, 'start_token': 29, 'end_token': 30,
'tag': 'ORG'},
{'value': 'Napoleon', 'start_char': 304, 'end_char': 312, 'start_token': 52, 'end_token': 53,
'tag': 'ORG'},
{'value': 'France', 'start_char': 388, 'end_char': 394, 'start_token': 65, 'end_token': 66,
'tag': 'GPE'},
{'value': 'Napoleon', 'start_char': 738, 'end_char': 746, 'start_token': 129, 'end_token': 130,
'tag': 'ORG'}]
result_count = 0
while result_count < len(extracted):
self.assertEqual(extracted[result_count], expected[result_count])
result_count += 1
if __name__ == '__main__':
unittest.main()
|
usc-isi-i2/etk
|
etk/unit_tests/test_spacy_ner_extractor.py
|
Python
|
mit
| 2,668
|
# This file contains default config info for downloading and installing dictionaries
# The template placeholders are filled by setup.py.
# Local path for dictionaries
default_dict_path = r'$path'
# path for the metadata file
default_dict_info_path = ur'$path'
# URL of the repository for dictionaries
default_repository = u'$repo'
# Country and language codes: These will be appended to the default_repos path
languages = set(('af_ZA', 'an_ES', 'ar', 'be_BY', 'bg_BG',
'bn_BD', 'br_FR', 'ca', 'cs_CZ', 'da_DK', 'de',
'el_GR', 'en', 'es_ES', 'et_EE', 'fr_FR', 'gd_GB', 'gl',
'gu_IN', 'he_IL', 'hi_IN', 'hr_HR', 'hu_HU', 'it_IT', 'ku_TR',
'lt_LT', 'lv_LV', 'ne_NP', 'nl_NL', 'no', 'oc_FR', 'pl_PL',
'prj', 'pt_BR', 'pt_PT', 'ro', 'ru_RU', 'si_LK', 'sk_SK',
'sl_SI', 'sr', 'sv_SE', 'sw_TZ',
'te_IN', 'th_TH', 'uk_UA', 'zu_ZA'))
|
GraceJonn123/github-resources
|
PyHyphen-2.0.5/2.x/config.py
|
Python
|
mit
| 831
|
# -*- coding: utf-8 -*-
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Accesses the google.logging.v2 LoggingServiceV2 API."""
import functools
import pkg_resources
import warnings
from google.oauth2 import service_account
import google.api_core.gapic_v1.client_info
import google.api_core.gapic_v1.config
import google.api_core.gapic_v1.method
import google.api_core.grpc_helpers
import google.api_core.page_iterator
import google.api_core.path_template
import grpc
from google.api import monitored_resource_pb2
from google.cloud.logging_v2.gapic import enums
from google.cloud.logging_v2.gapic import logging_service_v2_client_config
from google.cloud.logging_v2.gapic.transports import logging_service_v2_grpc_transport
from google.cloud.logging_v2.proto import log_entry_pb2
from google.cloud.logging_v2.proto import logging_pb2
from google.cloud.logging_v2.proto import logging_pb2_grpc
from google.protobuf import empty_pb2
_GAPIC_LIBRARY_VERSION = pkg_resources.get_distribution(
'google-cloud-logging', ).version
class LoggingServiceV2Client(object):
"""Service for ingesting and querying logs."""
SERVICE_ADDRESS = 'logging.googleapis.com:443'
"""The default address of the service."""
# The name of the interface for this client. This is the key used to
# find the method configuration in the client_config dictionary.
_INTERFACE_NAME = 'google.logging.v2.LoggingServiceV2'
@classmethod
def from_service_account_file(cls, filename, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
LoggingServiceV2Client: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(
filename)
kwargs['credentials'] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@classmethod
def log_path(cls, project, log):
"""Return a fully-qualified log string."""
return google.api_core.path_template.expand(
'projects/{project}/logs/{log}',
project=project,
log=log,
)
@classmethod
def project_path(cls, project):
"""Return a fully-qualified project string."""
return google.api_core.path_template.expand(
'projects/{project}',
project=project,
)
def __init__(self,
transport=None,
channel=None,
credentials=None,
client_config=None,
client_info=None):
"""Constructor.
Args:
transport (Union[~.LoggingServiceV2GrpcTransport,
Callable[[~.Credentials, type], ~.LoggingServiceV2GrpcTransport]): A transport
instance, responsible for actually making the API calls.
The default transport uses the gRPC protocol.
This argument may also be a callable which returns a
transport instance. Callables will be sent the credentials
as the first argument and the default transport class as
the second argument.
channel (grpc.Channel): DEPRECATED. A ``Channel`` instance
through which to make calls. This argument is mutually exclusive
with ``credentials``; providing both will raise an exception.
credentials (google.auth.credentials.Credentials): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is mutually exclusive with providing a
transport instance to ``transport``; doing so will raise
an exception.
client_config (dict): DEPRECATED. A dictionary of call options for
each method. If not specified, the default configuration is used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
"""
# Raise deprecation warnings for things we want to go away.
if client_config is not None:
warnings.warn(
'The `client_config` argument is deprecated.',
PendingDeprecationWarning,
stacklevel=2)
else:
client_config = logging_service_v2_client_config.config
if channel:
warnings.warn(
'The `channel` argument is deprecated; use '
'`transport` instead.',
PendingDeprecationWarning,
stacklevel=2)
# Instantiate the transport.
# The transport is responsible for handling serialization and
# deserialization and actually sending data to the service.
if transport:
if callable(transport):
self.transport = transport(
credentials=credentials,
default_class=logging_service_v2_grpc_transport.
LoggingServiceV2GrpcTransport,
)
else:
if credentials:
raise ValueError(
'Received both a transport instance and '
'credentials; these are mutually exclusive.')
self.transport = transport
else:
self.transport = logging_service_v2_grpc_transport.LoggingServiceV2GrpcTransport(
address=self.SERVICE_ADDRESS,
channel=channel,
credentials=credentials,
)
if client_info is None:
client_info = google.api_core.gapic_v1.client_info.ClientInfo(
gapic_version=_GAPIC_LIBRARY_VERSION, )
else:
client_info.gapic_version = _GAPIC_LIBRARY_VERSION
self._client_info = client_info
# Parse out the default settings for retry and timeout for each RPC
# from the client configuration.
# (Ordinarily, these are the defaults specified in the `*_config.py`
# file next to this one.)
self._method_configs = google.api_core.gapic_v1.config.parse_method_configs(
client_config['interfaces'][self._INTERFACE_NAME], )
# Save a dictionary of cached API call functions.
# These are the actual callables which invoke the proper
# transport methods, wrapped with `wrap_method` to add retry,
# timeout, and the like.
self._inner_api_calls = {}
# Service calls
def delete_log(self,
log_name,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Deletes all the log entries in a log.
The log reappears if it receives new entries.
Log entries written shortly before the delete operation might not be
deleted.
Example:
>>> from google.cloud import logging_v2
>>>
>>> client = logging_v2.LoggingServiceV2Client()
>>>
>>> log_name = client.log_path('[PROJECT]', '[LOG]')
>>>
>>> client.delete_log(log_name)
Args:
log_name (str): Required. The resource name of the log to delete:
::
"projects/[PROJECT_ID]/logs/[LOG_ID]"
"organizations/[ORGANIZATION_ID]/logs/[LOG_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]"
"folders/[FOLDER_ID]/logs/[LOG_ID]"
``[LOG_ID]`` must be URL-encoded. For example,
``"projects/my-project-id/logs/syslog"``,
``"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"``.
For more information about log names, see ``LogEntry``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'delete_log' not in self._inner_api_calls:
self._inner_api_calls[
'delete_log'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.delete_log,
default_retry=self._method_configs['DeleteLog'].retry,
default_timeout=self._method_configs['DeleteLog'].timeout,
client_info=self._client_info,
)
request = logging_pb2.DeleteLogRequest(log_name=log_name, )
self._inner_api_calls['delete_log'](
request, retry=retry, timeout=timeout, metadata=metadata)
def write_log_entries(self,
entries,
log_name=None,
resource=None,
labels=None,
partial_success=None,
dry_run=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Writes log entries to Logging. This API method is the
only way to send log entries to Logging. This method
is used, directly or indirectly, by the Logging agent
(fluentd) and all logging libraries configured to use Logging.
A single request may contain log entries for a maximum of 1000
different resources (projects, organizations, billing accounts or
folders)
Example:
>>> from google.cloud import logging_v2
>>>
>>> client = logging_v2.LoggingServiceV2Client()
>>>
>>> # TODO: Initialize `entries`:
>>> entries = []
>>>
>>> response = client.write_log_entries(entries)
Args:
entries (list[Union[dict, ~google.cloud.logging_v2.types.LogEntry]]): Required. The log entries to send to Logging. The order of log entries
in this list does not matter. Values supplied in this method's
``log_name``, ``resource``, and ``labels`` fields are copied into those
log entries in this list that do not include values for their
corresponding fields. For more information, see the ``LogEntry`` type.
If the ``timestamp`` or ``insert_id`` fields are missing in log entries,
then this method supplies the current time or a unique identifier,
respectively. The supplied values are chosen so that, among the log
entries that did not supply their own values, the entries earlier in the
list will sort before the entries later in the list. See the
``entries.list`` method.
Log entries with timestamps that are more than the `logs retention
period <https://cloud.google.com/logging/quota-policy>`__ in the past or
more than 24 hours in the future will not be available when calling
``entries.list``. However, those log entries can still be exported with
`LogSinks <https://cloud.google.com/logging/docs/api/tasks/exporting-logs>`__.
To improve throughput and to avoid exceeding the `quota
limit <https://cloud.google.com/logging/quota-policy>`__ for calls to
``entries.write``, you should try to include several log entries in this
list, rather than calling this method for each individual log entry.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.logging_v2.types.LogEntry`
log_name (str): Optional. A default log resource name that is assigned to all log
entries in ``entries`` that do not specify a value for ``log_name``:
::
"projects/[PROJECT_ID]/logs/[LOG_ID]"
"organizations/[ORGANIZATION_ID]/logs/[LOG_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]/logs/[LOG_ID]"
"folders/[FOLDER_ID]/logs/[LOG_ID]"
``[LOG_ID]`` must be URL-encoded. For example:
::
"projects/my-project-id/logs/syslog"
"organizations/1234567890/logs/cloudresourcemanager.googleapis.com%2Factivity"
The permission logging.logEntries.create is needed on each project,
organization, billing account, or folder that is receiving new log
entries, whether the resource is specified in logName or in an
individual log entry.
resource (Union[dict, ~google.cloud.logging_v2.types.MonitoredResource]): Optional. A default monitored resource object that is assigned to all
log entries in ``entries`` that do not specify a value for ``resource``.
Example:
::
{ "type": "gce_instance",
"labels": {
"zone": "us-central1-a", "instance_id": "00000000000000000000" }}
See ``LogEntry``.
If a dict is provided, it must be of the same form as the protobuf
message :class:`~google.cloud.logging_v2.types.MonitoredResource`
labels (dict[str -> str]): Optional. Default labels that are added to the ``labels`` field of all
log entries in ``entries``. If a log entry already has a label with the
same key as a label in this parameter, then the log entry's label is not
changed. See ``LogEntry``.
partial_success (bool): Optional. Whether valid entries should be written even if some other
entries fail due to INVALID\_ARGUMENT or PERMISSION\_DENIED errors. If
any entry is not written, then the response status is the error
associated with one of the failed entries and the response includes
error details keyed by the entries' zero-based index in the
``entries.write`` method.
dry_run (bool): Optional. If true, the request should expect normal response, but the
entries won't be persisted nor exported. Useful for checking whether the
logging API endpoints are working properly before sending valuable data.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.logging_v2.types.WriteLogEntriesResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'write_log_entries' not in self._inner_api_calls:
self._inner_api_calls[
'write_log_entries'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.write_log_entries,
default_retry=self._method_configs['WriteLogEntries'].
retry,
default_timeout=self._method_configs['WriteLogEntries'].
timeout,
client_info=self._client_info,
)
request = logging_pb2.WriteLogEntriesRequest(
entries=entries,
log_name=log_name,
resource=resource,
labels=labels,
partial_success=partial_success,
dry_run=dry_run,
)
return self._inner_api_calls['write_log_entries'](
request, retry=retry, timeout=timeout, metadata=metadata)
def list_log_entries(self,
resource_names,
project_ids=None,
filter_=None,
order_by=None,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Lists log entries. Use this method to retrieve log entries from Logging.
For ways to export log entries, see `Exporting
Logs <https://cloud.google.com/logging/docs/export>`__.
Example:
>>> from google.cloud import logging_v2
>>>
>>> client = logging_v2.LoggingServiceV2Client()
>>>
>>> # TODO: Initialize `resource_names`:
>>> resource_names = []
>>>
>>> # Iterate over all results
>>> for element in client.list_log_entries(resource_names):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_log_entries(resource_names).pages:
... for element in page:
... # process element
... pass
Args:
resource_names (list[str]): Required. Names of one or more parent resources from which to retrieve
log entries:
::
"projects/[PROJECT_ID]"
"organizations/[ORGANIZATION_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]"
"folders/[FOLDER_ID]"
Projects listed in the ``project_ids`` field are added to this list.
project_ids (list[str]): Deprecated. Use ``resource_names`` instead. One or more project
identifiers or project numbers from which to retrieve log entries.
Example: ``"my-project-1A"``. If present, these project identifiers are
converted to resource name format and added to the list of resources in
``resource_names``.
filter_ (str): Optional. A filter that chooses which log entries to return. See
`Advanced Logs
Filters <https://cloud.google.com/logging/docs/view/advanced_filters>`__.
Only log entries that match the filter are returned. An empty filter
matches all log entries in the resources listed in ``resource_names``.
Referencing a parent resource that is not listed in ``resource_names``
will cause the filter to return no results. The maximum length of the
filter is 20000 characters.
order_by (str): Optional. How the results should be sorted. Presently, the only
permitted values are ``"timestamp asc"`` (default) and
``"timestamp desc"``. The first option returns entries in order of
increasing values of ``LogEntry.timestamp`` (oldest first), and the
second option returns entries in order of decreasing timestamps (newest
first). Entries with equal timestamps are returned in order of their
``insert_id`` values.
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.gax.PageIterator` instance. By default, this
is an iterable of :class:`~google.cloud.logging_v2.types.LogEntry` instances.
This object can also be configured to iterate over the pages
of the response through the `options` parameter.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'list_log_entries' not in self._inner_api_calls:
self._inner_api_calls[
'list_log_entries'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_log_entries,
default_retry=self._method_configs['ListLogEntries'].retry,
default_timeout=self._method_configs['ListLogEntries'].
timeout,
client_info=self._client_info,
)
request = logging_pb2.ListLogEntriesRequest(
resource_names=resource_names,
project_ids=project_ids,
filter=filter_,
order_by=order_by,
page_size=page_size,
)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls['list_log_entries'],
retry=retry,
timeout=timeout,
metadata=metadata),
request=request,
items_field='entries',
request_token_field='page_token',
response_token_field='next_page_token',
)
return iterator
def list_monitored_resource_descriptors(
self,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Lists the descriptors for monitored resource types used by Logging.
Example:
>>> from google.cloud import logging_v2
>>>
>>> client = logging_v2.LoggingServiceV2Client()
>>>
>>> # Iterate over all results
>>> for element in client.list_monitored_resource_descriptors():
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_monitored_resource_descriptors().pages:
... for element in page:
... # process element
... pass
Args:
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.gax.PageIterator` instance. By default, this
is an iterable of :class:`~google.cloud.logging_v2.types.MonitoredResourceDescriptor` instances.
This object can also be configured to iterate over the pages
of the response through the `options` parameter.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'list_monitored_resource_descriptors' not in self._inner_api_calls:
self._inner_api_calls[
'list_monitored_resource_descriptors'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_monitored_resource_descriptors,
default_retry=self.
_method_configs['ListMonitoredResourceDescriptors'].retry,
default_timeout=self._method_configs[
'ListMonitoredResourceDescriptors'].timeout,
client_info=self._client_info,
)
request = logging_pb2.ListMonitoredResourceDescriptorsRequest(
page_size=page_size, )
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls['list_monitored_resource_descriptors'],
retry=retry,
timeout=timeout,
metadata=metadata),
request=request,
items_field='resource_descriptors',
request_token_field='page_token',
response_token_field='next_page_token',
)
return iterator
def list_logs(self,
parent,
page_size=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None):
"""
Lists the logs in projects, organizations, folders, or billing accounts.
Only logs that have entries are listed.
Example:
>>> from google.cloud import logging_v2
>>>
>>> client = logging_v2.LoggingServiceV2Client()
>>>
>>> parent = client.project_path('[PROJECT]')
>>>
>>> # Iterate over all results
>>> for element in client.list_logs(parent):
... # process element
... pass
>>>
>>>
>>> # Alternatively:
>>>
>>> # Iterate over results one page at a time
>>> for page in client.list_logs(parent).pages:
... for element in page:
... # process element
... pass
Args:
parent (str): Required. The resource name that owns the logs:
::
"projects/[PROJECT_ID]"
"organizations/[ORGANIZATION_ID]"
"billingAccounts/[BILLING_ACCOUNT_ID]"
"folders/[FOLDER_ID]"
page_size (int): The maximum number of resources contained in the
underlying API response. If page streaming is performed per-
resource, this parameter does not affect the return value. If page
streaming is performed per-page, this determines the maximum number
of resources in a page.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.gax.PageIterator` instance. By default, this
is an iterable of :class:`str` instances.
This object can also be configured to iterate over the pages
of the response through the `options` parameter.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid.
"""
# Wrap the transport method to add retry and timeout logic.
if 'list_logs' not in self._inner_api_calls:
self._inner_api_calls[
'list_logs'] = google.api_core.gapic_v1.method.wrap_method(
self.transport.list_logs,
default_retry=self._method_configs['ListLogs'].retry,
default_timeout=self._method_configs['ListLogs'].timeout,
client_info=self._client_info,
)
request = logging_pb2.ListLogsRequest(
parent=parent,
page_size=page_size,
)
iterator = google.api_core.page_iterator.GRPCIterator(
client=None,
method=functools.partial(
self._inner_api_calls['list_logs'],
retry=retry,
timeout=timeout,
metadata=metadata),
request=request,
items_field='log_names',
request_token_field='page_token',
response_token_field='next_page_token',
)
return iterator
|
jonparrott/google-cloud-python
|
logging/google/cloud/logging_v2/gapic/logging_service_v2_client.py
|
Python
|
apache-2.0
| 32,657
|
# Django settings for test_remote_project project.
import os.path
import posixpath
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
FIXTURE_DIRS = [
os.path.join(PROJECT_ROOT, 'fixtures'),
]
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'db.sqlite', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
os.path.join(PROJECT_ROOT, 'static'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'mzdvd*#0=$g(-!v_vj_7^(=zrh3klia(u&cqd3nr7p^khh^ui#'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'test_remote_project.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'test_remote_project.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(PROJECT_ROOT, 'templates'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
'cities_light',
'djangorestframework',
'south',
'autocomplete_light',
'remote_autocomplete',
'remote_autocomplete_inline',
'navigation_autocomplete',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console':{
'level':'DEBUG',
'class':'logging.StreamHandler',
},
},
'loggers': {
'django.request': {
'handlers':['console'],
'propagate': True,
'level':'DEBUG',
},
'cities_light': {
'handlers':['console'],
'propagate': True,
'level':'DEBUG',
},
}
}
|
dsanders11/django-autocomplete-light
|
test_remote_project/test_remote_project/settings.py
|
Python
|
mit
| 5,785
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
pubmed.py -- tools for identifying pubmed papers, retrieving citation data for pubmed papers, and loading those
papers into VIVO
"""
import logging
import httplib
__author__ = "Michael Conlon"
__copyright__ = "Copyright (c) 2016 Michael Conlon"
__license__ = "New BSD license"
__version__ = "0.2"
# Establish logging
logger = logging.getLogger(__name__)
# Catalyst service access
HOST = "profiles.catalyst.harvard.edu"
API_URL = "/services/GETPMIDs/default.asp"
def get_person_catalyst_pmids(uri, query_parms):
"""
Given a person uri, collect the attributes needed to call get_pmids and return two lists:
a list of pubs for the person found in VIVO, and a list of pubs for the person found by
the catalyst service
:param uri: the uri of a person in VIVO
:return: A dictionary of two lists, the vivo_pmids and the catalyst_pmids
"""
from pump.vivopump import vivo_query
query = """
SELECT ?first ?middle ?last ?email ?affiliation
WHERE {
<{}>
}
"""
query = query.format(uri)
a = vivo_query(query, query_parms)
first = a['results']['bindings'][0]['first']['value']
middle = None
last = None
emails = None
affiliations = None
return get_catalyst_pmids(first, middle, last, emails, affiliations)
def get_person_vivo_pmids(uri, query_parms):
"""
Given the uri of a person, query VIVO to get a list of the person's publications with pmids
:param uri:
:return: a dictionary keyed by pmid with uris of the pubs for each pmid
"""
from pump.vivopump import vivo_query
query = """SELECT (MAX(?paper_uri) AS ?puri) ?pmid
WHERE {
<{}> vivo:relatedBy ?a .
?a a vivo:Authorship .
?a vivo:relates ?paper_uri .
?paper_uri a bibo:AcademicArticle .
?paper_uri bibo:pmid ?pmid .
}
GROUP BY ?pmid
"""
query = query.replace('{}', uri)
a = vivo_query(query, query_parms)
pmid = [x['pmid']['value'] for x in a['results']['bindings']]
puri = [x['puri']['value'] for x in a['results']['bindings']]
return dict(zip(pmid, puri))
def get_catalyst_pmids(first, middle, last, email, affiliation=None):
"""
Given an author's identifiers and affiliation information, optional lists of pmids, call the catalyst service
to retrieve PMIDS for the author and return a list of PMIDS
:param first: author first name
:param middle: author middle name
:param last: author last name
:param email: author email(s) as a list
:param affiliation: author affiliation as a list
:return: list of pmids identified by the catalyst service that have a high probability of being written by the
author
"""
from xml.dom.minidom import parseString # tools for handling XML in python
result = get_catalyst_pmids_xml(first, middle, last, email, affiliation)
dom = parseString(result) # create a document Object Model (DOM) from the Harvard Catalyst result
return [node.childNodes[0].data for node in dom.getElementsByTagName('PMID')] # return a list of PMID paper
def get_catalyst_pmids_xml(first, middle, last, email, affiliation=None):
"""
Given author name parts (first, middle and last), email(s) and optional affiliation(s),
return the PMIDs of papers that are likely to be the works of the author. The Harvard
Catalyst GETPMIDS service is called.
"""
request = """
<?xml version="1.0"?>
<FindPMIDs>
<Name>
<First>{}</First>
<Middle>{}</Middle>
<Last>{}</Last>
<Suffix/>
</Name>
<EmailList>
{}
</EmailList>
<AffiliationList>
{}
</AffiliationList>
<LocalDuplicateNames>1</LocalDuplicateNames>
<RequireFirstName>false</RequireFirstName>
<MatchThreshold>0.98</MatchThreshold>
</FindPMIDs>"""
if affiliation is None:
affiliation = []
email_string = ''.join(['<Email>' + em + '</Email>' for em in email])
affil_string = ''.join(['<Affiliation>' + aff + '</Affiliation>' for aff in affiliation])
request = request.format(first, middle, last, email_string, affil_string)
webservice = httplib.HTTP(HOST)
webservice.putrequest("POST", API_URL)
webservice.putheader("Host", HOST)
webservice.putheader("User-Agent", "Python post")
webservice.putheader("Content-type", "text/xml; charset=\"UTF-8\"")
webservice.putheader("Content-length", "%d" % len(request))
webservice.endheaders()
webservice.send(request)
statuscode, statusmessage, header = webservice.getreply()
result = webservice.getfile().read()
logger.debug(u"Request {}\n\tStatus Code {} Message {} Header {}\n\tResult {}".format(request, statuscode,
statusmessage, header,
result))
return result
def get_pubmed_entrez(pmid):
"""
Given a PubMed ID, return the current the paper metadata from PubMed as an Entrez result set
"""
from Bio import Entrez
import time
Entrez.email = 'mconlon@ufl.edu'
# Get record(s) from Entrez. Retry if Entrez does not respond
start = 2.0
retries = 10
count = 0
while True:
try:
handle = Entrez.efetch(db="pubmed", id=pmid, retmode="xml")
records = Entrez.parse(handle)
break
except:
count += 1
if count > retries:
return {}
sleep_seconds = start**count
print "<!-- Failed Entrez query. Count = " + str(count)+ \
" Will sleep now for " + str(sleep_seconds)+ \
" seconds and retry -->"
time.sleep(sleep_seconds) # increase the wait time with each retry
return records
def get_pubmed_paper(pmid):
"""
Given an Entrez structure, return a simplified struture with attributes useful for VIVO
:param pmid:
:return: paper
"""
from json import dumps
from datetime import date
month_number = {'Jan': 1, 'Feb': 2, 'Mar': 3, 'Apr': 4, 'May': 6, 'Jun': 6, 'Jul': 7, 'Aug': 8, 'Sep': 9,
'Oct': 10, 'Nov': 11, 'Dec': 12}
paper = {}
grants_cited = []
keyword_list = []
# Find the desired attributes in the record structures returned by Entrez
for record in get_pubmed_entrez(pmid):
print "Entrez record:", dumps(record, indent=4)
article_id_list = record['PubmedData']['ArticleIdList']
for article_id in article_id_list:
attributes = article_id.attributes
if 'IdType' in attributes:
if attributes['IdType'] == 'pmc':
paper["pmcid"] = str(article_id)
paper['full_text_uri'] = "http://www.ncbi.nlm.nih.gov/pmc/articles/" + \
paper['pmcid'].upper()+ "/pdf"
if attributes['IdType'] == 'mid':
paper["nihmsid"] = str(article_id)
if attributes['IdType'] == 'pubmed':
paper["pmid"] = str(article_id)
if attributes['IdType'] == 'doi':
paper["doi"] = str(article_id)
try:
paper['abstract'] = \
str(record['MedlineCitation']['Article']['Abstract']['AbstractText'][0])
except KeyError:
pass
try:
paper['title'] = \
record['MedlineCitation']['Article']['ArticleTitle']
except KeyError:
pass
try:
pages = record['MedlineCitation']['Article']['Pagination']['MedlinePgn']
pages_list = pages.split('-')
try:
start = pages_list[0]
try:
istart = int(start)
except:
istart = -1
except:
start = ""
istart = -1
try:
end = pages_list[1]
if end.find(';') > 0:
end = end[:end.find(';')]
except:
end = ""
if start != "" and istart > -1 and end != "":
if int(start) > int(end):
if int(end) > 99:
end = str(int(start) - (int(start) % 1000) + int(end))
elif int(end) > 9:
end = str(int(start) - (int(start) % 100) + int(end))
else:
end = str(int(start) - (int(start) % 10) + int(end))
paper['page_start'] = start
paper['page_end'] = end
except KeyError:
pass
try:
paper['issn'] = \
record['MedlineCitation']['Article']['Journal']['ISSN']
except KeyError:
pass
try:
paper['volume'] = \
record['MedlineCitation']['Article']['Journal']['JournalIssue']['Volume']
except KeyError:
pass
try:
paper['issue'] = \
record['MedlineCitation']['Article']['Journal']['JournalIssue']['Issue']
except KeyError:
pass
try:
month = month_number[record['MedlineCitation']['Article']['Journal']['JournalIssue']['PubDate']['Month']]
day = int(record['MedlineCitation']['Article']['Journal']['JournalIssue']['PubDate']['Day'])
year = int(record['MedlineCitation']['Article']['Journal']['JournalIssue']['PubDate']['Year'])
paper['pub_date'] = date(year, month, day).isoformat()
except KeyError:
pass
try:
paper['author_list'] = record['MedlineCitation']['Article']['AuthorList']
except KeyError:
pass
try:
keywords = record['MedlineCitation']['MeshHeadingList']
for keyword in keywords:
keyword_list.append(str(keyword['DescriptorName']))
paper['keyword_list'] = keyword_list
except KeyError:
pass
try:
grants = record['MedlineCitation']['Article']['GrantList']
for grant in grants:
grants_cited.append(grant['GrantID'])
paper['grants_cited'] = grants_cited
except KeyError:
pass
return paper
|
mconlon17/vivo-pump
|
pubmed/pubmed.py
|
Python
|
bsd-2-clause
| 10,551
|
"""
Django settings for voting_framework project.
Generated by 'django-admin startproject' using Django 1.8.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '16qomo=nc=93*w^l@+gkj_q2ez9@eywxf5ks0if8%$+2qrh9+g'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
)
ROOT_URLCONF = 'voting_framework.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'voting_framework.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates'),
)
|
jwebbed/student-voting-framework
|
voting_framework/settings.py
|
Python
|
gpl-3.0
| 2,750
|
import pdb
import numpy as np
import cPickle as pickle
from utils import black
from utils import loggen
#from astropy.cosmology import FlatLambdaCDM
from astropy.cosmology import Planck15 as cosmo
import astropy.units as u
from lmfit import Parameters, minimize, fit_report
L_sun = 3.839e26 # W
c = 299792458.0 # m/s
def find_nearest_index(array_in,value):
ng = len(value)
#idx = (np.abs(array_in-value)).argmin()
idx = (np.abs(array_in-np.reshape(value,(ng,1)))).argmin(axis=1)
return idx
def sed(p, nu_in, T, betain, alphain):
'''
#m = [A, T, Beta, Alpha] - return SED (array) in Jy
#P[0] = A
#P[1] = T
#P[2] = Beta
#P[3] = Alpha
'''
v = p.valuesdict()
A0= v['Ain']
A=np.asarray(A0)
#T = v['Tin']
#betain = v['betain']
#alphain = v['alphain']
ng = np.size(A)
ns = len(nu_in)
base = 2.0 * (6.626)**(-2.0 - betain - alphain) * (1.38)**(3. + betain + alphain) / (2.99792458)**2.0
expo = 34.0 * (2.0 + betain + alphain) - 23.0 * (3.0 + betain + alphain) - 16.0 + 26.0
K = base * 10.0**expo
w_num = A * K * (T * (3.0 + betain + alphain))**(3.0 + betain + alphain)
w_den = (np.exp(3.0 + betain + alphain) - 1.0)
w_div = w_num/w_den
nu_cut = (3.0 + betain + alphain) * 0.208367e11 * T
graybody = np.reshape(A,(ng,1)) * nu_in**np.reshape(betain,(ng,1)) * black(nu_in, T) / 1000.0
powerlaw = np.reshape(w_div,(ng,1)) * nu_in**np.reshape(-1.0 * alphain,(ng,1))
graybody[np.where(nu_in >= np.reshape(nu_cut,(ng,1)))]=powerlaw[np.where(nu_in >= np.reshape(nu_cut,(ng,1)))]
return graybody
def sed_direct(A, nu_in, T, betain, alphain):
'''
'''
ng = np.size(A)
ns = len(nu_in)
base = 2.0 * (6.626)**(-2.0 - betain - alphain) * (1.38)**(3. + betain + alphain) / (2.99792458)**2.0
expo = 34.0 * (2.0 + betain + alphain) - 23.0 * (3.0 + betain + alphain) - 16.0 + 26.0
K = base * 10.0**expo
w_num = A * K * (T * (3.0 + betain + alphain))**(3.0 + betain + alphain)
w_den = (np.exp(3.0 + betain + alphain) - 1.0)
w_div = w_num/w_den
nu_cut = (3.0 + betain + alphain) * 0.208367e11 * T
#graybody = np.reshape(A,(ng,1)) * nu_in**np.reshape(np.repeat(betain,ng),[ng,1]) * black(nu_in, T) / 1000.0
#powerlaw = np.reshape(w_div,(ng,1)) * nu_in**np.reshape(np.repeat(alphain,ng),[ng,1])
graybody = np.reshape(A,(ng,1)) * nu_in**betain * black(nu_in, T) / 1000.0
powerlaw = np.reshape(w_div,(ng,1)) * nu_in**(-1.0 * alphain)
graybody[np.where(nu_in >= np.reshape(nu_cut,(ng,1)))]=powerlaw[np.where(nu_in >= np.reshape(nu_cut,(ng,1)))]
return graybody
def sedint(p, nu_in, Lir, T, betain, alphain):
'''
#m = [A, T, Beta, Alpha] - return integrated SED flux (one number) in Jy x Hz
#P[0] = A
#P[1] = T
#P[2] = Beta
#P[3] = Alpha
'''
v = p.valuesdict()
A0 = v['Ain']
A=np.asarray(A0)
#pdb.set_trace()
#T = v['Tin']
#betain = v['betain']
#alphain = v['alphain']
#print 'A is ' + str(A)
ns = len(nu_in)
#pdb.set_trace()
ng = np.size(A)
base = 2.0 * (6.626)**(-2.0 - betain - alphain) * (1.38)**(3. + betain + alphain) / (2.99792458)**2.0
expo = 34.0 * (2.0 + betain + alphain) - 23.0 * (3.0 + betain + alphain) - 16.0 + 26.0
K = base * 10.0**expo
w_num = A * K * (T * (3.0 + betain + alphain))**(3.0 + betain + alphain)
w_den = (np.exp(3.0 + betain + alphain) - 1.0)
w_div = w_num/w_den
nu_cut = (3.0 + betain + alphain) * 0.208367e11 * T
#nu_cut_ind = find_nearest_index(nu_in,nu_cut)
graybody = np.reshape(A,(ng,1)) * nu_in**np.reshape(betain,(ng,1)) * black(nu_in, T) / 1000.0
powerlaw = np.reshape(w_div,(ng,1)) * nu_in**np.reshape(-1.0 * alphain,(ng,1))
graybody[np.where(nu_in >= np.reshape(nu_cut,(ng,1)))]=powerlaw[np.where(nu_in >= np.reshape(nu_cut,(ng,1)))]
#pdb.set_trace()
dnu = nu_in[1:ns] - nu_in[0:ns-1]
dnu = np.append(dnu[0],dnu)
return np.ravel([np.sum(graybody * dnu, axis=1) - Lir])
def sedint2(p, nu_in, Lir, ng): # m = [A, T, Beta, Alpha] - return integrated SED flux (one number) in Jy x Hz
#P[0] = A
#P[1] = T
#P[2] = Beta
#P[3] = Alpha
v = p.valuesdict()
A = v['Ain']
T = v['Tin']
betain = v['betain']
alphain = v['alphain']
print 'A is ' + str(A)
ns = len(nu_in)
#ng = len(A)
base = 2.0 * (6.626)**(-2.0 - betain - alphain) * (1.38)**(3. + betain + alphain) / (2.99792458)**2.0
expo = 34.0 * (2.0 + betain + alphain) - 23.0 * (3.0 + betain + alphain) - 16.0 + 26.0
K = base * 10.0**expo
w_num = A * K * (T * (3.0 + betain + alphain))**(3.0 + betain + alphain)
w_den = (np.exp(3.0 + betain + alphain) - 1.0)
w_div = w_num/w_den
nu_cut = (3.0 + betain + alphain) * 0.208367e11 * T
#nu_cut_ind = find_nearest_index(nu_in,nu_cut)
graybody = np.reshape(A,(ng,1)) * nu_in**np.reshape(betain,(ng,1)) * black(nu_in, T) / 1000.0
powerlaw = np.reshape(w_div,(ng,1)) * nu_in**np.reshape(-1.0 * alphain,(ng,1))
graybody[np.where(nu_in >= np.reshape(nu_cut,(ng,1)))]=powerlaw[np.where(nu_in >= np.reshape(nu_cut,(ng,1)))]
#pdb.set_trace()
dnu = nu_in[1:ns] - nu_in[0:ns-1]
dnu = np.append(dnu[0],dnu)
return np.ravel([np.sum(graybody * dnu, axis=1) - Lir])
def simple_flux_from_greybody(lambdavector, Trf = None, b = None, Lrf = None, zin = None, ngal = None):
'''
Return flux densities at any wavelength of interest (in the range 1-10000 micron),
assuming a galaxy (at given redshift) graybody spectral energy distribution (SED),
with a power law replacing the Wien part of the spectrum to account for the
variability of dust temperatures within the galaxy. The two different functional
forms are stitched together by imposing that the two functions and their first
derivatives coincide. The code contains the nitty-gritty details explicitly.
Inputs:
alphain = spectral index of the power law replacing the Wien part of the spectrum, to account for the variability of dust temperatures within a galaxy [default = 2; see Blain 1999 and Blain et al. 2003]
betain = spectral index of the emissivity law for the graybody [default = 2; see Hildebrand 1985]
Trf = rest-frame temperature [in K; default = 20K]
Lrf = rest-frame FIR bolometric luminosity [in L_sun; default = 10^10]
zin = galaxy redshift [default = 0.001]
lambdavector = array of wavelengths of interest [in microns; default = (24, 70, 160, 250, 350, 500)];
AUTHOR:
Lorenzo Moncelsi [moncelsi@caltech.edu]
HISTORY:
20June2012: created in IDL
November2015: converted to Python
'''
nwv = len(lambdavector)
nuvector = c * 1.e6 / lambdavector # Hz
nsed = 1e4
lambda_mod = loggen(1e3, 8.0, nsed) # microns
nu_mod = c * 1.e6/lambda_mod # Hz
#Lorenzo's version had: H0=70.5, Omega_M=0.274, Omega_L=0.726 (Hinshaw et al. 2009)
#cosmo = Planck15#(H0 = 70.5 * u.km / u.s / u.Mpc, Om0 = 0.273)
conversion = 4.0 * np.pi *(1.0E-13 * cosmo.luminosity_distance(zin) * 3.08568025E22)**2.0 / L_sun # 4 * pi * D_L^2 units are L_sun/(Jy x Hz)
Lir = Lrf / conversion # Jy x Hz
Ain = np.zeros(ngal) + 1.0e-36 #good starting parameter
betain = np.zeros(ngal) + b
alphain= np.zeros(ngal) + 2.0
fit_params = Parameters()
fit_params.add('Ain', value= Ain)
#fit_params.add('Tin', value= Trf/(1.+zin), vary = False)
#fit_params.add('betain', value= b, vary = False)
#fit_params.add('alphain', value= alphain, vary = False)
#pdb.set_trace()
#THE LM FIT IS HERE
#Pfin = minimize(sedint, fit_params, args=(nu_mod,Lir.value,ngal))
Pfin = minimize(sedint, fit_params, args=(nu_mod,Lir.value,ngal,Trf/(1.+zin),b,alphain))
#pdb.set_trace()
flux_mJy=sed(Pfin.params,nuvector,ngal,Trf/(1.+zin),b,alphain)
return flux_mJy
def single_simple_flux_from_greybody(lambdavector, Trf = None, b = 2.0, Lrf = None, zin = None):
'''
Return flux densities at any wavelength of interest (in the range 1-10000 micron),
assuming a galaxy (at given redshift) graybody spectral energy distribution (SED),
with a power law replacing the Wien part of the spectrum to account for the
variability of dust temperatures within the galaxy. The two different functional
forms are stitched together by imposing that the two functions and their first
derivatives coincide. The code contains the nitty-gritty details explicitly.
Cosmology assumed: H0=70.5, Omega_M=0.274, Omega_L=0.726 (Hinshaw et al. 2009)
Inputs:
alphain = spectral index of the power law replacing the Wien part of the spectrum, to account for the variability of dust temperatures within a galaxy [default = 2; see Blain 1999 and Blain et al. 2003]
betain = spectral index of the emissivity law for the graybody [default = 2; see Hildebrand 1985]
Trf = rest-frame temperature [in K; default = 20K]
Lrf = rest-frame FIR bolometric luminosity [in L_sun; default = 10^10]
zin = galaxy redshift [default = 0.001]
lambdavector = array of wavelengths of interest [in microns; default = (24, 70, 160, 250, 350, 500)];
AUTHOR:
Lorenzo Moncelsi [moncelsi@caltech.edu]
HISTORY:
20June2012: created in IDL
November2015: converted to Python
'''
nwv = len(lambdavector)
nuvector = c * 1.e6 / lambdavector # Hz
nsed = 1e4
lambda_mod = loggen(1e3, 8.0, nsed) # microns
nu_mod = c * 1.e6/lambda_mod # Hz
#cosmo = Planck15#(H0 = 70.5 * u.km / u.s / u.Mpc, Om0 = 0.273)
conversion = 4.0 * np.pi *(1.0E-13 * cosmo.luminosity_distance(zin) * 3.08568025E22)**2.0 / L_sun # 4 * pi * D_L^2 units are L_sun/(Jy x Hz)
Lir = Lrf / conversion # Jy x Hz
Ain = 1.0e-36 #good starting parameter
betain = b
alphain= 2.0
fit_params = Parameters()
fit_params.add('Ain', value= Ain)
#THE LM FIT IS HERE
Pfin = minimize(sedint, fit_params, args=(nu_mod,Lir.value,Trf/(1.+zin),b,alphain))
flux_mJy=sed(Pfin.params,nuvector,Trf/(1.+zin),b,alphain)
return flux_mJy
def single_simple_rest_frame_flux_from_greybody(lambdavector, Trf = None, b = 2.0, Lrf = None, zin = None):
'''
Return flux densities at the rest-frame wavelength of interest (in the range 1-10000 micron),
assuming a galaxy (at given redshift) graybody spectral energy distribution (SED),
with a power law replacing the Wien part of the spectrum to account for the
variability of dust temperatures within the galaxy. The two different functional
forms are stitched together by imposing that the two functions and their first
derivatives coincide. The code contains the nitty-gritty details explicitly.
Cosmology assumed: H0=70.5, Omega_M=0.274, Omega_L=0.726 (Hinshaw et al. 2009)
Inputs:
alphain = spectral index of the power law replacing the Wien part of the spectrum, to account for the variability of dust temperatures within a galaxy [default = 2; see Blain 1999 and Blain et al. 2003]
betain = spectral index of the emissivity law for the graybody [default = 2; see Hildebrand 1985]
Trf = rest-frame temperature [in K; default = 20K]
Lrf = rest-frame FIR bolometric luminosity [in L_sun; default = 10^10]
zin = galaxy redshift [default = 0.001]
lambdavector = array of wavelengths of interest [in microns; default = (24, 70, 160, 250, 350, 500)];
AUTHOR:
Lorenzo Moncelsi [moncelsi@caltech.edu]
HISTORY:
20June2012: created in IDL
November2015: converted to Python
'''
nwv = len(lambdavector)
nuvector = (c * 1.e6 / lambdavector) / (1.+zin) # Hz
nsed = 1e4
lambda_mod = loggen(1e3, 8.0, nsed) # microns
nu_mod = c * 1.e6/lambda_mod # Hz
#cosmo = Planck15#(H0 = 70.5 * u.km / u.s / u.Mpc, Om0 = 0.273)
conversion = 4.0 * np.pi *(1.0E-13 * cosmo.luminosity_distance(zin) * 3.08568025E22)**2.0 / L_sun # 4 * pi * D_L^2 units are L_sun/(Jy x Hz)
Lir = Lrf / conversion # Jy x Hz
Ain = 1.0e-36 #good starting parameter
betain = b
alphain= 2.0
fit_params = Parameters()
fit_params.add('Ain', value= Ain)
#THE LM FIT IS HERE
Pfin = minimize(sedint, fit_params, args=(nu_mod,Lir.value,Trf/(1.+zin),b,alphain))
flux_mJy=sed(Pfin.params,nuvector,Trf/(1.+zin),b,alphain)
return flux_mJy
def amplitude_of_best_fit_greybody(Trf = None, b = 2.0, Lrf = None, zin = None):
'''
Same as single_simple_flux_from_greybody, but to made an amplitude lookup table
'''
nsed = 1e4
lambda_mod = loggen(1e3, 8.0, nsed) # microns
nu_mod = c * 1.e6/lambda_mod # Hz
#cosmo = Planck15#(H0 = 70.5 * u.km / u.s / u.Mpc, Om0 = 0.273)
conversion = 4.0 * np.pi *(1.0E-13 * cosmo.luminosity_distance(zin) * 3.08568025E22)**2.0 / L_sun # 4 * pi * D_L^2 units are L_sun/(Jy x Hz)
Lir = Lrf / conversion # Jy x Hz
Ain = 1.0e-36 #good starting parameter
betain = b
alphain= 2.0
fit_params = Parameters()
fit_params.add('Ain', value= Ain)
#THE LM FIT IS HERE
Pfin = minimize(sedint, fit_params, args=(nu_mod,Lir.value,Trf/(1.+zin),b,alphain))
#pdb.set_trace()
return Pfin.params['Ain'].value
def invert_sed_neural_net(lam, Trf, Lrf, zin, wpath = '/data/pickles/simstack/ann_function_fits/', wfile = 'SED_amplitude_weights_from_neural_network_logistic_100layers_N8000.p'):
reg = pickle.load( open( wpath + wfile, "rb" ) )
nuvector = c * 1.e6 / lam
rearrange_x = np.transpose(np.array([Trf, Lrf, zin]))
predicted_amplitude = 1e-40 * 10**reg.predict(rearrange_x)
fluxes = sed_direct(predicted_amplitude, np.array([nuvector]), Trf/(1.+zin), betain=2.0, alphain=2.0)
return fluxes
|
marcoviero/Utils
|
invert_sed.py
|
Python
|
mit
| 12,970
|
# This file is part of Checkmate.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright 2015 Ozge Lule(ozge.lule@ceng.metu.edu.tr),
# Esref Ozturk(esref.ozturk@ceng.metu.edu.tr)
from socket import *
from Test import Test
moves = [('Black', 'a7 a6'), ('Black', 'a6 a5'), ('Black', 'a5 a4')]
test = Test()
s2 = socket(AF_INET, SOCK_STREAM)
s2.connect(("0.0.0.0", 20000))
gameid = 1
test.send(s2, '{"op":"connect" , "color":"Black","gameid":"%d"}' % gameid)
test.send(s2, '{"op":"play","params":["nextmove","%s","%s"]}' % moves[0])
test.send(s2, '{"op":"play","params":["nextmove","%s","%s"]}' % moves[1])
test.send(s2, '{"op":"play","params":["nextmove","%s","%s"]}' % moves[2])
test.send(s2, '{"op":"kill"}')
|
esrefozturk/checkmate
|
CheckmateServerTests/test1-2.py
|
Python
|
gpl-3.0
| 1,343
|
import logging
from .Parameter import Parameter
from .Statement import Statement, InsertStatement, SelectStatement
logger = logging.getLogger('Procedure').print_pos
class Procedure(object):
READ = 'READ'
WRITE = 'WRITE'
@property
def is_read(self):
return self.mode == self.READ
@property
def is_write(self):
return self.mode == self.WRITE
def set_mode_to_read(self):
self.mode = self.READ
def set_mode_to_write(self):
self.mode = self.WRITE
def __init__(self, name):
self.name = name
self.mode = None
self.parameters = {}
self.statements = []
self._used_parameters = set()
def add_parameter(self, parameter: Parameter):
check_name = self.parameters.get(parameter.name)
if check_name:
logger.error('Procedure `%s` already has a parameter `%s`', self.name, parameter.name)
else:
self.parameters[parameter.name] = parameter
def add_statement(self, statement: Statement):
if isinstance(statement, InsertStatement) and self.is_read:
logger.error('Insert statement only for procedure write mode')
elif isinstance(statement, SelectStatement) and self.is_write:
logger.error('Select statement only for procedure read mode')
self.statements.append(statement)
def __str__(self):
return f'<{self.name}>'
|
bmstu-iu9/mill-db
|
pymilldb/context/Procedure.py
|
Python
|
mit
| 1,431
|
from django.conf.urls import patterns, include, url
from matches import urls
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
# Examples:
url(r'^', include(urls)),
# url(r'^Dota2Stats/', include('Dota2Stats.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
|
mmbob/Dota2Stats
|
Dota2Stats/urls.py
|
Python
|
mit
| 568
|
""""""
import os
import abc
import sys
import logging
# environment variable for configuration
CONFIG_ENV_VAR = "CONFIG"
LOGGER_NAME = "logger_for_template" # CHOOSE YOUR OWN NAME FOR YOUR APP
def configure_logging(log_level):
"""Configure a logger with sane datetime and path info for the calling function"""
# configure logging to stdout
logger = logging.getLogger(LOGGER_NAME)
logger.setLevel(log_level)
# remove any existing handlers
for h in logger.handlers:
logger.removeHandler(h)
# only add a new handler if we've not set one yet
if len(logger.handlers) == 0:
fmt = '%(asctime)s.%(msecs)d p%(process)s {%(pathname)s:%(lineno)d} %(levelname)s - %(message)s'
datefmt = '%Y-%m-%d %H:%M:%S'
ch = logging.StreamHandler(sys.stdout)
ch.setLevel(log_level)
formatter = logging.Formatter(fmt, datefmt=datefmt)
ch.setFormatter(formatter)
logger.addHandler(ch)
class ConfBasic(abc.ABC):
"""Abstract Base Class that defines some of the basics for our configuration"""
name = "conf_basic_ABC"
def __init__(self):
configure_logging(logging.DEBUG)
def __repr__(self):
"""Simplest representation of what a configuration looks like"""
return self.name
class ConfDev(ConfBasic):
"""Configuration for development scenario"""
name = "dev"
def __init__(self, overrides):
super().__init__()
self.a_parameter = overrides.get('a_parameter') or 42
class ConfTest(ConfBasic):
"""Example 2nd configuration, a test scenario"""
name = "test"
def __init__(self, overrides):
super().__init__()
self.a_parameter = 99
available_configurations = [ConfDev, ConfTest]
def get(configuration=None, overrides={}):
"""Return a configuration based on name or environment variable"""
if configuration is None:
configuration = os.getenv(CONFIG_ENV_VAR)
# look through the available configurations, find the
# match and instantiate it
for c in available_configurations:
if c.name == configuration:
conf = c(overrides)
return conf
configuration_names = [c.name for c in available_configurations]
print("No configuration matches to '{}', you must pass in a configuration from {}".format(configuration, configuration_names))
raise ValueError("No matching configuration")
|
ianozsvald/python_template_with_config
|
python_template_with_config/config.py
|
Python
|
mit
| 2,420
|
"""
Mouse events.
How it works
------------
The renderer has a 2 dimensional grid of mouse event handlers.
(`prompt_toolkit.layout.MouseHandlers`.) When the layout is rendered, the
`Window` class will make sure that this grid will also be filled with
callbacks. For vt100 terminals, mouse events are received through stdin, just
like any other key press. There is a handler among the key bindings that
catches these events and forwards them to such a mouse event handler. It passes
through the `Window` class where the coordinates are translated from absolute
coordinates to coordinates relative to the user control, and there
`UIControl.mouse_handler` is called.
"""
from __future__ import unicode_literals
__all__ = (
'MouseEventType',
'MouseEvent'
)
class MouseEventType:
MOUSE_UP = 'MOUSE_UP'
MOUSE_DOWN = 'MOUSE_DOWN'
SCROLL_UP = 'SCROLL_UP'
SCROLL_DOWN = 'SCROLL_DOWN'
MouseEventTypes = MouseEventType # Deprecated: plural for backwards compatibility.
class MouseEvent(object):
"""
Mouse event, sent to `UIControl.mouse_handler`.
:param position: `Point` instance.
:param event_type: `MouseEventType`.
"""
def __init__(self, position, event_type):
self.position = position
self.event_type = event_type
def __repr__(self):
return 'MouseEvent(%r, %r)' % (self.position, self.event_type)
|
unnikrishnankgs/va
|
venv/lib/python3.5/site-packages/prompt_toolkit/mouse_events.py
|
Python
|
bsd-2-clause
| 1,380
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from azure.cli.core.util import sdk_no_wait
from azure.mgmt.loganalytics.models import Cluster, ClusterSku, KeyVaultProperties, Identity, ClusterPatch
def create_log_analytics_cluster(client, resource_group_name, cluster_name, sku_capacity,
sku_name='CapacityReservation', identity_type='SystemAssigned',
location=None, tags=None, no_wait=False):
sku = ClusterSku(capacity=sku_capacity, name=sku_name)
identity = Identity(type=identity_type)
cluster_instance = Cluster(location=location,
tags=tags,
sku=sku,
identity=identity)
return sdk_no_wait(no_wait, client.begin_create_or_update, resource_group_name, cluster_name, cluster_instance)
def update_log_analytics_cluster(client, resource_group_name, cluster_name,
key_vault_uri=None, key_name=None, key_version=None,
sku_capacity=None, tags=None, no_wait=False):
cluster_patch = ClusterPatch()
if key_vault_uri is not None and key_name is not None and key_version is not None:
cluster_patch.key_vault_properties = KeyVaultProperties(key_vault_uri=key_vault_uri, key_name=key_name,
key_version=key_version)
if sku_capacity is not None:
cluster_patch.sku = ClusterSku(capacity=sku_capacity, name='CapacityReservation')
if tags is not None:
cluster_patch.tags = tags
return sdk_no_wait(no_wait, client.begin_update, resource_group_name, cluster_name, cluster_patch)
def list_log_analytics_clusters(client, resource_group_name=None):
if resource_group_name:
return client.list_by_resource_group(resource_group_name)
return client.list()
|
yugangw-msft/azure-cli
|
src/azure-cli/azure/cli/command_modules/monitor/operations/log_analytics_cluster.py
|
Python
|
mit
| 2,202
|
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_str,
compat_urllib_request,
compat_urlparse,
)
from ..utils import (
clean_html,
int_or_none,
parse_iso8601,
unescapeHTML,
)
class BlipTVIE(InfoExtractor):
_VALID_URL = r'https?://(?:\w+\.)?blip\.tv/(?:(?:.+-|rss/flash/)(?P<id>\d+)|((?:play/|api\.swf#)(?P<lookup_id>[\da-zA-Z+_]+)))'
_TESTS = [
{
'url': 'http://blip.tv/cbr/cbr-exclusive-gotham-city-imposters-bats-vs-jokerz-short-3-5796352',
'md5': 'c6934ad0b6acf2bd920720ec888eb812',
'info_dict': {
'id': '5779306',
'ext': 'mov',
'title': 'CBR EXCLUSIVE: "Gotham City Imposters" Bats VS Jokerz Short 3',
'description': 'md5:9bc31f227219cde65e47eeec8d2dc596',
'timestamp': 1323138843,
'upload_date': '20111206',
'uploader': 'cbr',
'uploader_id': '679425',
'duration': 81,
}
},
{
# https://github.com/rg3/youtube-dl/pull/2274
'note': 'Video with subtitles',
'url': 'http://blip.tv/play/h6Uag5OEVgI.html',
'md5': '309f9d25b820b086ca163ffac8031806',
'info_dict': {
'id': '6586561',
'ext': 'mp4',
'title': 'Red vs. Blue Season 11 Episode 1',
'description': 'One-Zero-One',
'timestamp': 1371261608,
'upload_date': '20130615',
'uploader': 'redvsblue',
'uploader_id': '792887',
'duration': 279,
}
},
{
# https://bugzilla.redhat.com/show_bug.cgi?id=967465
'url': 'http://a.blip.tv/api.swf#h6Uag5KbVwI',
'md5': '314e87b1ebe7a48fcbfdd51b791ce5a6',
'info_dict': {
'id': '6573122',
'ext': 'mov',
'upload_date': '20130520',
'description': 'Two hapless space marines argue over what to do when they realize they have an astronomically huge problem on their hands.',
'title': 'Red vs. Blue Season 11 Trailer',
'timestamp': 1369029609,
'uploader': 'redvsblue',
'uploader_id': '792887',
}
},
{
'url': 'http://blip.tv/play/gbk766dkj4Yn',
'md5': 'fe0a33f022d49399a241e84a8ea8b8e3',
'info_dict': {
'id': '1749452',
'ext': 'mp4',
'upload_date': '20090208',
'description': 'Witness the first appearance of the Nostalgia Critic character, as Doug reviews the movie Transformers.',
'title': 'Nostalgia Critic: Transformers',
'timestamp': 1234068723,
'uploader': 'NostalgiaCritic',
'uploader_id': '246467',
}
},
{
# https://github.com/rg3/youtube-dl/pull/4404
'note': 'Audio only',
'url': 'http://blip.tv/hilarios-productions/weekly-manga-recap-kingdom-7119982',
'md5': '76c0a56f24e769ceaab21fbb6416a351',
'info_dict': {
'id': '7103299',
'ext': 'flv',
'title': 'Weekly Manga Recap: Kingdom',
'description': 'And then Shin breaks the enemy line, and he's all like HWAH! And then he slices a guy and it's all like FWASHING! And... it's really hard to describe the best parts of this series without breaking down into sound effects, okay?',
'timestamp': 1417660321,
'upload_date': '20141204',
'uploader': 'The Rollo T',
'uploader_id': '407429',
'duration': 7251,
'vcodec': 'none',
}
},
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
lookup_id = mobj.group('lookup_id')
# See https://github.com/rg3/youtube-dl/issues/857 and
# https://github.com/rg3/youtube-dl/issues/4197
if lookup_id:
urlh = self._request_webpage(
'http://blip.tv/play/%s' % lookup_id, lookup_id, 'Resolving lookup id')
url = compat_urlparse.urlparse(urlh.geturl())
qs = compat_urlparse.parse_qs(url.query)
mobj = re.match(self._VALID_URL, qs['file'][0])
video_id = mobj.group('id')
rss = self._download_xml('http://blip.tv/rss/flash/%s' % video_id, video_id, 'Downloading video RSS')
def blip(s):
return '{http://blip.tv/dtd/blip/1.0}%s' % s
def media(s):
return '{http://search.yahoo.com/mrss/}%s' % s
def itunes(s):
return '{http://www.itunes.com/dtds/podcast-1.0.dtd}%s' % s
item = rss.find('channel/item')
video_id = item.find(blip('item_id')).text
title = item.find('./title').text
description = clean_html(compat_str(item.find(blip('puredescription')).text))
timestamp = parse_iso8601(item.find(blip('datestamp')).text)
uploader = item.find(blip('user')).text
uploader_id = item.find(blip('userid')).text
duration = int(item.find(blip('runtime')).text)
media_thumbnail = item.find(media('thumbnail'))
thumbnail = media_thumbnail.get('url') if media_thumbnail is not None else item.find(itunes('image')).text
categories = [category.text for category in item.findall('category')]
formats = []
subtitles_urls = {}
media_group = item.find(media('group'))
for media_content in media_group.findall(media('content')):
url = media_content.get('url')
role = media_content.get(blip('role'))
msg = self._download_webpage(
url + '?showplayer=20140425131715&referrer=http://blip.tv&mask=7&skin=flashvars&view=url',
video_id, 'Resolving URL for %s' % role)
real_url = compat_urlparse.parse_qs(msg.strip())['message'][0]
media_type = media_content.get('type')
if media_type == 'text/srt' or url.endswith('.srt'):
LANGS = {
'english': 'en',
}
lang = role.rpartition('-')[-1].strip().lower()
langcode = LANGS.get(lang, lang)
subtitles_urls[langcode] = url
elif media_type.startswith('video/'):
formats.append({
'url': real_url,
'format_id': role,
'format_note': media_type,
'vcodec': media_content.get(blip('vcodec')) or 'none',
'acodec': media_content.get(blip('acodec')),
'filesize': media_content.get('filesize'),
'width': int_or_none(media_content.get('width')),
'height': int_or_none(media_content.get('height')),
})
self._check_formats(formats, video_id)
self._sort_formats(formats)
subtitles = self.extract_subtitles(video_id, subtitles_urls)
return {
'id': video_id,
'title': title,
'description': description,
'timestamp': timestamp,
'uploader': uploader,
'uploader_id': uploader_id,
'duration': duration,
'thumbnail': thumbnail,
'categories': categories,
'formats': formats,
'subtitles': subtitles,
}
def _get_subtitles(self, video_id, subtitles_urls):
subtitles = {}
for lang, url in subtitles_urls.items():
# For some weird reason, blip.tv serves a video instead of subtitles
# when we request with a common UA
req = compat_urllib_request.Request(url)
req.add_header('User-Agent', 'youtube-dl')
subtitles[lang] = [{
# The extension is 'srt' but it's actually an 'ass' file
'ext': 'ass',
'data': self._download_webpage(req, None, note=False),
}]
return subtitles
class BlipTVUserIE(InfoExtractor):
_VALID_URL = r'(?:(?:https?://(?:\w+\.)?blip\.tv/)|bliptvuser:)(?!api\.swf)([^/]+)/*$'
_PAGE_SIZE = 12
IE_NAME = 'blip.tv:user'
_TEST = {
'url': 'http://blip.tv/actone',
'info_dict': {
'id': 'actone',
'title': 'Act One: The Series',
},
'playlist_count': 5,
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
username = mobj.group(1)
page_base = 'http://m.blip.tv/pr/show_get_full_episode_list?users_id=%s&lite=0&esi=1'
page = self._download_webpage(url, username, 'Downloading user page')
mobj = re.search(r'data-users-id="([^"]+)"', page)
page_base = page_base % mobj.group(1)
title = self._og_search_title(page)
# Download video ids using BlipTV Ajax calls. Result size per
# query is limited (currently to 12 videos) so we need to query
# page by page until there are no video ids - it means we got
# all of them.
video_ids = []
pagenum = 1
while True:
url = page_base + "&page=" + str(pagenum)
page = self._download_webpage(
url, username, 'Downloading video ids from page %d' % pagenum)
# Extract video identifiers
ids_in_page = []
for mobj in re.finditer(r'href="/([^"]+)"', page):
if mobj.group(1) not in ids_in_page:
ids_in_page.append(unescapeHTML(mobj.group(1)))
video_ids.extend(ids_in_page)
# A little optimization - if current page is not
# "full", ie. does not contain PAGE_SIZE video ids then
# we can assume that this page is the last one - there
# are no more ids on further pages - no need to query
# again.
if len(ids_in_page) < self._PAGE_SIZE:
break
pagenum += 1
urls = ['http://blip.tv/%s' % video_id for video_id in video_ids]
url_entries = [self.url_result(vurl, 'BlipTV') for vurl in urls]
return self.playlist_result(
url_entries, playlist_title=title, playlist_id=username)
|
Nikoli/youtube-dl
|
youtube_dl/extractor/bliptv.py
|
Python
|
unlicense
| 10,506
|
from unittest import TestCase
import numpy as np
from numpy.linalg import inv
from tcontrol.statespace import *
from tcontrol.transferfunction import *
from ..model_conversion import *
from ..exception import WrongNumberOfArguments
from ..discretization import c2d
from .tools.test_utility import assert_ss_equal
class TestStateSpace(TestCase):
def setUp(self):
self.A = np.array([[0, 1], [-4, -0.5]])
self.B = np.array([[0.], [1.]])
self.C = np.array([[4., 0.]])
self.D = np.array([0.])
self.tf_ = tf([4], [1, 0.5, 4])
self.ss_ = StateSpace(self.A, self.B, self.C, self.D)
def test___init__(self):
ss_ = StateSpace(self.A, self.B, self.C, self.D)
if isinstance(ss_.A, np.matrix):
self.assertTrue(ss_.A is self.A)
self.assertEqual(StateSpace(self.A, self.B, self.C, 0),
StateSpace(self.A, self.B, self.C, self.D))
self.assertRaises(ValueError, StateSpace, self.A, self.C, self.B, 0)
self.assertRaises(ValueError, StateSpace, self.A, self.C, 0, self.B)
def test___str__(self):
pass
def test___add__(self):
ss_1 = self.ss_ + self.ss_
A = [[0, 1, 0, 0], [-4, -.5, 0, 0], [0, 0, 0, 1], [0, 0, -4, -.5]]
B = [[0], [1], [0], [1]]
C = [4, 0, 4, 0]
ss_2 = StateSpace(A, B, C, 0)
self.assertEqual(ss_1, ss_2)
def test___mul__(self):
print(ss([[2, 3], [1, 0]], [[0], [1]], [[1, 0]], 0) * 1)
def test_parallel(self):
print(self.ss_.parallel(self.ss_, self.ss_))
def test_feedback(self):
A = [[0.814723686393179, 0.913375856139019, 0.278498218867048],
[0.905791937075619, 0.632359246225410, 0.546881519204984],
[0.126986816293506, 0.0975404049994095, 0.957506835434298]]
B = [[0.964888535199277, 0.957166948242946],
[0.157613081677548, 0.485375648722841],
[0.970592781760616, 0.800280468888800]]
C = [[0.141886338627215, 0.915735525189067, 0.959492426392903],
[0.421761282626275, 0.792207329559554, 0.655740699156587]]
D = [[0.0357116785741896, 0.933993247757551],
[0.849129305868777, 0.678735154857774]]
s1 = ss(A, B, C, D)
A_ = [[0.757740130578333, 0.655477890177557, 0.0318328463774207],
[0.743132468124916, 0.171186687811562, 0.276922984960890],
[0.392227019534168, 0.706046088019609, 0.0461713906311539]]
B_ = [[0.0971317812358475, 0.317099480060861],
[0.823457828327293, 0.950222048838355],
[0.694828622975817, 0.0344460805029088]]
C_ = [[0.438744359656398, 0.765516788149002, 0.186872604554379],
[0.381558457093008, 0.795199901137063, 0.489764395788231]]
D_ = [[0.445586200710900, 0.709364830858073],
[0.646313010111265, 0.754686681982361]]
s2 = ss(A_, B_, C_, D_)
sys_ = s1.feedback(s2)
ans_b = np.array([[0.497199047022938, 0.241439722567989],
[0.00397672393087681, 0.227709022266958],
[0.538296661406946, 0.149857420314700],
[0.132223192796285, 0.0885007994366726],
[0.293713307787898, 0.543698319397011],
[-0.117388364568095, 0.367561733989439]])
ans_d = np.array([[-0.192541214208884, 0.523103714748680],
[0.475954939644619, 0.118861134193000]])
self.assertTrue(np.all(np.less_equal(np.abs(sys_.B - ans_b), 1e-6)))
self.assertTrue(np.all(np.less_equal(np.abs(sys_.D - ans_d), 1e-6)))
def test_gc_gain(self):
self.assertEqual(ss([[0, 1], [0, -1]], [[0], [1]], [2, 1], 0).dc_gain, float('inf'))
def test_pole(self):
self.assertTrue(np.array_equal(self.ss_.pole(), ss2tf(self.ss_).pole()))
def test_controllability(self):
pass
def test_is_controllable(self):
self.assertTrue(self.ss_.is_controllable)
def test_observability(self):
self.assertTrue(np.array_equal(self.ss_.obsv_mat(), [[4, 0], [0, 4]]))
def test_is_observable(self):
self.assertTrue(self.ss_.is_observable)
def test_dual_system(self):
_ = StateSpace.dual_system(self.ss_)
self.assertTrue(np.all(np.equal(_.A.T, self.ss_.A)))
self.assertTrue(np.all(np.equal(_.C.T, self.ss_.B)))
self.assertTrue(np.all(np.equal(_.B.T, self.ss_.C)))
def test_ss(self):
self.assertEqual(ss(self.A, self.B, self.C, self.D), self.ss_)
self.assertRaises(WrongNumberOfArguments, ss, self.A, self.B, self.C, self.D,
self.B)
self.assertEqual(ss(self.tf_), self.ss_)
def test_tf2ss(self):
# test continuous time
ss_ = tf2ss(self.tf_)
assert_ss_equal(ss_, self.ss_)
self.assertRaises(TypeError, tf2ss, ss_)
# test discrete time
sys_ = tf([1], [1, 1])
d_sys_ = c2d(sys_, 1, 'Tustin')
d_ss_ = tf2ss(d_sys_)
error = np.abs(d_ss_.A - 1 / 3)
self.assertTrue(np.all(np.less_equal(error, 1e-6)))
error = np.abs(d_ss_.B - 1)
self.assertTrue(np.all(np.less_equal(error, 1e-6)))
error = np.abs(d_ss_.C - 4 / 9)
self.assertTrue(np.all(np.less_equal(error, 1e-6)))
error = np.abs(d_ss_.D - 1 / 3)
self.assertTrue(np.all(np.less_equal(error, 1e-6)))
def test_place(self):
sys_ = ss([[0, 0, 0], [1, -6, 0], [0, 1, -12]], [[1], [0], [0]], [1, 0, 2], [0])
self.assertTrue(np.allclose(sys_.place([-2, -1 + 1j, -1 - 1j]), [-14, 186, -1220]))
|
DaivdZhang/tinyControl
|
tcontrol/tests/test_statespace.py
|
Python
|
bsd-3-clause
| 5,656
|
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2005, 2006, 2007, 2008, 2010, 2011, 2013 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
invenio.ext.email.errors
------------------------
Contains standard error messages for email extension.
"""
class EmailError(Exception):
"""A generic email error."""
def __init__(self, message):
"""Initialisation."""
self.message = message
def __str__(self):
"""String representation."""
return repr(self.message)
|
crepererum/invenio
|
invenio/ext/email/errors.py
|
Python
|
gpl-2.0
| 1,187
|
from .deviceaction import *
from .uiobjectaction import *
from .adbfunction import *
from .device import *
from .devicemanage import *
from .frameworkkey import *
from .flow import Flow
from .configparser import *
from .testtemplate import *
from .otherdatastruct import *
from .cmdlineparser import *
from .xmlfile import *
from .logdatatojson import *
from .create_xlsx import *
'''
################################################
# Flow Engine Framework
################################################
Flow = flow.Flow
################################################
# Flow Engine Framework Key
################################################
NOT = frameworkkey.NOT
FOR = frameworkkey.FOR
SWITCH = frameworkkey.SWITCH
################################################
# Device Management
################################################
#SwitchSDevice = devicemanage.SwitchSDevice
#SwitchMDevice = devicemanage.SwitchMDevice
################################################
# Device Key
################################################
Home = deviceaction.Home
Back = deviceaction.Back
Point = deviceaction.Point
Enter = deviceaction.Enter
Recent = deviceaction.Recent
Power = deviceaction.Power
Menu = deviceaction.Menu
Search = deviceaction.Search
'''
################################################
# ADB Function
################################################
#OpenAPP = adbfunction.OpenAPP
#IsInCall = adbfunction.IsInCall
#InCallStay = adbfunction.InCallStay
#IsRinging = adbfunction.IsRinging
CallAnswer = CallAnswer()
EndCall = EndCall()
'''
################################################
# UIObject Action
################################################
#IsGone = uiobjectaction.Gone
#IsExists = uiobjectaction.Exists
Click = uiobjectaction.Click
LongClick = uiobjectaction.LongClick
Input = uiobjectaction.Input
Vert = uiobjectaction.Vert
Swipe = uiobjectaction.Swipe
Drag = uiobjectaction.Drag
Gesture = uiobjectaction.Gesture
################################################
# Test Template
################################################
TestTemplate = testtemplate.TestTemplate
Ready = testtemplate.Ready
################################################
# Config Parser
################################################
Get = configparser.get
Stci = configparser.stci
'''
|
libyoung/yaya-engine
|
yaya/__init__.py
|
Python
|
mit
| 2,298
|
#!/usr/bin/python
# Author: Zion Orent <zorent@ics.com>
# Copyright (c) 2015 Intel Corporation.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import print_function
import time, sys, signal, atexit
from upm import pyupm_grovewfs as upmGrovewfs
def main():
# Instantiate a Grove Water Flow Sensor on digital pin D2
myWaterFlow = upmGrovewfs.GroveWFS(2)
## Exit handlers ##
# This stops python from printing a stacktrace when you hit control-C
def SIGINTHandler(signum, frame):
raise SystemExit
# This function lets you run code on exit,
# including functions from myWaterFlow
def exitHandler():
myWaterFlow.stopFlowCounter()
print("Exiting")
sys.exit(0)
# Register exit handlers
atexit.register(exitHandler)
signal.signal(signal.SIGINT, SIGINTHandler)
# set the flow counter to 0 and start counting
myWaterFlow.clearFlowCounter()
myWaterFlow.startFlowCounter()
while (1):
# we grab these (millis and flowCount) just for display
# purposes in this example
millis = myWaterFlow.getMillis()
flowCount = myWaterFlow.flowCounter()
fr = myWaterFlow.flowRate()
# output milliseconds passed, flow count, and computed flow rate
outputStr = "Millis: {0} Flow Count: {1} Flow Rate: {2} LPM".format(
millis, flowCount, fr)
print(outputStr)
time.sleep(2)
if __name__ == '__main__':
main()
|
sasmita/upm
|
examples/python/grovewfs.py
|
Python
|
mit
| 2,489
|
# -*- coding: utf-8 -*-
# Copyright (C) 2014 Canonical
#
# Authors:
# Didier Roche
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; version 3.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""Framework with another category module without any framework"""
import umake.frameworks
class ACategory(umake.frameworks.BaseCategory):
def __init__(self):
super().__init__(name="Category A", description="Other category A description")
class FrameworkC(umake.frameworks.BaseFramework):
def __init__(self, **kwargs):
super().__init__(name="Framework C", description="Description for framework C",
**kwargs)
def setup(self, install_path=None, auto_accept_license=False):
super().setup()
def remove(self):
super().remove()
class FrameworkD(umake.frameworks.BaseFramework):
def __init__(self, **kwargs):
super().__init__(name="Framework D", description="Description for framework D",
**kwargs)
def setup(self, install_path=None, auto_accept_license=False):
super().setup()
def remove(self):
super().remove()
|
ubuntu/ubuntu-make
|
tests/data/duplicatedframeworks/samecategory.py
|
Python
|
gpl-3.0
| 1,683
|
# encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski (kyle@lahnakoski.com)
#
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import gzip
import zipfile
from tempfile import TemporaryFile
import boto
from boto.s3.connection import Location
from bs4 import BeautifulSoup
from mo_dots import wrap, Null, coalesce, unwrap, Data
from mo_future import text_type, StringIO
from mo_kwargs import override
from mo_logs import Log, Except
from mo_logs.strings import utf82unicode, unicode2utf8
from mo_logs.url import value2url_param
from mo_times.dates import Date
from mo_times.timer import Timer
from pyLibrary import convert
from pyLibrary.env import http
from pyLibrary.env.big_data import safe_size, MAX_STRING_SIZE, LazyLines, ibytes2ilines, scompressed2ibytes
TOO_MANY_KEYS = 1000 * 1000 * 1000
READ_ERROR = "S3 read error"
MAX_FILE_SIZE = 100 * 1024 * 1024
VALID_KEY = r"\d+([.:]\d+)*"
KEY_IS_WRONG_FORMAT = "key {{key}} in bucket {{bucket}} is of the wrong format"
class File(object):
def __init__(self, bucket, key):
self.bucket = bucket
self.key = key
def read(self):
return self.bucket.read(self.key)
def read_lines(self):
return self.bucket.read_lines(self.key)
def write(self, value):
self.bucket.write(self.key, value)
def write_lines(self, lines):
self.bucket.write_lines(self.key, lines)
@property
def meta(self):
return self.bucket.meta(self.key)
def delete(self):
return self.bucket.delete_key(self.key)
class Connection(object):
@override
def __init__(
self,
aws_access_key_id=None, # CREDENTIAL
aws_secret_access_key=None, # CREDENTIAL
region=None, # NAME OF AWS REGION, REQUIRED FOR SOME BUCKETS
kwargs=None
):
self.settings = kwargs
try:
if not kwargs.region:
self.connection = boto.connect_s3(
aws_access_key_id=unwrap(self.settings.aws_access_key_id),
aws_secret_access_key=unwrap(self.settings.aws_secret_access_key)
)
else:
self.connection = boto.s3.connect_to_region(
self.settings.region,
aws_access_key_id=unwrap(self.settings.aws_access_key_id),
aws_secret_access_key=unwrap(self.settings.aws_secret_access_key)
)
except Exception as e:
Log.error("Problem connecting to S3", e)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self.connection:
self.connection.close()
def get_bucket(self, name):
output = SkeletonBucket()
output.bucket = self.connection.get_bucket(name, validate=False)
return output
class Bucket(object):
"""
STORE JSON, OR CR-DELIMITED LIST OF JSON, IN S3
THIS CLASS MANAGES THE ".json" EXTENSION, AND ".gz"
(ZIP/UNZIP) SHOULD THE FILE BE BIG ENOUGH TO
JUSTIFY IT
ALL KEYS ARE DIGITS, SEPARATED BY DOT (.) COLON (:)
"""
@override
def __init__(
self,
bucket, # NAME OF THE BUCKET
aws_access_key_id=None, # CREDENTIAL
aws_secret_access_key=None, # CREDENTIAL
region=None, # NAME OF AWS REGION, REQUIRED FOR SOME BUCKETS
public=False,
debug=False,
kwargs=None
):
self.settings = kwargs
self.connection = None
self.bucket = None
self.key_format = _scrub_key(kwargs.key_format)
try:
self.connection = Connection(kwargs).connection
self.bucket = self.connection.get_bucket(self.settings.bucket, validate=False)
except Exception as e:
Log.error("Problem connecting to {{bucket}}", bucket=self.settings.bucket, cause=e)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self.connection:
self.connection.close()
def __getattr__(self, item):
return getattr(self.bucket, item)
def get_key(self, key, must_exist=True):
if must_exist:
meta = self.get_meta(key)
if not meta:
Log.error("Key {{key}} does not exist in bucket {{bucket}}", key=key, bucket=self.bucket.name)
key = strip_extension(meta.key)
return File(self, key)
def delete_key(self, key):
# self._verify_key_format(key) DO NOT VERIFY, DELETE BAD KEYS ANYWAY!!
try:
full_key = self.get_meta(key, conforming=False)
if full_key == None:
return
self.bucket.delete_key(full_key)
except Exception as e:
self.get_meta(key, conforming=False)
raise e
def delete_keys(self, keys):
self.bucket.delete_keys(keys)
def get_meta(self, key, conforming=True):
"""
RETURN METADATA ON FILE IN BUCKET
:param key: KEY, OR PREFIX OF KEY
:param conforming: TEST IF THE KEY CONFORMS TO REQUIRED PATTERN
:return: METADATA, IF UNIQUE, ELSE ERROR
"""
try:
metas = list(self.bucket.list(prefix=key))
metas = wrap([m for m in metas if m.name.find(".json") != -1])
perfect = Null
favorite = Null
too_many = False
error = None
for m in metas:
try:
simple = strip_extension(m.key)
if conforming:
self._verify_key_format(simple)
if simple == key:
perfect = m
too_many = False
if simple.startswith(key + ".") or simple.startswith(key + ":"):
if favorite and not perfect:
too_many = True
favorite = m
except Exception as e:
error = e
if too_many:
Log.error(
"multiple keys in {{bucket}} with prefix={{prefix|quote}}: {{list}}",
bucket=self.name,
prefix=key,
list=[k.name for k in metas]
)
if not perfect and error:
Log.error("Problem with key request", error)
return coalesce(perfect, favorite)
except Exception as e:
Log.error(READ_ERROR+" can not read {{key}} from {{bucket}}", key=key, bucket=self.bucket.name, cause=e)
def keys(self, prefix=None, delimiter=None):
"""
:param prefix: NOT A STRING PREFIX, RATHER PATH ID PREFIX (MUST MATCH TO NEXT "." OR ":")
:param delimiter: TO GET Prefix OBJECTS, RATHER THAN WHOLE KEYS
:return: SET OF KEYS IN BUCKET, OR
"""
if delimiter:
# WE REALLY DO NOT GET KEYS, BUT RATHER Prefix OBJECTS
# AT LEAST THEY ARE UNIQUE
candidates = [k.name.rstrip(delimiter) for k in self.bucket.list(prefix=prefix, delimiter=delimiter)]
else:
candidates = [strip_extension(k.key) for k in self.bucket.list(prefix=prefix)]
if prefix == None:
return set(c for c in candidates if c != "0.json")
else:
return set(k for k in candidates if k == prefix or k.startswith(prefix + ".") or k.startswith(prefix + ":"))
def metas(self, prefix=None, limit=None, delimiter=None):
"""
RETURN THE METADATA DESCRIPTORS FOR EACH KEY
"""
limit = coalesce(limit, TOO_MANY_KEYS)
keys = self.bucket.list(prefix=prefix, delimiter=delimiter)
prefix_len = len(prefix)
output = []
for i, k in enumerate(k for k in keys if len(k.key) == prefix_len or k.key[prefix_len] in [".", ":"]):
output.append({
"key": strip_extension(k.key),
"etag": convert.quote2string(k.etag),
"expiry_date": Date(k.expiry_date),
"last_modified": Date(k.last_modified)
})
if i >= limit:
break
return wrap(output)
def read(self, key):
source = self.get_meta(key)
try:
json = safe_size(source)
except Exception as e:
Log.error(READ_ERROR, e)
if json == None:
return None
if source.key.endswith(".zip"):
json = _unzip(json)
elif source.key.endswith(".gz"):
json = convert.zip2bytes(json)
return utf82unicode(json)
def read_bytes(self, key):
source = self.get_meta(key)
return safe_size(source)
def read_lines(self, key):
source = self.get_meta(key)
if source is None:
Log.error("{{key}} does not exist", key=key)
if source.size < MAX_STRING_SIZE:
if source.key.endswith(".gz"):
return LazyLines(ibytes2ilines(scompressed2ibytes(source)))
else:
return utf82unicode(source.read()).split("\n")
if source.key.endswith(".gz"):
return LazyLines(ibytes2ilines(scompressed2ibytes(source)))
else:
return LazyLines(source)
def write(self, key, value, disable_zip=False):
if key.endswith(".json") or key.endswith(".zip"):
Log.error("Expecting a pure key")
try:
if hasattr(value, "read"):
if disable_zip:
storage = self.bucket.new_key(key + ".json")
string_length = len(value)
else:
storage = self.bucket.new_key(key + ".json.gz")
string_length = len(value)
value = convert.bytes2zip(value)
file_length = len(value)
Log.note("Sending contents with length {{file_length|comma}} (from string with length {{string_length|comma}})", file_length= file_length, string_length=string_length)
value.seek(0)
storage.set_contents_from_file(value)
if self.settings.public:
storage.set_acl('public-read')
return
if len(value) > 20 * 1000 and not disable_zip:
self.bucket.delete_key(key + ".json")
self.bucket.delete_key(key + ".json.gz")
if isinstance(value, str):
value = convert.bytes2zip(value)
key += ".json.gz"
else:
value = convert.bytes2zip(unicode2utf8(value))
key += ".json.gz"
else:
self.bucket.delete_key(key + ".json.gz")
if isinstance(value, str):
key += ".json"
else:
key += ".json"
storage = self.bucket.new_key(key)
storage.set_contents_from_string(value)
if self.settings.public:
storage.set_acl('public-read')
except Exception as e:
Log.error(
"Problem writing {{bytes}} bytes to {{key}} in {{bucket}}",
key=key,
bucket=self.bucket.name,
bytes=len(value),
cause=e
)
def write_lines(self, key, lines):
self._verify_key_format(key)
storage = self.bucket.new_key(key + ".json.gz")
buff = TemporaryFile()
archive = gzip.GzipFile(fileobj=buff, mode='w')
count = 0
for l in lines:
if hasattr(l, "__iter__"):
for ll in l:
archive.write(ll.encode("utf8"))
archive.write(b"\n")
count += 1
else:
archive.write(l.encode("utf8"))
archive.write(b"\n")
count += 1
archive.close()
file_length = buff.tell()
retry = 3
while retry:
try:
with Timer("Sending {{count}} lines in {{file_length|comma}} bytes", {"file_length": file_length, "count": count}, silent=not self.settings.debug):
buff.seek(0)
storage.set_contents_from_file(buff)
break
except Exception as e:
e = Except.wrap(e)
retry -= 1
if retry == 0 or 'Access Denied' in e or "No space left on device" in e:
Log.error("could not push data to s3", cause=e)
else:
Log.warning("could not push data to s3", cause=e)
if self.settings.public:
storage.set_acl('public-read')
return
@property
def name(self):
return self.settings.bucket
def _verify_key_format(self, key):
if self.key_format == None:
return
if self.key_format != _scrub_key(key):
Log.error(
KEY_IS_WRONG_FORMAT,
key=key,
bucket=self.bucket.name
)
class SkeletonBucket(Bucket):
"""
LET CALLER WORRY ABOUT SETTING PROPERTIES
"""
def __init__(self):
object.__init__(self)
self.connection = None
self.bucket = None
self.key_format = None
content_keys={
"key": text_type,
"lastmodified": Date,
"etag": text_type,
"size": int,
"storageclass": text_type
}
class PublicBucket(object):
"""
USE THE https PUBLIC API TO INTERACT WITH A BUCKET
MAYBE boto CAN DO THIS, BUT NO DOCS FOUND
"""
@override
def __init__(self, url, kwargs=None):
self.url = url
def list(self, prefix=None, marker=None, delimiter=None):
# https://s3.amazonaws.com/net-mozaws-stage-fx-test-activedata?marker=jenkins-go-bouncer.prod-3019/py27.log
# <ListBucketResult>
# <Name>net-mozaws-stage-fx-test-activedata</Name>
# <Prefix/>
# <Marker>jenkins-go-bouncer.prod-3019/py27.log</Marker>
# <MaxKeys>1000</MaxKeys>
# <IsTruncated>true</IsTruncated>
# <Contents>
# <Key>jenkins-go-bouncer.prod-3020/py27.log</Key>
# <LastModified>2017-03-05T07:02:20.000Z</LastModified>
# <ETag>"69dcb19e91eb3eec51e1b659801523d6"</ETag>
# <Size>10037</Size>
# <StorageClass>STANDARD</StorageClass>
state = Data()
state.prefix =prefix
state.delimiter = delimiter
state.marker = marker
state.get_more = True
def more():
xml = http.get(self.url + "?" + value2url_param(state)).content
data = BeautifulSoup(xml, 'xml')
state.get_more = data.find("istruncated").contents[0] == "true"
contents = data.findAll("contents")
state.marker = contents[-1].find("key").contents[0]
return [{k: t(d.find(k).contents[0]) for k, t in content_keys.items()} for d in contents]
while state.get_more:
content = more()
for c in content:
yield wrap(c)
def read_lines(self, key):
url = self.url + "/" + key
return http.get(url).all_lines
def strip_extension(key):
e = key.find(".json")
if e == -1:
return key
return key[:e]
def _unzip(compressed):
buff = StringIO(compressed)
archive = zipfile.ZipFile(buff, mode='r')
return archive.read(archive.namelist()[0])
def _scrub_key(key):
"""
RETURN JUST THE :. CHARACTERS
"""
if key == None:
return None
output = []
for c in key:
if c in [":", "."]:
output.append(c)
return "".join(output)
def key_prefix(key):
return int(key.split(":")[0].split(".")[0])
|
klahnakoski/Bugzilla-ETL
|
vendor/pyLibrary/aws/s3.py
|
Python
|
mpl-2.0
| 16,082
|
#coding: utf-8
__author__ = 'zheng'
import re
import tornado.gen
from tornado.httpclient import AsyncHTTPClient
from BeautifulSoup import BeautifulSoup
from chat.handler import BaseHandler
class TumblrHandler(BaseHandler):
@tornado.gen.coroutine
def get(self):
http_client = AsyncHTTPClient()
http_response = yield http_client.fetch("https://www.tumblr.com/")
content = http_response.body
soup = BeautifulSoup(content)
img = soup.findAll('img')[0]
pattern=re.compile(r"""<img\s.*?\s?src\s*=\s*['|"]?([^\s'"]+).*?>""",re.I)
m = pattern.findall(str(img))
self.write(m[0])
|
yunlzheng/chat
|
chat/handler/tumblr.py
|
Python
|
mit
| 664
|
#!/usr/bin/env python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
import re
import sys
import traceback
from datetime import datetime
import test_data as td
import common
import text
import router
"""
Sequence output copies the tailing end of transfers to a second display line.
Define how many characters to show.
"""
SEQUENCE_TRANSFER_SIZE = 50
def colorize_bg(what):
# TODO: use the real colorize_bg
return what
def proton_split(line):
"""
Split a log line into fields.
* allow commas and spaces in quoted strings.
* split on ', ' and on ' '.
strip trailing commas between fields.
* quoted fields must have both quotes
:param line:
:return:
"""
result = []
indqs = False
pending_comma = False
res = ""
for i in range(len(line)):
c = line[i]
if c == '\"':
if pending_comma:
res += ','
pending_comma = False
indqs = not indqs
res += c
elif c == ',':
if pending_comma:
res += c
pending_comma = True
elif c == ' ':
if indqs:
if pending_comma:
res += ','
pending_comma = False
res += c
else:
if res != '':
if pending_comma:
pending_comma = False
result.append(res)
res = ''
else:
res += c
if res != '':
result.append(str(res))
if indqs:
raise ValueError("SPLIT ODD QUOTES: %s", line)
# print ("SPLIT: line: %s" % line)
# print ("SPLIT: flds: %s" % result)
return result
class LogLineData:
def direction_is_in(self):
return self.direction == text.direction_in()
def direction_is_out(self):
return self.direction == text.direction_out()
def __init__(self):
self.web_show_str = ""
self.sdorg_str = "" # text for sequence diagram source
self.name = ""
self.conn_num = "" # source router's undecorated conn num
self.conn_id = "" # decorated routerPrefixLetter'instanceNumber-conn_num
self.conn_peer = "" # display name of peer in seen in Open 'A - routerId.Test'
self.channel = "" # undecorated number - '0'
self.direction = "" # '<-' IN, or '->' OUT, or '--'
self.described_type = DescribedType() # DescribedType object
self.handle = "" # undecorated number - '1'
self.delivery_id = "" # "0"
self.delivery_tag = "" # "00:00:00:00"
self.remote = "" # undecorated number - '2'
self.channel_handle = "" # decorated - '[0,0]'
self.channel_remote = "" # decorated - '[1,2]'
self.flow_deliverycnt = "" # undecorated number - '50'
self.flow_linkcredit = "" # undecorated number - '100'
self.flow_cnt_credit = "" # decorated - '(50,100)'
self.flow_drain = False
self.transfer_id = ""
self.role = ""
self.is_receiver = False
self.source = ""
self.target = ""
self.first = "" # undecorated number - '10'
self.last = "" # undecorated number - '20'
self.settled = "" # Disposition or Transfer settled field from log line
self.disposition_state = "?absent?"
self.snd_settle_mode = "" # Attach
self.rcv_settle_mode = "" # Attach
self.transfer = False
self.transfer_data = "" # protonized transfer data value
self.transfer_bare = "" # bare message from transfer_data
self.transfer_hdr_annos = "" # header and annotation sections
self.transfer_size = "" # size declared by number in parenthesis
self.transfer_short_name = ""
self.transfer_settled = False
self.transfer_presettled = False
self.transfer_more = False
self.transfer_resume = False
self.transfer_aborted = False
self.transfer_exhausted_credit = False
self.link_short_name = ""
self.link_short_name_popup = ""
self.is_policy_trace = False # line is POLICY (trace)
self.is_server_info = False # line is SERVER (info)
self.is_router_ls = False # line is ROUTER_LS (info)
self.is_scraper = False # line is SCRAPER (any-level)
self.fid = "" # Log line (frame) id as used in javascript code
self.amqp_error = False
self.link_class = "client" # attach sees: normal, router, router-data (, management?)
self.disposition_display = ""
self.final_disposition = None
self.no_parent_link = False
class DescribedType:
"""
Given a line like:
@typename(00) [key1=val1, ...]
Extract the typename and create a map of the key-val pairs
May recursively find embedded described types
"""
@staticmethod
def is_dtype_name(name):
"""
Return true if the name is a pn_trace described type name
:param name:
:return:
"""
return (name.startswith('@') and
'(' in name and
name.endswith(')'))
@staticmethod
def get_key_and_val(kvp):
eqi = kvp.find('=')
return kvp[:eqi], kvp[eqi + 1:]
@staticmethod
def dtype_name(name):
if not DescribedType.is_dtype_name(name):
raise ValueError("Name '%s' is not a described type name" % name)
return name[1: name.find('(')]
@staticmethod
def dtype_number(name):
if not DescribedType.is_dtype_name(name):
raise ValueError("Name '%s' is not a described type name" % name)
return int(name[name.find('(') + 1: -1])
def __init__(self):
self.dict = {}
self.dtype_name = "unparsed"
self.dtype_number = 0
def __repr__(self):
return self._representation()
def _representation(self):
return "DescribedType %s( %d ) : %s" % (self.dtype_name, self.dtype_number, self.dict)
def add_field_to_dict(self, f_text, expected_key=None):
if '=' not in f_text:
raise ValueError("Field does not contain equal sign '%s'" % self.line)
if expected_key is not None and not f_text.startswith(expected_key):
raise ValueError("Transfer field %s not in order from line: %s" % (expected_key, self.line))
key, val = DescribedType.get_key_and_val(f_text)
if val.endswith(','):
val = val[:-1]
self.dict[key] = val
def process_transfer_tail_key(self):
keys = ["batchable", "aborted", "resume", "state", "rcv-settle-mode", "more", "settled", "message-format"]
for key in keys:
idx = self.line.rfind(key)
if idx != -1:
field = self.line[idx:]
self.add_field_to_dict(field, key)
self.line = self.line[:idx].strip()
return True
return False
def parseTransfer(self):
"""
Figure out the described type fields for the transfer.
Transfers are handled specially with the ill-formatted binary delivery-tag field
:return:
"""
# strip leading '[' and trailing ']'
if not (self.line.startswith('[') and self.line.endswith(']')):
raise ValueError("Described type not delimited with square brackets: '%s'" % self.line)
self.line = self.line[1:]
self.line = self.line[:-1]
# process fields from head
fHandle = self.line.split()[0]
self.add_field_to_dict(fHandle)
self.line = self.line[(len(fHandle) + 1):]
try:
fDelId = self.line.split()[0]
self.add_field_to_dict(fDelId)
self.line = self.line[(len(fDelId) + 1):]
# process fields from tail
while len(self.line) > 0 and self.process_transfer_tail_key():
pass
# the remainder, no matter how unlikely, must be the delivery-tag
self.add_field_to_dict(self.line, "delivery-tag")
except:
# delivery-id and delivery-tag are optional in subsequent transfers
# when more=true.
pass
def parse_dtype_line(self, _dtype, _line):
"""
Figure out the fields for the described type.
The line format is:
Transfers are handled specially with the ill-formatted binary delivery-tag field
Note other performatives with ill-formatted binary data might get rejected. We
only struggle figuring out the delivery-tag because it happens so often.
:param _dtype: @describedtypename(num)
:param _line: [key=val [, key=val]...]
:return:
"""
self.dtype = _dtype
self.line = str(_line)
self.dtype_name = DescribedType.dtype_name(self.dtype)
self.dtype_number = DescribedType.dtype_number(self.dtype)
# Process transfers separately..
# Transfer perfomatives will not call parse recursively while others might
if self.dtype_name == "transfer":
self.parseTransfer()
return
# strip leading '[' and trailing ']'
if not (self.line.startswith('[') and self.line.endswith(']')):
raise ValueError("Described type not delimited with square brackets: '%s'" % _line)
self.line = self.line[1:]
self.line = self.line[:-1]
# process fields
fields = proton_split(self.line)
while len(fields) > 0 and len(fields[0]) > 0:
if '=' not in fields[0]:
raise ValueError("Field does not contain equal sign '%s'" % fields[0])
key, val = DescribedType.get_key_and_val(fields[0])
del fields[0]
if DescribedType.is_dtype_name(val):
# recursing to process subtype
# pull subtype's data out of fields. The fields list belongs to parent.
subfields = []
if fields[0] == "[]":
# degenerate case of empty subtype closing parent type
# @disposition .. state=@accepted(36) []]
subfields.append("[]")
del fields[0]
else:
# While extracting this type's fields, include nested described types
# and PN_SYMBOL data enclosed in brackets. Current type ends when close
# bracket seen and nest level is zero.
nest = 0
while len(fields) > 0:
if "=@" in fields[0] and "]" not in fields[0] and "=@:" not in fields[0]:
nest += 1
if nest == 0:
if fields[0].endswith('],'):
subfields.append(fields[0][:-2])
subfields.append(']')
del fields[0]
break
if fields[0].endswith(']'):
subfields.append(fields[0][:-1])
subfields.append(']')
del fields[0]
break
elif fields[0].endswith('],') or fields[0].endswith(']'):
nest -= 1
if fields[0].endswith(']]'):
subfields.append(fields[0])
del fields[0]
break
subfields.append(fields[0])
del fields[0]
subtype = DescribedType()
subtype.parse_dtype_line(val, ' '.join(subfields))
self.dict[key] = subtype
elif val.startswith("@PN_SYMBOL"):
# symbols may end in first field or some later field
while not val.endswith(']'):
val += fields[0]
del fields[0]
self.dict[key] = val
elif val.startswith('{'):
# handle some embedded map: properties={:product=\"qpid-dispatch-router\", :version=\"1.3.0-SNAPSHOT\"}
# pull subtype's data out of fields. The fields list belongs to parent.
submap = {}
fields.insert(0, val)
skey, sval = DescribedType.get_key_and_val(fields[0][1:])
submap[skey] = sval
del fields[0]
while len(fields) > 0:
if fields[0].endswith('},'):
skey, sval = DescribedType.get_key_and_val(fields[0][:-2])
submap[skey] = sval
del fields[0]
break
if fields[0].endswith('}'):
skey, sval = DescribedType.get_key_and_val(fields[0][:-1])
submap[skey] = sval
del fields[0]
break
skey, sval = DescribedType.get_key_and_val(fields[0])
submap[skey] = sval
del fields[0]
self.dict[key] = submap
else:
self.dict[key] = val
class ParsedLogLine(object):
"""
Grind through the log line and record some facts about it.
* Constructor returns Null if the log line is to be ignored
* Constructor args:
** log_index 0 for 'A', 1 for 'B'
** routerInstance which instance in log file
** lineno line number
** line the log line
** common common block object
"""
server_trace_key = "SERVER (trace) ["
protocol_trace_key = "PROTOCOL (trace) ["
server_info_key = "SERVER (info) ["
policy_trace_key = "POLICY (trace) ["
router_ls_key = "ROUTER_LS (info)"
transfer_key = "@transfer(20)"
proton_frame_key = "FRAME: "
def sender_settle_mode_of(self, value):
if value == "0":
return "unsettled(0)"
elif value == "1":
return "settled(1)"
elif value == "2":
return "mixed(2)" # default
else:
return "unknown(%s) % value"
def receiver_settle_mode_of(self, value):
if value == "0":
return "first(0)"
elif value == "1":
return "second(1)"
else:
return "unknown(%s) % value"
def resdict_value(self, resdict, key, if_absent):
return resdict[key] if key in resdict else if_absent
def highlighted(self, name, value, color):
result = ""
if value:
result = "<span style=\"background-color:%s\">%s</span>" % (color, name)
return result
def unhighlighted(self, name, value):
return name if value else ""
def extract_facts(self):
perf = self.data.described_type.dtype_number
res = self.data
resdict = self.data.described_type.dict
res.sdorg_str = "HELP I'M A ROCK - Unknown performative: %s" % perf
# the performatives
# Note: res.channel is already populated
if perf == 0x10:
# Performative: open [0] always channel 0
res.name = "open"
res.channel = "0"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
res.sdorg_str = "OPEN"
if res.direction == text.direction_in():
res.conn_peer = self.resdict_value(resdict, "container-id", "unknown")
res.web_show_str += (" (peer: %s)" % res.conn_peer)
res.sdorg_str += (" (peer: %s)" % res.conn_peer)
elif perf == 0x11:
# Performative: begin [channel,remoteChannel]
# TODO: This has a bug where the local and remote channel numbers are confused.
# Usually they are the same. See if anyone notices!
# res.channel
res.name = "begin"
res.remote = self.resdict_value(resdict, "remote-channel", "None)")
res.channel_remote = "[%s,%s]" % (res.channel, res.remote)
res.web_show_str = "<strong>%s</strong> %s" % (res.name, res.channel_remote)
res.sdorg_str = "BEGIN %s" % (res.channel_remote)
elif perf == 0x12:
# Performative: attach [channel,handle] role name (source: src, target: tgt)
res.name = "attach"
res.handle = resdict["handle"]
res.role = "receiver" if resdict["role"] == "true" else "sender"
res.is_receiver = res.role == "receiver"
# translated names handled later
name = self.resdict_value(resdict, "name", "None")
self.shorteners.short_link_names.register(name, self)
tmpsrc = self.resdict_value(resdict, "source", None)
tmptgt = self.resdict_value(resdict, "target", None)
res.snd_settle_mode = self.sender_settle_mode_of(
resdict["snd-settle-mode"]) if "snd-settle-mode" in resdict else "mixed"
res.rcv_settle_mode = self.receiver_settle_mode_of(
resdict["rcv-settle-mode"]) if "rcv-settle-mode" in resdict else "first"
caps = ""
if tmpsrc is not None:
res.source = self.resdict_value(tmpsrc.dict, "address", "none")
caps = self.resdict_value(tmpsrc.dict, "capabilities", "")
else:
res.source = "none"
if tmptgt is not None:
res.target = self.resdict_value(tmptgt.dict, "address", "none")
if caps == "":
caps = self.resdict_value(tmptgt.dict, "capabilities", "")
else:
res.target = "none"
res.channel_handle = "[%s,%s]" % (res.channel, res.handle)
if 'qd.router-data' in caps:
res.link_class = 'router-data'
elif 'qd.router' in caps:
res.link_class = 'router'
"""
TODO:
res.source = short_endp_names.translate(res.source)
res.target = short_endp_names.translate(res.target)
res.snd_settle_mode = extract_name(tmpssm)
res.rcv_settle_mode = extract_name(tmprsm)
"""
# show_str handled in post_extract
elif perf == 0x13:
# Performative: flow [channel,handle]
res.name = "flow"
res.handle = resdict["handle"]
res.flow_deliverycnt = self.resdict_value(resdict, "delivery-count", "0")
res.flow_linkcredit = self.resdict_value(resdict, "link-credit", "0")
res.flow_drain = resdict.get("drain", "") == "true"
res.channel_handle = "[%s,%s]" % (res.channel, res.handle)
res.flow_cnt_credit = "(%s,%s)" % (res.flow_deliverycnt, res.flow_linkcredit)
res.web_show_str = "<strong>%s</strong> %s (%s,%s) %s" % (
res.name, colorize_bg(res.channel_handle), res.flow_deliverycnt, res.flow_linkcredit,
self.highlighted("drain", res.flow_drain, common.color_of("drain")))
res.sdorg_str = "FLOW %s (%s,%s)" % (res.channel_handle, res.flow_deliverycnt, res.flow_linkcredit) # TODO: Add drain
elif perf == 0x14:
# Performative: transfer [channel,handle] (id)
res.name = "transfer"
res.transfer = True
res.handle = resdict["handle"]
res.channel_handle = "[%s,%s]" % (res.channel, res.handle)
res.delivery_id = self.resdict_value(resdict, "delivery-id", "none")
res.delivery_tag = self.resdict_value(resdict, "delivery-tag", "none")
res.settled = self.resdict_value(resdict, "settled", "false")
res.transfer_settled = resdict.get("settled", "") == "true"
res.transfer_more = resdict.get("more", "") == "true"
res.transfer_resume = resdict.get("resume", "") == "true"
res.transfer_aborted = resdict.get("aborted", "") == "true"
# translated names handled in post_extract
self.shorteners.short_data_names.register(res.transfer_bare, self)
elif perf == 0x15:
# Performative: disposition [channel] (role first-last)
res.name = "disposition"
res.role = "receiver" if resdict["role"] == "true" else "sender"
res.is_receiver = res.role == "receiver"
res.first = self.resdict_value(resdict, "first", "0")
res.last = self.resdict_value(resdict, "last", res.first)
res.settled = self.resdict_value(resdict, "settled", "false")
state = resdict.get("state")
if state is not None:
res.disposition_state = state.dtype_name
### colorize_dispositions_not_accepted(proto, res, global_vars, count_anomalies)
res.web_show_str = ("<strong>%s</strong> [%s] (%s %s-%s settled=%s state=%s)" %
(res.name, res.channel, res.role, res.first, res.last, res.settled, res.disposition_state))
res.sdorg_str = ("%s [%s] (%s %s-%s settled=%s state=%s)" %
(res.name, res.channel, res.role, res.first, res.last, res.settled, res.disposition_state))
elif perf == 0x16:
# Performative: detach [channel, handle]
res.name = "detach"
res.handle = resdict["handle"]
### TODO: colorize_performative_error(proto, res, global_vars, count_anomalies)
res.channel_handle = "[%s,%s]" % (res.channel, res.handle)
res.web_show_str = "<strong>%s</strong> %s" % (res.name, colorize_bg(res.channel_handle))
res.sdorg_str = "DETACH %s" % (res.channel_handle)
elif perf == 0x17:
# Performative: end [channel]
res.name = "end"
### TODO: colorize_performative_error(proto, res, global_vars, count_anomalies)
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
res.sdorg_str = "END [%s]" % (res.channel)
elif perf == 0x18:
# Performative: close [0] always channel 0
res.channel = "0"
res.name = "close"
### colorize_performative_error(proto, res, global_vars, count_anomalies)
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
res.sdorg_str = "CLOSE [%s]" % (res.channel)
elif perf == 0x1d:
# transport:definitions error
res.name = "error"
descr = self.resdict_value(resdict, "description", "none")
res.web_show_str = "<strong>%s</strong> [%s] %s" % (res.name, res.channel, descr)
elif perf == 0x23:
# messaging:delivery-state received
res.name = "received"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x24:
# messaging:delivery-state accepted
res.name = "accepted"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x25:
# messaging:delivery-state rejected
res.name = "rejected"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x26:
# messaging:delivery-state released
res.name = "released"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x27:
# messaging:delivery-state modified
res.name = "modified"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x28:
# messaging:addressing source
res.name = "source"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x29:
# messaging:addressing target
res.name = "target"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x2b:
# messaging:addressing delete-on-close
res.name = "delete-on-close"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x2c:
# messaging:addressing delete-on-no-links
res.name = "delete-on-no-links"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x2d:
# messaging:addressing delete-on-no-messages
res.name = "delete-on-no-messages"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x2e:
# messaging:addressing delete-on-no-links-or-messages
res.name = "delete-on-no-links-or-messages"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x30:
# transactions:coordination coordinator
res.name = "coordinator"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x31:
# transactions:coordination declare
res.name = "declare"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x32:
# transactions:coordination discharge
res.name = "discharge"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x33:
# transactions:coordination declared
res.name = "declared"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x34:
# transactions:coordination transactional-state
res.name = "transactional-state"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x40:
# security:sasl sasl-mechanisms
res.name = "sasl-mechanisms"
mechs = self.resdict_value(resdict, "sasl-server-mechanisms", "none")
res.web_show_str = "<strong>%s</strong> [%s] %s" % (res.name, res.channel, mechs)
elif perf == 0x41:
# security:sasl sasl-init
res.name = "sasl-init"
mech = self.resdict_value(resdict, "mechanism", "none")
res.web_show_str = "<strong>%s</strong> [%s] %s" % (res.name, res.channel, mech)
elif perf == 0x42:
# security:sasl sasl-challenge
res.name = "sasl-challenge"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x43:
# security:sasl sasl-response
res.name = "sasl-response"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x44:
# security:sasl sasl-outcome
res.name = "sasl-outcome"
code = self.resdict_value(resdict, "code", "none")
res.web_show_str = "<strong>%s</strong> [%s] code=%s" % (res.name, res.channel, code)
elif perf == 0x70:
# messaging:message-format header
res.name = "header"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x71:
# messaging:message-format delivery-annotations
res.name = "delivery-annotations"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x72:
# messaging:message-format message-annotations
res.name = "message-annotations"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x73:
# messaging:message-format properties
res.name = "properties"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x74:
# messaging:message-format application-properties
res.name = "application-properties"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x75:
# messaging:message-format data
res.name = "data"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x76:
# messaging:message-format amqp-sequence
res.name = "amqp-sequence"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x77:
# messaging:message-format amqp-value
res.name = "amqp-value"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
elif perf == 0x78:
# messaging:message-format footer
res.name = "footer"
res.web_show_str = "<strong>%s</strong> [%s]" % (res.name, res.channel)
else:
res.web_show_str = "HELP I'M A ROCK - Unknown performative: %s" % perf
if "error" in resdict:
res.amqp_error = True
dct = resdict["error"].dict
condi = dct["condition"]
descr = dct["description"] if "description" in dct else ""
res.web_show_str += (" <span style=\"background-color:%s\">error</span> "
"%s %s" % (common.color_of("errors"), condi, descr))
def post_extract_names(self):
perf = self.data.described_type.dtype_number
res = self.data
resdict = self.data.described_type.dict
if perf == 0x12:
# Performative: attach [channel,handle] role name (source: src, target: tgt)
name = self.resdict_value(resdict, "name", "None")
res.link_short_name_popup = self.shorteners.short_link_names.translate(name, True, customer=self)
res.link_short_name = self.shorteners.short_link_names.translate(name, False)
res.web_show_str = ("<strong>%s</strong> %s %s %s (source: %s, target: %s, class: %s)" %
(res.name, colorize_bg(res.channel_handle), res.role, res.link_short_name_popup,
res.source, res.target, res.link_class))
res.sdorg_str = ("%s %s %s %s (src: %s, tgt: %s)" %
(res.name, res.channel_handle, res.role, res.link_short_name,
res.source, res.target))
elif perf == 0x14:
# Performative: transfer [channel,handle] (id)
self.transfer_short_name = self.shorteners.short_data_names.translate(res.transfer_bare, customer=self)
showdat = "<a href=\"#%s_dump\">%s</a>" % (self.transfer_short_name, self.transfer_short_name)
spl = common.strings_of_proton_log(res.transfer_bare)
to_strip = "\"... (truncated)"
if spl.endswith(to_strip):
spl = spl[:-len(to_strip)]
res.web_show_str = "<strong>%s</strong> %s (%s) %s %s %s %s %s %s - %s bytes" % (
res.name, colorize_bg(res.channel_handle), res.delivery_id,
self.highlighted("settled", res.transfer_settled, common.color_of("presettled")),
self.highlighted("more", res.transfer_more, common.color_of("more")),
self.highlighted("resume", res.transfer_resume, common.color_of("aborted")),
self.highlighted("aborted", res.transfer_aborted, common.color_of("aborted")),
showdat, common.html_escape(spl[-SEQUENCE_TRANSFER_SIZE:]),
res.transfer_size)
res.sdorg_str = "%s %s (%s) %s (%s%s%s%s)\\n%s" % (
res.name, res.channel_handle, res.delivery_id, res.transfer_size,
self.unhighlighted(" settled", res.transfer_settled),
self.unhighlighted(" more", res.transfer_more),
self.unhighlighted(" resume", res.transfer_resume),
self.unhighlighted(" aborted", res.transfer_aborted),
spl[-SEQUENCE_TRANSFER_SIZE:])
def adverbl_link_to(self):
"""
:return: html link to the main adverbl data display for this line
"""
return "<a href=\"#%s\">%s</a>" % (self.fid, "%s%d_%s" %
(common.log_letter_of(self.index), self.instance, str(self.lineno)))
def __init__(self, _log_index, _instance, _lineno, _line, _comn, _router, opaque=None):
"""
Process a naked qpid-dispatch log line
A log line looks like this:
2018-07-20 10:58:40.179187 -0400 SERVER (trace) [2]:0 -> @begin(17) [next-outgoing-id=0, incoming-window=2147483647, outgoing-window=2147483647] (/home/chug/git/qpid-dispatch/src/server.c:106)
The process is:
1. If the line ends with a filename:fileline then strip that away
2. Peel off the leading time of day and put that into data.datetime.
Lines with no datetime are presumed start-of-epoch.
3. Find (SERVER) or (POLICY). If absent then raise to reject message.
4. If connection number in square brackets '[2]' is missing then raise.
5. Extract connection number; save in data.conn_num
6. Create decorated data.conn_id "A0_2"
7. Extract data.channel if present. Raise if malformed.
8. Create a web_show_str for lines that may not parse any further. Like policy lines.
9. Extract the direction arrows
The log line is now reduced to a described type:
@describedtypename(num) [key=val [, key=val ...]]
except for transfers that have the funky transfer data at end.
:param _log_index: The router prefix index 0 for A, 1 for B, ...
:param _instance The router instance
:param _lineno:
:param _line:
:param _comn:
:param _router:
"""
verbatim_module = None
if len(_comn.verbatim_include_list) > 0:
for modx in _comn.verbatim_include_list:
if _comn.module_key_in_line(modx, _line):
verbatim_module = modx
break
if not (_comn.module_key_in_line(self.server_trace_key, _line) or
_comn.module_key_in_line(self.protocol_trace_key, _line) or
(_comn.module_key_in_line(self.policy_trace_key, _line) and "lookup_user:" in _line) or # open (not begin, attach)
_comn.module_key_in_line(self.server_info_key, _line) or
_comn.module_key_in_line(self.router_ls_key, _line) or
verbatim_module is not None):
raise ValueError("Line is not a candidate for parsing")
self.index = _log_index # router prefix 0 for A, 1 for B
self.instance = _instance # router instance in log file
self.lineno = _lineno # log line number
self.comn = _comn
self.router = _router
self.opaque = opaque
self.prefixi = common.log_letter_of(self.index) + str(self.instance) # prefix+instance A0
self.fid = "f_" + self.prefixi + "_" + str(self.lineno) # frame id A0_100
self.shorteners = _comn.shorteners # name shorteners
self.line = _line # working line chopped, trimmed
self.data = LogLineData() # parsed line fact store
# strip optional trailing file:line field
self.line = self.line.rstrip()
hasFileLine = False
if self.line.endswith(')'):
idxOP = self.line.rfind('(')
idxColon = self.line.rfind(':')
if idxOP != -1 and idxColon != -1:
if idxColon > idxOP:
lNumStr = self.line[(idxColon + 1): (-1)]
try:
lnum = int(lNumStr)
hasFileLine = True
except:
pass
if hasFileLine:
self.line = self.line[:self.line.rfind('(')].rstrip()
# Handle optional timestamp
# This whole project is brain dead without a timestamp. Just sayin'.
self.datetime = None
try:
self.datetime = datetime.strptime(self.line[:26], '%Y-%m-%d %H:%M:%S.%f')
except:
# old routers flub the timestamp and don't print leading zero in uS time
# 2018-11-18 11:31:08.269 should be 2018-11-18 11:31:08.000269
td = self.line[:26]
parts = td.split('.')
us = parts[1]
parts_us = us.split(' ')
if len(parts_us[0]) < 6:
parts_us[0] = '0' * (6 - len(parts_us[0])) + parts_us[0]
parts[1] = ' '.join(parts_us)
td = '.'.join(parts)
try:
self.datetime = datetime.strptime(td[:26], '%Y-%m-%d %H:%M:%S.%f')
except:
self.datetime = datetime(1970, 1, 1)
# Apply time-of-day filters
if self.datetime is not None:
if self.comn.args.time_start is not None:
if self.datetime < self.comn.args.time_start:
raise ValueError("Line too early outside time-of-day limits")
if self.comn.args.time_end is not None:
if self.datetime > self.comn.args.time_end:
raise ValueError("Line too late outside time-of-day limits")
# Pull out scraper literal logs
if verbatim_module is not None:
sti = self.line.find(verbatim_module)
if sti > 0:
# strip datetime and show literal string
sti += len(verbatim_module)
self.data.is_scraper = True
self.data.web_show_str = ("<strong>%s</strong> %s" % (verbatim_module, common.html_escape(self.line[sti:])))
stcp = self.line[sti:].find(')') # close paren after log level
if stcp < 0:
stcp = 0
self.data.sdorg_str = self.line[sti + stcp + 1:]
return
else:
assert False # verbatim module was found only moments ago...
# extract connection number
sti = self.line.find(self.server_trace_key)
if sti < 0:
sti = self.line.find(self.protocol_trace_key)
if sti < 0:
sti = self.line.find(self.policy_trace_key)
if sti < 0:
sti = self.line.find(self.server_info_key)
if sti < 0:
sti = self.line.find(self.router_ls_key)
if sti < 0:
raise ValueError("Log keyword/level not found in line %s" % (self.line))
else:
self.line = self.line[sti + len(self.router_ls_key):]
self.data.is_router_ls = True
# this has no relationship to AMQP log lines
return
else:
self.line = self.line[sti + len(self.server_info_key):]
self.data.is_server_info = True
else:
self.line = self.line[sti + len(self.policy_trace_key):]
self.data.is_policy_trace = True
else:
self.line = self.line[sti + len(self.protocol_trace_key):]
else:
self.line = self.line[sti + len(self.server_trace_key):]
ste = self.line.find(']')
if ste < 0:
print("Failed to parse line ", _lineno, " : ", _line)
traceback.print_exc()
raise ValueError("'%s' not found in line %s" % ("]", self.line))
self.data.conn_num = self.line[:ste]
if self.data.conn_num.startswith("C"):
self.data.conn_num = self.data.conn_num[1:]
self.line = self.line[ste + 1:]
# create decorated connection id
self.data.conn_id = self.prefixi + "_" + self.data.conn_num
# get the session (channel) number
if self.line.startswith(':'):
self.line = self.line[1:]
if self.line.startswith(self.proton_frame_key):
self.line = self.line[len(self.proton_frame_key):]
sti = self.line.find(' ')
if sti < 0:
raise ValueError("space not found after channel number at head of line %s" % (self.line))
if sti > 0:
self.data.channel = self.line[:sti]
self.line = self.line[sti + 1:]
self.line = self.line.lstrip()
# cover for traces that don't get any better
self.data.web_show_str = ("<strong>%s</strong>" % self.line)
# policy lines have no direction and described type fields
if self.data.is_policy_trace or self.data.is_server_info:
return
# direction
if self.line.startswith('<') or self.line.startswith('-'):
self.data.direction = self.line[:2]
self.line = self.line[3:]
self.data.web_show_str = ("<strong>%s</strong>" % self.line)
# The log line is now reduced to a described type:
# @describedtypename(num) [key=val [, key=val ...]]
# extract descriptor name
dname = self.line.split()[0]
self.line = self.line[(len(dname) + 1):]
# Dispose of the transfer data
if dname == self.transfer_key:
# Look for the '] (NNN) "' that separates the described type fields
# from the '(size) "data"'. Stick the required '(size) data' into
# data.transfer_data and delete it from the line.
rz = re.compile(r'\] \(\d+\) \"').search(self.line)
# aborted transfers may or may not have size/data in the log line
if rz is not None and len(rz.regs) > 0:
splitSt, splitTo = rz.regs[0]
self.data.transfer_size = self.line[splitSt + 3: splitTo - 3]
self.data.transfer_data = self.line[splitTo - 1:] # discard (NNN) size field
self.line = self.line[: splitSt + 1]
# try to isolate the bare message
sti = self.data.transfer_data.find(r"\x00Ss")
if sti > 0:
self.data.transfer_hdr_annos = self.data.transfer_data[:sti]
self.data.transfer_bare = self.data.transfer_data[sti:]
else:
self.data.transfer_hdr_annos = ''
self.data.transfer_bare = self.data.transfer_data
else:
self.data.transfer_size = "0"
self.data.transfer_data = "(none)"
if DescribedType.is_dtype_name(dname):
self.data.described_type.parse_dtype_line(dname, self.line)
# data fron incoming line is now parsed out into facts in .data
# Now cook the data to get useful displays
self.extract_facts()
def parse_log_file(fn, log_index, comn):
"""
Given a file name, return an array of Routers that hold the parsed lines.
Lines that don't parse are identified on stderr and then discarded.
:param fn: file name
:param log_index: router id 0 for 'A', 1 for 'B', ...
:param comn: common data
:return: list of Routers
"""
instance = 0
lineno = 0
search_for_in_progress = True
rtrs = []
rtr = None
key1 = "SERVER (trace) [" # AMQP traffic
key2 = "SERVER (info) Container Name:" # Normal 'router is starting' restart discovery line
key3 = "ROUTER_LS (info)" # a log line placed in separate pool of lines
keys = [key1, key3]
key4 = "ROUTER (info) Version:" # router version line
key5 = "ROUTER (info) Router started in " # router mode
with open(fn, 'r') as infile:
for line in infile:
if search_for_in_progress:
# What if the log file has no record of the router starting?
# This is an in_progress router and it is a pre-existing router instance
# and not one found by restart discovery.
# Any key or AMQP line indicates a router in-progress
if any(s in line for s in keys) or ("[" in line and "]" in line):
assert rtr is None
rtr = router.Router(fn, log_index, instance)
rtrs.append(rtr)
search_for_in_progress = False
rtr.restart_rec = router.RestartRecord(rtr, line, lineno + 1)
lineno += 1
verbatim_module = None
if len(comn.verbatim_include_list) > 0:
for modx in comn.verbatim_include_list:
if comn.module_key_in_line(modx, line):
verbatim_module = modx
break
if key2 in line:
# This line closes the current router, if any, and opens a new one
if rtr is not None:
instance += 1
rtr = router.Router(fn, log_index, instance)
rtrs.append(rtr)
rtr.restart_rec = router.RestartRecord(rtr, line, lineno)
search_for_in_progress = False
rtr.container_name = line[(line.find(key2) + len(key2)):].strip().split()[0]
elif key3 in line:
pl = None
try:
pl = ParsedLogLine(log_index, instance, lineno, line, comn, rtr)
except ValueError as ve:
pass
except Exception as e:
# t, v, tb = sys.exc_info()
if hasattr(e, 'message'):
traceback.print_exc()
sys.stderr.write("Failed to parse file '%s', line %d : %s. Analysis continuing...\n" % (fn, lineno, e.message))
else:
traceback.print_exc()
sys.stderr.write("Failed to parse file '%s', line %d : %s. Analysis continuing...\n" % (fn, lineno, e))
if pl is not None:
if pl.data.is_router_ls:
rtr.router_ls.append(pl)
elif key4 in line:
rtr.version = line[(line.find(key4) + len(key4)):].strip().split()[0]
elif key5 in line:
rtr.mode = line[(line.find(key5) + len(key5)):].strip().split()[0].lower()
elif verbatim_module is not None:
pl = ParsedLogLine(log_index, instance, lineno, line, comn, rtr)
rtr.lines.append(pl)
elif "[" in line and "]" in line:
try:
do_this = True if not hasattr(comn.args, 'skip_all_data') else not comn.args.skip_all_data
if not do_this:
# not indexing data. maybe do this line anyway
do_this = not any(s in line for s in [' @transfer', ' @disposition', ' @flow', 'EMPTY FRAME'])
if do_this:
pl = ParsedLogLine(log_index, instance, lineno, line, comn, rtr)
if pl is not None:
rtr.lines.append(pl)
else:
comn.data_skipped += 1
except ValueError as ve:
pass
except Exception as e:
# t, v, tb = sys.exc_info()
if hasattr(e, 'message'):
traceback.print_exc()
sys.stderr.write("Failed to parse file '%s', line %d : %s. Analysis continuing...\n" % (fn, lineno, e.message))
else:
traceback.print_exc()
sys.stderr.write("Failed to parse file '%s', line %d : %s. Analysis continuing...\n" % (fn, lineno, e))
# raise t, v, tb
else:
# ignore this log line
pass
return rtrs
if __name__ == "__main__":
class dummy_args():
skip_all_data = False
skip_detail = False
skip_msg_progress = False
split = False
time_start = None
time_end = None
print("Line-by-line split test")
try:
for line in td.TestData().data():
if "transfer" not in line:
print(proton_split(line))
else:
pass # splitter does not split transfers
pass
except:
traceback.print_exc(file=sys.stdout)
pass
print("Canned data parse test")
data = td.TestData().data()
log_index = 0 # from file for router A
instance = 0 # all from router instance 0
comn = common.Common()
comn.args = dummy_args()
try:
for i in range(len(data)):
temp = ParsedLogLine(log_index, instance, i, data[i], comn, None)
print(temp.datetime, temp.data.conn_id, temp.data.direction, temp.data.web_show_str)
pass
except:
traceback.print_exc(file=sys.stdout)
pass
comn.shorteners.short_data_names.sort_customers()
print("Read two-instance file test")
comn2 = common.Common()
comn2.args = dummy_args()
routers = parse_log_file('test_data/A-two-instances.log', 0, comn2)
if len(routers) != 2:
print("ERROR: Expected two router instances in log file")
t_b4_0 = datetime.strptime('2018-10-15 10:57:32.151673', '%Y-%m-%d %H:%M:%S.%f')
t_in_0 = datetime.strptime('2018-10-15 10:57:32.338183', '%Y-%m-%d %H:%M:%S.%f')
t_in_1 = datetime.strptime('2018-10-15 10:59:07.584498', '%Y-%m-%d %H:%M:%S.%f')
t_af_1 = datetime.strptime('2019-10-15 10:59:07.584498', '%Y-%m-%d %H:%M:%S.%f')
rtr, idx = router.which_router_tod(routers, t_b4_0)
assert rtr is routers[0] and idx == 0
rtr, idx = router.which_router_tod(routers, t_in_0)
assert rtr is routers[0] and idx == 0
rtr, idx = router.which_router_tod(routers, t_in_1)
assert rtr is routers[1] and idx == 1
rtr, idx = router.which_router_tod(routers, t_af_1)
assert rtr is routers[1] and idx == 1
pass
|
mgoulish/qpid-dispatch
|
tools/scraper/parser.py
|
Python
|
apache-2.0
| 51,059
|
import httplib
from pyamf import AMF0, AMF3
from pyamf import remoting
from pyamf.remoting.client import RemotingService
height = 1080
def build_amf_request(const, playerID, videoPlayer, publisherID):
env = remoting.Envelope(amfVersion=3)
env.bodies.append(
(
"/1",
remoting.Request(
target="com.brightcove.player.runtime.PlayerMediaFacade.findMediaById",
body=[const, playerID, videoPlayer, publisherID],
envelope=env
)
)
)
return env
def get_clip_info(const, playerID, videoPlayer, publisherID, playerKey):
conn = httplib.HTTPConnection("c.brightcove.com")
envelope = build_amf_request(const, playerID, videoPlayer, publisherID)
conn.request("POST", "/services/messagebroker/amf?playerKey=" + playerKey, str(remoting.encode(envelope).read()), {'content-type': 'application/x-amf'})
response = conn.getresponse().read()
response = remoting.decode(response).bodies[0][1].body
return response
def play(const, playerID, videoPlayer, publisherID, playerKey):
rtmpdata = get_clip_info(const, playerID, videoPlayer, publisherID, playerKey)
streamName = ""
streamUrl = rtmpdata['FLVFullLengthURL'];
for item in sorted(rtmpdata['renditions'], key=lambda item:item['frameHeight'], reverse=False):
streamHeight = item['frameHeight']
if streamHeight <= height:
streamUrl = item['defaultURL']
streamName = streamName + rtmpdata['displayName']
return [streamName, streamUrl];
|
aplicatii-romanesti/allinclusive-kodi-pi
|
.kodi/addons/plugin.video.kidsplace/brightcovePlayer.py
|
Python
|
apache-2.0
| 1,587
|
#!/usr/bin/env python
# Copyright 2005-2009,2011 Joe Wreschnig
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
import glob
import os
import shutil
import sys
import subprocess
import tarfile
from imp import reload
from distutils.core import setup, Command
from distutils import dir_util
from distutils.command.clean import clean as distutils_clean
from distutils.command.sdist import sdist
class clean(distutils_clean):
def run(self):
# In addition to what the normal clean run does, remove pyc
# and pyo and backup files from the source tree.
distutils_clean.run(self)
def should_remove(filename):
if (filename.lower()[-4:] in [".pyc", ".pyo"] or
filename.endswith("~") or
(filename.startswith("#") and filename.endswith("#"))):
return True
else:
return False
for pathname, dirs, files in os.walk(os.path.dirname(__file__)):
for filename in files:
if should_remove(filename):
try:
os.unlink(os.path.join(pathname, filename))
except EnvironmentError as err:
print(str(err))
try:
os.unlink("MANIFEST")
except OSError:
pass
for base in ["coverage", "build", "dist"]:
path = os.path.join(os.path.dirname(__file__), base)
if os.path.isdir(path):
shutil.rmtree(path)
class distcheck(sdist):
def _check_manifest(self):
assert self.get_archive_files()
# make sure MANIFEST.in includes all tracked files
if subprocess.call(["hg", "status"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE) == 0:
# contains the packaged files after run() is finished
included_files = self.filelist.files
assert included_files
process = subprocess.Popen(["hg", "locate"],
stdout=subprocess.PIPE)
out, err = process.communicate()
assert process.returncode == 0
tracked_files = out.splitlines()
for ignore in [".hgignore", ".hgtags"]:
tracked_files.remove(ignore)
assert not set(tracked_files) - set(included_files), \
"Not all tracked files included in tarball, update MANIFEST.in"
def _check_dist(self):
assert self.get_archive_files()
distcheck_dir = os.path.join(self.dist_dir, "distcheck")
if os.path.exists(distcheck_dir):
dir_util.remove_tree(distcheck_dir)
self.mkpath(distcheck_dir)
archive = self.get_archive_files()[0]
tfile = tarfile.open(archive, "r:gz")
tfile.extractall(distcheck_dir)
tfile.close()
name = self.distribution.get_fullname()
extract_dir = os.path.join(distcheck_dir, name)
old_pwd = os.getcwd()
os.chdir(extract_dir)
self.spawn([sys.executable, "setup.py", "test"])
self.spawn([sys.executable, "setup.py", "build"])
self.spawn([sys.executable, "setup.py", "build_sphinx"])
self.spawn([sys.executable, "setup.py", "install",
"--prefix", "../prefix", "--record", "../log.txt"])
os.environ["LC_ALL"] = "C"
self.spawn([sys.executable, "setup.py", "test", "--quick"])
os.chdir(old_pwd)
def run(self):
sdist.run(self)
self._check_manifest()
self._check_dist()
class build_sphinx(Command):
description = "build sphinx documentation"
user_options = [
("build-dir=", "d", "build directory"),
]
def initialize_options(self):
self.build_dir = None
def finalize_options(self):
self.build_dir = self.build_dir or "build"
def run(self):
docs = "docs"
target = os.path.join(self.build_dir, "sphinx")
self.spawn(["sphinx-build", "-b", "html", "-n", docs, target])
class test_cmd(Command):
description = "run automated tests"
user_options = [
("to-run=", None, "list of tests to run (default all)"),
("quick", None, "don't run slow mmap-failing tests"),
]
def initialize_options(self):
self.to_run = []
self.quick = False
def finalize_options(self):
if self.to_run:
self.to_run = self.to_run.split(",")
def run(self):
import tests
count, failures = tests.unit(self.to_run, self.quick)
if failures:
print("%d out of %d failed" % (failures, count))
raise SystemExit("Test failures are listed above.")
else:
print("All tests passed")
class coverage_cmd(Command):
description = "generate test coverage data"
user_options = [
("quick", None, "don't run slow mmap-failing tests"),
]
def initialize_options(self):
self.quick = None
def finalize_options(self):
self.quick = bool(self.quick)
def run(self):
import trace
tracer = trace.Trace(
count=True, trace=False,
ignoredirs=[sys.prefix, sys.exec_prefix])
def run_tests():
import mutagen
import mutagen._util
reload(mutagen._util)
reload(mutagen)
cmd = self.reinitialize_command("test")
cmd.quick = self.quick
cmd.ensure_finalized()
cmd.run()
tracer.runfunc(run_tests)
results = tracer.results()
coverage = os.path.join(os.path.dirname(__file__), "coverage")
results.write_results(show_missing=True, coverdir=coverage)
for match in glob.glob(os.path.join(coverage, "[!m]*.cover")):
os.unlink(match)
try:
os.unlink(os.path.join(coverage, "..setup.cover"))
except OSError:
pass
total_lines = 0
bad_lines = 0
for filename in glob.glob(os.path.join(coverage, "*.cover")):
lines = open(filename, "rU").readlines()
total_lines += len(lines)
bad_lines += len(
[line for line in lines if
(line.startswith(">>>>>>") and
"finally:" not in line and '"""' not in line)])
pct = 100.0 * (total_lines - bad_lines) / float(total_lines)
print("Coverage data written to %s (%d/%d, %0.2f%%)" % (
coverage, total_lines - bad_lines, total_lines, pct))
if pct < 98.66:
raise SystemExit(
"Coverage percentage went down; write more tests.")
if pct > 98.7:
raise SystemExit("Coverage percentage went up; change setup.py.")
if os.name == "posix":
data_files = [('share/man/man1', glob.glob("man/*.1"))]
else:
data_files = []
if __name__ == "__main__":
from mutagen import version_string
cmd_classes = {
"clean": clean,
"test": test_cmd,
"coverage": coverage_cmd,
"distcheck": distcheck,
"build_sphinx": build_sphinx,
}
setup(cmdclass=cmd_classes,
name="mutagenx", version=version_string,
url="https://github.com/LordSputnik/mutagen",
description="read and write audio tags for many formats in Python 3",
author="Ben Ockmore",
author_email="ben.sput@gmail.com",
license="GNU GPL v2",
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU General Public License v2 (GPLv2)',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Multimedia :: Sound/Audio'
],
packages=["mutagen"],
data_files=data_files,
scripts=[os.path.join("tools", name) for name in [
"mid3iconv",
"mid3v2",
"moggsplit",
"mutagen-inspect",
"mutagen-pony",
]],
long_description="""\
A fork of the mutagen package, modified to support Python 3.3+. I
take no credit for the original mutagen - the copyright for that is
owned by the original developers. This package isn't currently
compatible with Python 2.x, but I am working with the mutagen
developers to make these two projects converge. Once this happens, I'll
close this project and start working on improving mutagen itself.
From the original package description:
\"Mutagen is a Python module to handle audio metadata. It supports ASF,
FLAC, M4A, Monkey's Audio, MP3, Musepack, Ogg FLAC, Ogg Speex, Ogg
Theora, Ogg Vorbis, True Audio, WavPack and OptimFROG audio files. All
versions of ID3v2 are supported, and all standard ID3v2.4 frames are
parsed. It can read Xing headers to accurately calculate the bitrate
and length of MP3s. ID3 and APEv2 tags can be edited regardless of
audio format. It can also manipulate Ogg streams on an individual
packet/page level.\"
"""
)
|
LordSputnik/mutagen
|
setup.py
|
Python
|
gpl-2.0
| 9,430
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.