repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
Metaswitch/calico-neutron | neutron/plugins/cisco/common/cisco_credentials_v2.py | 50 | 2067 | # Copyright 2012 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.plugins.cisco.common import cisco_constants as const
from neutron.plugins.cisco.common import cisco_exceptions as cexc
from neutron.plugins.cisco.common import config
from neutron.plugins.cisco.db import network_db_v2 as cdb
class Store(object):
"""Credential Store."""
@staticmethod
def initialize():
dev_dict = config.get_device_dictionary()
for key in dev_dict:
dev_id, dev_ip, dev_key = key
if dev_key == const.USERNAME:
try:
cdb.add_credential(
dev_ip,
dev_dict[dev_id, dev_ip, const.USERNAME],
dev_dict[dev_id, dev_ip, const.PASSWORD],
dev_id)
except cexc.CredentialAlreadyExists:
# We are quietly ignoring this, since it only happens
# if this class module is loaded more than once, in
# which case, the credentials are already populated
pass
@staticmethod
def get_username(cred_name):
"""Get the username."""
credential = cdb.get_credential_name(cred_name)
return credential[const.CREDENTIAL_USERNAME]
@staticmethod
def get_password(cred_name):
"""Get the password."""
credential = cdb.get_credential_name(cred_name)
return credential[const.CREDENTIAL_PASSWORD]
| apache-2.0 |
hlzz/dotfiles | graphics/VTK-7.0.0/ThirdParty/Twisted/twisted/lore/texi.py | 2 | 3224 | # -*- test-case-name: twisted.lore.test.test_texi -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
from cStringIO import StringIO
import os, re
from twisted.web import domhelpers
import latex, tree
spaceRe = re.compile('\s+')
def texiEscape(text):
return spaceRe.sub(text, ' ')
entities = latex.entities.copy()
entities['copy'] = '@copyright{}'
class TexiSpitter(latex.BaseLatexSpitter):
baseLevel = 1
def writeNodeData(self, node):
latex.getLatexText(node, self.writer, texiEscape, entities)
def visitNode_title(self, node):
self.writer('@node ')
self.visitNodeDefault(node)
self.writer('\n')
self.writer('@section ')
self.visitNodeDefault(node)
self.writer('\n')
headers = tree.getHeaders(domhelpers.getParents(node)[-1])
if not headers:
return
self.writer('@menu\n')
for header in headers:
self.writer('* %s::\n' % domhelpers.getNodeText(header))
self.writer('@end menu\n')
def visitNode_pre(self, node):
"""
Writes a I{verbatim} block when it encounters a I{pre} element.
@param node: The element to process.
@type node: L{xml.dom.minidom.Element}
"""
self.writer('@verbatim\n')
buf = StringIO()
latex.getLatexText(node, buf.write, entities=entities)
self.writer(tree._removeLeadingTrailingBlankLines(buf.getvalue()))
self.writer('@end verbatim\n')
def visitNode_code(self, node):
fout = StringIO()
latex.getLatexText(node, fout.write, texiEscape, entities)
self.writer('@code{'+fout.getvalue()+'}')
def visitNodeHeader(self, node):
self.writer('\n\n@node ')
self.visitNodeDefault(node)
self.writer('\n')
level = (int(node.tagName[1])-2)+self.baseLevel
self.writer('\n\n@'+level*'sub'+'section ')
self.visitNodeDefault(node)
self.writer('\n')
def visitNode_a_listing(self, node):
fileName = os.path.join(self.currDir, node.getAttribute('href'))
self.writer('@verbatim\n')
self.writer(open(fileName).read())
self.writer('@end verbatim')
# Write a caption for this source listing
def visitNode_a_href(self, node):
self.visitNodeDefault(node)
def visitNode_a_name(self, node):
self.visitNodeDefault(node)
visitNode_h2 = visitNode_h3 = visitNode_h4 = visitNodeHeader
start_dl = '@itemize\n'
end_dl = '@end itemize\n'
start_ul = '@itemize\n'
end_ul = '@end itemize\n'
start_ol = '@enumerate\n'
end_ol = '@end enumerate\n'
start_li = '@item\n'
end_li = '\n'
start_dt = '@item\n'
end_dt = ': '
end_dd = '\n'
start_p = '\n\n'
start_strong = start_em = '@emph{'
end_strong = end_em = '}'
start_q = "``"
end_q = "''"
start_span_footnote = '@footnote{'
end_span_footnote = '}'
start_div_note = '@quotation\n@strong{Note:}'
end_div_note = '@end quotation\n'
start_th = '@strong{'
end_th = '}'
| bsd-3-clause |
varunnaganathan/django | django/contrib/gis/db/backends/mysql/schema.py | 138 | 3046 | import logging
from django.contrib.gis.db.models.fields import GeometryField
from django.db.backends.mysql.schema import DatabaseSchemaEditor
from django.db.utils import OperationalError
logger = logging.getLogger('django.contrib.gis')
class MySQLGISSchemaEditor(DatabaseSchemaEditor):
sql_add_spatial_index = 'CREATE SPATIAL INDEX %(index)s ON %(table)s(%(column)s)'
sql_drop_spatial_index = 'DROP INDEX %(index)s ON %(table)s'
def __init__(self, *args, **kwargs):
super(MySQLGISSchemaEditor, self).__init__(*args, **kwargs)
self.geometry_sql = []
def skip_default(self, field):
return (
super(MySQLGISSchemaEditor, self).skip_default(field) or
# Geometry fields are stored as BLOB/TEXT and can't have defaults.
isinstance(field, GeometryField)
)
def column_sql(self, model, field, include_default=False):
column_sql = super(MySQLGISSchemaEditor, self).column_sql(model, field, include_default)
# MySQL doesn't support spatial indexes on NULL columns
if isinstance(field, GeometryField) and field.spatial_index and not field.null:
qn = self.connection.ops.quote_name
db_table = model._meta.db_table
self.geometry_sql.append(
self.sql_add_spatial_index % {
'index': qn(self._create_spatial_index_name(model, field)),
'table': qn(db_table),
'column': qn(field.column),
}
)
return column_sql
def create_model(self, model):
super(MySQLGISSchemaEditor, self).create_model(model)
self.create_spatial_indexes()
def add_field(self, model, field):
super(MySQLGISSchemaEditor, self).add_field(model, field)
self.create_spatial_indexes()
def remove_field(self, model, field):
if isinstance(field, GeometryField) and field.spatial_index:
qn = self.connection.ops.quote_name
sql = self.sql_drop_spatial_index % {
'index': qn(self._create_spatial_index_name(model, field)),
'table': qn(model._meta.db_table),
}
try:
self.execute(sql)
except OperationalError:
logger.error(
"Couldn't remove spatial index: %s (may be expected "
"if your storage engine doesn't support them).", sql
)
super(MySQLGISSchemaEditor, self).remove_field(model, field)
def _create_spatial_index_name(self, model, field):
return '%s_%s_id' % (model._meta.db_table, field.column)
def create_spatial_indexes(self):
for sql in self.geometry_sql:
try:
self.execute(sql)
except OperationalError:
logger.error(
"Cannot create SPATIAL INDEX %s. Only MyISAM and (as of "
"MySQL 5.7.5) InnoDB support them.", sql
)
self.geometry_sql = []
| bsd-3-clause |
Plain-Andy-legacy/android_external_chromium_org | chrome/common/extensions/docs/server2/content_provider_test.py | 77 | 7815 | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from cStringIO import StringIO
import json
import unittest
from zipfile import ZipFile
from compiled_file_system import CompiledFileSystem
from content_provider import ContentProvider
from file_system import FileNotFoundError
from object_store_creator import ObjectStoreCreator
from path_canonicalizer import PathCanonicalizer
from test_file_system import TestFileSystem
from third_party.motemplate import Motemplate
_REDIRECTS_JSON = json.dumps({
'oldfile.html': 'storage.html',
'index.html': 'https://developers.google.com/chrome',
})
_MARKDOWN_CONTENT = (
('# Header 1 #', u'<h1 id="header-1">Header 1</h1>'),
('1. Foo\n', u'<ol>\n<li>Foo</li>\n</ol>'),
('\n',
'<p><img alt="alt text" src="/path/img.jpg" title="Title" /></p>'),
('* Unordered item 1', u'<ul>\n<li>Unordered item 1</li>\n</ul>')
)
# Test file system data which exercises many different mimetypes.
_TEST_DATA = {
'dir': {
'a.txt': 'a.txt content',
'b.txt': 'b.txt content',
'c': {
'd.txt': 'd.txt content',
},
},
'dir2': {
'dir3': {
'a.txt': 'a.txt content',
'b.txt': 'b.txt content',
'c': {
'd.txt': 'd.txt content',
},
},
},
'dir4': {
'index.html': 'index.html content 1'
},
'dir5': {
'index.html': 'index.html content 2'
},
'dir6': {
'notindex.html': 'notindex.html content'
},
'dir7': {
'index.md': '\n'.join(text[0] for text in _MARKDOWN_CONTENT)
},
'dir.txt': 'dir.txt content',
'dir5.html': 'dir5.html content',
'img.png': 'img.png content',
'index.html': 'index.html content',
'read.txt': 'read.txt content',
'redirects.json': _REDIRECTS_JSON,
'noextension': 'noextension content',
'run.js': 'run.js content',
'site.css': 'site.css content',
'storage.html': 'storage.html content',
'markdown.md': '\n'.join(text[0] for text in _MARKDOWN_CONTENT)
}
class ContentProviderUnittest(unittest.TestCase):
def setUp(self):
self._test_file_system = TestFileSystem(_TEST_DATA)
self._content_provider = self._CreateContentProvider()
def _CreateContentProvider(self, supports_zip=False):
object_store_creator = ObjectStoreCreator.ForTest()
return ContentProvider(
'foo',
CompiledFileSystem.Factory(object_store_creator),
self._test_file_system,
object_store_creator,
default_extensions=('.html', '.md'),
# TODO(kalman): Test supports_templates=False.
supports_templates=True,
supports_zip=supports_zip)
def _assertContent(self, content, content_type, content_and_type):
# Assert type so that str is differentiated from unicode.
self.assertEqual(type(content), type(content_and_type.content))
self.assertEqual(content, content_and_type.content)
self.assertEqual(content_type, content_and_type.content_type)
def _assertTemplateContent(self, content, path, version):
content_and_type = self._content_provider.GetContentAndType(path).Get()
self.assertEqual(Motemplate, type(content_and_type.content))
content_and_type.content = content_and_type.content.source
self._assertContent(content, 'text/html', content_and_type)
self.assertEqual(version, self._content_provider.GetVersion(path).Get())
def _assertMarkdownContent(self, content, path, version):
content_and_type = self._content_provider.GetContentAndType(path).Get()
content_and_type.content = content_and_type.content.source
self._assertContent(content, 'text/html', content_and_type)
self.assertEqual(version, self._content_provider.GetVersion(path).Get())
def testPlainText(self):
self._assertContent(
u'a.txt content', 'text/plain',
self._content_provider.GetContentAndType('dir/a.txt').Get())
self._assertContent(
u'd.txt content', 'text/plain',
self._content_provider.GetContentAndType('dir/c/d.txt').Get())
self._assertContent(
u'read.txt content', 'text/plain',
self._content_provider.GetContentAndType('read.txt').Get())
self._assertContent(
unicode(_REDIRECTS_JSON, 'utf-8'), 'application/json',
self._content_provider.GetContentAndType('redirects.json').Get())
self._assertContent(
u'run.js content', 'application/javascript',
self._content_provider.GetContentAndType('run.js').Get())
self._assertContent(
u'site.css content', 'text/css',
self._content_provider.GetContentAndType('site.css').Get())
def testTemplate(self):
self._assertTemplateContent(u'storage.html content', 'storage.html', '0')
self._test_file_system.IncrementStat('storage.html')
self._assertTemplateContent(u'storage.html content', 'storage.html', '1')
def testImage(self):
self._assertContent(
'img.png content', 'image/png',
self._content_provider.GetContentAndType('img.png').Get())
def testZipTopLevel(self):
zip_content_provider = self._CreateContentProvider(supports_zip=True)
content_and_type = zip_content_provider.GetContentAndType('dir.zip').Get()
zipfile = ZipFile(StringIO(content_and_type.content))
content_and_type.content = zipfile.namelist()
self._assertContent(
['dir/a.txt', 'dir/b.txt', 'dir/c/d.txt'], 'application/zip',
content_and_type)
def testZip2ndLevel(self):
zip_content_provider = self._CreateContentProvider(supports_zip=True)
content_and_type = zip_content_provider.GetContentAndType(
'dir2/dir3.zip').Get()
zipfile = ZipFile(StringIO(content_and_type.content))
content_and_type.content = zipfile.namelist()
self._assertContent(
['dir3/a.txt', 'dir3/b.txt', 'dir3/c/d.txt'], 'application/zip',
content_and_type)
def testCanonicalZipPaths(self):
# Without supports_zip the path is canonicalized as a file.
self.assertEqual(
'dir.txt',
self._content_provider.GetCanonicalPath('dir.zip'))
self.assertEqual(
'dir.txt',
self._content_provider.GetCanonicalPath('diR.zip'))
# With supports_zip the path is canonicalized as the zip file which
# corresponds to the canonical directory.
zip_content_provider = self._CreateContentProvider(supports_zip=True)
self.assertEqual(
'dir.zip',
zip_content_provider.GetCanonicalPath('dir.zip'))
self.assertEqual(
'dir.zip',
zip_content_provider.GetCanonicalPath('diR.zip'))
def testMarkdown(self):
expected_content = '\n'.join(text[1] for text in _MARKDOWN_CONTENT)
self._assertMarkdownContent(expected_content, 'markdown', '0')
self._test_file_system.IncrementStat('markdown.md')
self._assertMarkdownContent(expected_content, 'markdown', '1')
def testNotFound(self):
self.assertRaises(
FileNotFoundError,
self._content_provider.GetContentAndType('oops').Get)
def testIndexRedirect(self):
self._assertTemplateContent(u'index.html content', '', '0')
self._assertTemplateContent(u'index.html content 1', 'dir4', '0')
self._assertTemplateContent(u'dir5.html content', 'dir5', '0')
self._assertMarkdownContent(
'\n'.join(text[1] for text in _MARKDOWN_CONTENT),
'dir7',
'0')
self._assertContent(
'noextension content', 'text/plain',
self._content_provider.GetContentAndType('noextension').Get())
self.assertRaises(
FileNotFoundError,
self._content_provider.GetContentAndType('dir6').Get)
def testRefresh(self):
# Not entirely sure what to test here, but get some code coverage.
self._content_provider.Refresh().Get()
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
luogangyi/bcec-nova | nova/api/openstack/extensions.py | 8 | 16604 | # Copyright 2011 OpenStack Foundation
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
import functools
import os
import six
import webob.dec
import webob.exc
import nova.api.openstack
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova import exception
from nova.openstack.common.gettextutils import _
from nova.openstack.common import importutils
from nova.openstack.common import log as logging
import nova.policy
LOG = logging.getLogger(__name__)
class ExtensionDescriptor(object):
"""Base class that defines the contract for extensions.
Note that you don't have to derive from this class to have a valid
extension; it is purely a convenience.
"""
# The name of the extension, e.g., 'Fox In Socks'
name = None
# The alias for the extension, e.g., 'FOXNSOX'
alias = None
# Description comes from the docstring for the class
# The XML namespace for the extension, e.g.,
# 'http://www.fox.in.socks/api/ext/pie/v1.0'
namespace = None
# The timestamp when the extension was last updated, e.g.,
# '2011-01-22T13:25:27-06:00'
updated = None
def __init__(self, ext_mgr):
"""Register extension with the extension manager."""
ext_mgr.register(self)
self.ext_mgr = ext_mgr
def get_resources(self):
"""List of extensions.ResourceExtension extension objects.
Resources define new nouns, and are accessible through URLs.
"""
resources = []
return resources
def get_controller_extensions(self):
"""List of extensions.ControllerExtension extension objects.
Controller extensions are used to extend existing controllers.
"""
controller_exts = []
return controller_exts
@classmethod
def nsmap(cls):
"""Synthesize a namespace map from extension."""
# Start with a base nsmap
nsmap = ext_nsmap.copy()
# Add the namespace for the extension
nsmap[cls.alias] = cls.namespace
return nsmap
@classmethod
def xmlname(cls, name):
"""Synthesize element and attribute names."""
return '{%s}%s' % (cls.namespace, name)
def make_ext(elem):
elem.set('name')
elem.set('namespace')
elem.set('alias')
elem.set('updated')
desc = xmlutil.SubTemplateElement(elem, 'description')
desc.text = 'description'
xmlutil.make_links(elem, 'links')
ext_nsmap = {None: xmlutil.XMLNS_COMMON_V10, 'atom': xmlutil.XMLNS_ATOM}
class ExtensionTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('extension', selector='extension')
make_ext(root)
return xmlutil.MasterTemplate(root, 1, nsmap=ext_nsmap)
class ExtensionsTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('extensions')
elem = xmlutil.SubTemplateElement(root, 'extension',
selector='extensions')
make_ext(elem)
return xmlutil.MasterTemplate(root, 1, nsmap=ext_nsmap)
class ExtensionsController(wsgi.Resource):
def __init__(self, extension_manager):
self.extension_manager = extension_manager
super(ExtensionsController, self).__init__(None)
def _translate(self, ext):
ext_data = {}
ext_data['name'] = ext.name
ext_data['alias'] = ext.alias
ext_data['description'] = ext.__doc__
ext_data['namespace'] = ext.namespace
ext_data['updated'] = ext.updated
ext_data['links'] = [] # TODO(dprince): implement extension links
return ext_data
@wsgi.serializers(xml=ExtensionsTemplate)
def index(self, req):
extensions = []
for ext in self.extension_manager.sorted_extensions():
extensions.append(self._translate(ext))
return dict(extensions=extensions)
@wsgi.serializers(xml=ExtensionTemplate)
def show(self, req, id):
try:
# NOTE(dprince): the extensions alias is used as the 'id' for show
ext = self.extension_manager.extensions[id]
except KeyError:
raise webob.exc.HTTPNotFound()
return dict(extension=self._translate(ext))
def delete(self, req, id):
raise webob.exc.HTTPNotFound()
def create(self, req, body):
raise webob.exc.HTTPNotFound()
class ExtensionManager(object):
"""Load extensions from the configured extension path.
See nova/tests/api/openstack/volume/extensions/foxinsocks.py or an
example extension implementation.
"""
def sorted_extensions(self):
if self.sorted_ext_list is None:
self.sorted_ext_list = sorted(self.extensions.iteritems())
for _alias, ext in self.sorted_ext_list:
yield ext
def is_loaded(self, alias):
return alias in self.extensions
def register(self, ext):
# Do nothing if the extension doesn't check out
if not self._check_extension(ext):
return
alias = ext.alias
LOG.audit(_('Loaded extension: %s'), alias)
if alias in self.extensions:
raise exception.NovaException("Found duplicate extension: %s"
% alias)
self.extensions[alias] = ext
self.sorted_ext_list = None
def get_resources(self):
"""Returns a list of ResourceExtension objects."""
resources = []
resources.append(ResourceExtension('extensions',
ExtensionsController(self)))
for ext in self.sorted_extensions():
try:
resources.extend(ext.get_resources())
except AttributeError:
# NOTE(dprince): Extension aren't required to have resource
# extensions
pass
return resources
def get_controller_extensions(self):
"""Returns a list of ControllerExtension objects."""
controller_exts = []
for ext in self.sorted_extensions():
try:
get_ext_method = ext.get_controller_extensions
except AttributeError:
# NOTE(Vek): Extensions aren't required to have
# controller extensions
continue
controller_exts.extend(get_ext_method())
return controller_exts
def _check_extension(self, extension):
"""Checks for required methods in extension objects."""
try:
LOG.debug(_('Ext name: %s'), extension.name)
LOG.debug(_('Ext alias: %s'), extension.alias)
LOG.debug(_('Ext description: %s'),
' '.join(extension.__doc__.strip().split()))
LOG.debug(_('Ext namespace: %s'), extension.namespace)
LOG.debug(_('Ext updated: %s'), extension.updated)
except AttributeError as ex:
LOG.exception(_("Exception loading extension: %s"), unicode(ex))
return False
return True
def load_extension(self, ext_factory):
"""Execute an extension factory.
Loads an extension. The 'ext_factory' is the name of a
callable that will be imported and called with one
argument--the extension manager. The factory callable is
expected to call the register() method at least once.
"""
LOG.debug(_("Loading extension %s"), ext_factory)
if isinstance(ext_factory, six.string_types):
# Load the factory
factory = importutils.import_class(ext_factory)
else:
factory = ext_factory
# Call it
LOG.debug(_("Calling extension factory %s"), ext_factory)
factory(self)
def _load_extensions(self):
"""Load extensions specified on the command line."""
extensions = list(self.cls_list)
for ext_factory in extensions:
try:
self.load_extension(ext_factory)
except Exception as exc:
LOG.warn(_('Failed to load extension %(ext_factory)s: '
'%(exc)s'),
{'ext_factory': ext_factory, 'exc': exc})
class ControllerExtension(object):
"""Extend core controllers of nova OpenStack API.
Provide a way to extend existing nova OpenStack API core
controllers.
"""
def __init__(self, extension, collection, controller):
self.extension = extension
self.collection = collection
self.controller = controller
class ResourceExtension(object):
"""Add top level resources to the OpenStack API in nova."""
def __init__(self, collection, controller=None, parent=None,
collection_actions=None, member_actions=None,
custom_routes_fn=None, inherits=None, member_name=None):
if not collection_actions:
collection_actions = {}
if not member_actions:
member_actions = {}
self.collection = collection
self.controller = controller
self.parent = parent
self.collection_actions = collection_actions
self.member_actions = member_actions
self.custom_routes_fn = custom_routes_fn
self.inherits = inherits
self.member_name = member_name
def load_standard_extensions(ext_mgr, logger, path, package, ext_list=None):
"""Registers all standard API extensions."""
# Walk through all the modules in our directory...
our_dir = path[0]
for dirpath, dirnames, filenames in os.walk(our_dir):
# Compute the relative package name from the dirpath
relpath = os.path.relpath(dirpath, our_dir)
if relpath == '.':
relpkg = ''
else:
relpkg = '.%s' % '.'.join(relpath.split(os.sep))
# Now, consider each file in turn, only considering .py files
for fname in filenames:
root, ext = os.path.splitext(fname)
# Skip __init__ and anything that's not .py
if ext != '.py' or root == '__init__':
continue
# Try loading it
classname = "%s%s" % (root[0].upper(), root[1:])
classpath = ("%s%s.%s.%s" %
(package, relpkg, root, classname))
if ext_list is not None and classname not in ext_list:
logger.debug("Skipping extension: %s" % classpath)
continue
try:
ext_mgr.load_extension(classpath)
except Exception as exc:
logger.warn(_('Failed to load extension %(classpath)s: '
'%(exc)s'),
{'classpath': classpath, 'exc': exc})
# Now, let's consider any subdirectories we may have...
subdirs = []
for dname in dirnames:
# Skip it if it does not have __init__.py
if not os.path.exists(os.path.join(dirpath, dname, '__init__.py')):
continue
# If it has extension(), delegate...
ext_name = "%s%s.%s.extension" % (package, relpkg, dname)
try:
ext = importutils.import_class(ext_name)
except ImportError:
# extension() doesn't exist on it, so we'll explore
# the directory for ourselves
subdirs.append(dname)
else:
try:
ext(ext_mgr)
except Exception as exc:
logger.warn(_('Failed to load extension %(ext_name)s:'
'%(exc)s'),
{'ext_name': ext_name, 'exc': exc})
# Update the list of directories we'll explore...
dirnames[:] = subdirs
def core_authorizer(api_name, extension_name):
def authorize(context, target=None, action=None):
if target is None:
target = {'project_id': context.project_id,
'user_id': context.user_id}
if action is None:
act = '%s:%s' % (api_name, extension_name)
else:
act = '%s:%s:%s' % (api_name, extension_name, action)
nova.policy.enforce(context, act, target)
return authorize
def extension_authorizer(api_name, extension_name):
return core_authorizer('%s_extension' % api_name, extension_name)
def soft_extension_authorizer(api_name, extension_name):
hard_authorize = extension_authorizer(api_name, extension_name)
def authorize(context, action=None):
try:
hard_authorize(context, action=action)
return True
except exception.NotAuthorized:
return False
return authorize
def check_compute_policy(context, action, target, scope='compute'):
_action = '%s:%s' % (scope, action)
nova.policy.enforce(context, _action, target)
@six.add_metaclass(abc.ABCMeta)
class V3APIExtensionBase(object):
"""Abstract base class for all V3 API extensions.
All V3 API extensions must derive from this class and implement
the abstract methods get_resources and get_controller_extensions
even if they just return an empty list. The extensions must also
define the abstract properties.
"""
def __init__(self, extension_info):
self.extension_info = extension_info
@abc.abstractmethod
def get_resources(self):
"""Return a list of resources extensions.
The extensions should return a list of ResourceExtension
objects. This list may be empty.
"""
pass
@abc.abstractmethod
def get_controller_extensions(self):
"""Return a list of controller extensions.
The extensions should return a list of ControllerExtension
objects. This list may be empty.
"""
pass
@abc.abstractproperty
def name(self):
"""Name of the extension."""
pass
@abc.abstractproperty
def alias(self):
"""Alias for the extension."""
pass
@abc.abstractproperty
def version(self):
"""Version of the extension."""
pass
def expected_errors(errors):
"""Decorator for v3 API methods which specifies expected exceptions.
Specify which exceptions may occur when an API method is called. If an
unexpected exception occurs then return a 500 instead and ask the user
of the API to file a bug report.
"""
def decorator(f):
@functools.wraps(f)
def wrapped(*args, **kwargs):
try:
return f(*args, **kwargs)
except Exception as exc:
if isinstance(exc, webob.exc.WSGIHTTPException):
if isinstance(errors, int):
t_errors = (errors,)
else:
t_errors = errors
if exc.code in t_errors:
raise
elif isinstance(exc, exception.PolicyNotAuthorized):
# Note(cyeoh): Special case to handle
# PolicyNotAuthorized exceptions so every
# extension method does not need to wrap authorize
# calls. ResourceExceptionHandler silently
# converts NotAuthorized to HTTPForbidden
raise
elif isinstance(exc, exception.ValidationError):
# Note(oomichi): Handle a validation error, which
# happens due to invalid API parameters, as an
# expected error.
raise
LOG.exception(_("Unexpected exception in API method"))
msg = _('Unexpected API Error. Please report this at '
'http://bugs.launchpad.net/nova/ and attach the Nova '
'API log if possible.\n%s') % type(exc)
raise webob.exc.HTTPInternalServerError(explanation=msg)
return wrapped
return decorator
| apache-2.0 |
mrgloom/mir_eval | tests/test_chord.py | 1 | 15820 | """
Unit tests for mir_eval.chord
"""
import mir_eval
import numpy as np
import nose.tools
import warnings
import glob
import json
A_TOL = 1e-12
# Path to the fixture files
REF_GLOB = 'data/chord/ref*.lab'
EST_GLOB = 'data/chord/est*.lab'
SCORES_GLOB = 'data/chord/output*.json'
def __check_valid(function, parameters, result):
''' Helper function for checking the output of a function '''
assert function(*parameters) == result
def __check_exception(function, parameters, exception):
''' Makes sure the provided function throws the provided
exception given the provided input '''
nose.tools.assert_raises(exception, function, *parameters)
def test_pitch_class_to_semitone():
valid_classes = ['Gbb', 'G', 'G#', 'Cb', 'B#']
valid_semitones = [5, 7, 8, 11, 0]
for pitch_class, semitone in zip(valid_classes, valid_semitones):
yield (__check_valid, mir_eval.chord.pitch_class_to_semitone,
(pitch_class,), semitone)
invalid_classes = ['Cab', '#C', 'bG']
for pitch_class in invalid_classes:
yield (__check_exception, mir_eval.chord.pitch_class_to_semitone,
(pitch_class,), mir_eval.chord.InvalidChordException)
def test_scale_degree_to_semitone():
valid_degrees = ['b7', '#3', '1', 'b1', '#7', 'bb5']
valid_semitones = [10, 5, 0, -1, 12, 5]
for scale_degree, semitone in zip(valid_degrees, valid_semitones):
yield (__check_valid, mir_eval.chord.scale_degree_to_semitone,
(scale_degree,), semitone)
invalid_degrees = ['7b', '4#', '77']
for scale_degree in invalid_degrees:
yield (__check_exception, mir_eval.chord.scale_degree_to_semitone,
(scale_degree,), mir_eval.chord.InvalidChordException)
def test_validate_chord_label():
valid_labels = ['C', 'Eb:min/5', 'A#:dim7', 'B:maj(*1,*5)/3', 'A#:sus4']
# For valid labels, calling the function without an error = pass
for chord_label in valid_labels:
yield (mir_eval.chord.validate_chord_label, chord_label)
invalid_labels = ["C::maj", "C//5", "C((4)", "C5))",
"C:maj(*3/3", "Cmaj*3/3)"]
for chord_label in invalid_labels:
yield (__check_exception, mir_eval.chord.validate_chord_label,
(chord_label,), mir_eval.chord.InvalidChordException)
def test_split():
labels = ['C', 'B:maj(*1,*3)/5', 'Ab:min/b3', 'N', 'G:(3)']
splits = [['C', 'maj', set(), '1'],
['B', 'maj', set(['*1', '*3']), '5'],
['Ab', 'min', set(), 'b3'],
['N', '', set(), ''],
['G', '', set(['3']), '1']]
for chord_label, split_chord in zip(labels, splits):
yield (__check_valid, mir_eval.chord.split,
(chord_label,), split_chord)
def test_join():
# Arguments are root, quality, extensions, bass
splits = [('F#', '', None, ''),
('F#', 'hdim7', None, ''),
('F#', '', {'*b3', '4'}, ''),
('F#', '', None, 'b7'),
('F#', '', {'*b3', '4'}, 'b7'),
('F#', 'hdim7', None, 'b7'),
('F#', 'hdim7', {'*b3', '4'}, 'b7')]
labels = ['F#', 'F#:hdim7', 'F#:(*b3,4)', 'F#/b7',
'F#:(*b3,4)/b7', 'F#:hdim7/b7', 'F#:hdim7(*b3,4)/b7']
for split_chord, chord_label in zip(splits, labels):
yield (__check_valid, mir_eval.chord.join,
split_chord, chord_label)
def test_rotate_bitmaps_to_roots():
def __check_bitmaps(bitmaps, roots, expected_bitmaps):
''' Helper function for checking bitmaps_to_roots '''
ans = mir_eval.chord.rotate_bitmaps_to_roots(bitmaps, roots)
assert np.all(ans == expected_bitmaps)
bitmaps = [
[1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0],
[1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0],
[1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0]]
roots = [0, 5, 11]
expected_bitmaps = [
[1, 0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 1, 0, 0, 0, 1, 0, 0],
[0, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 1]]
# The function can operate on many bitmaps/roots at a time
# but we should only test them one at a time.
for bitmap, root, expected_bitmap in zip(bitmaps, roots, expected_bitmaps):
yield (__check_bitmaps, [bitmap], [root], [expected_bitmap])
def test_encode():
def __check_encode(label, expected_root, expected_intervals,
expected_bass):
''' Helper function for checking encode '''
root, intervals, bass = mir_eval.chord.encode(label)
assert root == expected_root
assert np.all(intervals == expected_intervals)
assert bass == expected_bass
labels = ['B:maj(*1,*3)/5', 'G:dim', 'C:(3)/3']
expected_roots = [11, 7, 0]
expected_intervals = [[0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0],
[1, 0, 0, 1, 0, 0, 1, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0]]
expected_bass = [7, 0, 4]
for label, e_root, e_interval, e_bass in zip(labels,
expected_roots,
expected_intervals,
expected_bass):
yield (__check_encode, label, e_root, e_interval, e_bass)
# Non-chord bass notes *must* be explicitly named as extensions when
# STRICT_BASS_INTERVALS == True
mir_eval.chord.STRICT_BASS_INTERVALS = True
yield (__check_exception, mir_eval.chord.encode,
('G:dim(4)/6',), mir_eval.chord.InvalidChordException)
# Otherwise, we can cut a little slack.
mir_eval.chord.STRICT_BASS_INTERVALS = False
yield (__check_encode, 'G:dim(4)/6', 7,
[1, 0, 0, 1, 0, 1, 1, 0, 0, 1, 0, 0], 9)
def test_encode_many():
def __check_encode_many(labels, expected_roots, expected_intervals,
expected_basses):
''' Does all of the logic for checking encode_many '''
roots, intervals, basses = mir_eval.chord.encode_many(labels)
assert np.all(roots == expected_roots)
assert np.all(intervals == expected_intervals)
assert np.all(basses == expected_basses)
labels = ['B:maj(*1,*3)/5',
'B:maj(*1,*3)/5',
'N',
'C:min',
'C:min']
expected_roots = [11, 11, -1, 0, 0]
expected_intervals = [
[0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0],
[1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0]]
expected_basses = [7, 7, -1, 0, 0]
yield (__check_encode_many, labels, expected_roots, expected_intervals,
expected_basses)
def __check_one_metric(metric, ref_label, est_label, score):
''' Checks that a metric function produces score given ref_label and
est_label '''
# We provide a dummy interval. We're just checking one pair
# of labels at a time.
assert metric([ref_label], [est_label]) == score
def __check_not_comparable(metric, ref_label, est_label):
''' Checks that ref_label is not comparable to est_label by metric '''
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
# Try to produce the warning
score = mir_eval.chord.weighted_accuracy(metric([ref_label],
[est_label]),
np.array([1]))
assert len(w) == 1
assert issubclass(w[-1].category, UserWarning)
assert str(w[-1].message) == ("No reference chords were comparable "
"to estimated chords, returning 0.")
# And confirm that the metric is 0
assert np.allclose(score, 0)
def test_thirds():
ref_labels = ['N', 'C:maj', 'C:maj', 'C:maj', 'C:min',
'C:maj', 'G:min', 'C:maj', 'C:min', 'C:min',
'C:maj', 'F:maj', 'C:maj', 'A:maj', 'A:maj']
est_labels = ['N', 'N', 'C:aug', 'C:dim', 'C:dim',
'C:sus4', 'G:sus2', 'G:maj', 'C:hdim7', 'C:min7',
'C:maj6', 'F:min6', 'C:minmaj7', 'A:7', 'A:9']
scores = [1.0, 0.0, 1.0, 0.0, 1.0,
1.0, 0.0, 0.0, 1.0, 1.0,
1.0, 0.0, 0.0, 1.0, 1.0]
for ref_label, est_label, score in zip(ref_labels, est_labels, scores):
yield (__check_one_metric, mir_eval.chord.thirds,
ref_label, est_label, score)
def test_thirds_inv():
ref_labels = ['C:maj/5', 'G:min', 'C:maj', 'C:min/b3', 'C:min']
est_labels = ['C:sus4/5', 'G:min/b3', 'C:maj/5', 'C:hdim7/b3', 'C:dim']
scores = [1.0, 0.0, 0.0, 1.0, 1.0]
for ref_label, est_label, score in zip(ref_labels, est_labels, scores):
yield (__check_one_metric, mir_eval.chord.thirds_inv,
ref_label, est_label, score)
def test_triads():
ref_labels = ['C:min', 'C:maj', 'C:maj', 'C:min', 'C:maj',
'C:maj', 'G:min', 'C:maj', 'C:min', 'C:min']
est_labels = ['C:min7', 'C:7', 'C:aug', 'C:dim', 'C:sus2',
'C:sus4', 'G:minmaj7', 'G:maj', 'C:hdim7', 'C:min6']
scores = [1.0, 1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0, 1.0]
for ref_label, est_label, score in zip(ref_labels, est_labels, scores):
yield (__check_one_metric, mir_eval.chord.triads,
ref_label, est_label, score)
def test_triads_inv():
ref_labels = ['C:maj/5', 'G:min', 'C:maj', 'C:min/b3', 'C:min/b3']
est_labels = ['C:maj7/5', 'G:min7/5', 'C:7/5', 'C:min6/b3', 'C:dim/b3']
scores = [1.0, 0.0, 0.0, 1.0, 0.0]
for ref_label, est_label, score in zip(ref_labels, est_labels, scores):
yield (__check_one_metric, mir_eval.chord.triads_inv,
ref_label, est_label, score)
def test_tetrads():
ref_labels = ['C:min', 'C:maj', 'C:7', 'C:maj7', 'C:sus2',
'C:7/3', 'G:min', 'C:maj', 'C:min', 'C:min']
est_labels = ['C:min7', 'C:maj6', 'C:9', 'C:maj7/5', 'C:sus2/2',
'C:11/b7', 'G:sus2', 'G:maj', 'C:hdim7', 'C:minmaj7']
scores = [0.0, 0.0, 1.0, 1.0, 1.0,
1.0, 0.0, 0.0, 0.0, 0.0]
for ref_label, est_label, score in zip(ref_labels, est_labels, scores):
yield (__check_one_metric, mir_eval.chord.tetrads,
ref_label, est_label, score)
def test_tetrads_inv():
ref_labels = ['C:maj7/5', 'G:min', 'C:7/5', 'C:min/b3', 'C:min9']
est_labels = ['C:maj7/3', 'G:min/b3', 'C:13/5', 'C:hdim7/b3', 'C:min7']
scores = [0.0, 0.0, 1.0, 0.0, 1.0]
for ref_label, est_label, score in zip(ref_labels, est_labels, scores):
yield (__check_one_metric, mir_eval.chord.tetrads_inv,
ref_label, est_label, score)
def test_majmin():
ref_labels = ['N', 'C:maj', 'C:maj', 'C:min', 'G:maj7']
est_labels = ['N', 'N', 'C:aug', 'C:dim', 'G']
scores = [1.0, 0.0, 0.0, 0.0, 1.0]
for ref_label, est_label, score in zip(ref_labels, est_labels, scores):
yield (__check_one_metric, mir_eval.chord.majmin,
ref_label, est_label, score)
yield (__check_not_comparable, mir_eval.chord.majmin, 'C:aug', 'C:maj')
def test_majmin_inv():
ref_labels = ['C:maj/5', 'G:min', 'C:maj/5', 'C:min7',
'G:min/b3', 'C:maj7/5', 'C:7']
est_labels = ['C:sus4/5', 'G:min/b3', 'C:maj/5', 'C:min',
'G:min/b3', 'C:maj/5', 'C:maj']
scores = [0.0, 0.0, 1.0, 1.0,
1.0, 1.0, 1.0]
for ref_label, est_label, score in zip(ref_labels, est_labels, scores):
yield (__check_one_metric, mir_eval.chord.majmin_inv,
ref_label, est_label, score)
ref_not_comparable = ['C:hdim7/b3', 'C:maj/4', 'C:maj/2']
est_not_comparable = ['C:min/b3', 'C:maj/4', 'C:sus2/2']
for ref_label, est_label in zip(ref_not_comparable, est_not_comparable):
yield (__check_not_comparable, mir_eval.chord.majmin_inv,
ref_label, est_label)
def test_sevenths():
ref_labels = ['C:min', 'C:maj', 'C:7', 'C:maj7',
'C:7/3', 'G:min', 'C:maj', 'C:7']
est_labels = ['C:min7', 'C:maj6', 'C:9', 'C:maj7/5',
'C:11/b7', 'G:sus2', 'G:maj', 'C:maj7']
scores = [0.0, 0.0, 1.0, 1.0,
1.0, 0.0, 0.0, 0.0]
for ref_label, est_label, score in zip(ref_labels, est_labels, scores):
yield (__check_one_metric, mir_eval.chord.sevenths,
ref_label, est_label, score)
ref_not_comparable = ['C:sus2', 'C:hdim7']
est_not_comparable = ['C:sus2/2', 'C:hdim7']
for ref_label, est_label in zip(ref_not_comparable, est_not_comparable):
yield (__check_not_comparable, mir_eval.chord.sevenths,
ref_label, est_label)
def test_sevenths_inv():
ref_labels = ['C:maj7/5', 'G:min', 'C:7/5', 'C:min7/b7']
est_labels = ['C:maj7/3', 'G:min/b3', 'C:13/5', 'C:min7/b7']
scores = [0.0, 0.0, 1.0, 1.0]
for ref_label, est_label, score in zip(ref_labels, est_labels, scores):
yield (__check_one_metric, mir_eval.chord.sevenths_inv,
ref_label, est_label, score)
yield (__check_not_comparable, mir_eval.chord.sevenths_inv, 'C:dim7/b3',
'C:dim7/b3')
def test_weighted_accuracy():
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
# First, test for a warning on empty beats
score = mir_eval.chord.weighted_accuracy(np.array([1, 0, 1]),
np.array([0, 0, 0]))
assert len(w) == 1
assert issubclass(w[-1].category, UserWarning)
assert str(w[-1].message) == 'No nonzero weights, returning 0'
# And that the metric is 0
assert np.allclose(score, 0)
# len(comparisons) must equal len(weights)
comparisons = np.array([1, 0, 1])
weights = np.array([1, 1])
nose.tools.assert_raises(ValueError, mir_eval.chord.weighted_accuracy,
comparisons, weights)
# Weights must all be positive
weights = np.array([-1, -1])
nose.tools.assert_raises(ValueError, mir_eval.chord.weighted_accuracy,
comparisons, weights)
# Make sure accuracy = 1 and 0 when all comparisons are True and False resp
comparisons = np.array([1, 1, 1])
weights = np.array([1, 1, 1])
score = mir_eval.chord.weighted_accuracy(comparisons, weights)
assert np.allclose(score, 1)
comparisons = np.array([0, 0, 0])
score = mir_eval.chord.weighted_accuracy(comparisons, weights)
assert np.allclose(score, 0)
def __check_score(sco_f, metric, score, expected_score):
assert np.allclose(score, expected_score, atol=A_TOL)
def test_beat_functions():
# Load in all files in the same order
ref_files = sorted(glob.glob(REF_GLOB))
est_files = sorted(glob.glob(EST_GLOB))
sco_files = sorted(glob.glob(SCORES_GLOB))
# Regression tests
for ref_f, est_f, sco_f in zip(ref_files, est_files, sco_files):
with open(sco_f, 'r') as f:
expected_scores = json.load(f)
# Load in an example beat annotation
ref_intervals, ref_labels = mir_eval.io.load_labeled_intervals(ref_f)
# Load in an example beat tracker output
est_intervals, est_labels = mir_eval.io.load_labeled_intervals(est_f)
# Compute scores
scores = mir_eval.chord.evaluate(ref_intervals, ref_labels,
est_intervals, est_labels)
# Compare them
for metric in scores:
# This is a simple hack to make nosetest's messages more useful
yield (__check_score, sco_f, metric, scores[metric],
expected_scores[metric])
| mit |
dya2/python-for-android | python3-alpha/python3-src/Lib/threading.py | 45 | 36822 | """Thread module emulating a subset of Java's threading model."""
import sys as _sys
import _thread
from time import time as _time, sleep as _sleep
from traceback import format_exc as _format_exc
from collections import deque
from _weakrefset import WeakSet
# Note regarding PEP 8 compliant names
# This threading model was originally inspired by Java, and inherited
# the convention of camelCase function and method names from that
# language. Those originaly names are not in any imminent danger of
# being deprecated (even for Py3k),so this module provides them as an
# alias for the PEP 8 compliant names
# Note that using the new PEP 8 compliant names facilitates substitution
# with the multiprocessing module, which doesn't provide the old
# Java inspired names.
__all__ = ['active_count', 'Condition', 'current_thread', 'enumerate', 'Event',
'Lock', 'RLock', 'Semaphore', 'BoundedSemaphore', 'Thread', 'Barrier',
'Timer', 'setprofile', 'settrace', 'local', 'stack_size']
# Rename some stuff so "from threading import *" is safe
_start_new_thread = _thread.start_new_thread
_allocate_lock = _thread.allocate_lock
_get_ident = _thread.get_ident
ThreadError = _thread.error
try:
_CRLock = _thread.RLock
except AttributeError:
_CRLock = None
TIMEOUT_MAX = _thread.TIMEOUT_MAX
del _thread
# Debug support (adapted from ihooks.py).
_VERBOSE = False
if __debug__:
class _Verbose(object):
def __init__(self, verbose=None):
if verbose is None:
verbose = _VERBOSE
self._verbose = verbose
def _note(self, format, *args):
if self._verbose:
format = format % args
# Issue #4188: calling current_thread() can incur an infinite
# recursion if it has to create a DummyThread on the fly.
ident = _get_ident()
try:
name = _active[ident].name
except KeyError:
name = "<OS thread %d>" % ident
format = "%s: %s\n" % (name, format)
_sys.stderr.write(format)
else:
# Disable this when using "python -O"
class _Verbose(object):
def __init__(self, verbose=None):
pass
def _note(self, *args):
pass
# Support for profile and trace hooks
_profile_hook = None
_trace_hook = None
def setprofile(func):
global _profile_hook
_profile_hook = func
def settrace(func):
global _trace_hook
_trace_hook = func
# Synchronization classes
Lock = _allocate_lock
def RLock(verbose=None, *args, **kwargs):
if verbose is None:
verbose = _VERBOSE
if (__debug__ and verbose) or _CRLock is None:
return _PyRLock(verbose, *args, **kwargs)
return _CRLock(*args, **kwargs)
class _RLock(_Verbose):
def __init__(self, verbose=None):
_Verbose.__init__(self, verbose)
self._block = _allocate_lock()
self._owner = None
self._count = 0
def __repr__(self):
owner = self._owner
try:
owner = _active[owner].name
except KeyError:
pass
return "<%s owner=%r count=%d>" % (
self.__class__.__name__, owner, self._count)
def acquire(self, blocking=True, timeout=-1):
me = _get_ident()
if self._owner == me:
self._count = self._count + 1
if __debug__:
self._note("%s.acquire(%s): recursive success", self, blocking)
return 1
rc = self._block.acquire(blocking, timeout)
if rc:
self._owner = me
self._count = 1
if __debug__:
self._note("%s.acquire(%s): initial success", self, blocking)
else:
if __debug__:
self._note("%s.acquire(%s): failure", self, blocking)
return rc
__enter__ = acquire
def release(self):
if self._owner != _get_ident():
raise RuntimeError("cannot release un-acquired lock")
self._count = count = self._count - 1
if not count:
self._owner = None
self._block.release()
if __debug__:
self._note("%s.release(): final release", self)
else:
if __debug__:
self._note("%s.release(): non-final release", self)
def __exit__(self, t, v, tb):
self.release()
# Internal methods used by condition variables
def _acquire_restore(self, state):
self._block.acquire()
self._count, self._owner = state
if __debug__:
self._note("%s._acquire_restore()", self)
def _release_save(self):
if __debug__:
self._note("%s._release_save()", self)
count = self._count
self._count = 0
owner = self._owner
self._owner = None
self._block.release()
return (count, owner)
def _is_owned(self):
return self._owner == _get_ident()
_PyRLock = _RLock
def Condition(*args, **kwargs):
return _Condition(*args, **kwargs)
class _Condition(_Verbose):
def __init__(self, lock=None, verbose=None):
_Verbose.__init__(self, verbose)
if lock is None:
lock = RLock()
self._lock = lock
# Export the lock's acquire() and release() methods
self.acquire = lock.acquire
self.release = lock.release
# If the lock defines _release_save() and/or _acquire_restore(),
# these override the default implementations (which just call
# release() and acquire() on the lock). Ditto for _is_owned().
try:
self._release_save = lock._release_save
except AttributeError:
pass
try:
self._acquire_restore = lock._acquire_restore
except AttributeError:
pass
try:
self._is_owned = lock._is_owned
except AttributeError:
pass
self._waiters = []
def __enter__(self):
return self._lock.__enter__()
def __exit__(self, *args):
return self._lock.__exit__(*args)
def __repr__(self):
return "<Condition(%s, %d)>" % (self._lock, len(self._waiters))
def _release_save(self):
self._lock.release() # No state to save
def _acquire_restore(self, x):
self._lock.acquire() # Ignore saved state
def _is_owned(self):
# Return True if lock is owned by current_thread.
# This method is called only if __lock doesn't have _is_owned().
if self._lock.acquire(0):
self._lock.release()
return False
else:
return True
def wait(self, timeout=None):
if not self._is_owned():
raise RuntimeError("cannot wait on un-acquired lock")
waiter = _allocate_lock()
waiter.acquire()
self._waiters.append(waiter)
saved_state = self._release_save()
try: # restore state no matter what (e.g., KeyboardInterrupt)
if timeout is None:
waiter.acquire()
gotit = True
if __debug__:
self._note("%s.wait(): got it", self)
else:
if timeout > 0:
gotit = waiter.acquire(True, timeout)
else:
gotit = waiter.acquire(False)
if not gotit:
if __debug__:
self._note("%s.wait(%s): timed out", self, timeout)
try:
self._waiters.remove(waiter)
except ValueError:
pass
else:
if __debug__:
self._note("%s.wait(%s): got it", self, timeout)
return gotit
finally:
self._acquire_restore(saved_state)
def wait_for(self, predicate, timeout=None):
endtime = None
waittime = timeout
result = predicate()
while not result:
if waittime is not None:
if endtime is None:
endtime = _time() + waittime
else:
waittime = endtime - _time()
if waittime <= 0:
if __debug__:
self._note("%s.wait_for(%r, %r): Timed out.",
self, predicate, timeout)
break
if __debug__:
self._note("%s.wait_for(%r, %r): Waiting with timeout=%s.",
self, predicate, timeout, waittime)
self.wait(waittime)
result = predicate()
else:
if __debug__:
self._note("%s.wait_for(%r, %r): Success.",
self, predicate, timeout)
return result
def notify(self, n=1):
if not self._is_owned():
raise RuntimeError("cannot notify on un-acquired lock")
__waiters = self._waiters
waiters = __waiters[:n]
if not waiters:
if __debug__:
self._note("%s.notify(): no waiters", self)
return
self._note("%s.notify(): notifying %d waiter%s", self, n,
n!=1 and "s" or "")
for waiter in waiters:
waiter.release()
try:
__waiters.remove(waiter)
except ValueError:
pass
def notify_all(self):
self.notify(len(self._waiters))
notifyAll = notify_all
def Semaphore(*args, **kwargs):
return _Semaphore(*args, **kwargs)
class _Semaphore(_Verbose):
# After Tim Peters' semaphore class, but not quite the same (no maximum)
def __init__(self, value=1, verbose=None):
if value < 0:
raise ValueError("semaphore initial value must be >= 0")
_Verbose.__init__(self, verbose)
self._cond = Condition(Lock())
self._value = value
def acquire(self, blocking=True, timeout=None):
if not blocking and timeout is not None:
raise ValueError("can't specify timeout for non-blocking acquire")
rc = False
endtime = None
self._cond.acquire()
while self._value == 0:
if not blocking:
break
if __debug__:
self._note("%s.acquire(%s): blocked waiting, value=%s",
self, blocking, self._value)
if timeout is not None:
if endtime is None:
endtime = _time() + timeout
else:
timeout = endtime - _time()
if timeout <= 0:
break
self._cond.wait(timeout)
else:
self._value = self._value - 1
if __debug__:
self._note("%s.acquire: success, value=%s",
self, self._value)
rc = True
self._cond.release()
return rc
__enter__ = acquire
def release(self):
self._cond.acquire()
self._value = self._value + 1
if __debug__:
self._note("%s.release: success, value=%s",
self, self._value)
self._cond.notify()
self._cond.release()
def __exit__(self, t, v, tb):
self.release()
def BoundedSemaphore(*args, **kwargs):
return _BoundedSemaphore(*args, **kwargs)
class _BoundedSemaphore(_Semaphore):
"""Semaphore that checks that # releases is <= # acquires"""
def __init__(self, value=1, verbose=None):
_Semaphore.__init__(self, value, verbose)
self._initial_value = value
def release(self):
if self._value >= self._initial_value:
raise ValueError("Semaphore released too many times")
return _Semaphore.release(self)
def Event(*args, **kwargs):
return _Event(*args, **kwargs)
class _Event(_Verbose):
# After Tim Peters' event class (without is_posted())
def __init__(self, verbose=None):
_Verbose.__init__(self, verbose)
self._cond = Condition(Lock())
self._flag = False
def _reset_internal_locks(self):
# private! called by Thread._reset_internal_locks by _after_fork()
self._cond.__init__()
def is_set(self):
return self._flag
isSet = is_set
def set(self):
self._cond.acquire()
try:
self._flag = True
self._cond.notify_all()
finally:
self._cond.release()
def clear(self):
self._cond.acquire()
try:
self._flag = False
finally:
self._cond.release()
def wait(self, timeout=None):
self._cond.acquire()
try:
if not self._flag:
self._cond.wait(timeout)
return self._flag
finally:
self._cond.release()
# A barrier class. Inspired in part by the pthread_barrier_* api and
# the CyclicBarrier class from Java. See
# http://sourceware.org/pthreads-win32/manual/pthread_barrier_init.html and
# http://java.sun.com/j2se/1.5.0/docs/api/java/util/concurrent/
# CyclicBarrier.html
# for information.
# We maintain two main states, 'filling' and 'draining' enabling the barrier
# to be cyclic. Threads are not allowed into it until it has fully drained
# since the previous cycle. In addition, a 'resetting' state exists which is
# similar to 'draining' except that threads leave with a BrokenBarrierError,
# and a 'broken' state in which all threads get get the exception.
class Barrier(_Verbose):
"""
Barrier. Useful for synchronizing a fixed number of threads
at known synchronization points. Threads block on 'wait()' and are
simultaneously once they have all made that call.
"""
def __init__(self, parties, action=None, timeout=None, verbose=None):
"""
Create a barrier, initialised to 'parties' threads.
'action' is a callable which, when supplied, will be called
by one of the threads after they have all entered the
barrier and just prior to releasing them all.
If a 'timeout' is provided, it is uses as the default for
all subsequent 'wait()' calls.
"""
_Verbose.__init__(self, verbose)
self._cond = Condition(Lock())
self._action = action
self._timeout = timeout
self._parties = parties
self._state = 0 #0 filling, 1, draining, -1 resetting, -2 broken
self._count = 0
def wait(self, timeout=None):
"""
Wait for the barrier. When the specified number of threads have
started waiting, they are all simultaneously awoken. If an 'action'
was provided for the barrier, one of the threads will have executed
that callback prior to returning.
Returns an individual index number from 0 to 'parties-1'.
"""
if timeout is None:
timeout = self._timeout
with self._cond:
self._enter() # Block while the barrier drains.
index = self._count
self._count += 1
try:
if index + 1 == self._parties:
# We release the barrier
self._release()
else:
# We wait until someone releases us
self._wait(timeout)
return index
finally:
self._count -= 1
# Wake up any threads waiting for barrier to drain.
self._exit()
# Block until the barrier is ready for us, or raise an exception
# if it is broken.
def _enter(self):
while self._state in (-1, 1):
# It is draining or resetting, wait until done
self._cond.wait()
#see if the barrier is in a broken state
if self._state < 0:
raise BrokenBarrierError
assert self._state == 0
# Optionally run the 'action' and release the threads waiting
# in the barrier.
def _release(self):
try:
if self._action:
self._action()
# enter draining state
self._state = 1
self._cond.notify_all()
except:
#an exception during the _action handler. Break and reraise
self._break()
raise
# Wait in the barrier until we are relased. Raise an exception
# if the barrier is reset or broken.
def _wait(self, timeout):
if not self._cond.wait_for(lambda : self._state != 0, timeout):
#timed out. Break the barrier
self._break()
raise BrokenBarrierError
if self._state < 0:
raise BrokenBarrierError
assert self._state == 1
# If we are the last thread to exit the barrier, signal any threads
# waiting for the barrier to drain.
def _exit(self):
if self._count == 0:
if self._state in (-1, 1):
#resetting or draining
self._state = 0
self._cond.notify_all()
def reset(self):
"""
Reset the barrier to the initial state.
Any threads currently waiting will get the BrokenBarrier exception
raised.
"""
with self._cond:
if self._count > 0:
if self._state == 0:
#reset the barrier, waking up threads
self._state = -1
elif self._state == -2:
#was broken, set it to reset state
#which clears when the last thread exits
self._state = -1
else:
self._state = 0
self._cond.notify_all()
def abort(self):
"""
Place the barrier into a 'broken' state.
Useful in case of error. Any currently waiting threads and
threads attempting to 'wait()' will have BrokenBarrierError
raised.
"""
with self._cond:
self._break()
def _break(self):
# An internal error was detected. The barrier is set to
# a broken state all parties awakened.
self._state = -2
self._cond.notify_all()
@property
def parties(self):
"""
Return the number of threads required to trip the barrier.
"""
return self._parties
@property
def n_waiting(self):
"""
Return the number of threads that are currently waiting at the barrier.
"""
# We don't need synchronization here since this is an ephemeral result
# anyway. It returns the correct value in the steady state.
if self._state == 0:
return self._count
return 0
@property
def broken(self):
"""
Return True if the barrier is in a broken state
"""
return self._state == -2
#exception raised by the Barrier class
class BrokenBarrierError(RuntimeError): pass
# Helper to generate new thread names
_counter = 0
def _newname(template="Thread-%d"):
global _counter
_counter = _counter + 1
return template % _counter
# Active thread administration
_active_limbo_lock = _allocate_lock()
_active = {} # maps thread id to Thread object
_limbo = {}
# For debug and leak testing
_dangling = WeakSet()
# Main class for threads
class Thread(_Verbose):
__initialized = False
# Need to store a reference to sys.exc_info for printing
# out exceptions when a thread tries to use a global var. during interp.
# shutdown and thus raises an exception about trying to perform some
# operation on/with a NoneType
__exc_info = _sys.exc_info
# Keep sys.exc_clear too to clear the exception just before
# allowing .join() to return.
#XXX __exc_clear = _sys.exc_clear
def __init__(self, group=None, target=None, name=None,
args=(), kwargs=None, verbose=None):
assert group is None, "group argument must be None for now"
_Verbose.__init__(self, verbose)
if kwargs is None:
kwargs = {}
self._target = target
self._name = str(name or _newname())
self._args = args
self._kwargs = kwargs
self._daemonic = self._set_daemon()
self._ident = None
self._started = Event()
self._stopped = False
self._block = Condition(Lock())
self._initialized = True
# sys.stderr is not stored in the class like
# sys.exc_info since it can be changed between instances
self._stderr = _sys.stderr
_dangling.add(self)
def _reset_internal_locks(self):
# private! Called by _after_fork() to reset our internal locks as
# they may be in an invalid state leading to a deadlock or crash.
if hasattr(self, '_block'): # DummyThread deletes _block
self._block.__init__()
self._started._reset_internal_locks()
def _set_daemon(self):
# Overridden in _MainThread and _DummyThread
return current_thread().daemon
def __repr__(self):
assert self._initialized, "Thread.__init__() was not called"
status = "initial"
if self._started.is_set():
status = "started"
if self._stopped:
status = "stopped"
if self._daemonic:
status += " daemon"
if self._ident is not None:
status += " %s" % self._ident
return "<%s(%s, %s)>" % (self.__class__.__name__, self._name, status)
def start(self):
if not self._initialized:
raise RuntimeError("thread.__init__() not called")
if self._started.is_set():
raise RuntimeError("threads can only be started once")
if __debug__:
self._note("%s.start(): starting thread", self)
with _active_limbo_lock:
_limbo[self] = self
try:
_start_new_thread(self._bootstrap, ())
except Exception:
with _active_limbo_lock:
del _limbo[self]
raise
self._started.wait()
def run(self):
try:
if self._target:
self._target(*self._args, **self._kwargs)
finally:
# Avoid a refcycle if the thread is running a function with
# an argument that has a member that points to the thread.
del self._target, self._args, self._kwargs
def _bootstrap(self):
# Wrapper around the real bootstrap code that ignores
# exceptions during interpreter cleanup. Those typically
# happen when a daemon thread wakes up at an unfortunate
# moment, finds the world around it destroyed, and raises some
# random exception *** while trying to report the exception in
# _bootstrap_inner() below ***. Those random exceptions
# don't help anybody, and they confuse users, so we suppress
# them. We suppress them only when it appears that the world
# indeed has already been destroyed, so that exceptions in
# _bootstrap_inner() during normal business hours are properly
# reported. Also, we only suppress them for daemonic threads;
# if a non-daemonic encounters this, something else is wrong.
try:
self._bootstrap_inner()
except:
if self._daemonic and _sys is None:
return
raise
def _set_ident(self):
self._ident = _get_ident()
def _bootstrap_inner(self):
try:
self._set_ident()
self._started.set()
with _active_limbo_lock:
_active[self._ident] = self
del _limbo[self]
if __debug__:
self._note("%s._bootstrap(): thread started", self)
if _trace_hook:
self._note("%s._bootstrap(): registering trace hook", self)
_sys.settrace(_trace_hook)
if _profile_hook:
self._note("%s._bootstrap(): registering profile hook", self)
_sys.setprofile(_profile_hook)
try:
self.run()
except SystemExit:
if __debug__:
self._note("%s._bootstrap(): raised SystemExit", self)
except:
if __debug__:
self._note("%s._bootstrap(): unhandled exception", self)
# If sys.stderr is no more (most likely from interpreter
# shutdown) use self._stderr. Otherwise still use sys (as in
# _sys) in case sys.stderr was redefined since the creation of
# self.
if _sys:
_sys.stderr.write("Exception in thread %s:\n%s\n" %
(self.name, _format_exc()))
else:
# Do the best job possible w/o a huge amt. of code to
# approximate a traceback (code ideas from
# Lib/traceback.py)
exc_type, exc_value, exc_tb = self._exc_info()
try:
print((
"Exception in thread " + self.name +
" (most likely raised during interpreter shutdown):"), file=self._stderr)
print((
"Traceback (most recent call last):"), file=self._stderr)
while exc_tb:
print((
' File "%s", line %s, in %s' %
(exc_tb.tb_frame.f_code.co_filename,
exc_tb.tb_lineno,
exc_tb.tb_frame.f_code.co_name)), file=self._stderr)
exc_tb = exc_tb.tb_next
print(("%s: %s" % (exc_type, exc_value)), file=self._stderr)
# Make sure that exc_tb gets deleted since it is a memory
# hog; deleting everything else is just for thoroughness
finally:
del exc_type, exc_value, exc_tb
else:
if __debug__:
self._note("%s._bootstrap(): normal return", self)
finally:
# Prevent a race in
# test_threading.test_no_refcycle_through_target when
# the exception keeps the target alive past when we
# assert that it's dead.
#XXX self.__exc_clear()
pass
finally:
with _active_limbo_lock:
self._stop()
try:
# We don't call self._delete() because it also
# grabs _active_limbo_lock.
del _active[_get_ident()]
except:
pass
def _stop(self):
self._block.acquire()
self._stopped = True
self._block.notify_all()
self._block.release()
def _delete(self):
"Remove current thread from the dict of currently running threads."
# Notes about running with _dummy_thread:
#
# Must take care to not raise an exception if _dummy_thread is being
# used (and thus this module is being used as an instance of
# dummy_threading). _dummy_thread.get_ident() always returns -1 since
# there is only one thread if _dummy_thread is being used. Thus
# len(_active) is always <= 1 here, and any Thread instance created
# overwrites the (if any) thread currently registered in _active.
#
# An instance of _MainThread is always created by 'threading'. This
# gets overwritten the instant an instance of Thread is created; both
# threads return -1 from _dummy_thread.get_ident() and thus have the
# same key in the dict. So when the _MainThread instance created by
# 'threading' tries to clean itself up when atexit calls this method
# it gets a KeyError if another Thread instance was created.
#
# This all means that KeyError from trying to delete something from
# _active if dummy_threading is being used is a red herring. But
# since it isn't if dummy_threading is *not* being used then don't
# hide the exception.
try:
with _active_limbo_lock:
del _active[_get_ident()]
# There must not be any python code between the previous line
# and after the lock is released. Otherwise a tracing function
# could try to acquire the lock again in the same thread, (in
# current_thread()), and would block.
except KeyError:
if 'dummy_threading' not in _sys.modules:
raise
def join(self, timeout=None):
if not self._initialized:
raise RuntimeError("Thread.__init__() not called")
if not self._started.is_set():
raise RuntimeError("cannot join thread before it is started")
if self is current_thread():
raise RuntimeError("cannot join current thread")
if __debug__:
if not self._stopped:
self._note("%s.join(): waiting until thread stops", self)
self._block.acquire()
try:
if timeout is None:
while not self._stopped:
self._block.wait()
if __debug__:
self._note("%s.join(): thread stopped", self)
else:
deadline = _time() + timeout
while not self._stopped:
delay = deadline - _time()
if delay <= 0:
if __debug__:
self._note("%s.join(): timed out", self)
break
self._block.wait(delay)
else:
if __debug__:
self._note("%s.join(): thread stopped", self)
finally:
self._block.release()
@property
def name(self):
assert self._initialized, "Thread.__init__() not called"
return self._name
@name.setter
def name(self, name):
assert self._initialized, "Thread.__init__() not called"
self._name = str(name)
@property
def ident(self):
assert self._initialized, "Thread.__init__() not called"
return self._ident
def is_alive(self):
assert self._initialized, "Thread.__init__() not called"
return self._started.is_set() and not self._stopped
isAlive = is_alive
@property
def daemon(self):
assert self._initialized, "Thread.__init__() not called"
return self._daemonic
@daemon.setter
def daemon(self, daemonic):
if not self._initialized:
raise RuntimeError("Thread.__init__() not called")
if self._started.is_set():
raise RuntimeError("cannot set daemon status of active thread");
self._daemonic = daemonic
def isDaemon(self):
return self.daemon
def setDaemon(self, daemonic):
self.daemon = daemonic
def getName(self):
return self.name
def setName(self, name):
self.name = name
# The timer class was contributed by Itamar Shtull-Trauring
def Timer(*args, **kwargs):
return _Timer(*args, **kwargs)
class _Timer(Thread):
"""Call a function after a specified number of seconds:
t = Timer(30.0, f, args=[], kwargs={})
t.start()
t.cancel() # stop the timer's action if it's still waiting
"""
def __init__(self, interval, function, args=[], kwargs={}):
Thread.__init__(self)
self.interval = interval
self.function = function
self.args = args
self.kwargs = kwargs
self.finished = Event()
def cancel(self):
"""Stop the timer if it hasn't finished yet"""
self.finished.set()
def run(self):
self.finished.wait(self.interval)
if not self.finished.is_set():
self.function(*self.args, **self.kwargs)
self.finished.set()
# Special thread class to represent the main thread
# This is garbage collected through an exit handler
class _MainThread(Thread):
def __init__(self):
Thread.__init__(self, name="MainThread")
self._started.set()
self._set_ident()
with _active_limbo_lock:
_active[self._ident] = self
def _set_daemon(self):
return False
def _exitfunc(self):
self._stop()
t = _pickSomeNonDaemonThread()
if t:
if __debug__:
self._note("%s: waiting for other threads", self)
while t:
t.join()
t = _pickSomeNonDaemonThread()
if __debug__:
self._note("%s: exiting", self)
self._delete()
def _pickSomeNonDaemonThread():
for t in enumerate():
if not t.daemon and t.is_alive():
return t
return None
# Dummy thread class to represent threads not started here.
# These aren't garbage collected when they die, nor can they be waited for.
# If they invoke anything in threading.py that calls current_thread(), they
# leave an entry in the _active dict forever after.
# Their purpose is to return *something* from current_thread().
# They are marked as daemon threads so we won't wait for them
# when we exit (conform previous semantics).
class _DummyThread(Thread):
def __init__(self):
Thread.__init__(self, name=_newname("Dummy-%d"))
# Thread._block consumes an OS-level locking primitive, which
# can never be used by a _DummyThread. Since a _DummyThread
# instance is immortal, that's bad, so release this resource.
del self._block
self._started.set()
self._set_ident()
with _active_limbo_lock:
_active[self._ident] = self
def _set_daemon(self):
return True
def join(self, timeout=None):
assert False, "cannot join a dummy thread"
# Global API functions
def current_thread():
try:
return _active[_get_ident()]
except KeyError:
##print "current_thread(): no current thread for", _get_ident()
return _DummyThread()
currentThread = current_thread
def active_count():
with _active_limbo_lock:
return len(_active) + len(_limbo)
activeCount = active_count
def _enumerate():
# Same as enumerate(), but without the lock. Internal use only.
return list(_active.values()) + list(_limbo.values())
def enumerate():
with _active_limbo_lock:
return list(_active.values()) + list(_limbo.values())
from _thread import stack_size
# Create the main thread object,
# and make it available for the interpreter
# (Py_Main) as threading._shutdown.
_shutdown = _MainThread()._exitfunc
# get thread-local implementation, either from the thread
# module, or from the python fallback
try:
from _thread import _local as local
except ImportError:
from _threading_local import local
def _after_fork():
# This function is called by Python/ceval.c:PyEval_ReInitThreads which
# is called from PyOS_AfterFork. Here we cleanup threading module state
# that should not exist after a fork.
# Reset _active_limbo_lock, in case we forked while the lock was held
# by another (non-forked) thread. http://bugs.python.org/issue874900
global _active_limbo_lock
_active_limbo_lock = _allocate_lock()
# fork() only copied the current thread; clear references to others.
new_active = {}
current = current_thread()
with _active_limbo_lock:
for thread in _active.values():
if thread is current:
# There is only one active thread. We reset the ident to
# its new value since it can have changed.
ident = _get_ident()
thread._ident = ident
# Any condition variables hanging off of the active thread may
# be in an invalid state, so we reinitialize them.
thread._reset_internal_locks()
new_active[ident] = thread
else:
# All the others are already stopped.
# We don't call _Thread__stop() because it tries to acquire
# thread._Thread__block which could also have been held while
# we forked.
thread._stopped = True
_limbo.clear()
_active.clear()
_active.update(new_active)
assert len(_active) == 1
| apache-2.0 |
minhphung171093/GreenERP_V8 | openerp/addons/website_hr_recruitment/controllers/main.py | 170 | 5916 | # -*- coding: utf-8 -*-
import base64
from openerp import SUPERUSER_ID
from openerp import http
from openerp.tools.translate import _
from openerp.http import request
from openerp.addons.website.models.website import slug
class website_hr_recruitment(http.Controller):
@http.route([
'/jobs',
'/jobs/country/<model("res.country"):country>',
'/jobs/department/<model("hr.department"):department>',
'/jobs/country/<model("res.country"):country>/department/<model("hr.department"):department>',
'/jobs/office/<int:office_id>',
'/jobs/country/<model("res.country"):country>/office/<int:office_id>',
'/jobs/department/<model("hr.department"):department>/office/<int:office_id>',
'/jobs/country/<model("res.country"):country>/department/<model("hr.department"):department>/office/<int:office_id>',
], type='http', auth="public", website=True)
def jobs(self, country=None, department=None, office_id=None, **kwargs):
env = request.env(context=dict(request.env.context, show_address=True, no_tag_br=True))
Country = env['res.country']
Jobs = env['hr.job']
# List jobs available to current UID
job_ids = Jobs.search([], order="website_published desc,no_of_recruitment desc").ids
# Browse jobs as superuser, because address is restricted
jobs = Jobs.sudo().browse(job_ids)
# Deduce departments and offices of those jobs
departments = set(j.department_id for j in jobs if j.department_id)
offices = set(j.address_id for j in jobs if j.address_id)
countries = set(o.country_id for o in offices if o.country_id)
# Default search by user country
if not (country or department or office_id or kwargs.get('all_countries')):
country_code = request.session['geoip'].get('country_code')
if country_code:
countries_ = Country.search([('code', '=', country_code)])
country = countries_[0] if countries_ else None
if not any(j for j in jobs if j.address_id and j.address_id.country_id == country):
country = False
# Filter the matching one
if country and not kwargs.get('all_countries'):
jobs = (j for j in jobs if j.address_id is None or j.address_id.country_id and j.address_id.country_id.id == country.id)
if department:
jobs = (j for j in jobs if j.department_id and j.department_id.id == department.id)
if office_id:
jobs = (j for j in jobs if j.address_id and j.address_id.id == office_id)
# Render page
return request.website.render("website_hr_recruitment.index", {
'jobs': jobs,
'countries': countries,
'departments': departments,
'offices': offices,
'country_id': country,
'department_id': department,
'office_id': office_id,
})
@http.route('/jobs/add', type='http', auth="user", website=True)
def jobs_add(self, **kwargs):
job = request.env['hr.job'].create({
'name': _('New Job Offer'),
})
return request.redirect("/jobs/detail/%s?enable_editor=1" % slug(job))
@http.route('/jobs/detail/<model("hr.job"):job>', type='http', auth="public", website=True)
def jobs_detail(self, job, **kwargs):
return request.render("website_hr_recruitment.detail", {
'job': job,
'main_object': job,
})
@http.route('/jobs/apply/<model("hr.job"):job>', type='http', auth="public", website=True)
def jobs_apply(self, job):
error = {}
default = {}
if 'website_hr_recruitment_error' in request.session:
error = request.session.pop('website_hr_recruitment_error')
default = request.session.pop('website_hr_recruitment_default')
return request.render("website_hr_recruitment.apply", {
'job': job,
'error': error,
'default': default,
})
@http.route('/jobs/thankyou', methods=['POST'], type='http', auth="public", website=True)
def jobs_thankyou(self, **post):
error = {}
for field_name in ["partner_name", "phone", "email_from"]:
if not post.get(field_name):
error[field_name] = 'missing'
if error:
request.session['website_hr_recruitment_error'] = error
ufile = post.pop('ufile')
if ufile:
error['ufile'] = 'reset'
request.session['website_hr_recruitment_default'] = post
return request.redirect('/jobs/apply/%s' % post.get("job_id"))
# public user can't create applicants (duh)
env = request.env(user=SUPERUSER_ID)
value = {
'source_id' : env.ref('hr_recruitment.source_website_company').id,
'name': '%s\'s Application' % post.get('partner_name'),
}
for f in ['email_from', 'partner_name', 'description']:
value[f] = post.get(f)
for f in ['department_id', 'job_id']:
value[f] = int(post.get(f) or 0)
# Retro-compatibility for saas-3. "phone" field should be replace by "partner_phone" in the template in trunk.
value['partner_phone'] = post.pop('phone', False)
applicant_id = env['hr.applicant'].create(value).id
if post['ufile']:
attachment_value = {
'name': post['ufile'].filename,
'res_name': value['partner_name'],
'res_model': 'hr.applicant',
'res_id': applicant_id,
'datas': base64.encodestring(post['ufile'].read()),
'datas_fname': post['ufile'].filename,
}
env['ir.attachment'].create(attachment_value)
return request.render("website_hr_recruitment.thankyou", {})
# vim :et:
| agpl-3.0 |
Azure/azure-sdk-for-python | sdk/communication/azure-communication-chat/azure/communication/chat/_generated/models/_models_py3.py | 1 | 32164 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import datetime
from typing import Dict, List, Optional, Union
from azure.core.exceptions import HttpResponseError
import msrest.serialization
from ._azure_communication_chat_service_enums import *
class AddChatParticipantsRequest(msrest.serialization.Model):
"""Participants to be added to the thread.
All required parameters must be populated in order to send to Azure.
:param participants: Required. Participants to add to a chat thread.
:type participants: list[~azure.communication.chat.models.ChatParticipant]
"""
_validation = {
'participants': {'required': True},
}
_attribute_map = {
'participants': {'key': 'participants', 'type': '[ChatParticipant]'},
}
def __init__(
self,
*,
participants: List["ChatParticipant"],
**kwargs
):
super(AddChatParticipantsRequest, self).__init__(**kwargs)
self.participants = participants
class AddChatParticipantsResult(msrest.serialization.Model):
"""Result of the add chat participants operation.
Variables are only populated by the server, and will be ignored when sending a request.
:ivar invalid_participants: The participants that failed to be added to the chat thread.
:vartype invalid_participants: list[~azure.communication.chat.models.ChatError]
"""
_validation = {
'invalid_participants': {'readonly': True},
}
_attribute_map = {
'invalid_participants': {'key': 'invalidParticipants', 'type': '[ChatError]'},
}
def __init__(
self,
**kwargs
):
super(AddChatParticipantsResult, self).__init__(**kwargs)
self.invalid_participants = None
class ChatError(msrest.serialization.Model):
"""The Communication Services error.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param code: Required. The error code.
:type code: str
:param message: Required. The error message.
:type message: str
:ivar target: The error target.
:vartype target: str
:ivar details: Further details about specific errors that led to this error.
:vartype details: list[~azure.communication.chat.models.ChatError]
:ivar inner_error: The inner error if any.
:vartype inner_error: ~azure.communication.chat.models.ChatError
"""
_validation = {
'code': {'required': True},
'message': {'required': True},
'target': {'readonly': True},
'details': {'readonly': True},
'inner_error': {'readonly': True},
}
_attribute_map = {
'code': {'key': 'code', 'type': 'str'},
'message': {'key': 'message', 'type': 'str'},
'target': {'key': 'target', 'type': 'str'},
'details': {'key': 'details', 'type': '[ChatError]'},
'inner_error': {'key': 'innererror', 'type': 'ChatError'},
}
def __init__(
self,
*,
code: str,
message: str,
**kwargs
):
super(ChatError, self).__init__(**kwargs)
self.code = code
self.message = message
self.target = None
self.details = None
self.inner_error = None
class ChatMessage(msrest.serialization.Model):
"""Chat message.
All required parameters must be populated in order to send to Azure.
:param id: Required. The id of the chat message. This id is server generated.
:type id: str
:param type: Required. The chat message type. Possible values include: "text", "html",
"topicUpdated", "participantAdded", "participantRemoved".
:type type: str or ~azure.communication.chat.models.ChatMessageType
:param sequence_id: Required. Sequence of the chat message in the conversation.
:type sequence_id: str
:param version: Required. Version of the chat message.
:type version: str
:param content: Content of a chat message.
:type content: ~azure.communication.chat.models.ChatMessageContent
:param sender_display_name: The display name of the chat message sender. This property is used
to populate sender name for push notifications.
:type sender_display_name: str
:param created_on: Required. The timestamp when the chat message arrived at the server. The
timestamp is in RFC3339 format: ``yyyy-MM-ddTHH:mm:ssZ``.
:type created_on: ~datetime.datetime
:param sender_communication_identifier: Identifies a participant in Azure Communication
services. A participant is, for example, a phone number or an Azure communication user. This
model must be interpreted as a union: Apart from rawId, at most one further property may be
set.
:type sender_communication_identifier:
~azure.communication.chat.models.CommunicationIdentifierModel
:param deleted_on: The timestamp (if applicable) when the message was deleted. The timestamp is
in RFC3339 format: ``yyyy-MM-ddTHH:mm:ssZ``.
:type deleted_on: ~datetime.datetime
:param edited_on: The last timestamp (if applicable) when the message was edited. The timestamp
is in RFC3339 format: ``yyyy-MM-ddTHH:mm:ssZ``.
:type edited_on: ~datetime.datetime
:param metadata: Message metadata.
:type metadata: dict[str, str]
"""
_validation = {
'id': {'required': True},
'type': {'required': True},
'sequence_id': {'required': True},
'version': {'required': True},
'created_on': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'sequence_id': {'key': 'sequenceId', 'type': 'str'},
'version': {'key': 'version', 'type': 'str'},
'content': {'key': 'content', 'type': 'ChatMessageContent'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'deleted_on': {'key': 'deletedOn', 'type': 'iso-8601'},
'edited_on': {'key': 'editedOn', 'type': 'iso-8601'},
'metadata': {'key': 'metadata', 'type': '{str}'},
}
def __init__(
self,
*,
id: str,
type: Union[str, "ChatMessageType"],
sequence_id: str,
version: str,
created_on: datetime.datetime,
content: Optional["ChatMessageContent"] = None,
sender_display_name: Optional[str] = None,
sender_communication_identifier: Optional["CommunicationIdentifierModel"] = None,
deleted_on: Optional[datetime.datetime] = None,
edited_on: Optional[datetime.datetime] = None,
metadata: Optional[Dict[str, str]] = None,
**kwargs
):
super(ChatMessage, self).__init__(**kwargs)
self.id = id
self.type = type
self.sequence_id = sequence_id
self.version = version
self.content = content
self.sender_display_name = sender_display_name
self.created_on = created_on
self.sender_communication_identifier = sender_communication_identifier
self.deleted_on = deleted_on
self.edited_on = edited_on
self.metadata = metadata
class ChatMessageContent(msrest.serialization.Model):
"""Content of a chat message.
:param message: Chat message content for messages of types text or html.
:type message: str
:param topic: Chat message content for messages of type topicUpdated.
:type topic: str
:param participants: Chat message content for messages of types participantAdded or
participantRemoved.
:type participants: list[~azure.communication.chat.models.ChatParticipant]
:param initiator_communication_identifier: Identifies a participant in Azure Communication
services. A participant is, for example, a phone number or an Azure communication user. This
model must be interpreted as a union: Apart from rawId, at most one further property may be
set.
:type initiator_communication_identifier:
~azure.communication.chat.models.CommunicationIdentifierModel
"""
_attribute_map = {
'message': {'key': 'message', 'type': 'str'},
'topic': {'key': 'topic', 'type': 'str'},
'participants': {'key': 'participants', 'type': '[ChatParticipant]'},
'initiator_communication_identifier': {'key': 'initiatorCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
}
def __init__(
self,
*,
message: Optional[str] = None,
topic: Optional[str] = None,
participants: Optional[List["ChatParticipant"]] = None,
initiator_communication_identifier: Optional["CommunicationIdentifierModel"] = None,
**kwargs
):
super(ChatMessageContent, self).__init__(**kwargs)
self.message = message
self.topic = topic
self.participants = participants
self.initiator_communication_identifier = initiator_communication_identifier
class ChatMessageReadReceipt(msrest.serialization.Model):
"""A chat message read receipt indicates the time a chat message was read by a recipient.
All required parameters must be populated in order to send to Azure.
:param sender_communication_identifier: Required. Identifies a participant in Azure
Communication services. A participant is, for example, a phone number or an Azure communication
user. This model must be interpreted as a union: Apart from rawId, at most one further property
may be set.
:type sender_communication_identifier:
~azure.communication.chat.models.CommunicationIdentifierModel
:param chat_message_id: Required. Id of the chat message that has been read. This id is
generated by the server.
:type chat_message_id: str
:param read_on: Required. The time at which the message was read. The timestamp is in RFC3339
format: ``yyyy-MM-ddTHH:mm:ssZ``.
:type read_on: ~datetime.datetime
"""
_validation = {
'sender_communication_identifier': {'required': True},
'chat_message_id': {'required': True},
'read_on': {'required': True},
}
_attribute_map = {
'sender_communication_identifier': {'key': 'senderCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'chat_message_id': {'key': 'chatMessageId', 'type': 'str'},
'read_on': {'key': 'readOn', 'type': 'iso-8601'},
}
def __init__(
self,
*,
sender_communication_identifier: "CommunicationIdentifierModel",
chat_message_id: str,
read_on: datetime.datetime,
**kwargs
):
super(ChatMessageReadReceipt, self).__init__(**kwargs)
self.sender_communication_identifier = sender_communication_identifier
self.chat_message_id = chat_message_id
self.read_on = read_on
class ChatMessageReadReceiptsCollection(msrest.serialization.Model):
"""A paged collection of chat message read receipts.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of chat message read receipts.
:type value: list[~azure.communication.chat.models.ChatMessageReadReceipt]
:ivar next_link: If there are more chat message read receipts that can be retrieved, the next
link will be populated.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ChatMessageReadReceipt]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["ChatMessageReadReceipt"],
**kwargs
):
super(ChatMessageReadReceiptsCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class ChatMessagesCollection(msrest.serialization.Model):
"""Collection of chat messages for a particular chat thread.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of chat messages.
:type value: list[~azure.communication.chat.models.ChatMessage]
:ivar next_link: If there are more chat messages that can be retrieved, the next link will be
populated.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ChatMessage]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["ChatMessage"],
**kwargs
):
super(ChatMessagesCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class ChatParticipant(msrest.serialization.Model):
"""A participant of the chat thread.
All required parameters must be populated in order to send to Azure.
:param communication_identifier: Required. Identifies a participant in Azure Communication
services. A participant is, for example, a phone number or an Azure communication user. This
model must be interpreted as a union: Apart from rawId, at most one further property may be
set.
:type communication_identifier: ~azure.communication.chat.models.CommunicationIdentifierModel
:param display_name: Display name for the chat participant.
:type display_name: str
:param share_history_time: Time from which the chat history is shared with the participant. The
timestamp is in RFC3339 format: ``yyyy-MM-ddTHH:mm:ssZ``.
:type share_history_time: ~datetime.datetime
"""
_validation = {
'communication_identifier': {'required': True},
}
_attribute_map = {
'communication_identifier': {'key': 'communicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'display_name': {'key': 'displayName', 'type': 'str'},
'share_history_time': {'key': 'shareHistoryTime', 'type': 'iso-8601'},
}
def __init__(
self,
*,
communication_identifier: "CommunicationIdentifierModel",
display_name: Optional[str] = None,
share_history_time: Optional[datetime.datetime] = None,
**kwargs
):
super(ChatParticipant, self).__init__(**kwargs)
self.communication_identifier = communication_identifier
self.display_name = display_name
self.share_history_time = share_history_time
class ChatParticipantsCollection(msrest.serialization.Model):
"""Collection of participants belong to a particular thread.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Chat participants.
:type value: list[~azure.communication.chat.models.ChatParticipant]
:ivar next_link: If there are more chat participants that can be retrieved, the next link will
be populated.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ChatParticipant]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["ChatParticipant"],
**kwargs
):
super(ChatParticipantsCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class ChatThreadItem(msrest.serialization.Model):
"""Summary information of a chat thread.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param id: Required. Chat thread id.
:type id: str
:param topic: Required. Chat thread topic.
:type topic: str
:param deleted_on: The timestamp when the chat thread was deleted. The timestamp is in RFC3339
format: ``yyyy-MM-ddTHH:mm:ssZ``.
:type deleted_on: ~datetime.datetime
:ivar last_message_received_on: The timestamp when the last message arrived at the server. The
timestamp is in RFC3339 format: ``yyyy-MM-ddTHH:mm:ssZ``.
:vartype last_message_received_on: ~datetime.datetime
"""
_validation = {
'id': {'required': True},
'topic': {'required': True},
'last_message_received_on': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'topic': {'key': 'topic', 'type': 'str'},
'deleted_on': {'key': 'deletedOn', 'type': 'iso-8601'},
'last_message_received_on': {'key': 'lastMessageReceivedOn', 'type': 'iso-8601'},
}
def __init__(
self,
*,
id: str,
topic: str,
deleted_on: Optional[datetime.datetime] = None,
**kwargs
):
super(ChatThreadItem, self).__init__(**kwargs)
self.id = id
self.topic = topic
self.deleted_on = deleted_on
self.last_message_received_on = None
class ChatThreadProperties(msrest.serialization.Model):
"""Chat thread.
All required parameters must be populated in order to send to Azure.
:param id: Required. Chat thread id.
:type id: str
:param topic: Required. Chat thread topic.
:type topic: str
:param created_on: Required. The timestamp when the chat thread was created. The timestamp is
in RFC3339 format: ``yyyy-MM-ddTHH:mm:ssZ``.
:type created_on: ~datetime.datetime
:param created_by_communication_identifier: Required. Identifies a participant in Azure
Communication services. A participant is, for example, a phone number or an Azure communication
user. This model must be interpreted as a union: Apart from rawId, at most one further property
may be set.
:type created_by_communication_identifier:
~azure.communication.chat.models.CommunicationIdentifierModel
:param deleted_on: The timestamp when the chat thread was deleted. The timestamp is in RFC3339
format: ``yyyy-MM-ddTHH:mm:ssZ``.
:type deleted_on: ~datetime.datetime
"""
_validation = {
'id': {'required': True},
'topic': {'required': True},
'created_on': {'required': True},
'created_by_communication_identifier': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'topic': {'key': 'topic', 'type': 'str'},
'created_on': {'key': 'createdOn', 'type': 'iso-8601'},
'created_by_communication_identifier': {'key': 'createdByCommunicationIdentifier', 'type': 'CommunicationIdentifierModel'},
'deleted_on': {'key': 'deletedOn', 'type': 'iso-8601'},
}
def __init__(
self,
*,
id: str,
topic: str,
created_on: datetime.datetime,
created_by_communication_identifier: "CommunicationIdentifierModel",
deleted_on: Optional[datetime.datetime] = None,
**kwargs
):
super(ChatThreadProperties, self).__init__(**kwargs)
self.id = id
self.topic = topic
self.created_on = created_on
self.created_by_communication_identifier = created_by_communication_identifier
self.deleted_on = deleted_on
class ChatThreadsItemCollection(msrest.serialization.Model):
"""Collection of chat threads.
Variables are only populated by the server, and will be ignored when sending a request.
All required parameters must be populated in order to send to Azure.
:param value: Required. Collection of chat threads.
:type value: list[~azure.communication.chat.models.ChatThreadItem]
:ivar next_link: If there are more chat threads that can be retrieved, the next link will be
populated.
:vartype next_link: str
"""
_validation = {
'value': {'required': True},
'next_link': {'readonly': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': '[ChatThreadItem]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(
self,
*,
value: List["ChatThreadItem"],
**kwargs
):
super(ChatThreadsItemCollection, self).__init__(**kwargs)
self.value = value
self.next_link = None
class CommunicationErrorResponse(msrest.serialization.Model):
"""The Communication Services error.
All required parameters must be populated in order to send to Azure.
:param error: Required. The Communication Services error.
:type error: ~azure.communication.chat.models.ChatError
"""
_validation = {
'error': {'required': True},
}
_attribute_map = {
'error': {'key': 'error', 'type': 'ChatError'},
}
def __init__(
self,
*,
error: "ChatError",
**kwargs
):
super(CommunicationErrorResponse, self).__init__(**kwargs)
self.error = error
class CommunicationIdentifierModel(msrest.serialization.Model):
"""Identifies a participant in Azure Communication services. A participant is, for example, a phone number or an Azure communication user. This model must be interpreted as a union: Apart from rawId, at most one further property may be set.
:param raw_id: Raw Id of the identifier. Optional in requests, required in responses.
:type raw_id: str
:param communication_user: The communication user.
:type communication_user: ~azure.communication.chat.models.CommunicationUserIdentifierModel
:param phone_number: The phone number.
:type phone_number: ~azure.communication.chat.models.PhoneNumberIdentifierModel
:param microsoft_teams_user: The Microsoft Teams user.
:type microsoft_teams_user: ~azure.communication.chat.models.MicrosoftTeamsUserIdentifierModel
"""
_attribute_map = {
'raw_id': {'key': 'rawId', 'type': 'str'},
'communication_user': {'key': 'communicationUser', 'type': 'CommunicationUserIdentifierModel'},
'phone_number': {'key': 'phoneNumber', 'type': 'PhoneNumberIdentifierModel'},
'microsoft_teams_user': {'key': 'microsoftTeamsUser', 'type': 'MicrosoftTeamsUserIdentifierModel'},
}
def __init__(
self,
*,
raw_id: Optional[str] = None,
communication_user: Optional["CommunicationUserIdentifierModel"] = None,
phone_number: Optional["PhoneNumberIdentifierModel"] = None,
microsoft_teams_user: Optional["MicrosoftTeamsUserIdentifierModel"] = None,
**kwargs
):
super(CommunicationIdentifierModel, self).__init__(**kwargs)
self.raw_id = raw_id
self.communication_user = communication_user
self.phone_number = phone_number
self.microsoft_teams_user = microsoft_teams_user
class CommunicationUserIdentifierModel(msrest.serialization.Model):
"""A user that got created with an Azure Communication Services resource.
All required parameters must be populated in order to send to Azure.
:param id: Required. The Id of the communication user.
:type id: str
"""
_validation = {
'id': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
*,
id: str,
**kwargs
):
super(CommunicationUserIdentifierModel, self).__init__(**kwargs)
self.id = id
class CreateChatThreadRequest(msrest.serialization.Model):
"""Request payload for creating a chat thread.
All required parameters must be populated in order to send to Azure.
:param topic: Required. The chat thread topic.
:type topic: str
:param participants: Participants to be added to the chat thread.
:type participants: list[~azure.communication.chat.models.ChatParticipant]
"""
_validation = {
'topic': {'required': True},
}
_attribute_map = {
'topic': {'key': 'topic', 'type': 'str'},
'participants': {'key': 'participants', 'type': '[ChatParticipant]'},
}
def __init__(
self,
*,
topic: str,
participants: Optional[List["ChatParticipant"]] = None,
**kwargs
):
super(CreateChatThreadRequest, self).__init__(**kwargs)
self.topic = topic
self.participants = participants
class CreateChatThreadResult(msrest.serialization.Model):
"""Result of the create chat thread operation.
Variables are only populated by the server, and will be ignored when sending a request.
:param chat_thread: Chat thread.
:type chat_thread: ~azure.communication.chat.models.ChatThreadProperties
:ivar invalid_participants: The participants that failed to be added to the chat thread.
:vartype invalid_participants: list[~azure.communication.chat.models.ChatError]
"""
_validation = {
'invalid_participants': {'readonly': True},
}
_attribute_map = {
'chat_thread': {'key': 'chatThread', 'type': 'ChatThreadProperties'},
'invalid_participants': {'key': 'invalidParticipants', 'type': '[ChatError]'},
}
def __init__(
self,
*,
chat_thread: Optional["ChatThreadProperties"] = None,
**kwargs
):
super(CreateChatThreadResult, self).__init__(**kwargs)
self.chat_thread = chat_thread
self.invalid_participants = None
class MicrosoftTeamsUserIdentifierModel(msrest.serialization.Model):
"""A Microsoft Teams user.
All required parameters must be populated in order to send to Azure.
:param user_id: Required. The Id of the Microsoft Teams user. If not anonymous, this is the AAD
object Id of the user.
:type user_id: str
:param is_anonymous: True if the Microsoft Teams user is anonymous. By default false if
missing.
:type is_anonymous: bool
:param cloud: The cloud that the Microsoft Teams user belongs to. By default 'public' if
missing. Possible values include: "public", "dod", "gcch".
:type cloud: str or ~azure.communication.chat.models.CommunicationCloudEnvironmentModel
"""
_validation = {
'user_id': {'required': True},
}
_attribute_map = {
'user_id': {'key': 'userId', 'type': 'str'},
'is_anonymous': {'key': 'isAnonymous', 'type': 'bool'},
'cloud': {'key': 'cloud', 'type': 'str'},
}
def __init__(
self,
*,
user_id: str,
is_anonymous: Optional[bool] = None,
cloud: Optional[Union[str, "CommunicationCloudEnvironmentModel"]] = None,
**kwargs
):
super(MicrosoftTeamsUserIdentifierModel, self).__init__(**kwargs)
self.user_id = user_id
self.is_anonymous = is_anonymous
self.cloud = cloud
class PhoneNumberIdentifierModel(msrest.serialization.Model):
"""A phone number.
All required parameters must be populated in order to send to Azure.
:param value: Required. The phone number in E.164 format.
:type value: str
"""
_validation = {
'value': {'required': True},
}
_attribute_map = {
'value': {'key': 'value', 'type': 'str'},
}
def __init__(
self,
*,
value: str,
**kwargs
):
super(PhoneNumberIdentifierModel, self).__init__(**kwargs)
self.value = value
class SendChatMessageRequest(msrest.serialization.Model):
"""Details of the message to send.
All required parameters must be populated in order to send to Azure.
:param content: Required. Chat message content.
:type content: str
:param sender_display_name: The display name of the chat message sender. This property is used
to populate sender name for push notifications.
:type sender_display_name: str
:param type: The chat message type. Possible values include: "text", "html", "topicUpdated",
"participantAdded", "participantRemoved".
:type type: str or ~azure.communication.chat.models.ChatMessageType
:param metadata: Message metadata.
:type metadata: dict[str, str]
"""
_validation = {
'content': {'required': True},
}
_attribute_map = {
'content': {'key': 'content', 'type': 'str'},
'sender_display_name': {'key': 'senderDisplayName', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'metadata': {'key': 'metadata', 'type': '{str}'},
}
def __init__(
self,
*,
content: str,
sender_display_name: Optional[str] = None,
type: Optional[Union[str, "ChatMessageType"]] = None,
metadata: Optional[Dict[str, str]] = None,
**kwargs
):
super(SendChatMessageRequest, self).__init__(**kwargs)
self.content = content
self.sender_display_name = sender_display_name
self.type = type
self.metadata = metadata
class SendChatMessageResult(msrest.serialization.Model):
"""Result of the send message operation.
All required parameters must be populated in order to send to Azure.
:param id: Required. A server-generated message id.
:type id: str
"""
_validation = {
'id': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
}
def __init__(
self,
*,
id: str,
**kwargs
):
super(SendChatMessageResult, self).__init__(**kwargs)
self.id = id
class SendReadReceiptRequest(msrest.serialization.Model):
"""Request payload for sending a read receipt.
All required parameters must be populated in order to send to Azure.
:param chat_message_id: Required. Id of the latest chat message read by the user.
:type chat_message_id: str
"""
_validation = {
'chat_message_id': {'required': True},
}
_attribute_map = {
'chat_message_id': {'key': 'chatMessageId', 'type': 'str'},
}
def __init__(
self,
*,
chat_message_id: str,
**kwargs
):
super(SendReadReceiptRequest, self).__init__(**kwargs)
self.chat_message_id = chat_message_id
class UpdateChatMessageRequest(msrest.serialization.Model):
"""Request payload for updating a chat message.
:param content: Chat message content.
:type content: str
:param metadata: Message metadata.
:type metadata: dict[str, str]
"""
_attribute_map = {
'content': {'key': 'content', 'type': 'str'},
'metadata': {'key': 'metadata', 'type': '{str}'},
}
def __init__(
self,
*,
content: Optional[str] = None,
metadata: Optional[Dict[str, str]] = None,
**kwargs
):
super(UpdateChatMessageRequest, self).__init__(**kwargs)
self.content = content
self.metadata = metadata
class UpdateChatThreadRequest(msrest.serialization.Model):
"""Request payload for updating a chat thread.
:param topic: Chat thread topic.
:type topic: str
"""
_attribute_map = {
'topic': {'key': 'topic', 'type': 'str'},
}
def __init__(
self,
*,
topic: Optional[str] = None,
**kwargs
):
super(UpdateChatThreadRequest, self).__init__(**kwargs)
self.topic = topic
| mit |
hsab/UMOG | umog_addon/nodes/geometry/displace.py | 1 | 5180 | from ...base_types import UMOGOutputNode
import bpy
import numpy as np
from mathutils import Vector
class DisplaceNode(bpy.types.Node, UMOGOutputNode):
bl_idname = "umog_DisplaceNode"
bl_label = "Displace"
assignedType = "Object"
mesh_name = bpy.props.StringProperty()
mesh_dupl_name = bpy.props.StringProperty()
texture_name_temp = bpy.props.StringProperty()
mesh_name_index = bpy.props.IntProperty()
# use_subdiv = bpy.props.BoolProperty(default=True)
mod_midlevel = bpy.props.FloatProperty(min = 0.0, max = 1.0, default = 0.5)
mod_strength = bpy.props.FloatProperty(default = 1.0)
def create(self):
self.newInput(self.assignedType, "Object")
self.newInput("VertexGroup", "Vertex Group")
socket = self.newInput("Texture2", "Texture")
self.newInput("Float", "Midlevel", value = 0.5)
self.newInput("Float", "Strength", value = 0.1)
socket = self.newOutput(self.assignedType, "Output")
socket.display.refreshableIcon = False
socket.display.packedIcon = False
socket = self.newOutput("VertexGroup", "Vertex Group")
socket.display.refreshableIcon = False
socket.display.packedIcon = False
def refresh(self):
if self.inputs[0].value == '':
self.inputs[1].value = ''
self.inputs[1].object = ''
else:
self.inputs[1].object = self.inputs[0].value
self.outputs[0].value = self.inputs[0].value
self.outputs[0].refresh()
self.outputs[1].value = self.inputs[1].value
self.outputs[1].refresh()
def execute(self, refholder):
self.inputs[0].setViewObjectMode()
self.inputs[0].setSelected()
obj = self.inputs[0].getObject()
vertexGroup = self.inputs[1].value
texture = self.inputs[2].getTexture()
midLevel = self.inputs[3].value
strength = self.inputs[4].value
# Is Object and Texture are Linked
if self.inputs[0].is_linked and self.inputs[2].value != '':
objData = obj.data
# objData.calc_normals_split()
shapeKeys = None
hasShapes = objData.shape_keys is not None
if hasShapes:
shapeKeys = objData.shape_keys.key_blocks
keyNorms = shapeKeys[-1].normals_vertex_get()
print(shapeKeys[-1].name)
npNorms = np.asarray(keyNorms, dtype="float")
npNorms = npNorms.reshape((len(objData.vertices), 3))
objData.normals_split_custom_set_from_vertices(npNorms)
objData.use_auto_smooth = True
shapeKeys[-1].value = 0
else:
self.resetNormals(objData)
oname = "DISPLACE"
mod = obj.modifiers.new(name = oname, type = 'DISPLACE')
mod.texture = texture
mod.mid_level = midLevel
mod.strength = strength
if hasShapes:
mod.direction = 'CUSTOM_NORMAL'
else:
mod.direction = 'NORMAL'
if vertexGroup != '':
mod.vertex_group = vertexGroup
bpy.ops.object.modifier_apply(modifier = oname, apply_as = "SHAPE")
if shapeKeys is None:
shapeKeys = objData.shape_keys.key_blocks
soFarShape = shapeKeys[-2]
soFarShape.value = 1
dispShape = shapeKeys[-1]
dispShape.value = 1
bpy.ops.object.shape_key_add(from_mix = True)
obj.shape_key_remove(dispShape)
soFarShape.value = 0
accumShape = shapeKeys[-1]
accumShape.value = 1
bakeCount = self.nodeTree.properties.bakeCount
accumShape.name = "baked_umog_" + str(bakeCount) + "_displace_" + str(
bpy.context.scene.frame_current)
obj.update_from_editmode()
obj.hasUMOGBaked = True
obj.bakeCount = bakeCount
if bakeCount not in obj.data.bakedKeys:
obj.data.bakedKeys[bakeCount] = []
obj.data.bakedKeys[bakeCount].append(accumShape)
else:
print("no texture specified")
def write_keyframe(self, refholder, frame):
pass
# obj = bpy.data.objects[self.mesh_name]
# for vertex in obj.data.vertices:
# vertex.keyframe_insert(data_path='co', frame=frame)
def preExecute(self, refholder):
self.inputs[0].setSelected()
obj = self.inputs[0].getObject()
objData = obj.data
hasShapes = objData.shape_keys is not None
if hasShapes:
shapeKeys = objData.shape_keys.key_blocks
bpy.ops.object.shape_key_add(from_mix = True)
while len(shapeKeys) > 0:
obj.active_shape_key_index = 0
bpy.ops.object.shape_key_remove(all = False)
def postBake(self, refholder):
obj = self.inputs[0].getObject()
self.resetNormals(obj.data)
def resetNormals(self, objData):
objData.use_auto_smooth = False
bpy.ops.mesh.customdata_custom_splitnormals_clear()
| gpl-3.0 |
drpeteb/scipy | scipy/linalg/tests/test_solvers.py | 95 | 9591 | from __future__ import division, print_function, absolute_import
import numpy as np
from numpy.linalg import inv
from numpy.testing import TestCase, rand, run_module_suite, assert_raises, \
assert_equal, assert_almost_equal, assert_array_almost_equal, assert_, \
assert_allclose
from scipy.linalg import solve_sylvester, solve_lyapunov, \
solve_discrete_lyapunov, solve_continuous_are, solve_discrete_are
class TestSolveLyapunov(TestCase):
cases = [
(np.array([[1, 2], [3, 4]]),
np.array([[9, 10], [11, 12]])),
# a, q all complex.
(np.array([[1.0+1j, 2.0], [3.0-4.0j, 5.0]]),
np.array([[2.0-2j, 2.0+2j],[-1.0-1j, 2.0]])),
# a real; q complex.
(np.array([[1.0, 2.0], [3.0, 5.0]]),
np.array([[2.0-2j, 2.0+2j],[-1.0-1j, 2.0]])),
# a complex; q real.
(np.array([[1.0+1j, 2.0], [3.0-4.0j, 5.0]]),
np.array([[2.0, 2.0],[-1.0, 2.0]])),
# An example from Kitagawa, 1977
(np.array([[3, 9, 5, 1, 4], [1, 2, 3, 8, 4], [4, 6, 6, 6, 3],
[1, 5, 2, 0, 7], [5, 3, 3, 1, 5]]),
np.array([[2, 4, 1, 0, 1], [4, 1, 0, 2, 0], [1, 0, 3, 0, 3],
[0, 2, 0, 1, 0], [1, 0, 3, 0, 4]])),
# Companion matrix example. a complex; q real; a.shape[0] = 11
(np.array([[0.100+0.j, 0.091+0.j, 0.082+0.j, 0.073+0.j, 0.064+0.j,
0.055+0.j, 0.046+0.j, 0.037+0.j, 0.028+0.j, 0.019+0.j,
0.010+0.j],
[1.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 1.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 0.000+0.j, 1.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 0.000+0.j, 0.000+0.j, 1.000+0.j, 0.000+0.j,
0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 1.000+0.j,
0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
1.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j, 1.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j, 0.000+0.j, 1.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j, 0.000+0.j, 0.000+0.j, 1.000+0.j, 0.000+0.j,
0.000+0.j],
[0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j,
0.000+0.j, 0.000+0.j, 0.000+0.j, 0.000+0.j, 1.000+0.j,
0.000+0.j]]),
np.eye(11)),
# https://github.com/scipy/scipy/issues/4176
(np.matrix([[0, 1], [-1/2, -1]]),
(np.matrix([0, 3]).T * np.matrix([0, 3]).T.T)),
# https://github.com/scipy/scipy/issues/4176
(np.matrix([[0, 1], [-1/2, -1]]),
(np.array(np.matrix([0, 3]).T * np.matrix([0, 3]).T.T))),
]
def check_continuous_case(self, a, q):
x = solve_lyapunov(a, q)
assert_array_almost_equal(np.dot(a, x) + np.dot(x, a.conj().transpose()), q)
def check_discrete_case(self, a, q, method=None):
x = solve_discrete_lyapunov(a, q, method=method)
assert_array_almost_equal(np.dot(np.dot(a, x),a.conj().transpose()) - x, -1.0*q)
def test_cases(self):
for case in self.cases:
self.check_continuous_case(case[0], case[1])
self.check_discrete_case(case[0], case[1])
self.check_discrete_case(case[0], case[1], method='direct')
self.check_discrete_case(case[0], case[1], method='bilinear')
class TestSolveContinuousARE(TestCase):
cases = [
# An example from Laub, A. J.
# (http://dspace.mit.edu/bitstream/handle/1721.1/1301/R-0859-05666488.pdf)
(np.matrix([[0, 1], [0, 0]]),
np.matrix([[0,], [1,]]),
np.matrix([[1, 0], [0, 2]]),
np.matrix([[1,],])),
# Difficult from a numerical standpoint, again from Laub, A. J.
(np.matrix([[4, 3], [-9.0/2.0, -7.0/2.0]]),
np.matrix([[1,], [-1,]]),
np.matrix([[9, 6], [6, 4]]),
np.matrix([[1,],])),
# Complex a; real b, q, r
(np.matrix([[0, 1-2j], [0, -3j]]),
np.matrix([[0,], [1,]]),
np.matrix([[1, 0], [0, 2]]),
np.matrix([[1,],])),
# Real a, q, r; complex b
(np.matrix([[0, 1], [0, -1]]),
np.matrix([[-2j,], [1j,]]),
np.matrix([[1, 0], [0, 2]]),
np.matrix([[1,],])),
# Real a, b; complex q, r
(np.matrix([[0, 1], [0, -1]]),
np.matrix([[1, 2], [1, 3]]),
np.matrix([[1, -3j], [1-1j, 2]]),
np.matrix([[-2j, 2], [1j, 3]])),
]
def check_case(self, a, b, q, r):
"""Checks if (A'X + XA - XBR^-1B'X+Q=0) is true"""
x = solve_continuous_are(a, b, q, r)
assert_array_almost_equal(
a.getH()*x + x*a - x*b*inv(r)*b.getH()*x + q, 0.0)
def test_cases(self):
for case in self.cases:
self.check_case(case[0], case[1], case[2], case[3])
class TestSolveDiscreteARE(TestCase):
cases = [
# Difficult from a numerical standpoint, again from Laub, A. J.
# (http://dspace.mit.edu/bitstream/handle/1721.1/1301/R-0859-05666488.pdf)
(np.matrix([[4, 3], [-9.0/2.0, -7.0/2.0]]),
np.matrix([[1,], [-1,]]),
np.matrix([[9, 6], [6, 4]]),
np.matrix([[1,],])),
# Another example from Laub
(np.matrix([[0.9512, 0], [0, 0.9048]]),
np.matrix([[4.877, 4.877], [-1.1895, 3.569]]),
np.matrix([[0.005, 0],[0, 0.02]]),
np.matrix([[1.0/3.0, 0],[0, 3]])),
# Complex a; real b, q, r
(np.matrix([[2, 1-2j], [0, -3j]]),
np.matrix([[0,], [1,]]),
np.matrix([[1, 0], [0, 2]]),
np.matrix([[1,],])),
# Real a, q, r; complex b
(np.matrix([[2, 1], [0, -1]]),
np.matrix([[-2j,], [1j,]]),
np.matrix([[1, 0], [0, 2]]),
np.matrix([[1,],])),
# Real a, b; complex q, r
(np.matrix([[3, 1], [0, -1]]),
np.matrix([[1, 2], [1, 3]]),
np.matrix([[1, -3j], [1-1j, 2]]),
np.matrix([[-2j, 2], [1j, 3]])),
]
def check_case(self, a, b, q, r):
"""Checks if X = A'XA-(A'XB)(R+B'XB)^-1(B'XA)+Q) is true"""
x = solve_discrete_are(a, b, q, r)
assert_array_almost_equal(
a.getH()*x*a-(a.getH()*x*b)*inv(r+b.getH()*x*b)*(b.getH()*x*a)+q-x, 0.0)
def test_cases(self):
for case in self.cases:
self.check_case(case[0], case[1], case[2], case[3])
class TestSolveSylvester(TestCase):
cases = [
# a, b, c all real.
(np.array([[1, 2], [0, 4]]),
np.array([[5, 6], [0, 8]]),
np.array([[9, 10], [11, 12]])),
# a, b, c all real, 4x4. a and b have non-trival 2x2 blocks in their
# quasi-triangular form.
(np.array([[1.0, 0, 0, 0], [0, 1.0, 2.0, 0.0], [0, 0, 3.0, -4], [0, 0, 2, 5]]),
np.array([[2.0, 0, 0,1.0], [0, 1.0, 0.0, 0.0], [0, 0, 1.0, -1], [0, 0, 1, 1]]),
np.array([[1.0, 0, 0, 0], [0, 1.0, 0, 0], [0, 0, 1.0, 0], [0, 0, 0, 1.0]])),
# a, b, c all complex.
(np.array([[1.0+1j, 2.0], [3.0-4.0j, 5.0]]),
np.array([[-1.0, 2j], [3.0, 4.0]]),
np.array([[2.0-2j, 2.0+2j],[-1.0-1j, 2.0]])),
# a and b real; c complex.
(np.array([[1.0, 2.0], [3.0, 5.0]]),
np.array([[-1.0, 0], [3.0, 4.0]]),
np.array([[2.0-2j, 2.0+2j],[-1.0-1j, 2.0]])),
# a and c complex; b real.
(np.array([[1.0+1j, 2.0], [3.0-4.0j, 5.0]]),
np.array([[-1.0, 0], [3.0, 4.0]]),
np.array([[2.0-2j, 2.0+2j],[-1.0-1j, 2.0]])),
# a complex; b and c real.
(np.array([[1.0+1j, 2.0], [3.0-4.0j, 5.0]]),
np.array([[-1.0, 0], [3.0, 4.0]]),
np.array([[2.0, 2.0],[-1.0, 2.0]])),
# not square matrices, real
(np.array([[8, 1, 6], [3, 5, 7], [4, 9, 2]]),
np.array([[2, 3], [4, 5]]),
np.array([[1, 2], [3, 4], [5, 6]])),
# not square matrices, complex
(np.array([[8, 1j, 6+2j], [3, 5, 7], [4, 9, 2]]),
np.array([[2, 3], [4, 5-1j]]),
np.array([[1, 2j], [3, 4j], [5j, 6+7j]])),
]
def check_case(self, a, b, c):
x = solve_sylvester(a, b, c)
assert_array_almost_equal(np.dot(a, x) + np.dot(x, b), c)
def test_cases(self):
for case in self.cases:
self.check_case(case[0], case[1], case[2])
def test_trivial(self):
a = np.array([[1.0, 0.0], [0.0, 1.0]])
b = np.array([[1.0]])
c = np.array([2.0, 2.0]).reshape(-1,1)
x = solve_sylvester(a, b, c)
assert_array_almost_equal(x, np.array([1.0, 1.0]).reshape(-1,1))
if __name__ == "__main__":
run_module_suite()
| bsd-3-clause |
jeremiahmarks/sl4a | python/src/Lib/calendar.py | 60 | 22995 | """Calendar printing functions
Note when comparing these calendars to the ones printed by cal(1): By
default, these calendars have Monday as the first day of the week, and
Sunday as the last (the European convention). Use setfirstweekday() to
set the first day of the week (0=Monday, 6=Sunday)."""
import sys
import datetime
import locale as _locale
__all__ = ["IllegalMonthError", "IllegalWeekdayError", "setfirstweekday",
"firstweekday", "isleap", "leapdays", "weekday", "monthrange",
"monthcalendar", "prmonth", "month", "prcal", "calendar",
"timegm", "month_name", "month_abbr", "day_name", "day_abbr"]
# Exception raised for bad input (with string parameter for details)
error = ValueError
# Exceptions raised for bad input
class IllegalMonthError(ValueError):
def __init__(self, month):
self.month = month
def __str__(self):
return "bad month number %r; must be 1-12" % self.month
class IllegalWeekdayError(ValueError):
def __init__(self, weekday):
self.weekday = weekday
def __str__(self):
return "bad weekday number %r; must be 0 (Monday) to 6 (Sunday)" % self.weekday
# Constants for months referenced later
January = 1
February = 2
# Number of days per month (except for February in leap years)
mdays = [0, 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31]
# This module used to have hard-coded lists of day and month names, as
# English strings. The classes following emulate a read-only version of
# that, but supply localized names. Note that the values are computed
# fresh on each call, in case the user changes locale between calls.
class _localized_month:
_months = [datetime.date(2001, i+1, 1).strftime for i in range(12)]
_months.insert(0, lambda x: "")
def __init__(self, format):
self.format = format
def __getitem__(self, i):
funcs = self._months[i]
if isinstance(i, slice):
return [f(self.format) for f in funcs]
else:
return funcs(self.format)
def __len__(self):
return 13
class _localized_day:
# January 1, 2001, was a Monday.
_days = [datetime.date(2001, 1, i+1).strftime for i in range(7)]
def __init__(self, format):
self.format = format
def __getitem__(self, i):
funcs = self._days[i]
if isinstance(i, slice):
return [f(self.format) for f in funcs]
else:
return funcs(self.format)
def __len__(self):
return 7
# Full and abbreviated names of weekdays
day_name = _localized_day('%A')
day_abbr = _localized_day('%a')
# Full and abbreviated names of months (1-based arrays!!!)
month_name = _localized_month('%B')
month_abbr = _localized_month('%b')
# Constants for weekdays
(MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY) = range(7)
def isleap(year):
"""Return 1 for leap years, 0 for non-leap years."""
return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
def leapdays(y1, y2):
"""Return number of leap years in range [y1, y2).
Assume y1 <= y2."""
y1 -= 1
y2 -= 1
return (y2//4 - y1//4) - (y2//100 - y1//100) + (y2//400 - y1//400)
def weekday(year, month, day):
"""Return weekday (0-6 ~ Mon-Sun) for year (1970-...), month (1-12),
day (1-31)."""
return datetime.date(year, month, day).weekday()
def monthrange(year, month):
"""Return weekday (0-6 ~ Mon-Sun) and number of days (28-31) for
year, month."""
if not 1 <= month <= 12:
raise IllegalMonthError(month)
day1 = weekday(year, month, 1)
ndays = mdays[month] + (month == February and isleap(year))
return day1, ndays
class Calendar(object):
"""
Base calendar class. This class doesn't do any formatting. It simply
provides data to subclasses.
"""
def __init__(self, firstweekday=0):
self.firstweekday = firstweekday # 0 = Monday, 6 = Sunday
def getfirstweekday(self):
return self._firstweekday % 7
def setfirstweekday(self, firstweekday):
self._firstweekday = firstweekday
firstweekday = property(getfirstweekday, setfirstweekday)
def iterweekdays(self):
"""
Return a iterator for one week of weekday numbers starting with the
configured first one.
"""
for i in range(self.firstweekday, self.firstweekday + 7):
yield i%7
def itermonthdates(self, year, month):
"""
Return an iterator for one month. The iterator will yield datetime.date
values and will always iterate through complete weeks, so it will yield
dates outside the specified month.
"""
date = datetime.date(year, month, 1)
# Go back to the beginning of the week
days = (date.weekday() - self.firstweekday) % 7
date -= datetime.timedelta(days=days)
oneday = datetime.timedelta(days=1)
while True:
yield date
date += oneday
if date.month != month and date.weekday() == self.firstweekday:
break
def itermonthdays2(self, year, month):
"""
Like itermonthdates(), but will yield (day number, weekday number)
tuples. For days outside the specified month the day number is 0.
"""
for date in self.itermonthdates(year, month):
if date.month != month:
yield (0, date.weekday())
else:
yield (date.day, date.weekday())
def itermonthdays(self, year, month):
"""
Like itermonthdates(), but will yield day numbers. For days outside
the specified month the day number is 0.
"""
for date in self.itermonthdates(year, month):
if date.month != month:
yield 0
else:
yield date.day
def monthdatescalendar(self, year, month):
"""
Return a matrix (list of lists) representing a month's calendar.
Each row represents a week; week entries are datetime.date values.
"""
dates = list(self.itermonthdates(year, month))
return [ dates[i:i+7] for i in range(0, len(dates), 7) ]
def monthdays2calendar(self, year, month):
"""
Return a matrix representing a month's calendar.
Each row represents a week; week entries are
(day number, weekday number) tuples. Day numbers outside this month
are zero.
"""
days = list(self.itermonthdays2(year, month))
return [ days[i:i+7] for i in range(0, len(days), 7) ]
def monthdayscalendar(self, year, month):
"""
Return a matrix representing a month's calendar.
Each row represents a week; days outside this month are zero.
"""
days = list(self.itermonthdays(year, month))
return [ days[i:i+7] for i in range(0, len(days), 7) ]
def yeardatescalendar(self, year, width=3):
"""
Return the data for the specified year ready for formatting. The return
value is a list of month rows. Each month row contains upto width months.
Each month contains between 4 and 6 weeks and each week contains 1-7
days. Days are datetime.date objects.
"""
months = [
self.monthdatescalendar(year, i)
for i in range(January, January+12)
]
return [months[i:i+width] for i in range(0, len(months), width) ]
def yeardays2calendar(self, year, width=3):
"""
Return the data for the specified year ready for formatting (similar to
yeardatescalendar()). Entries in the week lists are
(day number, weekday number) tuples. Day numbers outside this month are
zero.
"""
months = [
self.monthdays2calendar(year, i)
for i in range(January, January+12)
]
return [months[i:i+width] for i in range(0, len(months), width) ]
def yeardayscalendar(self, year, width=3):
"""
Return the data for the specified year ready for formatting (similar to
yeardatescalendar()). Entries in the week lists are day numbers.
Day numbers outside this month are zero.
"""
months = [
self.monthdayscalendar(year, i)
for i in range(January, January+12)
]
return [months[i:i+width] for i in range(0, len(months), width) ]
class TextCalendar(Calendar):
"""
Subclass of Calendar that outputs a calendar as a simple plain text
similar to the UNIX program cal.
"""
def prweek(self, theweek, width):
"""
Print a single week (no newline).
"""
print self.formatweek(theweek, width),
def formatday(self, day, weekday, width):
"""
Returns a formatted day.
"""
if day == 0:
s = ''
else:
s = '%2i' % day # right-align single-digit days
return s.center(width)
def formatweek(self, theweek, width):
"""
Returns a single week in a string (no newline).
"""
return ' '.join(self.formatday(d, wd, width) for (d, wd) in theweek)
def formatweekday(self, day, width):
"""
Returns a formatted week day name.
"""
if width >= 9:
names = day_name
else:
names = day_abbr
return names[day][:width].center(width)
def formatweekheader(self, width):
"""
Return a header for a week.
"""
return ' '.join(self.formatweekday(i, width) for i in self.iterweekdays())
def formatmonthname(self, theyear, themonth, width, withyear=True):
"""
Return a formatted month name.
"""
s = month_name[themonth]
if withyear:
s = "%s %r" % (s, theyear)
return s.center(width)
def prmonth(self, theyear, themonth, w=0, l=0):
"""
Print a month's calendar.
"""
print self.formatmonth(theyear, themonth, w, l),
def formatmonth(self, theyear, themonth, w=0, l=0):
"""
Return a month's calendar string (multi-line).
"""
w = max(2, w)
l = max(1, l)
s = self.formatmonthname(theyear, themonth, 7 * (w + 1) - 1)
s = s.rstrip()
s += '\n' * l
s += self.formatweekheader(w).rstrip()
s += '\n' * l
for week in self.monthdays2calendar(theyear, themonth):
s += self.formatweek(week, w).rstrip()
s += '\n' * l
return s
def formatyear(self, theyear, w=2, l=1, c=6, m=3):
"""
Returns a year's calendar as a multi-line string.
"""
w = max(2, w)
l = max(1, l)
c = max(2, c)
colwidth = (w + 1) * 7 - 1
v = []
a = v.append
a(repr(theyear).center(colwidth*m+c*(m-1)).rstrip())
a('\n'*l)
header = self.formatweekheader(w)
for (i, row) in enumerate(self.yeardays2calendar(theyear, m)):
# months in this row
months = range(m*i+1, min(m*(i+1)+1, 13))
a('\n'*l)
names = (self.formatmonthname(theyear, k, colwidth, False)
for k in months)
a(formatstring(names, colwidth, c).rstrip())
a('\n'*l)
headers = (header for k in months)
a(formatstring(headers, colwidth, c).rstrip())
a('\n'*l)
# max number of weeks for this row
height = max(len(cal) for cal in row)
for j in range(height):
weeks = []
for cal in row:
if j >= len(cal):
weeks.append('')
else:
weeks.append(self.formatweek(cal[j], w))
a(formatstring(weeks, colwidth, c).rstrip())
a('\n' * l)
return ''.join(v)
def pryear(self, theyear, w=0, l=0, c=6, m=3):
"""Print a year's calendar."""
print self.formatyear(theyear, w, l, c, m)
class HTMLCalendar(Calendar):
"""
This calendar returns complete HTML pages.
"""
# CSS classes for the day <td>s
cssclasses = ["mon", "tue", "wed", "thu", "fri", "sat", "sun"]
def formatday(self, day, weekday):
"""
Return a day as a table cell.
"""
if day == 0:
return '<td class="noday"> </td>' # day outside month
else:
return '<td class="%s">%d</td>' % (self.cssclasses[weekday], day)
def formatweek(self, theweek):
"""
Return a complete week as a table row.
"""
s = ''.join(self.formatday(d, wd) for (d, wd) in theweek)
return '<tr>%s</tr>' % s
def formatweekday(self, day):
"""
Return a weekday name as a table header.
"""
return '<th class="%s">%s</th>' % (self.cssclasses[day], day_abbr[day])
def formatweekheader(self):
"""
Return a header for a week as a table row.
"""
s = ''.join(self.formatweekday(i) for i in self.iterweekdays())
return '<tr>%s</tr>' % s
def formatmonthname(self, theyear, themonth, withyear=True):
"""
Return a month name as a table row.
"""
if withyear:
s = '%s %s' % (month_name[themonth], theyear)
else:
s = '%s' % month_name[themonth]
return '<tr><th colspan="7" class="month">%s</th></tr>' % s
def formatmonth(self, theyear, themonth, withyear=True):
"""
Return a formatted month as a table.
"""
v = []
a = v.append
a('<table border="0" cellpadding="0" cellspacing="0" class="month">')
a('\n')
a(self.formatmonthname(theyear, themonth, withyear=withyear))
a('\n')
a(self.formatweekheader())
a('\n')
for week in self.monthdays2calendar(theyear, themonth):
a(self.formatweek(week))
a('\n')
a('</table>')
a('\n')
return ''.join(v)
def formatyear(self, theyear, width=3):
"""
Return a formatted year as a table of tables.
"""
v = []
a = v.append
width = max(width, 1)
a('<table border="0" cellpadding="0" cellspacing="0" class="year">')
a('\n')
a('<tr><th colspan="%d" class="year">%s</th></tr>' % (width, theyear))
for i in range(January, January+12, width):
# months in this row
months = range(i, min(i+width, 13))
a('<tr>')
for m in months:
a('<td>')
a(self.formatmonth(theyear, m, withyear=False))
a('</td>')
a('</tr>')
a('</table>')
return ''.join(v)
def formatyearpage(self, theyear, width=3, css='calendar.css', encoding=None):
"""
Return a formatted year as a complete HTML page.
"""
if encoding is None:
encoding = sys.getdefaultencoding()
v = []
a = v.append
a('<?xml version="1.0" encoding="%s"?>\n' % encoding)
a('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">\n')
a('<html>\n')
a('<head>\n')
a('<meta http-equiv="Content-Type" content="text/html; charset=%s" />\n' % encoding)
if css is not None:
a('<link rel="stylesheet" type="text/css" href="%s" />\n' % css)
a('<title>Calendar for %d</title>\n' % theyear)
a('</head>\n')
a('<body>\n')
a(self.formatyear(theyear, width))
a('</body>\n')
a('</html>\n')
return ''.join(v).encode(encoding, "xmlcharrefreplace")
class TimeEncoding:
def __init__(self, locale):
self.locale = locale
def __enter__(self):
self.oldlocale = _locale.setlocale(_locale.LC_TIME, self.locale)
return _locale.getlocale(_locale.LC_TIME)[1]
def __exit__(self, *args):
_locale.setlocale(_locale.LC_TIME, self.oldlocale)
class LocaleTextCalendar(TextCalendar):
"""
This class can be passed a locale name in the constructor and will return
month and weekday names in the specified locale. If this locale includes
an encoding all strings containing month and weekday names will be returned
as unicode.
"""
def __init__(self, firstweekday=0, locale=None):
TextCalendar.__init__(self, firstweekday)
if locale is None:
locale = _locale.getdefaultlocale()
self.locale = locale
def formatweekday(self, day, width):
with TimeEncoding(self.locale) as encoding:
if width >= 9:
names = day_name
else:
names = day_abbr
name = names[day]
if encoding is not None:
name = name.decode(encoding)
return name[:width].center(width)
def formatmonthname(self, theyear, themonth, width, withyear=True):
with TimeEncoding(self.locale) as encoding:
s = month_name[themonth]
if encoding is not None:
s = s.decode(encoding)
if withyear:
s = "%s %r" % (s, theyear)
return s.center(width)
class LocaleHTMLCalendar(HTMLCalendar):
"""
This class can be passed a locale name in the constructor and will return
month and weekday names in the specified locale. If this locale includes
an encoding all strings containing month and weekday names will be returned
as unicode.
"""
def __init__(self, firstweekday=0, locale=None):
HTMLCalendar.__init__(self, firstweekday)
if locale is None:
locale = _locale.getdefaultlocale()
self.locale = locale
def formatweekday(self, day):
with TimeEncoding(self.locale) as encoding:
s = day_abbr[day]
if encoding is not None:
s = s.decode(encoding)
return '<th class="%s">%s</th>' % (self.cssclasses[day], s)
def formatmonthname(self, theyear, themonth, withyear=True):
with TimeEncoding(self.locale) as encoding:
s = month_name[themonth]
if encoding is not None:
s = s.decode(encoding)
if withyear:
s = '%s %s' % (s, theyear)
return '<tr><th colspan="7" class="month">%s</th></tr>' % s
# Support for old module level interface
c = TextCalendar()
firstweekday = c.getfirstweekday
def setfirstweekday(firstweekday):
if not MONDAY <= firstweekday <= SUNDAY:
raise IllegalWeekdayError(firstweekday)
c.firstweekday = firstweekday
monthcalendar = c.monthdayscalendar
prweek = c.prweek
week = c.formatweek
weekheader = c.formatweekheader
prmonth = c.prmonth
month = c.formatmonth
calendar = c.formatyear
prcal = c.pryear
# Spacing of month columns for multi-column year calendar
_colwidth = 7*3 - 1 # Amount printed by prweek()
_spacing = 6 # Number of spaces between columns
def format(cols, colwidth=_colwidth, spacing=_spacing):
"""Prints multi-column formatting for year calendars"""
print formatstring(cols, colwidth, spacing)
def formatstring(cols, colwidth=_colwidth, spacing=_spacing):
"""Returns a string formatted from n strings, centered within n columns."""
spacing *= ' '
return spacing.join(c.center(colwidth) for c in cols)
EPOCH = 1970
_EPOCH_ORD = datetime.date(EPOCH, 1, 1).toordinal()
def timegm(tuple):
"""Unrelated but handy function to calculate Unix timestamp from GMT."""
year, month, day, hour, minute, second = tuple[:6]
days = datetime.date(year, month, 1).toordinal() - _EPOCH_ORD + day - 1
hours = days*24 + hour
minutes = hours*60 + minute
seconds = minutes*60 + second
return seconds
def main(args):
import optparse
parser = optparse.OptionParser(usage="usage: %prog [options] [year [month]]")
parser.add_option(
"-w", "--width",
dest="width", type="int", default=2,
help="width of date column (default 2, text only)"
)
parser.add_option(
"-l", "--lines",
dest="lines", type="int", default=1,
help="number of lines for each week (default 1, text only)"
)
parser.add_option(
"-s", "--spacing",
dest="spacing", type="int", default=6,
help="spacing between months (default 6, text only)"
)
parser.add_option(
"-m", "--months",
dest="months", type="int", default=3,
help="months per row (default 3, text only)"
)
parser.add_option(
"-c", "--css",
dest="css", default="calendar.css",
help="CSS to use for page (html only)"
)
parser.add_option(
"-L", "--locale",
dest="locale", default=None,
help="locale to be used from month and weekday names"
)
parser.add_option(
"-e", "--encoding",
dest="encoding", default=None,
help="Encoding to use for output"
)
parser.add_option(
"-t", "--type",
dest="type", default="text",
choices=("text", "html"),
help="output type (text or html)"
)
(options, args) = parser.parse_args(args)
if options.locale and not options.encoding:
parser.error("if --locale is specified --encoding is required")
sys.exit(1)
locale = options.locale, options.encoding
if options.type == "html":
if options.locale:
cal = LocaleHTMLCalendar(locale=locale)
else:
cal = HTMLCalendar()
encoding = options.encoding
if encoding is None:
encoding = sys.getdefaultencoding()
optdict = dict(encoding=encoding, css=options.css)
if len(args) == 1:
print cal.formatyearpage(datetime.date.today().year, **optdict)
elif len(args) == 2:
print cal.formatyearpage(int(args[1]), **optdict)
else:
parser.error("incorrect number of arguments")
sys.exit(1)
else:
if options.locale:
cal = LocaleTextCalendar(locale=locale)
else:
cal = TextCalendar()
optdict = dict(w=options.width, l=options.lines)
if len(args) != 3:
optdict["c"] = options.spacing
optdict["m"] = options.months
if len(args) == 1:
result = cal.formatyear(datetime.date.today().year, **optdict)
elif len(args) == 2:
result = cal.formatyear(int(args[1]), **optdict)
elif len(args) == 3:
result = cal.formatmonth(int(args[1]), int(args[2]), **optdict)
else:
parser.error("incorrect number of arguments")
sys.exit(1)
if options.encoding:
result = result.encode(options.encoding)
print result
if __name__ == "__main__":
main(sys.argv)
| apache-2.0 |
richardfergie/googleads-python-lib | examples/dfp/v201502/contact_service/update_contacts.py | 4 | 2384 | #!/usr/bin/python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example updates contact addresses.
To determine which contacts exist, run get_all_contacts.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
"""
# Import appropriate modules from the client library.
from googleads import dfp
# Set the ID of the contact to update.
CONTACT_ID = 'INSERT_CONTACT_ID_HERE'
def main(client, contact_id):
# Initialize appropriate service.
contact_service = client.GetService('ContactService', version='v201502')
# Create statement object to select the single contact by ID.
values = [{
'key': 'id',
'value': {
'xsi_type': 'NumberValue',
'value': contact_id
}
}]
query = 'WHERE id = :id'
statement = dfp.FilterStatement(query, values, 1)
# Get contacts by statement.
response = contact_service.getContactsByStatement(
statement.ToStatement())
if 'results' in response:
updated_contacts = []
for contact in response['results']:
contact['address'] = '123 New Street, New York, NY, 10011'
updated_contacts.append(contact)
# Update the contact on the server.
contacts = contact_service.updateContacts(updated_contacts)
# Display results.
for contact in contacts:
print (('Contact with ID \'%s\', name \'%s\', and address \'%s\' '
'was updated.')
% (contact['id'], contact['name'], contact['address']))
else:
print 'No contacts found to update.'
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client, CONTACT_ID)
| apache-2.0 |
cloud9UG/odoo | addons/website_event_track/models/event.py | 300 | 8344 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp.addons.website.models.website import slug
import pytz
class event_track_tag(osv.osv):
_name = "event.track.tag"
_order = 'name'
_columns = {
'name': fields.char('Event Track Tag', translate=True)
}
class event_tag(osv.osv):
_name = "event.tag"
_order = 'name'
_columns = {
'name': fields.char('Event Tag', translate=True)
}
#
# Tracks: conferences
#
class event_track_stage(osv.osv):
_name = "event.track.stage"
_order = 'sequence'
_columns = {
'name': fields.char('Track Stage', translate=True),
'sequence': fields.integer('Sequence')
}
_defaults = {
'sequence': 0
}
class event_track_location(osv.osv):
_name = "event.track.location"
_columns = {
'name': fields.char('Track Rooms')
}
class event_track(osv.osv):
_name = "event.track"
_description = 'Event Tracks'
_order = 'priority, date'
_inherit = ['mail.thread', 'ir.needaction_mixin', 'website.seo.metadata']
def _website_url(self, cr, uid, ids, field_name, arg, context=None):
res = dict.fromkeys(ids, '')
for track in self.browse(cr, uid, ids, context=context):
res[track.id] = "/event/%s/track/%s" % (slug(track.event_id), slug(track))
return res
_columns = {
'name': fields.char('Track Title', required=True, translate=True),
'user_id': fields.many2one('res.users', 'Responsible'),
'speaker_ids': fields.many2many('res.partner', string='Speakers'),
'tag_ids': fields.many2many('event.track.tag', string='Tags'),
'stage_id': fields.many2one('event.track.stage', 'Stage'),
'description': fields.html('Track Description', translate=True),
'date': fields.datetime('Track Date'),
'duration': fields.float('Duration', digits=(16,2)),
'location_id': fields.many2one('event.track.location', 'Location'),
'event_id': fields.many2one('event.event', 'Event', required=True),
'color': fields.integer('Color Index'),
'priority': fields.selection([('3','Low'),('2','Medium (*)'),('1','High (**)'),('0','Highest (***)')], 'Priority', required=True),
'website_published': fields.boolean('Available in the website', copy=False),
'website_url': fields.function(_website_url, string="Website url", type="char"),
'image': fields.related('speaker_ids', 'image', type='binary', readonly=True)
}
def set_priority(self, cr, uid, ids, priority, context={}):
return self.write(cr, uid, ids, {'priority' : priority})
def _default_stage_id(self, cr, uid, context={}):
stage_obj = self.pool.get('event.track.stage')
ids = stage_obj.search(cr, uid, [], context=context)
return ids and ids[0] or False
_defaults = {
'user_id': lambda self, cr, uid, ctx: uid,
'website_published': lambda self, cr, uid, ctx: False,
'duration': lambda *args: 1.5,
'stage_id': _default_stage_id,
'priority': '2'
}
def _read_group_stage_ids(self, cr, uid, ids, domain, read_group_order=None, access_rights_uid=None, context=None):
stage_obj = self.pool.get('event.track.stage')
result = stage_obj.name_search(cr, uid, '', context=context)
return result, {}
_group_by_full = {
'stage_id': _read_group_stage_ids,
}
#
# Events
#
class event_event(osv.osv):
_inherit = "event.event"
def _list_tz(self,cr,uid, context=None):
# put POSIX 'Etc/*' entries at the end to avoid confusing users - see bug 1086728
return [(tz,tz) for tz in sorted(pytz.all_timezones, key=lambda tz: tz if not tz.startswith('Etc/') else '_')]
def _count_tracks(self, cr, uid, ids, field_name, arg, context=None):
return {
event.id: len(event.track_ids)
for event in self.browse(cr, uid, ids, context=context)
}
def _get_tracks_tag_ids(self, cr, uid, ids, field_names, arg=None, context=None):
res = dict((res_id, []) for res_id in ids)
for event in self.browse(cr, uid, ids, context=context):
for track in event.track_ids:
res[event.id] += [tag.id for tag in track.tag_ids]
res[event.id] = list(set(res[event.id]))
return res
_columns = {
'tag_ids': fields.many2many('event.tag', string='Tags'),
'track_ids': fields.one2many('event.track', 'event_id', 'Tracks', copy=True),
'sponsor_ids': fields.one2many('event.sponsor', 'event_id', 'Sponsorships', copy=True),
'blog_id': fields.many2one('blog.blog', 'Event Blog'),
'show_track_proposal': fields.boolean('Talks Proposals'),
'show_tracks': fields.boolean('Multiple Tracks'),
'show_blog': fields.boolean('News'),
'count_tracks': fields.function(_count_tracks, type='integer', string='Tracks'),
'tracks_tag_ids': fields.function(_get_tracks_tag_ids, type='one2many', relation='event.track.tag', string='Tags of Tracks'),
'allowed_track_tag_ids': fields.many2many('event.track.tag', string='Accepted Tags', help="List of available tags for track proposals."),
'timezone_of_event': fields.selection(_list_tz, 'Event Timezone', size=64),
}
_defaults = {
'show_track_proposal': False,
'show_tracks': False,
'show_blog': False,
'timezone_of_event':lambda self,cr,uid,c: self.pool.get('res.users').browse(cr, uid, uid, c).tz,
}
def _get_new_menu_pages(self, cr, uid, event, context=None):
context = context or {}
result = super(event_event, self)._get_new_menu_pages(cr, uid, event, context=context)
if event.show_tracks:
result.append( (_('Talks'), '/event/%s/track' % slug(event)))
result.append( (_('Agenda'), '/event/%s/agenda' % slug(event)))
if event.blog_id:
result.append( (_('News'), '/blogpost'+slug(event.blog_ig)))
if event.show_track_proposal:
result.append( (_('Talk Proposals'), '/event/%s/track_proposal' % slug(event)))
return result
#
# Sponsors
#
class event_sponsors_type(osv.osv):
_name = "event.sponsor.type"
_order = "sequence"
_columns = {
"name": fields.char('Sponsor Type', required=True, translate=True),
"sequence": fields.integer('Sequence')
}
class event_sponsors(osv.osv):
_name = "event.sponsor"
_order = "sequence"
_columns = {
'event_id': fields.many2one('event.event', 'Event', required=True),
'sponsor_type_id': fields.many2one('event.sponsor.type', 'Sponsoring Type', required=True),
'partner_id': fields.many2one('res.partner', 'Sponsor/Customer', required=True),
'url': fields.text('Sponsor Website'),
'sequence': fields.related('sponsor_type_id', 'sequence', string='Sequence', store=True),
'image_medium': fields.related('partner_id', 'image_medium', string='Logo', type='binary')
}
def has_access_to_partner(self, cr, uid, ids, context=None):
partner_ids = [sponsor.partner_id.id for sponsor in self.browse(cr, uid, ids, context=context)]
return len(partner_ids) == self.pool.get("res.partner").search(cr, uid, [("id", "in", partner_ids)], count=True, context=context)
| agpl-3.0 |
the9000/python-crash-track | crashtrack.py | 1 | 2003 | # encoding: utf-8
"""
Track important values, print on exceptions.
Sample use:
@tracked
def foo(a=1):
divisor = a - 1
track("Dividing by %r", divisor)
return 1 / divisor
@tracked
def bar(x):
track('before foo(%r)', x)
return foo(x)
>>> bar(1)
bar: before foo(1)
foo: Dividing by 0
<Normal stacktrace>
"""
__all__ = ['track', 'tracked']
from threading import local
import sys
_local = local()
_local.stack = [] # contains mutable [func_name, current_message] items
def _say(func_name, msg):
"""
Output a stack line to stderr.
"""
for s in func_name, ': ', msg, '\n':
sys.stderr.write(s)
def track(msg, *data):
"""
Store a string msg % data in a tracking stack.
It will be printed if an unhandled exception is raised
in a function decorated with @tracked.
"""
_local.stack[-1][1] = msg % data
def tracked(func):
"""
Decorator to use on functions where track() is used.
If an unhandled exception happens in the function,
the whole stack of strings accumulated by track() calls
in the decorated functions, with function names, is printed.
The exception is then re-raised.
"""
def printCrashTrack(*args, **kwargs):
_local.stack.append([func.__name__, 'entered'])
try:
return func(*args, **kwargs)
except:
for func_name, msg in _local.stack:
_say(func_name, msg)
_local.stack = [] # so that upper levels don't print it
raise
finally:
if _local.stack:
_local.stack.pop()
return printCrashTrack
# TODO:
# def trackedAllowing(*exception_classes):
# """
# Same as @tracked, but does not print the trace
# if an exception for exception_classes is raised.
# """
# Problem: how to cut the tracking stack exactly up to the handler?
# Storing actual call frame IDs could be a solution.
# It would allow to match tracking stack with excpetion stack trace if desired.
| bsd-2-clause |
kashif/chainer | chainer/initializer.py | 3 | 1298 | import numpy
class Initializer(object):
"""Initializes array.
It initializes the given array.
Attributes:
~Initializer.dtype: Data type specifier. It is for type check in
``__call__`` function.
"""
def __init__(self, dtype=None):
self.dtype = dtype
def __call__(self, array):
"""Initializes given array.
This method destructively changes the value of array.
The derived class is required to implement this method.
The algorithms used to make the new values depend on the
concrete derived classes.
Args:
array (numpy.ndarray or cupy.ndarray):
An array to be initialized by this initializer.
"""
raise NotImplementedError()
# Original code forked from MIT licensed keras project
# https://github.com/fchollet/keras/blob/master/keras/initializations.py
def get_fans(shape):
if not isinstance(shape, tuple):
raise ValueError('shape must be tuple')
if len(shape) < 2:
raise ValueError('shape must be of length >= 2: shape={}', shape)
receptive_field_size = numpy.prod(shape[2:], dtype=numpy.int32)
fan_in = shape[1] * receptive_field_size
fan_out = shape[0] * receptive_field_size
return fan_in, fan_out
| mit |
Sodki/ansible | lib/ansible/modules/cloud/openstack/os_server_group.py | 27 | 5339 | #!/usr/bin/python
# Copyright (c) 2016 Catalyst IT Limited
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: os_server_group
short_description: Manage OpenStack server groups
extends_documentation_fragment: openstack
version_added: "2.2"
author: "Lingxian Kong (@kong)"
description:
- Add or remove server groups from OpenStack.
options:
state:
description:
- Indicate desired state of the resource. When I(state) is 'present',
then I(policies) is required.
choices: ['present', 'absent']
required: false
default: present
name:
description:
- Server group name.
required: true
policies:
description:
- A list of one or more policy names to associate with the server
group. The list must contain at least one policy name. The current
valid policy names are anti-affinity, affinity, soft-anti-affinity
and soft-affinity.
required: false
availability_zone:
description:
- Ignored. Present for backwards compatability
required: false
requirements:
- "python >= 2.6"
- "shade"
'''
EXAMPLES = '''
# Create a server group with 'affinity' policy.
- os_server_group:
state: present
auth:
auth_url: https://api.cloud.catalyst.net.nz:5000/v2.0
username: admin
password: admin
project_name: admin
name: my_server_group
policies:
- affinity
# Delete 'my_server_group' server group.
- os_server_group:
state: absent
auth:
auth_url: https://api.cloud.catalyst.net.nz:5000/v2.0
username: admin
password: admin
project_name: admin
name: my_server_group
'''
RETURN = '''
id:
description: Unique UUID.
returned: success
type: string
name:
description: The name of the server group.
returned: success
type: string
policies:
description: A list of one or more policy names of the server group.
returned: success
type: list of strings
members:
description: A list of members in the server group.
returned: success
type: list of strings
metadata:
description: Metadata key and value pairs.
returned: success
type: dict
project_id:
description: The project ID who owns the server group.
returned: success
type: string
user_id:
description: The user ID who owns the server group.
returned: success
type: string
'''
try:
import shade
HAS_SHADE = True
except ImportError:
HAS_SHADE = False
def _system_state_change(state, server_group):
if state == 'present' and not server_group:
return True
if state == 'absent' and server_group:
return True
return False
def main():
argument_spec = openstack_full_argument_spec(
name=dict(required=True),
policies=dict(required=False, type='list'),
state=dict(default='present', choices=['absent', 'present']),
)
module_kwargs = openstack_module_kwargs()
module = AnsibleModule(
argument_spec,
supports_check_mode=True,
**module_kwargs
)
if not HAS_SHADE:
module.fail_json(msg='shade is required for this module')
name = module.params['name']
policies = module.params['policies']
state = module.params['state']
try:
cloud = shade.openstack_cloud(**module.params)
server_group = cloud.get_server_group(name)
if module.check_mode:
module.exit_json(
changed=_system_state_change(state, server_group)
)
changed = False
if state == 'present':
if not server_group:
if not policies:
module.fail_json(
msg="Parameter 'policies' is required in Server Group "
"Create"
)
server_group = cloud.create_server_group(name, policies)
changed = True
module.exit_json(
changed=changed,
id=server_group['id'],
server_group=server_group
)
if state == 'absent':
if server_group:
cloud.delete_server_group(server_group['id'])
changed = True
module.exit_json(changed=changed)
except shade.OpenStackCloudException as e:
module.fail_json(msg=str(e), extra_data=e.extra_data)
# this is magic, see lib/ansible/module_common.py
from ansible.module_utils.basic import *
from ansible.module_utils.openstack import *
if __name__ == '__main__':
main()
| gpl-3.0 |
redhat-openstack/python-openstackclient | openstackclient/tests/compute/v2/test_agent.py | 1 | 7429 | # Copyright 2016 Easystack. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import mock
from mock import call
from openstackclient.common import exceptions
from openstackclient.compute.v2 import agent
from openstackclient.tests.compute.v2 import fakes as compute_fakes
class TestAgent(compute_fakes.TestComputev2):
fake_agent = compute_fakes.FakeAgent.create_one_agent()
columns = (
'agent_id',
'architecture',
'hypervisor',
'md5hash',
'os',
'url',
'version',
)
data = (
fake_agent.agent_id,
fake_agent.architecture,
fake_agent.hypervisor,
fake_agent.md5hash,
fake_agent.os,
fake_agent.url,
fake_agent.version,
)
def setUp(self):
super(TestAgent, self).setUp()
self.agents_mock = self.app.client_manager.compute.agents
self.agents_mock.reset_mock()
class TestAgentCreate(TestAgent):
def setUp(self):
super(TestAgentCreate, self).setUp()
self.agents_mock.create.return_value = self.fake_agent
self.cmd = agent.CreateAgent(self.app, None)
def test_agent_create(self):
arglist = [
self.fake_agent.os,
self.fake_agent.architecture,
self.fake_agent.version,
self.fake_agent.url,
self.fake_agent.md5hash,
self.fake_agent.hypervisor,
]
verifylist = [
('os', self.fake_agent.os),
('architecture', self.fake_agent.architecture),
('version', self.fake_agent.version),
('url', self.fake_agent.url),
('md5hash', self.fake_agent.md5hash),
('hypervisor', self.fake_agent.hypervisor),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.agents_mock.create.assert_called_with(parsed_args.os,
parsed_args.architecture,
parsed_args.version,
parsed_args.url,
parsed_args.md5hash,
parsed_args.hypervisor)
self.assertEqual(self.columns, columns)
self.assertEqual(self.data, data)
class TestAgentDelete(TestAgent):
fake_agents = compute_fakes.FakeAgent.create_agents(count=2)
def setUp(self):
super(TestAgentDelete, self).setUp()
self.agents_mock.get.return_value = self.fake_agents
self.cmd = agent.DeleteAgent(self.app, None)
def test_delete_one_agent(self):
arglist = [
self.fake_agents[0].agent_id
]
verifylist = [
('id', [self.fake_agents[0].agent_id]),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.agents_mock.delete.assert_called_with(
self.fake_agents[0].agent_id)
self.assertIsNone(result)
def test_delete_multiple_agents(self):
arglist = []
for n in self.fake_agents:
arglist.append(n.agent_id)
verifylist = [
('id', arglist),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
calls = []
for n in self.fake_agents:
calls.append(call(n.agent_id))
self.agents_mock.delete.assert_has_calls(calls)
self.assertIsNone(result)
def test_delete_multiple_agents_exception(self):
arglist = [
self.fake_agents[0].agent_id,
self.fake_agents[1].agent_id,
'x-y-z',
]
verifylist = [
('id', arglist),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
ret_delete = [
None,
None,
exceptions.NotFound('404')
]
self.agents_mock.delete = mock.Mock(side_effect=ret_delete)
self.assertRaises(exceptions.CommandError, self.cmd.take_action,
parsed_args)
calls = [
call(self.fake_agents[0].agent_id),
call(self.fake_agents[1].agent_id),
]
self.agents_mock.delete.assert_has_calls(calls)
class TestAgentList(TestAgent):
agents = compute_fakes.FakeAgent.create_agents(count=3)
list_columns = (
"Agent ID",
"Hypervisor",
"OS",
"Architecture",
"Version",
"Md5Hash",
"URL",
)
list_data = []
for _agent in agents:
list_data.append((
_agent.agent_id,
_agent.hypervisor,
_agent.os,
_agent.architecture,
_agent.version,
_agent.md5hash,
_agent.url,
))
def setUp(self):
super(TestAgentList, self).setUp()
self.agents_mock.list.return_value = self.agents
self.cmd = agent.ListAgent(self.app, None)
def test_agent_list(self):
arglist = []
verifylist = []
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.assertEqual(self.list_columns, columns)
self.assertEqual(self.list_data, list(data))
def test_agent_list_with_hypervisor(self):
arglist = [
'--hypervisor',
'hypervisor',
]
verifylist = [
('hypervisor', 'hypervisor'),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
columns, data = self.cmd.take_action(parsed_args)
self.assertEqual(self.list_columns, columns)
self.assertEqual(self.list_data, list(data))
class TestAgentSet(TestAgent):
def setUp(self):
super(TestAgentSet, self).setUp()
self.agents_mock.update.return_value = self.fake_agent
self.cmd = agent.SetAgent(self.app, None)
def test_agent_set(self):
arglist = [
'id',
'new-version',
'new-url',
'new-md5hash',
]
verifylist = [
('id', 'id'),
('version', 'new-version'),
('url', 'new-url'),
('md5hash', 'new-md5hash'),
]
parsed_args = self.check_parser(self.cmd, arglist, verifylist)
result = self.cmd.take_action(parsed_args)
self.agents_mock.update.assert_called_with(parsed_args.id,
parsed_args.version,
parsed_args.url,
parsed_args.md5hash)
self.assertIsNone(result)
| apache-2.0 |
cbrewster/servo | tests/wpt/web-platform-tests/tools/third_party/html5lib/html5lib/tests/test_encoding.py | 30 | 4801 | from __future__ import absolute_import, division, unicode_literals
import os
import pytest
from .support import get_data_files, test_dir, errorMessage, TestData as _TestData
from html5lib import HTMLParser, _inputstream
def test_basic_prescan_length():
data = "<title>Caf\u00E9</title><!--a--><meta charset='utf-8'>".encode('utf-8')
pad = 1024 - len(data) + 1
data = data.replace(b"-a-", b"-" + (b"a" * pad) + b"-")
assert len(data) == 1024 # Sanity
stream = _inputstream.HTMLBinaryInputStream(data, useChardet=False)
assert 'utf-8' == stream.charEncoding[0].name
def test_parser_reparse():
data = "<title>Caf\u00E9</title><!--a--><meta charset='utf-8'>".encode('utf-8')
pad = 10240 - len(data) + 1
data = data.replace(b"-a-", b"-" + (b"a" * pad) + b"-")
assert len(data) == 10240 # Sanity
stream = _inputstream.HTMLBinaryInputStream(data, useChardet=False)
assert 'windows-1252' == stream.charEncoding[0].name
p = HTMLParser(namespaceHTMLElements=False)
doc = p.parse(data, useChardet=False)
assert 'utf-8' == p.documentEncoding
assert doc.find(".//title").text == "Caf\u00E9"
@pytest.mark.parametrize("expected,data,kwargs", [
("utf-16le", b"\xFF\xFE", {"override_encoding": "iso-8859-2"}),
("utf-16be", b"\xFE\xFF", {"override_encoding": "iso-8859-2"}),
("utf-8", b"\xEF\xBB\xBF", {"override_encoding": "iso-8859-2"}),
("iso-8859-2", b"", {"override_encoding": "iso-8859-2", "transport_encoding": "iso-8859-3"}),
("iso-8859-2", b"<meta charset=iso-8859-3>", {"transport_encoding": "iso-8859-2"}),
("iso-8859-2", b"<meta charset=iso-8859-2>", {"same_origin_parent_encoding": "iso-8859-3"}),
("iso-8859-2", b"", {"same_origin_parent_encoding": "iso-8859-2", "likely_encoding": "iso-8859-3"}),
("iso-8859-2", b"", {"same_origin_parent_encoding": "utf-16", "likely_encoding": "iso-8859-2"}),
("iso-8859-2", b"", {"same_origin_parent_encoding": "utf-16be", "likely_encoding": "iso-8859-2"}),
("iso-8859-2", b"", {"same_origin_parent_encoding": "utf-16le", "likely_encoding": "iso-8859-2"}),
("iso-8859-2", b"", {"likely_encoding": "iso-8859-2", "default_encoding": "iso-8859-3"}),
("iso-8859-2", b"", {"default_encoding": "iso-8859-2"}),
("windows-1252", b"", {"default_encoding": "totally-bogus-string"}),
("windows-1252", b"", {}),
])
def test_parser_args(expected, data, kwargs):
stream = _inputstream.HTMLBinaryInputStream(data, useChardet=False, **kwargs)
assert expected == stream.charEncoding[0].name
p = HTMLParser()
p.parse(data, useChardet=False, **kwargs)
assert expected == p.documentEncoding
@pytest.mark.parametrize("kwargs", [
{"override_encoding": "iso-8859-2"},
{"override_encoding": None},
{"transport_encoding": "iso-8859-2"},
{"transport_encoding": None},
{"same_origin_parent_encoding": "iso-8859-2"},
{"same_origin_parent_encoding": None},
{"likely_encoding": "iso-8859-2"},
{"likely_encoding": None},
{"default_encoding": "iso-8859-2"},
{"default_encoding": None},
{"foo_encoding": "iso-8859-2"},
{"foo_encoding": None},
])
def test_parser_args_raises(kwargs):
with pytest.raises(TypeError) as exc_info:
p = HTMLParser()
p.parse("", useChardet=False, **kwargs)
assert exc_info.value.args[0].startswith("Cannot set an encoding with a unicode input")
def runParserEncodingTest(data, encoding):
p = HTMLParser()
assert p.documentEncoding is None
p.parse(data, useChardet=False)
encoding = encoding.lower().decode("ascii")
assert encoding == p.documentEncoding, errorMessage(data, encoding, p.documentEncoding)
def runPreScanEncodingTest(data, encoding):
stream = _inputstream.HTMLBinaryInputStream(data, useChardet=False)
encoding = encoding.lower().decode("ascii")
# Very crude way to ignore irrelevant tests
if len(data) > stream.numBytesMeta:
return
assert encoding == stream.charEncoding[0].name, errorMessage(data, encoding, stream.charEncoding[0].name)
def test_encoding():
for filename in get_data_files("encoding"):
tests = _TestData(filename, b"data", encoding=None)
for test in tests:
yield (runParserEncodingTest, test[b'data'], test[b'encoding'])
yield (runPreScanEncodingTest, test[b'data'], test[b'encoding'])
# pylint:disable=wrong-import-position
try:
import chardet # noqa
except ImportError:
print("chardet not found, skipping chardet tests")
else:
def test_chardet():
with open(os.path.join(test_dir, "encoding", "chardet", "test_big5.txt"), "rb") as fp:
encoding = _inputstream.HTMLInputStream(fp.read()).charEncoding
assert encoding[0].name == "big5"
# pylint:enable=wrong-import-position
| mpl-2.0 |
sustainingtechnologies/django-guardian | guardian/migrations/0002_auto__add_field_groupobjectpermission_object_pk__add_field_userobjectp.py | 85 | 5650 | # encoding: utf-8
from south.db import db
from south.v2 import SchemaMigration
from guardian.compat import user_model_label
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'GroupObjectPermission.object_pk'
db.add_column('guardian_groupobjectpermission', 'object_pk', self.gf('django.db.models.fields.TextField')(default=''), keep_default=False)
# Adding field 'UserObjectPermission.object_pk'
db.add_column('guardian_userobjectpermission', 'object_pk', self.gf('django.db.models.fields.TextField')(default=''), keep_default=False)
def backwards(self, orm):
# Deleting field 'GroupObjectPermission.object_pk'
db.delete_column('guardian_groupobjectpermission', 'object_pk')
# Deleting field 'UserObjectPermission.object_pk'
db.delete_column('guardian_userobjectpermission', 'object_pk')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
user_model_label: {
'Meta': {'object_name': user_model_label.split('.')[-1]},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'guardian.groupobjectpermission': {
'Meta': {'unique_together': "(['group', 'permission', 'content_type', 'object_id'],)", 'object_name': 'GroupObjectPermission'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'object_pk': ('django.db.models.fields.TextField', [], {'default': "''"}),
'permission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Permission']"})
},
'guardian.userobjectpermission': {
'Meta': {'unique_together': "(['user', 'permission', 'content_type', 'object_id'],)", 'object_name': 'UserObjectPermission'},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'object_pk': ('django.db.models.fields.TextField', [], {'default': "''"}),
'permission': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Permission']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['%s']" % user_model_label})
}
}
complete_apps = ['guardian']
| bsd-2-clause |
fgirault/smeuhsocial | apps/django_openid/forms.py | 1 | 5247 | from django.contrib.auth.models import User
from django import forms
import re
class RegistrationForm(forms.ModelForm):
no_password_error = 'You must either set a password or attach an OpenID'
invalid_username_error = 'Usernames must consist of letters and numbers'
reserved_username_error = 'That username cannot be registered'
duplicate_email_error = 'That e-mail address is already in use'
username_re = re.compile('^[a-zA-Z0-9]+$')
# Additional required fields (above what the User model says)
extra_required = ('first_name', 'last_name', 'email')
def __init__(self, *args, **kwargs):
"""
Accepts openid as optional keyword argument, for password validation.
Also accepts optional reserved_usernames keyword argument which is a
list of usernames that should not be registered (e.g. 'security')
"""
try:
self.openid = kwargs.pop('openid')
except KeyError:
self.openid = None
try:
self.reserved_usernames = kwargs.pop('reserved_usernames')
except KeyError:
self.reserved_usernames = []
try:
self.no_duplicate_emails = kwargs.pop('no_duplicate_emails')
except KeyError:
self.no_duplicate_emails = False
# Super's __init__ creates self.fields for us
super(RegistrationForm, self).__init__(*args, **kwargs)
# Now we can modify self.fields with our extra required information
for field in self.extra_required:
self.fields[field].required = True
class Meta:
model = User
fields = ('username', 'first_name', 'last_name', 'email')
# Password is NOT required as a general rule; we only validate that they
# have set a password if an OpenID is not being associated
password = forms.CharField(
widget = forms.PasswordInput,
required = False
)
def clean_username(self):
username = self.cleaned_data.get('username', '')
if not self.username_re.match(username):
raise forms.ValidationError, self.invalid_username_error
if username in self.reserved_usernames:
raise forms.ValidationError, self.reserved_username_error
return username
def clean_password(self):
"Password is only required if no OpenID was specified"
password = self.cleaned_data.get('password', '')
if not self.openid and not password:
raise forms.ValidationError, self.no_password_error
return password
def clean_email(self):
email = self.cleaned_data.get('email', '')
if self.no_duplicate_emails and User.objects.filter(
email = email
).count() > 0:
raise forms.ValidationError, self.duplicate_email_error
return email
class RegistrationFormPasswordConfirm(RegistrationForm):
password_mismatch_error = 'Your passwords do not match'
password2 = forms.CharField(
widget = forms.PasswordInput,
required = False,
label = "Confirm password"
)
def clean_password2(self):
password = self.cleaned_data.get('password', '')
password2 = self.cleaned_data.get('password2', '')
if password and (password != password2):
raise forms.ValidationError, self.password_mismatch_error
return password2
class ChangePasswordForm(forms.Form):
password = forms.CharField(
widget = forms.PasswordInput,
required = True
)
password2 = forms.CharField(
widget = forms.PasswordInput,
required = True,
label = 'Confirm password'
)
password_mismatch_error = 'Your passwords do not match'
def __init__(self, user, *args, **kwargs):
self.user = user
super(ChangePasswordForm, self).__init__(*args, **kwargs)
def clean_password2(self):
password = self.cleaned_data.get('password', '')
password2 = self.cleaned_data.get('password2', '')
if password and (password != password2):
raise forms.ValidationError, self.password_mismatch_error
return password2
class ChangePasswordVerifyOldForm(ChangePasswordForm):
"""
Use this if you want the user to enter their old password first
Careful though... if hte user has just recovered their account, they
should be able to reset their password without having to enter the old
one. This case is not currently handled.
"""
password_incorrect_error = 'Your password is incorrect'
def __init__(self, *args, **kwargs):
super(ChangePasswordVerifyOldForm, self).__init__(*args, **kwargs)
if self.user.has_usable_password() and self.user.password:
# Only ask for their old password if they have set it already
self.fields['old_password'] = forms.CharField(
widget = forms.PasswordInput,
required = True
)
def clean_old_password(self):
password = self.cleaned_data.get('old_password', '')
if not self.user.check_password(password):
raise forms.ValidationError, self.password_incorrect_error
| mit |
hypnotika/namebench | nb_third_party/dns/rdtypes/IN/DHCID.py | 248 | 2116 | # Copyright (C) 2006, 2007, 2009, 2010 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.exception
class DHCID(dns.rdata.Rdata):
"""DHCID record
@ivar data: the data (the content of the RR is opaque as far as the
DNS is concerned)
@type data: string
@see: RFC 4701"""
__slots__ = ['data']
def __init__(self, rdclass, rdtype, data):
super(DHCID, self).__init__(rdclass, rdtype)
self.data = data
def to_text(self, origin=None, relativize=True, **kw):
return dns.rdata._base64ify(self.data)
def from_text(cls, rdclass, rdtype, tok, origin = None, relativize = True):
chunks = []
while 1:
t = tok.get().unescape()
if t.is_eol_or_eof():
break
if not t.is_identifier():
raise dns.exception.SyntaxError
chunks.append(t.value)
b64 = ''.join(chunks)
data = b64.decode('base64_codec')
return cls(rdclass, rdtype, data)
from_text = classmethod(from_text)
def to_wire(self, file, compress = None, origin = None):
file.write(self.data)
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin = None):
data = wire[current : current + rdlen]
return cls(rdclass, rdtype, data)
from_wire = classmethod(from_wire)
def _cmp(self, other):
return cmp(self.data, other.data)
| apache-2.0 |
density215/d215-miniblog | unidecode/x076.py | 252 | 4639 | data = (
'Yu ', # 0x00
'Cui ', # 0x01
'Ya ', # 0x02
'Zhu ', # 0x03
'Cu ', # 0x04
'Dan ', # 0x05
'Shen ', # 0x06
'Zhung ', # 0x07
'Ji ', # 0x08
'Yu ', # 0x09
'Hou ', # 0x0a
'Feng ', # 0x0b
'La ', # 0x0c
'Yang ', # 0x0d
'Shen ', # 0x0e
'Tu ', # 0x0f
'Yu ', # 0x10
'Gua ', # 0x11
'Wen ', # 0x12
'Huan ', # 0x13
'Ku ', # 0x14
'Jia ', # 0x15
'Yin ', # 0x16
'Yi ', # 0x17
'Lu ', # 0x18
'Sao ', # 0x19
'Jue ', # 0x1a
'Chi ', # 0x1b
'Xi ', # 0x1c
'Guan ', # 0x1d
'Yi ', # 0x1e
'Wen ', # 0x1f
'Ji ', # 0x20
'Chuang ', # 0x21
'Ban ', # 0x22
'Lei ', # 0x23
'Liu ', # 0x24
'Chai ', # 0x25
'Shou ', # 0x26
'Nue ', # 0x27
'Dian ', # 0x28
'Da ', # 0x29
'Pie ', # 0x2a
'Tan ', # 0x2b
'Zhang ', # 0x2c
'Biao ', # 0x2d
'Shen ', # 0x2e
'Cu ', # 0x2f
'Luo ', # 0x30
'Yi ', # 0x31
'Zong ', # 0x32
'Chou ', # 0x33
'Zhang ', # 0x34
'Zhai ', # 0x35
'Sou ', # 0x36
'Suo ', # 0x37
'Que ', # 0x38
'Diao ', # 0x39
'Lou ', # 0x3a
'Lu ', # 0x3b
'Mo ', # 0x3c
'Jin ', # 0x3d
'Yin ', # 0x3e
'Ying ', # 0x3f
'Huang ', # 0x40
'Fu ', # 0x41
'Liao ', # 0x42
'Long ', # 0x43
'Qiao ', # 0x44
'Liu ', # 0x45
'Lao ', # 0x46
'Xian ', # 0x47
'Fei ', # 0x48
'Dan ', # 0x49
'Yin ', # 0x4a
'He ', # 0x4b
'Yan ', # 0x4c
'Ban ', # 0x4d
'Xian ', # 0x4e
'Guan ', # 0x4f
'Guai ', # 0x50
'Nong ', # 0x51
'Yu ', # 0x52
'Wei ', # 0x53
'Yi ', # 0x54
'Yong ', # 0x55
'Pi ', # 0x56
'Lei ', # 0x57
'Li ', # 0x58
'Shu ', # 0x59
'Dan ', # 0x5a
'Lin ', # 0x5b
'Dian ', # 0x5c
'Lin ', # 0x5d
'Lai ', # 0x5e
'Pie ', # 0x5f
'Ji ', # 0x60
'Chi ', # 0x61
'Yang ', # 0x62
'Xian ', # 0x63
'Jie ', # 0x64
'Zheng ', # 0x65
'[?] ', # 0x66
'Li ', # 0x67
'Huo ', # 0x68
'Lai ', # 0x69
'Shaku ', # 0x6a
'Dian ', # 0x6b
'Xian ', # 0x6c
'Ying ', # 0x6d
'Yin ', # 0x6e
'Qu ', # 0x6f
'Yong ', # 0x70
'Tan ', # 0x71
'Dian ', # 0x72
'Luo ', # 0x73
'Luan ', # 0x74
'Luan ', # 0x75
'Bo ', # 0x76
'[?] ', # 0x77
'Gui ', # 0x78
'Po ', # 0x79
'Fa ', # 0x7a
'Deng ', # 0x7b
'Fa ', # 0x7c
'Bai ', # 0x7d
'Bai ', # 0x7e
'Qie ', # 0x7f
'Bi ', # 0x80
'Zao ', # 0x81
'Zao ', # 0x82
'Mao ', # 0x83
'De ', # 0x84
'Pa ', # 0x85
'Jie ', # 0x86
'Huang ', # 0x87
'Gui ', # 0x88
'Ci ', # 0x89
'Ling ', # 0x8a
'Gao ', # 0x8b
'Mo ', # 0x8c
'Ji ', # 0x8d
'Jiao ', # 0x8e
'Peng ', # 0x8f
'Gao ', # 0x90
'Ai ', # 0x91
'E ', # 0x92
'Hao ', # 0x93
'Han ', # 0x94
'Bi ', # 0x95
'Wan ', # 0x96
'Chou ', # 0x97
'Qian ', # 0x98
'Xi ', # 0x99
'Ai ', # 0x9a
'Jiong ', # 0x9b
'Hao ', # 0x9c
'Huang ', # 0x9d
'Hao ', # 0x9e
'Ze ', # 0x9f
'Cui ', # 0xa0
'Hao ', # 0xa1
'Xiao ', # 0xa2
'Ye ', # 0xa3
'Po ', # 0xa4
'Hao ', # 0xa5
'Jiao ', # 0xa6
'Ai ', # 0xa7
'Xing ', # 0xa8
'Huang ', # 0xa9
'Li ', # 0xaa
'Piao ', # 0xab
'He ', # 0xac
'Jiao ', # 0xad
'Pi ', # 0xae
'Gan ', # 0xaf
'Pao ', # 0xb0
'Zhou ', # 0xb1
'Jun ', # 0xb2
'Qiu ', # 0xb3
'Cun ', # 0xb4
'Que ', # 0xb5
'Zha ', # 0xb6
'Gu ', # 0xb7
'Jun ', # 0xb8
'Jun ', # 0xb9
'Zhou ', # 0xba
'Zha ', # 0xbb
'Gu ', # 0xbc
'Zhan ', # 0xbd
'Du ', # 0xbe
'Min ', # 0xbf
'Qi ', # 0xc0
'Ying ', # 0xc1
'Yu ', # 0xc2
'Bei ', # 0xc3
'Zhao ', # 0xc4
'Zhong ', # 0xc5
'Pen ', # 0xc6
'He ', # 0xc7
'Ying ', # 0xc8
'He ', # 0xc9
'Yi ', # 0xca
'Bo ', # 0xcb
'Wan ', # 0xcc
'He ', # 0xcd
'Ang ', # 0xce
'Zhan ', # 0xcf
'Yan ', # 0xd0
'Jian ', # 0xd1
'He ', # 0xd2
'Yu ', # 0xd3
'Kui ', # 0xd4
'Fan ', # 0xd5
'Gai ', # 0xd6
'Dao ', # 0xd7
'Pan ', # 0xd8
'Fu ', # 0xd9
'Qiu ', # 0xda
'Sheng ', # 0xdb
'Dao ', # 0xdc
'Lu ', # 0xdd
'Zhan ', # 0xde
'Meng ', # 0xdf
'Li ', # 0xe0
'Jin ', # 0xe1
'Xu ', # 0xe2
'Jian ', # 0xe3
'Pan ', # 0xe4
'Guan ', # 0xe5
'An ', # 0xe6
'Lu ', # 0xe7
'Shu ', # 0xe8
'Zhou ', # 0xe9
'Dang ', # 0xea
'An ', # 0xeb
'Gu ', # 0xec
'Li ', # 0xed
'Mu ', # 0xee
'Cheng ', # 0xef
'Gan ', # 0xf0
'Xu ', # 0xf1
'Mang ', # 0xf2
'Mang ', # 0xf3
'Zhi ', # 0xf4
'Qi ', # 0xf5
'Ruan ', # 0xf6
'Tian ', # 0xf7
'Xiang ', # 0xf8
'Dun ', # 0xf9
'Xin ', # 0xfa
'Xi ', # 0xfb
'Pan ', # 0xfc
'Feng ', # 0xfd
'Dun ', # 0xfe
'Min ', # 0xff
)
| bsd-3-clause |
smarx/dropbox-sdk-python | test/test_dropbox.py | 2 | 19859 | from __future__ import absolute_import, division, print_function, unicode_literals
import datetime
import os
import random
import string
import sys
import unittest
from dropbox import Dropbox
from dropbox.exceptions import (
ApiError,
AuthError,
BadInputError,
)
from dropbox.files import (
ListFolderError,
)
# Get token from environment variable.
oauth2_token = os.environ.get('DROPBOX_TOKEN')
if oauth2_token is None:
print('Set DROPBOX_TOKEN environment variable to a valid token.',
file=sys.stderr)
sys.exit(1)
MALFORMED_TOKEN = 'asdf'
INVALID_TOKEN = 'z' * 62
class TestDropbox(unittest.TestCase):
def setUp(self):
self.dbx = Dropbox(oauth2_token)
def test_bad_auth(self):
# Test malformed token
malformed_token_dbx = Dropbox(MALFORMED_TOKEN)
with self.assertRaises(BadInputError) as cm:
malformed_token_dbx.files_list_folder('')
self.assertIn('token is malformed', cm.exception.message)
# Test reasonable-looking invalid token
invalid_token_dbx = Dropbox(INVALID_TOKEN)
with self.assertRaises(AuthError) as cm:
invalid_token_dbx.files_list_folder('')
self.assertEqual(cm.exception.reason['error']['.tag'],
'invalid_access_token')
def test_rpc(self):
self.dbx.files_list_folder('')
# Test API error
random_folder_path = '/' + \
''.join(random.sample(string.ascii_letters, 15))
with self.assertRaises(ApiError) as cm:
self.dbx.files_list_folder(random_folder_path)
self.assertIsInstance(cm.exception.reason, ListFolderError)
def test_upload_download(self):
# Upload file
timestamp = str(datetime.datetime.utcnow())
random_filename = ''.join(random.sample(string.ascii_letters, 15))
random_path = '/Test/%s/%s' % (timestamp, random_filename)
test_contents = string.ascii_letters
self.dbx.files_upload(test_contents, random_path)
# Download file
metadata, resp = self.dbx.files_download(random_path)
self.assertEqual(string.ascii_letters, resp.text)
# Cleanup folder
self.dbx.files_delete('/Test/%s' % timestamp)
from dropbox import client
class TestV1DropboxClient(unittest.TestCase):
def setUp(self):
pass
def test_upload(self):
print("Hello!")
import json
import os
import posixpath
import six
import sys
import threading
from uuid import UUID
from dropbox import session, client
from dropbox.rest import ErrorResponse
if six.PY3:
from io import StringIO
else:
from StringIO import StringIO
try:
import json
except ImportError:
import simplejson as json
PY3 = sys.version_info[0] == 3
class BaseClientTests(unittest.TestCase):
def setUp(self):
"""Creates the API client and decides on a test directory."""
self.client = client.DropboxClient(oauth2_token)
self.test_dir = "/Test/%s" % str(datetime.datetime.utcnow())
self.foo = 'foo.txt'
self.frog = 'Costa Rican Frog.jpg'
self.song = 'dropbox_song.mp3'
def tearDown(self):
try:
self.client.file_delete(self.test_dir)
except ErrorResponse as e:
if 'not found' not in e.body['error']:
raise
def upload_file(self, src, target, **kwargs):
with open(src, 'rb') as f:
return self.client.put_file(target, f, **kwargs)
def dict_has(self, dictionary, *args, **kwargs):
for key in args:
self.assertTrue(key in dictionary)
for (key, value) in kwargs.items():
self.assertEqual(value, dictionary[key])
def assert_file(self, dictionary, filename, *args, **kwargs):
import os
defaults = dict(
bytes = os.path.getsize(filename),
is_dir = False
)
combined = dict(list(defaults.items()) + list(kwargs.items()))
self.dict_has(dictionary, *args,
**combined
)
def test_account_info(self):
"""Tests if the account_info returns the expected fields."""
account_info = self.client.account_info()
self.dict_has(account_info,
"country",
"display_name",
"referral_link",
"quota_info",
"uid"
)
def test_put_file(self):
"""Tests if put_file returns the expected metadata"""
def test_put(file, path):
file_path = posixpath.join(self.test_dir, path)
f = open(file, "rb")
metadata = self.client.put_file(file_path, f)
self.assert_file(metadata, file, path = file_path)
test_put(self.foo, "put_foo.txt")
test_put(self.song, "put_song.mp3")
test_put(self.frog, "put_frog.jpg")
def test_put_file_overwrite(self):
"""Tests if put_file with overwrite=true returns the expected metadata"""
path = posixpath.join(self.test_dir, "foo_overwrite.txt")
self.upload_file(self.foo, path)
f = StringIO("This Overwrites")
metadata = self.client.put_file(path, f, overwrite=True)
self.dict_has(metadata,
size = "15 bytes",
bytes = 15,
is_dir = False,
path = path,
mime_type = "text/plain"
)
def test_get_file(self):
"""Tests if storing and retrieving a file returns the same file"""
def test_get(file, path):
file_path = posixpath.join(self.test_dir, path)
self.upload_file(file, file_path)
downloaded = self.client.get_file(file_path).read()
local = open(file, "rb").read()
self.assertEqual(len(downloaded), len(local))
self.assertEqual(downloaded, local)
test_get(self.foo, "get_foo.txt")
test_get(self.frog, "get_frog.txt")
test_get(self.song, "get_song.txt")
def test_get_partial_file(self):
"""Tests if storing a file and retrieving part of it returns the correct part"""
def test_get(file, path, start_frac, download_frac):
file_path = posixpath.join(self.test_dir, path)
self.upload_file(file, file_path)
local = open(file, "rb").read()
local_len = len(local)
download_start = int(start_frac * local_len) if start_frac is not None else None
download_length = int(download_frac * local_len) if download_frac is not None else None
downloaded = self.client.get_file(file_path, start=download_start,
length=download_length).read()
local_file = open(file, "rb")
if download_start:
local_file.seek(download_start)
if download_length is None:
local_partial = local_file.read()
else:
local_partial = local_file.read(download_length)
elif download_length:
local_file.seek(-1 * download_length, 2)
local_partial = local_file.read(download_length)
self.assertEqual(len(downloaded), len(local_partial))
self.assertEqual(downloaded, local_partial)
test_get(self.foo, "get_foo.txt", 0.25, 0.5)
test_get(self.frog, "get_frog.txt", None, 0.5)
test_get(self.song, "get_song.txt", 0.25, None)
def test_metadata(self):
"""Tests if metadata returns the expected values for a files uploaded earlier"""
path = posixpath.join(self.test_dir, "foo_upload.txt")
self.upload_file(self.foo, path)
metadata = self.client.metadata(path)
self.assert_file(metadata, self.foo, path = path)
# Test root metadata
self.client.metadata('/')
self.client.metadata('')
def test_metadata_bad(self):
"""Tests if metadata returns an error for nonexistent file"""
self.assertRaises(
ErrorResponse,
lambda: self.client.metadata(posixpath.join(self.test_dir, "foo_does_not_exist.txt"))
)
def test_create_folder(self):
"""Tests if creating a folder works"""
path = posixpath.join(self.test_dir, u"new_fold\xe9r")
metadata = self.client.file_create_folder(path)
self.dict_has(metadata,
size="0 bytes",
bytes=0,
is_dir=True,
path=path)
def test_create_folder_dupe(self):
"""Tests if creating a folder fails correctly if one already exists"""
path = posixpath.join(self.test_dir, u"new_fold\xe9r_dupe")
self.client.file_create_folder(path)
self.assertRaises(
ErrorResponse,
lambda: self.client.file_create_folder(path)
)
def test_delete(self):
"""Tests if deleting a file really makes it disappear"""
path = posixpath.join(self.test_dir, u"d\xe9lfoo.txt")
self.upload_file(self.foo, path)
metadata = self.client.metadata(path)
self.assert_file(metadata, self.foo, path = path)
self.client.file_delete(path)
metadata = self.client.metadata(path)
self.assert_file(metadata, self.foo,
path = path,
bytes = 0,
size = "0 bytes",
is_deleted = True
)
def test_copy(self):
"""Tests copying a file, to ensure that two copies exist after the operation"""
path = posixpath.join(self.test_dir, "copyfoo.txt")
path2 = posixpath.join(self.test_dir, "copyfoo2.txt")
self.upload_file(self.foo, path)
self.client.file_copy(path, path2)
metadata = self.client.metadata(path)
metadata2 = self.client.metadata(path2)
self.assert_file(metadata, self.foo, path = path)
self.assert_file(metadata2, self.foo, path = path2)
def test_move(self):
"""Tests moving a file, to ensure the new copy exists and the old copy is removed"""
path = posixpath.join(self.test_dir, "movefoo.txt")
path2 = posixpath.join(self.test_dir, "movefoo2.txt")
self.upload_file(self.foo, path)
self.client.file_move(path, path2)
metadata = self.client.metadata(path)
self.assert_file(metadata, self.foo, path = path, is_deleted = True, size = "0 bytes", bytes = 0)
metadata = self.client.metadata(path2)
self.assert_file(metadata, self.foo, path = path2)
def test_thumbnail(self):
path = posixpath.join(self.test_dir, "frog.jpeg")
orig_md = self.upload_file(self.frog, path)
path = orig_md['path']
for fmt in ('JPEG', 'PNG'):
prev_len = 0
for ident in ('xs', 's', 'm', 'l', 'xl'):
with self.client.thumbnail(path, ident, fmt) as r:
data1 = r.read()
r, md = self.client.thumbnail_and_metadata(path, ident, fmt)
with r:
data2 = r.read()
self.assertEquals(data1, data2)
# Make sure the amount of data returned increases as we increase the size.
self.assertTrue(len(data1) > prev_len)
prev_len = len(data1)
# Make sure the default is 'm'
with self.client.thumbnail(path, 'm') as r:
data_m = r.read()
with self.client.thumbnail(path) as r:
data1 = r.read()
r, md = self.client.thumbnail_and_metadata(path)
with r:
data2 = r.read()
self.assertEqual(data_m, data1)
self.assertEqual(data_m, data2)
def test_stream(self):
"""Tests file streaming using the /media endpoint"""
path = posixpath.join(self.test_dir, "stream_song.mp3")
self.upload_file(self.song, path)
link = self.client.media(path)
self.dict_has(link,
"url",
"expires",
)
def test_share(self):
"""Tests file streaming using the /media endpoint"""
path = posixpath.join(self.test_dir, "stream_song.mp3")
self.upload_file(self.song, path)
link = self.client.share(path)
self.dict_has(link,
"url",
"expires",
)
def test_search(self):
"""Tests searching for a file in a folder"""
path = posixpath.join(self.test_dir, "search/")
j = posixpath.join
self.upload_file(self.foo, j(path, "text.txt"))
self.upload_file(self.foo, j(path, u"t\xe9xt.txt"))
self.upload_file(self.foo, j(path, "subFolder/text.txt"))
self.upload_file(self.foo, j(path, "subFolder/cow.txt"))
self.upload_file(self.frog, j(path, "frog.jpg"))
self.upload_file(self.frog, j(path, "frog2.jpg"))
self.upload_file(self.frog, j(path, "subFolder/frog2.jpg"))
results = self.client.search(path, "sasdfasdf")
self.assertEquals(results, [])
results = self.client.search(path, "jpg")
self.assertEquals(len(results), 3)
for metadata in results:
self.assert_file(metadata, self.frog)
results = self.client.search(j(path, "subFolder"), "jpg")
self.assertEquals(len(results), 1)
self.assert_file(results[0], self.frog)
all_tex_files = {j(path, n) for n in ["text.txt", u"t\xe9xt.txt", "subFolder/text.txt"]}
results = self.client.search(path, "tex")
self.assertEquals({r["path"] for r in results}, all_tex_files)
results = self.client.search(path, u"t\xe9x")
self.assertEquals({r["path"] for r in results}, all_tex_files)
def test_revisions_restore(self):
"""Tests getting the old revisions of a file"""
path = posixpath.join(self.test_dir, "foo_revs.txt")
self.upload_file(self.foo, path)
self.upload_file(self.frog, path, overwrite = True)
self.upload_file(self.song, path, overwrite = True)
revs = self.client.revisions(path)
metadata = self.client.metadata(path)
self.assert_file(metadata, self.song, path = path, mime_type = "text/plain")
self.assertEquals(len(revs), 3)
self.assert_file(revs[0], self.song, path = path, mime_type = "text/plain")
self.assert_file(revs[1], self.frog, path = path, mime_type = "text/plain")
self.assert_file(revs[2], self.foo, path = path, mime_type = "text/plain")
metadata = self.client.restore(path, revs[2]["rev"])
self.assert_file(metadata, self.foo, path = path, mime_type = "text/plain")
metadata = self.client.metadata(path)
self.assert_file(metadata, self.foo, path = path, mime_type = "text/plain")
def test_copy_ref(self):
"""Tests using the /copy_ref endpoint to move data within a single dropbox"""
path = posixpath.join(self.test_dir, "foo_copy_ref.txt")
path2 = posixpath.join(self.test_dir, "foo_copy_ref_target.txt")
self.upload_file(self.foo, path)
copy_ref = self.client.create_copy_ref(path)
self.dict_has(copy_ref,
"expires",
"copy_ref"
)
self.client.add_copy_ref(copy_ref["copy_ref"], path2)
metadata = self.client.metadata(path2)
self.assert_file(metadata, self.foo, path = path2)
copied_foo = self.client.get_file(path2).read()
local_foo = open(self.foo, "rb").read()
self.assertEqual(len(copied_foo), len(local_foo))
self.assertEqual(copied_foo, local_foo)
def test_chunked_upload(self):
target_path = posixpath.join(self.test_dir, 'chunked_upload_file.txt')
chunk_size = 4 * 1024
random_string1, random_data1 = make_random_data(chunk_size)
random_string2, random_data2 = make_random_data(chunk_size)
new_offset, upload_id = self.client.upload_chunk(StringIO(random_string1), 0)
self.assertEquals(new_offset, chunk_size)
self.assertIsNotNone(upload_id)
new_offset, upload_id2 = self.client.upload_chunk(StringIO(random_string2), 0,
new_offset, upload_id)
self.assertEquals(new_offset, chunk_size * 2)
self.assertEquals(upload_id2, upload_id)
metadata = self.client.commit_chunked_upload('/auto' + target_path, upload_id,
overwrite=True)
self.dict_has(metadata, bytes=chunk_size * 2, path=target_path)
downloaded = self.client.get_file(target_path).read()
self.assertEquals(chunk_size * 2, len(downloaded))
self.assertEquals(random_data1, downloaded[:chunk_size])
self.assertEquals(random_data2, downloaded[chunk_size:])
def test_chunked_uploader(self):
path = posixpath.join(self.test_dir, "chunked_uploader_file.txt")
size = 10 * 1024 * 1024
chunk_size = 4 * 1024 * 1102
random_string, random_data = make_random_data(size)
uploader = self.client.get_chunked_uploader(StringIO(random_string), len(random_string))
error_count = 0
while uploader.offset < size and error_count < 5:
try:
upload = uploader.upload_chunked(chunk_size=chunk_size)
except ErrorResponse as e:
error_count += 1
uploader.finish(path)
downloaded = self.client.get_file(path).read()
self.assertEquals(size, len(downloaded))
self.assertEquals(random_data, downloaded)
def test_delta(self):
prefix = posixpath.join(self.test_dir, "delta")
a = posixpath.join(prefix, "a.txt")
self.upload_file(self.foo, a)
b = posixpath.join(prefix, "b.txt")
self.upload_file(self.foo, b)
c = posixpath.join(prefix, "c")
c_1 = posixpath.join(prefix, "c/1.txt")
self.upload_file(self.foo, c_1)
c_2 = posixpath.join(prefix, "c/2.txt")
self.upload_file(self.foo, c_2)
prefix_lc = prefix.lower()
c_lc = c.lower()
# /delta on everything
expected = { p.lower() for p in (prefix, a, b, c, c_1, c_2) }
entries = set()
cursor = None
while True:
r = self.client.delta(cursor)
if r['reset']: entries = set()
for path_lc, md in r['entries']:
if path_lc.startswith(prefix_lc+'/') or path_lc == prefix_lc:
assert md is not None, "we should never get deletes under 'prefix'"
entries.add(path_lc)
if not r['has_more']: break
cursor = r['cursor']
self.assertEqual(expected, entries)
# /delta where path_prefix=c
expected = { p.lower() for p in (c, c_1, c_2) }
entries = set()
cursor = None
while True:
r = self.client.delta(cursor, path_prefix=c)
if r['reset']: entries = set()
for path_lc, md in r['entries']:
assert path_lc.startswith(c_lc+'/') or path_lc == c_lc
assert md is not None, "we should never get deletes"
entries.add(path_lc)
if not r['has_more']: break
cursor = r['cursor']
self.assertEqual(expected, entries)
def test_longpoll_delta(self):
cursor = self.client.delta()['cursor']
def assert_longpoll():
r = self.client.longpoll_delta(cursor)
assert (r['changes'])
t = threading.Thread(target=assert_longpoll)
t.start()
self.upload_file(self.foo, posixpath.join(self.test_dir, "foo.txt"))
t.join()
def make_random_data(size):
random_data = os.urandom(size)
if PY3:
random_string = random_data.decode('latin1')
else:
random_string = random_data
return random_string, random_data
| mit |
nuncjo/odoo | addons/portal_project/tests/__init__.py | 260 | 1086 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2012-TODAY OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import test_access_rights
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
pylp/pylp | pylp/lib/file.py | 1 | 1687 | """
Store the contents of a file.
Copyright (C) 2017 The Pylp Authors.
This file is under the MIT License.
"""
import os
import asyncio
from concurrent.futures import ThreadPoolExecutor
from pylp.lib.transformer import Transformer
# Infinity value for file order
_inf = float('Inf')
def read_file(path):
"""Read contents from a local file."""
with open(path, "r") as file:
return file.read()
class File():
"""This class represents a file."""
def __init__(self, path, **options):
# Options
self.order = _inf
self.base = options.get("base", os.path.dirname(path))
# Path of the file
self.cwd = options.get("cwd")
self.set_path(path)
self.relpath = None
# Contents of the file
self.contents = options.get("contents", "")
def set_path(self, path):
"""Set the path of the file."""
if os.path.isabs(path):
path = os.path.normpath(os.path.join(self.cwd, path))
self.path = path
self.relative = os.path.relpath(self.path, self.base)
def clone(self, path = None, *, with_contents = True, **options):
"""Clone the file."""
file = File(path if path else self.path, cwd=options.get("cwd", self.cwd))
file.base = options.get("base", self.base)
if with_contents:
file.contents = options.get("contents", self.contents)
return file
class FileReader(Transformer):
"""Transformer that reads contents from local files."""
def __init__(self):
super().__init__()
self.exe = ThreadPoolExecutor()
self.loop = asyncio.get_event_loop()
async def transform(self, file):
"""Function called when a file need to be transformed."""
file.contents = await self.loop.run_in_executor(self.exe, read_file, file.path)
return file
| mit |
GeotrekCE/Geotrek-admin | geotrek/outdoor/tests/test_functional.py | 2 | 4673 | from geotrek.common.tests import CommonTest
from geotrek.outdoor.models import Site, Course
from geotrek.outdoor.factories import SiteFactory, CourseFactory, OutdoorManagerFactory
from geotrek.authent.factories import StructureFactory
from django.utils.translation import gettext as _
class SiteViewsTests(CommonTest):
model = Site
modelfactory = SiteFactory
userfactory = OutdoorManagerFactory
expected_json_geom = {
'type': 'GeometryCollection',
'geometries': [{'type': 'Point', 'coordinates': [-1.3630812, -5.9838563]}],
}
def get_expected_json_attrs(self):
return {
'advice': 'Warning!',
'ambiance': 'Party time!',
'areas': [],
'children': [],
'cities': [],
'description': 'Blah',
'description_teaser': 'More blah',
'districts': [],
'eid': '42',
'filelist_url': '/paperclip/get/outdoor/site/{}/'.format(self.obj.pk),
'information_desks': [],
'labels': [],
'map_image_url': '/image/site-{}.png'.format(self.obj.pk),
'name': 'Site',
'orientation': ['S', 'SW'],
'parent': None,
'period': 'Summer',
'portal': [],
'practice': {
'id': self.obj.practice.pk,
'name': 'Practice',
},
'printable': '/api/en/sites/{}/site.pdf'.format(self.obj.pk),
'publication_date': '2020-03-17',
'published': True,
'published_status': [
{'lang': 'en', 'language': 'English', 'status': True},
{'lang': 'es', 'language': 'Spanish', 'status': False},
{'lang': 'fr', 'language': 'French', 'status': False},
{'lang': 'it', 'language': 'Italian', 'status': False},
],
'slug': 'site',
'source': [],
'structure': {
'id': self.obj.structure.pk,
'name': 'My structure',
},
'themes': [],
'type': {
'id': self.obj.type.pk,
'name': 'Site type'
},
'web_links': [],
'ratings_min': [],
'ratings_max': [],
'wind': ['N'],
}
def get_bad_data(self):
return {
'geom': 'doh!'
}, _('Invalid geometry value.')
def get_good_data(self):
return {
'structure': StructureFactory.create().pk,
'name_en': 'test en',
'name_fr': 'test fr',
'geom': '{"type": "GeometryCollection", "geometries": [{"type": "Point", "coordinates":[0, 0]}]}',
}
class CourseViewsTests(CommonTest):
model = Course
modelfactory = CourseFactory
userfactory = OutdoorManagerFactory
expected_json_geom = {
'type': 'GeometryCollection',
'geometries': [{'type': 'Point', 'coordinates': [-1.3630812, -5.9838563]}],
}
def get_expected_json_attrs(self):
return {
'advice': 'Warning!',
'areas': [],
'cities': [],
'description': 'Blah',
'districts': [],
'eid': '43',
'equipment': 'Rope',
'filelist_url': '/paperclip/get/outdoor/course/{}/'.format(self.obj.pk),
'height': 42,
'map_image_url': '/image/course-{}.png'.format(self.obj.pk),
'name': 'Course',
'site': self.obj.site.pk,
'printable': '/api/en/courses/{}/course.pdf'.format(self.obj.pk),
'publication_date': '2020-03-17',
'published': True,
'published_status': [
{'lang': 'en', 'language': 'English', 'status': True},
{'lang': 'es', 'language': 'Spanish', 'status': False},
{'lang': 'fr', 'language': 'French', 'status': False},
{'lang': 'it', 'language': 'Italian', 'status': False},
],
'slug': 'course',
'structure': {
'id': self.obj.structure.pk,
'name': 'My structure',
},
'ratings': [],
}
def get_bad_data(self):
return {
'geom': 'doh!'
}, _('Invalid geometry value.')
def get_good_data(self):
return {
'structure': StructureFactory.create().pk,
'site': SiteFactory.create().pk,
'name_en': 'test en',
'name_fr': 'test fr',
'geom': '{"type": "GeometryCollection", "geometries": [{"type": "Point", "coordinates":[0, 0]}]}',
}
| bsd-2-clause |
yebrahim/pydatalab | tests/mlworkbench_magic/ml_tests.py | 2 | 8279 | # Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and limitations under
# the License.
"""Tests the \%\%ml magics functions without runing any jobs."""
from __future__ import absolute_import
from __future__ import unicode_literals
import unittest
import mock
import os
# import Python so we can mock the parts we need to here.
import IPython.core.display
import IPython.core.magic
def noop_decorator(func):
return func
IPython.core.magic.register_line_cell_magic = noop_decorator
IPython.core.magic.register_line_magic = noop_decorator
IPython.core.magic.register_cell_magic = noop_decorator
IPython.core.display.HTML = lambda x: x
IPython.core.display.JSON = lambda x: x
IPython.get_ipython = mock.Mock()
IPython.get_ipython().user_ns = {}
import google.datalab.contrib.mlworkbench.commands._ml as mlmagic # noqa
def find_key_value(arg_list, key, value):
"""Checks '--key value' is in arg_list."""
for i in range(len(arg_list)):
if arg_list[i] == key and arg_list[i + 1] == value:
return True
return False
def find_key_endswith(arg_list, key, value):
"""Checks '--key prefix_<value>' is in arg_list."""
for i in range(len(arg_list)):
if arg_list[i] == key and arg_list[i + 1].endswith(value):
return True
return False
def find_startswith_endswith(arg_list, key, value):
"""Checks '--<key>anything<value>' is in arg_list."""
for i in range(len(arg_list)):
if arg_list[i].startswith(key) and arg_list[i].endswith(value):
return True
return False
class TestMLMagic(unittest.TestCase):
@mock.patch('google.datalab.contrib.mlworkbench._shell_process.run_and_monitor')
@mock.patch('subprocess.Popen') # Because of the trainer help menu
def test_analyze_csv_local(self, popen_mock, run_and_monitor_mock):
mlmagic.ml(
line='dataset create',
cell="""\
format: csv
train: ./taxi/train.csv
eval: ./taxi/eval.csv
name: taxi_data
schema:
- name: unique_key
type: STRING
- name: fare
type: FLOAT"""
)
mlmagic.ml(
line='analyze',
cell="""\
output: my_out_dir
data: taxi_data
features: dummy_features""")
cmd_list = run_and_monitor_mock.call_args[0][0]
# cmd_list = [u'python', u'analyze.py', u'--output', 'path/my_out_dir',
# u'--csv=path/file*.csv', u'--schema', u'/path/schema.json',
# u'--features', u'path/features.json']
self.assertEqual('python', cmd_list[0])
self.assertEqual('analyze.py', cmd_list[1])
self.assertIn('--schema', cmd_list)
self.assertIn('--features', cmd_list)
self.assertTrue(find_key_endswith(cmd_list, '--output', 'my_out_dir'))
self.assertTrue(find_startswith_endswith(cmd_list, '--csv=', 'train.csv'))
@mock.patch('google.datalab.contrib.mlworkbench._shell_process.run_and_monitor')
@mock.patch('subprocess.Popen') # Because of the trainer help menu
def test_transform_csv(self, popen_mock, run_and_monitor_mock):
mlmagic.ml(
line='dataset create',
cell="""\
format: csv
train: ./taxi/train.csv
eval: ./taxi/eval.csv
name: taxi_data
schema:
- name: unique_key
type: STRING
- name: fare
type: FLOAT"""
)
mlmagic.ml(
line='transform --shuffle --cloud',
cell="""\
output: my_out_dir
analysis: my_analyze_dir
batch_size: 123
data: taxi_data
cloud_config:
project_id: my_id
num_workers: 987
worker_machine_type: BLUE
job_name: RED""")
cmd_list = run_and_monitor_mock.call_args[0][0]
# cmd_list = [u'python', u'transform.py', u'--output', 'path/my_out_dir',
# u'--analysis', 'path/my_analyze_dir', u'--prefix', 'my_prefix',
# u'--shuffle', u'--batch-size', '100', u'--csv=/path/file*.csv'
# ...
self.assertEqual('python', cmd_list[0])
self.assertEqual('transform.py', cmd_list[1])
self.assertIn('--shuffle', cmd_list)
self.assertTrue(find_key_endswith(cmd_list, '--output', 'my_out_dir'))
self.assertTrue(find_key_endswith(cmd_list, '--analysis', 'my_analyze_dir'))
self.assertTrue(find_key_value(cmd_list, '--prefix', 'train') or
find_key_value(cmd_list, '--prefix', 'eval'))
self.assertTrue(find_key_value(cmd_list, '--batch-size', '123'))
self.assertTrue(find_startswith_endswith(cmd_list, '--csv=', 'train.csv') or
find_startswith_endswith(cmd_list, '--csv=', 'eval.csv'))
self.assertTrue(find_key_value(cmd_list, '--project-id', 'my_id'))
self.assertTrue(find_key_value(cmd_list, '--num-workers', '987'))
self.assertTrue(find_key_value(cmd_list, '--worker-machine-type', 'BLUE'))
self.assertTrue(find_key_value(cmd_list, '--job-name', 'RED'))
@mock.patch('google.datalab.contrib.mlworkbench.commands._ml._show_job_link')
@mock.patch('google.datalab.ml.package_and_copy')
@mock.patch('google.datalab.ml.Job.submit_training')
@mock.patch('subprocess.Popen') # Because of the trainer help menu
def test_train_csv(self, popen_mock, submit_training_mock,
package_and_copy_mock, _show_job_link_mock):
mlmagic.ml(
line='dataset create',
cell="""\
format: transformed
train: ./taxi/train_tfrecord.tar.gz
eval: ./taxi/eval_tfrecord.tar.gz
name: taxi_data_transformed"""
)
mlmagic.ml(
line='train --cloud',
cell="""\
output: gs://my_out_dir
analysis: my_analyze_dir
data: $taxi_data_transformed
model_args:
key: value
cloud_config:
job_name: job1
project_id: id""")
job_request = submit_training_mock.call_args[0][0]
cmd_list = job_request['args']
self.assertEqual(job_request['project_id'], 'id')
self.assertEqual(job_request['job_dir'], 'gs://my_out_dir')
self.assertEqual(job_request['python_module'], 'trainer.task')
self.assertEqual(job_request['package_uris'], ['gs://my_out_dir/staging/trainer.tar.gz'])
self.assertTrue(find_key_value(cmd_list, '--job-dir', 'gs://my_out_dir'))
self.assertTrue(find_key_endswith(cmd_list, '--analysis', 'my_analyze_dir'))
self.assertTrue(find_startswith_endswith(cmd_list, '--train=', 'train_tfrecord.tar.gz'))
self.assertTrue(find_startswith_endswith(cmd_list, '--eval=', 'eval_tfrecord.tar.gz'))
self.assertTrue(find_key_value(cmd_list, '--key', 'value'))
@mock.patch('google.datalab.contrib.mlworkbench.commands._ml._show_job_link')
@mock.patch('google.datalab.Context.default')
@mock.patch('google.datalab.ml.Job.submit_batch_prediction')
@mock.patch('subprocess.Popen') # Because of the trainer help menu
def test_batch_predict_csv(self, popen_mock, submit_batch_prediction_mock,
default_mock, _show_job_link_mock):
default_mock.return_value = mock.Mock(project_id='my_project_id')
mlmagic.ml(
line='batch_predict --cloud',
cell="""\
model: my_model.my_version
output: gs://output
format: json
batch_size: 10
data:
csv: %s""" % os.path.abspath(__file__))
job_args = submit_batch_prediction_mock.call_args[0][0]
self.assertEqual(job_args['input_paths'], [os.path.abspath(__file__)])
self.assertEqual(
job_args['version_name'],
'projects/my_project_id/models/my_model/versions/my_version')
self.assertEqual(job_args['output_path'], 'gs://output')
self.assertEqual(job_args['data_format'], 'TEXT')
| apache-2.0 |
azurestandard/django | django/utils/simplejson.py | 126 | 1094 | # Django 1.5 only supports Python >= 2.6, where the standard library includes
# the json module. Previous version of Django shipped a copy for Python < 2.6.
# For backwards compatibility, we're keeping an importable json module
# at this location, with the same lookup sequence.
# Avoid shadowing the simplejson module
from __future__ import absolute_import
import warnings
warnings.warn("django.utils.simplejson is deprecated; use json instead.",
PendingDeprecationWarning)
try:
import simplejson
except ImportError:
use_simplejson = False
else:
# The system-installed version has priority providing it is either not an
# earlier version or it contains the C speedups.
from json import __version__ as stdlib_json_version
use_simplejson = (hasattr(simplejson, '_speedups') or
simplejson.__version__.split('.') >= stdlib_json_version.split('.'))
# Make sure we copy over the version. See #17071
if use_simplejson:
from simplejson import *
from simplejson import __version__
else:
from json import *
from json import __version__
| bsd-3-clause |
andfoy/margffoy-tuay-server | env/lib/python2.7/site-packages/django/contrib/gis/geos/polygon.py | 450 | 6843 | from ctypes import byref, c_uint
from django.contrib.gis.geos import prototypes as capi
from django.contrib.gis.geos.geometry import GEOSGeometry
from django.contrib.gis.geos.libgeos import GEOM_PTR, get_pointer_arr
from django.contrib.gis.geos.linestring import LinearRing
from django.utils import six
from django.utils.six.moves import range
class Polygon(GEOSGeometry):
_minlength = 1
def __init__(self, *args, **kwargs):
"""
Initializes on an exterior ring and a sequence of holes (both
instances may be either LinearRing instances, or a tuple/list
that may be constructed into a LinearRing).
Examples of initialization, where shell, hole1, and hole2 are
valid LinearRing geometries:
>>> from django.contrib.gis.geos import LinearRing, Polygon
>>> shell = hole1 = hole2 = LinearRing()
>>> poly = Polygon(shell, hole1, hole2)
>>> poly = Polygon(shell, (hole1, hole2))
>>> # Example where a tuple parameters are used:
>>> poly = Polygon(((0, 0), (0, 10), (10, 10), (0, 10), (0, 0)),
... ((4, 4), (4, 6), (6, 6), (6, 4), (4, 4)))
"""
if not args:
raise TypeError('Must provide at least one LinearRing, or a tuple, to initialize a Polygon.')
# Getting the ext_ring and init_holes parameters from the argument list
ext_ring = args[0]
init_holes = args[1:]
n_holes = len(init_holes)
# If initialized as Polygon(shell, (LinearRing, LinearRing)) [for backward-compatibility]
if n_holes == 1 and isinstance(init_holes[0], (tuple, list)):
if len(init_holes[0]) == 0:
init_holes = ()
n_holes = 0
elif isinstance(init_holes[0][0], LinearRing):
init_holes = init_holes[0]
n_holes = len(init_holes)
polygon = self._create_polygon(n_holes + 1, (ext_ring,) + init_holes)
super(Polygon, self).__init__(polygon, **kwargs)
def __iter__(self):
"Iterates over each ring in the polygon."
for i in range(len(self)):
yield self[i]
def __len__(self):
"Returns the number of rings in this Polygon."
return self.num_interior_rings + 1
@classmethod
def from_bbox(cls, bbox):
"Constructs a Polygon from a bounding box (4-tuple)."
x0, y0, x1, y1 = bbox
for z in bbox:
if not isinstance(z, six.integer_types + (float,)):
return GEOSGeometry('POLYGON((%s %s, %s %s, %s %s, %s %s, %s %s))' %
(x0, y0, x0, y1, x1, y1, x1, y0, x0, y0))
return Polygon(((x0, y0), (x0, y1), (x1, y1), (x1, y0), (x0, y0)))
# ### These routines are needed for list-like operation w/ListMixin ###
def _create_polygon(self, length, items):
# Instantiate LinearRing objects if necessary, but don't clone them yet
# _construct_ring will throw a TypeError if a parameter isn't a valid ring
# If we cloned the pointers here, we wouldn't be able to clean up
# in case of error.
rings = []
for r in items:
if isinstance(r, GEOM_PTR):
rings.append(r)
else:
rings.append(self._construct_ring(r))
shell = self._clone(rings.pop(0))
n_holes = length - 1
if n_holes:
holes = get_pointer_arr(n_holes)
for i, r in enumerate(rings):
holes[i] = self._clone(r)
holes_param = byref(holes)
else:
holes_param = None
return capi.create_polygon(shell, holes_param, c_uint(n_holes))
def _clone(self, g):
if isinstance(g, GEOM_PTR):
return capi.geom_clone(g)
else:
return capi.geom_clone(g.ptr)
def _construct_ring(self, param, msg=(
'Parameter must be a sequence of LinearRings or objects that can initialize to LinearRings')):
"Helper routine for trying to construct a ring from the given parameter."
if isinstance(param, LinearRing):
return param
try:
ring = LinearRing(param)
return ring
except TypeError:
raise TypeError(msg)
def _set_list(self, length, items):
# Getting the current pointer, replacing with the newly constructed
# geometry, and destroying the old geometry.
prev_ptr = self.ptr
srid = self.srid
self.ptr = self._create_polygon(length, items)
if srid:
self.srid = srid
capi.destroy_geom(prev_ptr)
def _get_single_internal(self, index):
"""
Returns the ring at the specified index. The first index, 0, will
always return the exterior ring. Indices > 0 will return the
interior ring at the given index (e.g., poly[1] and poly[2] would
return the first and second interior ring, respectively).
CAREFUL: Internal/External are not the same as Interior/Exterior!
_get_single_internal returns a pointer from the existing geometries for use
internally by the object's methods. _get_single_external returns a clone
of the same geometry for use by external code.
"""
if index == 0:
return capi.get_extring(self.ptr)
else:
# Getting the interior ring, have to subtract 1 from the index.
return capi.get_intring(self.ptr, index - 1)
def _get_single_external(self, index):
return GEOSGeometry(capi.geom_clone(self._get_single_internal(index)), srid=self.srid)
_set_single = GEOSGeometry._set_single_rebuild
_assign_extended_slice = GEOSGeometry._assign_extended_slice_rebuild
# #### Polygon Properties ####
@property
def num_interior_rings(self):
"Returns the number of interior rings."
# Getting the number of rings
return capi.get_nrings(self.ptr)
def _get_ext_ring(self):
"Gets the exterior ring of the Polygon."
return self[0]
def _set_ext_ring(self, ring):
"Sets the exterior ring of the Polygon."
self[0] = ring
# Properties for the exterior ring/shell.
exterior_ring = property(_get_ext_ring, _set_ext_ring)
shell = exterior_ring
@property
def tuple(self):
"Gets the tuple for each ring in this Polygon."
return tuple(self[i].tuple for i in range(len(self)))
coords = tuple
@property
def kml(self):
"Returns the KML representation of this Polygon."
inner_kml = ''.join("<innerBoundaryIs>%s</innerBoundaryIs>" % self[i + 1].kml
for i in range(self.num_interior_rings))
return "<Polygon><outerBoundaryIs>%s</outerBoundaryIs>%s</Polygon>" % (self[0].kml, inner_kml)
| gpl-2.0 |
google/makerspace-auth | software/authbox/__init__.py | 2 | 1147 | # Copyright 2017-2018 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Authbox client modules.
"""
__version__ = "1.0.0"
# To facilitate testing, this makes things importable on non-Raspberry Pi
# This module isn't perfect (for example, input() doesn't read what output()
# writes), but at least supports the api, and we can mock where it matters.
try:
from RPi import GPIO
del GPIO
except ImportError:
import warnings
warnings.warn("Using fake_rpi suitable for testing only!")
del warnings
import sys
import fake_rpi
sys.modules["RPi"] = fake_rpi.RPi
del sys, fake_rpi
| apache-2.0 |
thomazs/geraldo | site/newsite/django_1_0/tests/modeltests/generic_relations/models.py | 12 | 5058 | """
34. Generic relations
Generic relations let an object have a foreign key to any object through a
content-type/object-id field. A generic foreign key can point to any object,
be it animal, vegetable, or mineral.
The canonical example is tags (although this example implementation is *far*
from complete).
"""
from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
class TaggedItem(models.Model):
"""A tag on an item."""
tag = models.SlugField()
content_type = models.ForeignKey(ContentType)
object_id = models.PositiveIntegerField()
content_object = generic.GenericForeignKey()
class Meta:
ordering = ["tag"]
def __unicode__(self):
return self.tag
class Animal(models.Model):
common_name = models.CharField(max_length=150)
latin_name = models.CharField(max_length=150)
tags = generic.GenericRelation(TaggedItem)
def __unicode__(self):
return self.common_name
class Vegetable(models.Model):
name = models.CharField(max_length=150)
is_yucky = models.BooleanField(default=True)
tags = generic.GenericRelation(TaggedItem)
def __unicode__(self):
return self.name
class Mineral(models.Model):
name = models.CharField(max_length=150)
hardness = models.PositiveSmallIntegerField()
# note the lack of an explicit GenericRelation here...
def __unicode__(self):
return self.name
__test__ = {'API_TESTS':"""
# Create the world in 7 lines of code...
>>> lion = Animal(common_name="Lion", latin_name="Panthera leo")
>>> platypus = Animal(common_name="Platypus", latin_name="Ornithorhynchus anatinus")
>>> eggplant = Vegetable(name="Eggplant", is_yucky=True)
>>> bacon = Vegetable(name="Bacon", is_yucky=False)
>>> quartz = Mineral(name="Quartz", hardness=7)
>>> for o in (lion, platypus, eggplant, bacon, quartz):
... o.save()
# Objects with declared GenericRelations can be tagged directly -- the API
# mimics the many-to-many API.
>>> bacon.tags.create(tag="fatty")
<TaggedItem: fatty>
>>> bacon.tags.create(tag="salty")
<TaggedItem: salty>
>>> lion.tags.create(tag="yellow")
<TaggedItem: yellow>
>>> lion.tags.create(tag="hairy")
<TaggedItem: hairy>
>>> lion.tags.all()
[<TaggedItem: hairy>, <TaggedItem: yellow>]
>>> bacon.tags.all()
[<TaggedItem: fatty>, <TaggedItem: salty>]
# You can easily access the content object like a foreign key.
>>> t = TaggedItem.objects.get(tag="salty")
>>> t.content_object
<Vegetable: Bacon>
# Recall that the Mineral class doesn't have an explicit GenericRelation
# defined. That's OK, because you can create TaggedItems explicitly.
>>> tag1 = TaggedItem(content_object=quartz, tag="shiny")
>>> tag2 = TaggedItem(content_object=quartz, tag="clearish")
>>> tag1.save()
>>> tag2.save()
# However, excluding GenericRelations means your lookups have to be a bit more
# explicit.
>>> from django.contrib.contenttypes.models import ContentType
>>> ctype = ContentType.objects.get_for_model(quartz)
>>> TaggedItem.objects.filter(content_type__pk=ctype.id, object_id=quartz.id)
[<TaggedItem: clearish>, <TaggedItem: shiny>]
# You can set a generic foreign key in the way you'd expect.
>>> tag1.content_object = platypus
>>> tag1.save()
>>> platypus.tags.all()
[<TaggedItem: shiny>]
>>> TaggedItem.objects.filter(content_type__pk=ctype.id, object_id=quartz.id)
[<TaggedItem: clearish>]
# If you delete an object with an explicit Generic relation, the related
# objects are deleted when the source object is deleted.
# Original list of tags:
>>> [(t.tag, t.content_type, t.object_id) for t in TaggedItem.objects.all()]
[(u'clearish', <ContentType: mineral>, 1), (u'fatty', <ContentType: vegetable>, 2), (u'hairy', <ContentType: animal>, 1), (u'salty', <ContentType: vegetable>, 2), (u'shiny', <ContentType: animal>, 2), (u'yellow', <ContentType: animal>, 1)]
>>> lion.delete()
>>> [(t.tag, t.content_type, t.object_id) for t in TaggedItem.objects.all()]
[(u'clearish', <ContentType: mineral>, 1), (u'fatty', <ContentType: vegetable>, 2), (u'salty', <ContentType: vegetable>, 2), (u'shiny', <ContentType: animal>, 2)]
# If Generic Relation is not explicitly defined, any related objects
# remain after deletion of the source object.
>>> quartz.delete()
>>> [(t.tag, t.content_type, t.object_id) for t in TaggedItem.objects.all()]
[(u'clearish', <ContentType: mineral>, 1), (u'fatty', <ContentType: vegetable>, 2), (u'salty', <ContentType: vegetable>, 2), (u'shiny', <ContentType: animal>, 2)]
# If you delete a tag, the objects using the tag are unaffected
# (other than losing a tag)
>>> tag = TaggedItem.objects.get(id=1)
>>> tag.delete()
>>> bacon.tags.all()
[<TaggedItem: salty>]
>>> [(t.tag, t.content_type, t.object_id) for t in TaggedItem.objects.all()]
[(u'clearish', <ContentType: mineral>, 1), (u'salty', <ContentType: vegetable>, 2), (u'shiny', <ContentType: animal>, 2)]
>>> ctype = ContentType.objects.get_for_model(lion)
>>> Animal.objects.filter(tags__content_type=ctype)
[<Animal: Platypus>]
"""}
| lgpl-3.0 |
Djabbz/wakatime | wakatime/packages/pygments_py3/pygments/lexers/graph.py | 72 | 2293 | # -*- coding: utf-8 -*-
"""
pygments.lexers.graph
~~~~~~~~~~~~~~~~~~~~~
Lexers for graph query languages.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, bygroups, using, this
from pygments.token import Keyword, Punctuation, Comment, Operator, Name,\
String, Number, Whitespace
__all__ = ['CypherLexer']
class CypherLexer(RegexLexer):
"""
For `Cypher Query Language
<http://docs.neo4j.org/chunked/milestone/cypher-query-lang.html>`_
For the Cypher version in Neo4J 2.0
.. versionadded:: 2.0
"""
name = 'Cypher'
aliases = ['cypher']
filenames = ['*.cyp', '*.cypher']
flags = re.MULTILINE | re.IGNORECASE
tokens = {
'root': [
include('comment'),
include('keywords'),
include('clauses'),
include('relations'),
include('strings'),
include('whitespace'),
include('barewords'),
],
'comment': [
(r'^.*//.*\n', Comment.Single),
],
'keywords': [
(r'(create|order|match|limit|set|skip|start|return|with|where|'
r'delete|foreach|not|by)\b', Keyword),
],
'clauses': [
# TODO: many missing ones, see http://docs.neo4j.org/refcard/2.0/
(r'(all|any|as|asc|create|create\s+unique|delete|'
r'desc|distinct|foreach|in|is\s+null|limit|match|none|'
r'order\s+by|return|set|skip|single|start|union|where|with)\b',
Keyword),
],
'relations': [
(r'(-\[)(.*?)(\]->)', bygroups(Operator, using(this), Operator)),
(r'(<-\[)(.*?)(\]-)', bygroups(Operator, using(this), Operator)),
(r'-->|<--|\[|\]', Operator),
(r'<|>|<>|=|<=|=>|\(|\)|\||:|,|;', Punctuation),
(r'[.*{}]', Punctuation),
],
'strings': [
(r'"(?:\\[tbnrf\'"\\]|[^\\"])*"', String),
(r'`(?:``|[^`])+`', Name.Variable),
],
'whitespace': [
(r'\s+', Whitespace),
],
'barewords': [
(r'[a-z]\w*', Name),
(r'\d+', Number),
],
}
| bsd-3-clause |
ContinuumIO/chaco | chaco/tools/pan_tool.py | 1 | 10094 | """ Defines the PanTool class.
"""
from numpy import inf
# Enthought library imports
from enable.api import BaseTool, Pointer, KeySpec
from traits.api import Bool, Enum, Float, Tuple, Instance
class PanTool(BaseTool):
""" A tool that enables the user to pan a plot by clicking a mouse
button and dragging.
"""
# The mouse button that initiates the drag operation.
drag_button = Enum("left", "middle", "right")
# The cursor to use when panning.
drag_pointer = Pointer("hand")
# Scaling factor on the panning "speed".
speed = Float(1.0)
# The modifier key that, if depressed when the drag is initiated, constrains
# the panning to happen in the only direction of largest initial motion.
# It is possible to permanently restrict this tool to always drag along one
# direction. To do so, set constrain=True, constrain_key=None, and
# constrain_direction to the desired direction.
constrain_key = Enum(None, "shift", "control", "alt")
# Keys to Pan via keyboard
pan_right_key = Instance(KeySpec, args=("Right",))
pan_left_key = Instance(KeySpec, args=("Left",))
pan_up_key = Instance(KeySpec, args=("Up",))
pan_down_key = Instance(KeySpec, args=("Down",))
# number of pixels the keys should pan
# disabled if 0.0
pan_keys_step = Float(0.0)
# Constrain the panning to one direction?
constrain = Bool(False)
# The direction of constrained draw. A value of None means that the user
# has initiated the drag and pressed the constrain_key, but hasn't moved
# the mouse yet; the magnitude of the components of the next mouse_move
# event will determine the constrain_direction.
constrain_direction = Enum(None, "x", "y")
# Restrict to the bounds of the plot data
restrict_to_data = Bool(False)
# (x,y) of the point where the mouse button was pressed.
_original_xy = Tuple
# Data coordinates of **_original_xy**. This may be either (index,value)
# or (value,index) depending on the component's orientation.
_original_data = Tuple
# Was constrain=True triggered by the **contrain_key**? If False, it was
# set programmatically.
_auto_constrain = Bool(False)
#------------------------------------------------------------------------
# Inherited BaseTool traits
#------------------------------------------------------------------------
# The tool does not have a visual representation (overrides
# BaseTool).
draw_mode = "none"
# The tool is not visible (overrides BaseTool).
visible = False
# The possible event states of this tool (overrides enable.Interactor).
event_state = Enum("normal", "panning")
def normal_key_pressed(self, event):
""" Handles a key being pressed when the tool is in the 'normal'
state.
"""
if self.pan_keys_step == 0.0:
return
src = self.component.bounds[0]/2, self.component.bounds[1]/2
dest = src
if self.pan_left_key.match(event):
dest = (src[0] - self.pan_keys_step,
src[1])
elif self.pan_right_key.match(event):
dest = (src[0] + self.pan_keys_step,
src[1])
elif self.pan_down_key.match(event):
dest = (src[0],
src[1] - self.pan_keys_step)
elif self.pan_up_key.match(event):
dest = (src[0],
src[1] + self.pan_keys_step)
if src != dest:
self._original_xy = src
event.x = dest[0]
event.y = dest[1]
self.panning_mouse_move(event)
return
def normal_left_down(self, event):
""" Handles the left mouse button being pressed when the tool is in
the 'normal' state.
Starts panning if the left mouse button is the drag button.
"""
if self.drag_button == "left":
self._start_pan(event)
return
def normal_right_down(self, event):
""" Handles the right mouse button being pressed when the tool is in
the 'normal' state.
Starts panning if the right mouse button is the drag button.
"""
if self.drag_button == "right":
self._start_pan(event)
return
def normal_middle_down(self, event):
""" Handles the middle mouse button being pressed when the tool is in
the 'normal' state.
Starts panning if the middle mouse button is the drag button.
"""
if self.drag_button == "middle":
self._start_pan(event)
return
def panning_left_up(self, event):
""" Handles the left mouse button coming up when the tool is in the
'panning' state.
Stops panning if the left mouse button is the drag button.
"""
if self.drag_button == "left":
self._end_pan(event)
return
def panning_right_up(self, event):
""" Handles the right mouse button coming up when the tool is in the
'panning' state.
Stops panning if the right mouse button is the drag button.
"""
if self.drag_button == "right":
self._end_pan(event)
return
def panning_middle_up(self, event):
""" Handles the middle mouse button coming up when the tool is in the
'panning' state.
Stops panning if the middle mouse button is the drag button.
"""
if self.drag_button == "middle":
self._end_pan(event)
return
def panning_mouse_move(self, event):
""" Handles the mouse being moved when the tool is in the 'panning'
state.
"""
plot = self.component
if self._auto_constrain and self.constrain_direction is None:
# Determine the constraint direction
if abs(event.x - self._original_xy[0]) > abs(event.y - self._original_xy[1]):
self.constrain_direction = "x"
else:
self.constrain_direction = "y"
for direction, bound_name, ndx in [("x","width",0), ("y","height",1)]:
if not self.constrain or self.constrain_direction == direction:
mapper = getattr(plot, direction + "_mapper")
range = mapper.range
domain_min, domain_max = mapper.domain_limits
eventpos = getattr(event, direction)
origpos = self._original_xy[ndx]
screenlow, screenhigh = mapper.screen_bounds
screendelta = self.speed * (eventpos - origpos)
#if getattr(plot, direction + "_direction", None) == "flipped":
# screendelta = -screendelta
newlow = mapper.map_data(screenlow - screendelta)
newhigh = mapper.map_data(screenhigh - screendelta)
# Don't set the range in this dimension if the panning
# would exceed the domain limits.
# To do this offset properly, we would need to iteratively
# solve for a root using map_data on successive trial
# values. As a first approximation, we're just going to
# use a linear approximation, which works perfectly for
# linear mappers (which is used 99% of the time).
if domain_min is None:
if self.restrict_to_data:
domain_min = min([source.get_data().min() for source in range.sources])
else:
domain_min = -inf
if domain_max is None:
if self.restrict_to_data:
domain_max = max([source.get_data().max() for source in range.sources])
else:
domain_max = inf
if (newlow <= domain_min) and (newhigh >= domain_max):
# Don't do anything; effectively, freeze the pan
continue
if newlow <= domain_min:
delta = newhigh - newlow
newlow = domain_min
# Don't let the adjusted newhigh exceed domain_max; this
# can happen with a nonlinear mapper.
newhigh = min(domain_max, domain_min + delta)
elif newhigh >= domain_max:
delta = newhigh - newlow
newhigh = domain_max
# Don't let the adjusted newlow go below domain_min; this
# can happen with a nonlinear mapper.
newlow = max(domain_min, domain_max - delta)
# Use .set_bounds() so that we don't generate two range_changed
# events on the DataRange
range.set_bounds(newlow, newhigh)
event.handled = True
self._original_xy = (event.x, event.y)
plot.request_redraw()
return
def panning_mouse_leave(self, event):
""" Handles the mouse leaving the plot when the tool is in the 'panning'
state.
Ends panning.
"""
return self._end_pan(event)
def _start_pan(self, event, capture_mouse=True):
self._original_xy = (event.x, event.y)
if self.constrain_key is not None:
if getattr(event, self.constrain_key + "_down"):
self.constrain = True
self._auto_constrain = True
self.constrain_direction = None
self.event_state = "panning"
if capture_mouse:
event.window.set_pointer(self.drag_pointer)
event.window.set_mouse_owner(self, event.net_transform())
event.handled = True
return
def _end_pan(self, event):
if self._auto_constrain:
self.constrain = False
self.constrain_direction = None
self.event_state = "normal"
event.window.set_pointer("arrow")
if event.window.mouse_owner == self:
event.window.set_mouse_owner(None)
event.handled = True
return
# EOF
| bsd-3-clause |
multani/sonata | sonata/pluginsystem.py | 2 | 7574 | # Copyright 2006-2009 Scott Horowitz <stonecrest@gmail.com>
# Copyright 2009-2014 Jonathan Ballet <jon@multani.info>
#
# This file is part of Sonata.
#
# Sonata is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sonata is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sonata. If not, see <http://www.gnu.org/licenses/>.
import configparser
from io import StringIO
import logging
import os
import pkgutil
import re
import sonata.plugins
def find_plugin_dirs():
return [
os.path.expanduser('~/.config/sonata/plugins'),
'/usr/local/lib/sonata/plugins']
# add dirs from sys.path:
sonata.plugins.__path__ = pkgutil.extend_path(sonata.plugins.__path__,
sonata.plugins.__name__)
# add dirs specific to sonata:
sonata.plugins.__path__ = find_plugin_dirs() + sonata.plugins.__path__
class Plugin:
def __init__(self, path, name, info, load):
self.logger = logging.getLogger(__name__)
self.path = path
self.name = name
self._info = info
self._load = load
# obligatory plugin info:
format_value = info.get('plugin', 'plugin_format')
self.plugin_format = tuple(map(int, format_value.split(',')))
self.longname = info.get('plugin', 'name')
versionvalue = info.get('plugin', 'version')
self.version = tuple(map(int, versionvalue.split(',')))
self.version_string = '.'.join(map(str, self.version))
self._capabilities = dict(info.items('capabilities'))
# optional plugin info:
try:
self.description = info.get('plugin', 'description')
except configparser.NoOptionError:
self.description = ""
try:
self.author = info.get('plugin', 'author')
except:
self.author = ""
try:
self.author_email = info.get('plugin', 'author_email')
except:
self.author_email = ""
try:
self.iconurl = info.get('plugin', 'icon')
except configparser.NoOptionError:
self.iconurl = None
try:
self.url = info.get('plugin', 'url')
except:
self.url = ""
# state:
self._module = None # lazy loading
self._enabled = False
def get_enabled(self):
return self._enabled
def get_features(self, capability):
if not self._enabled or not capability in self._capabilities:
return []
module = self._get_module()
if not module:
return []
features = self._capabilities[capability]
try:
return [self.get_feature(module, f)
for f in features.split(', ')]
except KeyboardInterrupt:
raise
except:
self.logger.exception("Failed to access features in plugin %s.",
self.name)
return []
def get_feature(self, module, feature):
obj = module
for name in feature.split('.'):
obj = getattr(obj, name)
return obj
def _get_module(self):
if not self._module:
try:
self._module = self._load()
except Exception:
self.logger.exception("Failed to load plugin %s.", self.name)
return None
return self._module
def force_loaded(self):
return bool(self._get_module())
class BuiltinPlugin(Plugin):
def __init__(self, name, longname, description, capabilities, object):
self.name = name
self.longname = longname
self.description = description
self._capabilities = capabilities
self._module = object
self.version_string = "Built-in"
self.author = self.author_email = self.url = ""
self.iconurl = None
self._enabled = False
def _get_module(self):
return self._module
class PluginSystem:
def __init__(self):
self.logger = logging.getLogger(__name__)
self.plugin_infos = []
self.notifys = []
def get_info(self):
return self.plugin_infos
def get(self, capability):
return [(plugin, feature)
for plugin in self.plugin_infos
for feature in plugin.get_features(capability)]
def get_from_name(self, name):
for plugin in self.plugin_infos:
if plugin.longname == name:
return plugin
return None
def notify_of(self, capability, enable_cb, disable_cb):
self.notifys.append((capability, enable_cb, disable_cb))
for plugin, feature in self.get(capability):
enable_cb(plugin, feature)
def set_enabled(self, plugin, state):
if plugin._enabled != state:
# make notify callbacks for each feature of the plugin:
plugin._enabled = True # XXX for plugin.get_features
# process the notifys in the order they were registered:
order = (lambda x:x) if state else reversed
for capability, enable, disable in order(self.notifys):
callback = enable if state else disable
for feature in plugin.get_features(capability):
callback(plugin, feature)
plugin._enabled = state
def find_plugins(self):
for path in sonata.plugins.__path__:
if not os.path.isdir(path):
continue
for entry in os.listdir(path):
if entry.startswith('_'):
continue # __init__.py etc.
if entry.endswith('.py'):
try:
self.load_info(path, entry[:-3])
except:
self.logger.exception("Failed to load info: %s",
os.path.join(path, entry))
def load_info(self, path, name):
f = open(os.path.join(path, name+".py"), "rt", encoding="utf-8")
text = f.read()
f.close()
pat = r'^### BEGIN PLUGIN INFO.*((\n#.*)*)\n### END PLUGIN INFO'
infotext = re.search(pat, text, re.MULTILINE).group(1)
uncommented = '\n'.join(line[1:].strip()
for line in infotext.split('\n'))
info = configparser.SafeConfigParser()
info.readfp(StringIO(uncommented))
plugin = Plugin(path, name, info,
lambda:self.import_plugin(name))
# add only newest version of each name
old_plugin = self.get_from_name(plugin.longname)
if old_plugin:
if plugin.version > old_plugin.version:
self.plugin_infos.remove(old_plugin)
self.plugin_infos.append(plugin)
else:
self.plugin_infos.append(plugin)
if not info.options('capabilities'):
self.logger.warning("No capabilities in plugin %s.", name)
def import_plugin(self, name):
# XXX load from a .py file - no .pyc etc.
__import__('sonata.plugins', {}, {}, [name], 0)
plugin = getattr(sonata.plugins, name)
return plugin
pluginsystem = PluginSystem()
| gpl-3.0 |
pgmillon/ansible | test/units/module_utils/facts/test_collectors.py | 78 | 16892 | # unit tests for ansible fact collectors
# -*- coding: utf-8 -*-
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# Make coding more python3-ish
from __future__ import (absolute_import, division)
__metaclass__ = type
from units.compat.mock import Mock, patch
from . base import BaseFactsTest
from ansible.module_utils.facts import collector
from ansible.module_utils.facts.system.apparmor import ApparmorFactCollector
from ansible.module_utils.facts.system.caps import SystemCapabilitiesFactCollector
from ansible.module_utils.facts.system.cmdline import CmdLineFactCollector
from ansible.module_utils.facts.system.distribution import DistributionFactCollector
from ansible.module_utils.facts.system.dns import DnsFactCollector
from ansible.module_utils.facts.system.env import EnvFactCollector
from ansible.module_utils.facts.system.fips import FipsFactCollector
from ansible.module_utils.facts.system.pkg_mgr import PkgMgrFactCollector, OpenBSDPkgMgrFactCollector
from ansible.module_utils.facts.system.platform import PlatformFactCollector
from ansible.module_utils.facts.system.python import PythonFactCollector
from ansible.module_utils.facts.system.selinux import SelinuxFactCollector
from ansible.module_utils.facts.system.service_mgr import ServiceMgrFactCollector
from ansible.module_utils.facts.system.ssh_pub_keys import SshPubKeyFactCollector
from ansible.module_utils.facts.system.user import UserFactCollector
from ansible.module_utils.facts.virtual.base import VirtualCollector
from ansible.module_utils.facts.network.base import NetworkCollector
from ansible.module_utils.facts.hardware.base import HardwareCollector
class CollectorException(Exception):
pass
class ExceptionThrowingCollector(collector.BaseFactCollector):
name = 'exc_throwing'
def __init__(self, collectors=None, namespace=None, exception=None):
super(ExceptionThrowingCollector, self).__init__(collectors, namespace)
self._exception = exception or CollectorException('collection failed')
def collect(self, module=None, collected_facts=None):
raise self._exception
class TestExceptionThrowingCollector(BaseFactsTest):
__test__ = True
gather_subset = ['exc_throwing']
valid_subsets = ['exc_throwing']
collector_class = ExceptionThrowingCollector
def test_collect(self):
module = self._mock_module()
fact_collector = self.collector_class()
self.assertRaises(CollectorException,
fact_collector.collect,
module=module,
collected_facts=self.collected_facts)
def test_collect_with_namespace(self):
module = self._mock_module()
fact_collector = self.collector_class()
self.assertRaises(CollectorException,
fact_collector.collect_with_namespace,
module=module,
collected_facts=self.collected_facts)
class TestApparmorFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'apparmor']
valid_subsets = ['apparmor']
fact_namespace = 'ansible_apparmor'
collector_class = ApparmorFactCollector
def test_collect(self):
facts_dict = super(TestApparmorFacts, self).test_collect()
self.assertIn('status', facts_dict['apparmor'])
class TestCapsFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'caps']
valid_subsets = ['caps']
fact_namespace = 'ansible_system_capabilities'
collector_class = SystemCapabilitiesFactCollector
def _mock_module(self):
mock_module = Mock()
mock_module.params = {'gather_subset': self.gather_subset,
'gather_timeout': 10,
'filter': '*'}
mock_module.get_bin_path = Mock(return_value='/usr/sbin/capsh')
mock_module.run_command = Mock(return_value=(0, 'Current: =ep', ''))
return mock_module
class TestCmdLineFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'cmdline']
valid_subsets = ['cmdline']
fact_namespace = 'ansible_cmdline'
collector_class = CmdLineFactCollector
def test_parse_proc_cmdline_uefi(self):
uefi_cmdline = r'initrd=\70ef65e1a04a47aea04f7b5145ea3537\4.10.0-19-generic\initrd root=UUID=50973b75-4a66-4bf0-9764-2b7614489e64 ro quiet'
expected = {'initrd': r'\70ef65e1a04a47aea04f7b5145ea3537\4.10.0-19-generic\initrd',
'root': 'UUID=50973b75-4a66-4bf0-9764-2b7614489e64',
'quiet': True,
'ro': True}
fact_collector = self.collector_class()
facts_dict = fact_collector._parse_proc_cmdline(uefi_cmdline)
self.assertDictEqual(facts_dict, expected)
def test_parse_proc_cmdline_fedora(self):
cmdline_fedora = r'BOOT_IMAGE=/vmlinuz-4.10.16-200.fc25.x86_64 root=/dev/mapper/fedora-root ro rd.lvm.lv=fedora/root rd.luks.uuid=luks-c80b7537-358b-4a07-b88c-c59ef187479b rd.lvm.lv=fedora/swap rhgb quiet LANG=en_US.UTF-8' # noqa
expected = {'BOOT_IMAGE': '/vmlinuz-4.10.16-200.fc25.x86_64',
'LANG': 'en_US.UTF-8',
'quiet': True,
'rd.luks.uuid': 'luks-c80b7537-358b-4a07-b88c-c59ef187479b',
'rd.lvm.lv': 'fedora/swap',
'rhgb': True,
'ro': True,
'root': '/dev/mapper/fedora-root'}
fact_collector = self.collector_class()
facts_dict = fact_collector._parse_proc_cmdline(cmdline_fedora)
self.assertDictEqual(facts_dict, expected)
def test_parse_proc_cmdline_dup_console(self):
example = r'BOOT_IMAGE=/boot/vmlinuz-4.4.0-72-generic root=UUID=e12e46d9-06c9-4a64-a7b3-60e24b062d90 ro console=tty1 console=ttyS0'
# FIXME: Two 'console' keywords? Using a dict for the fact value here loses info. Currently the 'last' one wins
expected = {'BOOT_IMAGE': '/boot/vmlinuz-4.4.0-72-generic',
'root': 'UUID=e12e46d9-06c9-4a64-a7b3-60e24b062d90',
'ro': True,
'console': 'ttyS0'}
fact_collector = self.collector_class()
facts_dict = fact_collector._parse_proc_cmdline(example)
# TODO: fails because we lose a 'console'
self.assertDictEqual(facts_dict, expected)
class TestDistributionFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'distribution']
valid_subsets = ['distribution']
fact_namespace = 'ansible_distribution'
collector_class = DistributionFactCollector
class TestDnsFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'dns']
valid_subsets = ['dns']
fact_namespace = 'ansible_dns'
collector_class = DnsFactCollector
class TestEnvFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'env']
valid_subsets = ['env']
fact_namespace = 'ansible_env'
collector_class = EnvFactCollector
def test_collect(self):
facts_dict = super(TestEnvFacts, self).test_collect()
self.assertIn('HOME', facts_dict['env'])
class TestFipsFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'fips']
valid_subsets = ['fips']
fact_namespace = 'ansible_fips'
collector_class = FipsFactCollector
class TestHardwareCollector(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'hardware']
valid_subsets = ['hardware']
fact_namespace = 'ansible_hardware'
collector_class = HardwareCollector
collected_facts = {'ansible_architecture': 'x86_64'}
class TestNetworkCollector(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'network']
valid_subsets = ['network']
fact_namespace = 'ansible_network'
collector_class = NetworkCollector
class TestPkgMgrFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'pkg_mgr']
valid_subsets = ['pkg_mgr']
fact_namespace = 'ansible_pkgmgr'
collector_class = PkgMgrFactCollector
collected_facts = {
"ansible_distribution": "Fedora",
"ansible_distribution_major_version": "28",
"ansible_os_family": "RedHat"
}
def test_collect(self):
module = self._mock_module()
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module, collected_facts=self.collected_facts)
self.assertIsInstance(facts_dict, dict)
self.assertIn('pkg_mgr', facts_dict)
def _sanitize_os_path_apt_get(path):
if path == '/usr/bin/apt-get':
return True
else:
return False
class TestPkgMgrFactsAptFedora(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'pkg_mgr']
valid_subsets = ['pkg_mgr']
fact_namespace = 'ansible_pkgmgr'
collector_class = PkgMgrFactCollector
collected_facts = {
"ansible_distribution": "Fedora",
"ansible_distribution_major_version": "28",
"ansible_os_family": "RedHat",
"ansible_pkg_mgr": "apt"
}
@patch('ansible.module_utils.facts.system.pkg_mgr.os.path.exists', side_effect=_sanitize_os_path_apt_get)
def test_collect(self, mock_os_path_exists):
module = self._mock_module()
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module, collected_facts=self.collected_facts)
self.assertIsInstance(facts_dict, dict)
self.assertIn('pkg_mgr', facts_dict)
class TestOpenBSDPkgMgrFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'pkg_mgr']
valid_subsets = ['pkg_mgr']
fact_namespace = 'ansible_pkgmgr'
collector_class = OpenBSDPkgMgrFactCollector
def test_collect(self):
module = self._mock_module()
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module, collected_facts=self.collected_facts)
self.assertIsInstance(facts_dict, dict)
self.assertIn('pkg_mgr', facts_dict)
self.assertEqual(facts_dict['pkg_mgr'], 'openbsd_pkg')
class TestPlatformFactCollector(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'platform']
valid_subsets = ['platform']
fact_namespace = 'ansible_platform'
collector_class = PlatformFactCollector
class TestPythonFactCollector(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'python']
valid_subsets = ['python']
fact_namespace = 'ansible_python'
collector_class = PythonFactCollector
class TestSelinuxFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'selinux']
valid_subsets = ['selinux']
fact_namespace = 'ansible_selinux'
collector_class = SelinuxFactCollector
def test_no_selinux(self):
with patch('ansible.module_utils.facts.system.selinux.HAVE_SELINUX', False):
module = self._mock_module()
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module)
self.assertIsInstance(facts_dict, dict)
self.assertEqual(facts_dict['selinux']['status'], 'Missing selinux Python library')
return facts_dict
class TestServiceMgrFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'service_mgr']
valid_subsets = ['service_mgr']
fact_namespace = 'ansible_service_mgr'
collector_class = ServiceMgrFactCollector
# TODO: dedupe some of this test code
@patch('ansible.module_utils.facts.system.service_mgr.get_file_content', return_value=None)
def test_no_proc1(self, mock_gfc):
# no /proc/1/comm, ps returns non-0
# should fallback to 'service'
module = self._mock_module()
module.run_command = Mock(return_value=(1, '', 'wat'))
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module)
self.assertIsInstance(facts_dict, dict)
self.assertEqual(facts_dict['service_mgr'], 'service')
@patch('ansible.module_utils.facts.system.service_mgr.get_file_content', return_value=None)
def test_no_proc1_ps_random_init(self, mock_gfc):
# no /proc/1/comm, ps returns '/sbin/sys11' which we dont know
# should end up return 'sys11'
module = self._mock_module()
module.run_command = Mock(return_value=(0, '/sbin/sys11', ''))
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module)
self.assertIsInstance(facts_dict, dict)
self.assertEqual(facts_dict['service_mgr'], 'sys11')
@patch('ansible.module_utils.facts.system.service_mgr.get_file_content', return_value=None)
def test_clowncar(self, mock_gfc):
# no /proc/1/comm, ps fails, distro and system are clowncar
# should end up return 'sys11'
module = self._mock_module()
module.run_command = Mock(return_value=(1, '', ''))
collected_facts = {'distribution': 'clowncar',
'system': 'ClownCarOS'}
fact_collector = self.collector_class()
facts_dict = fact_collector.collect(module=module,
collected_facts=collected_facts)
self.assertIsInstance(facts_dict, dict)
self.assertEqual(facts_dict['service_mgr'], 'service')
# TODO: reenable these tests when we can mock more easily
# @patch('ansible.module_utils.facts.system.service_mgr.get_file_content', return_value=None)
# def test_sunos_fallback(self, mock_gfc):
# # no /proc/1/comm, ps fails, 'system' is SunOS
# # should end up return 'smf'?
# module = self._mock_module()
# # FIXME: the result here is a kluge to at least cover more of service_mgr.collect
# # TODO: remove
# # FIXME: have to force a pid for results here to get into any of the system/distro checks
# module.run_command = Mock(return_value=(1, ' 37 ', ''))
# collected_facts = {'system': 'SunOS'}
# fact_collector = self.collector_class(module=module)
# facts_dict = fact_collector.collect(collected_facts=collected_facts)
# print('facts_dict: %s' % facts_dict)
# self.assertIsInstance(facts_dict, dict)
# self.assertEqual(facts_dict['service_mgr'], 'smf')
# @patch('ansible.module_utils.facts.system.service_mgr.get_file_content', return_value=None)
# def test_aix_fallback(self, mock_gfc):
# # no /proc/1/comm, ps fails, 'system' is SunOS
# # should end up return 'smf'?
# module = self._mock_module()
# module.run_command = Mock(return_value=(1, '', ''))
# collected_facts = {'system': 'AIX'}
# fact_collector = self.collector_class(module=module)
# facts_dict = fact_collector.collect(collected_facts=collected_facts)
# print('facts_dict: %s' % facts_dict)
# self.assertIsInstance(facts_dict, dict)
# self.assertEqual(facts_dict['service_mgr'], 'src')
# @patch('ansible.module_utils.facts.system.service_mgr.get_file_content', return_value=None)
# def test_linux_fallback(self, mock_gfc):
# # no /proc/1/comm, ps fails, 'system' is SunOS
# # should end up return 'smf'?
# module = self._mock_module()
# module.run_command = Mock(return_value=(1, ' 37 ', ''))
# collected_facts = {'system': 'Linux'}
# fact_collector = self.collector_class(module=module)
# facts_dict = fact_collector.collect(collected_facts=collected_facts)
# print('facts_dict: %s' % facts_dict)
# self.assertIsInstance(facts_dict, dict)
# self.assertEqual(facts_dict['service_mgr'], 'sdfadf')
class TestSshPubKeyFactCollector(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'ssh_pub_keys']
valid_subsets = ['ssh_pub_keys']
fact_namespace = 'ansible_ssh_pub_leys'
collector_class = SshPubKeyFactCollector
class TestUserFactCollector(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'user']
valid_subsets = ['user']
fact_namespace = 'ansible_user'
collector_class = UserFactCollector
class TestVirtualFacts(BaseFactsTest):
__test__ = True
gather_subset = ['!all', 'virtual']
valid_subsets = ['virtual']
fact_namespace = 'ansible_virtual'
collector_class = VirtualCollector
| gpl-3.0 |
dhanunjaya/neutron | neutron/plugins/ml2/drivers/brocade/mechanism_brocade.py | 42 | 1534 | # Copyright 2014 Brocade Communications System, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Implentation of Brocade ML2 Mechanism driver for ML2 Plugin."""
from oslo_config import cfg
ML2_BROCADE = [cfg.StrOpt('address', default='',
help=_('The address of the host to SSH to')),
cfg.StrOpt('username', default='admin',
help=_('The SSH username to use')),
cfg.StrOpt('password', default='password', secret=True,
help=_('The SSH password to use')),
cfg.StrOpt('physical_networks', default='',
help=_('Allowed physical networks')),
cfg.StrOpt('ostype', default='NOS',
help=_('OS Type of the switch')),
cfg.StrOpt('osversion', default='4.0.0',
help=_('OS Version number'))
]
cfg.CONF.register_opts(ML2_BROCADE, "ml2_brocade")
| apache-2.0 |
agile-geoscience/bruges | bruges/transform/test/depthtime_test.py | 2 | 1941 | # -*- coding: utf-8 -*-
"""
Tests.
"""
import unittest
import numpy as np
from bruges.transform import time_to_depth, depth_to_time
class TimeDepthTest(unittest.TestCase):
"""
Tests the basic functionality of the time to depth and depth to
time conversions in bruges.
"""
def test_depthtotime(self):
data = np.zeros((100, 100))
vmodel = np.zeros((100, 100))
data[0:50, :] += 100.0
data[50:, :] += 200.0
vmodel[0:50, :] += 1500.0
vmodel[50:, :] += 3000.0
dt = 0.001
dz = 1.0
face_change = int(np.floor(((49 * dz) / 1500.0) / dt))
output = depth_to_time(data, vmodel, dz, dt, twt=False)
self.assertTrue((output[face_change+1, 50] -
output[face_change, 50]) == 100)
def test_timetodepth(self):
data = np.zeros((100, 100))
vmodel = np.zeros((100, 100))
data[0:50, :] += 100.0
data[50:, :] += 200.0
vmodel[0:50, :] += 1500.0
vmodel[50:, :] += 5000.0
dt = 0.001
dz = 1.0
output = time_to_depth(data, vmodel, dt, dz, twt=False)
face_change = int(np.floor(((49.5 * dt) * 1500.0) / dz))
self.assertTrue((output[face_change+1, 50] -
output[face_change, 50]) == 100)
def test_recip(self):
data = np.zeros((100, 100))
vmodel = np.zeros((100, 100))
data[0:50, :] += 100.0
data[50:, :] += 200.0
vmodel[0:50, :] += 1500.0
vmodel[50:, :] += 1800.0
dt = 0.001
dz = 1.
out1 = depth_to_time(data, vmodel, dz, dt)
v2 = depth_to_time(vmodel, vmodel, dz, dt)
out2 = time_to_depth(out1, v2, dt, dz)
# This test is not finished!
if (__name__ == '__main__'):
suite = \
unittest.TestLoader().loadTestsFromTestCase(TimeDepthTest)
unittest.TextTestRunner(verbosity=2).run(suite)
| apache-2.0 |
molobrakos/home-assistant | homeassistant/components/avion/light.py | 7 | 3809 | """Support for Avion dimmers."""
import importlib
import logging
import time
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS, PLATFORM_SCHEMA, SUPPORT_BRIGHTNESS, Light)
from homeassistant.const import (
CONF_API_KEY, CONF_DEVICES, CONF_ID, CONF_NAME, CONF_PASSWORD,
CONF_USERNAME)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
SUPPORT_AVION_LED = SUPPORT_BRIGHTNESS
DEVICE_SCHEMA = vol.Schema({
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(CONF_ID): cv.positive_int,
vol.Optional(CONF_NAME): cv.string,
})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_DEVICES, default={}): {cv.string: DEVICE_SCHEMA},
vol.Optional(CONF_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up an Avion switch."""
# pylint: disable=no-member
avion = importlib.import_module('avion')
lights = []
if CONF_USERNAME in config and CONF_PASSWORD in config:
devices = avion.get_devices(
config[CONF_USERNAME], config[CONF_PASSWORD])
for device in devices:
lights.append(AvionLight(device))
for address, device_config in config[CONF_DEVICES].items():
device = avion.Avion(
mac=address,
passphrase=device_config[CONF_API_KEY],
name=device_config.get(CONF_NAME),
object_id=device_config.get(CONF_ID),
connect=False)
lights.append(AvionLight(device))
add_entities(lights)
class AvionLight(Light):
"""Representation of an Avion light."""
def __init__(self, device):
"""Initialize the light."""
self._name = device.name
self._address = device.mac
self._brightness = 255
self._state = False
self._switch = device
@property
def unique_id(self):
"""Return the ID of this light."""
return self._address
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._brightness
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_AVION_LED
@property
def should_poll(self):
"""Don't poll."""
return False
@property
def assumed_state(self):
"""We can't read the actual state, so assume it matches."""
return True
def set_state(self, brightness):
"""Set the state of this lamp to the provided brightness."""
# pylint: disable=no-member
avion = importlib.import_module('avion')
# Bluetooth LE is unreliable, and the connection may drop at any
# time. Make an effort to re-establish the link.
initial = time.monotonic()
while True:
if time.monotonic() - initial >= 10:
return False
try:
self._switch.set_brightness(brightness)
break
except avion.AvionException:
self._switch.connect()
return True
def turn_on(self, **kwargs):
"""Turn the specified or all lights on."""
brightness = kwargs.get(ATTR_BRIGHTNESS)
if brightness is not None:
self._brightness = brightness
self.set_state(self.brightness)
self._state = True
def turn_off(self, **kwargs):
"""Turn the specified or all lights off."""
self.set_state(0)
self._state = False
| apache-2.0 |
fle-internal/fle-home | fle_site/apps/about/views.py | 1 | 1524 | import json
import os
import random
from django.conf import settings
from annoying.decorators import render_to
from models import TeamMember, BoardMember, PressArticle, Internship, Job, SupportingOrganization, Translator
@render_to("about/team.html")
def team(request):
return {
"team_members": TeamMember.objects.current(),
"alumni": TeamMember.objects.alumni(),
}
@render_to("about/board.html")
def board(request):
return {
"board_members": BoardMember.objects.board(),
"advisor_members": BoardMember.objects.advisor()
}
@render_to("about/press.html")
def press(request):
return {
"press_articles": PressArticle.objects.order_by('-publish_date')
}
@render_to("about/internships.html")
def internships(request):
return {
"internships": Internship.objects.all()
}
@render_to("about/jobs.html")
def jobs(request):
return {
"jobs": Job.objects.active()
}
@render_to("about/grants.html")
def grants(request):
return {
"jobs": Job.objects.active()
}
@render_to("about/supporters.html")
def supporters(request):
sponsors = SupportingOrganization.objects.filter(organization_type__title="sponsor")
partners = SupportingOrganization.objects.filter(organization_type__title="partner")
return {
"sponsors": sponsors,
"partners": partners
}
@render_to("about/translators.html")
def translators(request):
return {
"translators": Translator.objects.order_by("?")
}
| mit |
rruebner/odoo | addons/l10n_ro/__openerp__.py | 77 | 2045 | # -*- encoding: utf-8 -*-
##############################################################################
#
# @author - Fekete Mihai <feketemihai@gmail.com>
# Copyright (C) 2011 TOTAL PC SYSTEMS (http://www.www.erpsystems.ro).
# Copyright (C) 2009 (<http://www.filsystem.ro>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "Romania - Accounting",
"version" : "1.0",
"author" : "TOTAL PC SYSTEMS",
"website": "http://www.erpsystems.ro",
"category" : "Localization/Account Charts",
"depends" : ['account','account_chart','base_vat'],
"description": """
This is the module to manage the Accounting Chart, VAT structure, Fiscal Position and Tax Mapping.
It also adds the Registration Number for Romania in OpenERP.
================================================================================================================
Romanian accounting chart and localization.
""",
"demo" : [],
"data" : ['partner_view.xml',
'account_chart.xml',
'account_tax_code_template.xml',
'account_chart_template.xml',
'account_tax_template.xml',
'fiscal_position_template.xml',
'l10n_chart_ro_wizard.xml',
],
"installable": True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Distrotech/intellij-community | python/lib/Lib/site-packages/django/contrib/gis/geos/prototypes/misc.py | 334 | 1438 | """
This module is for the miscellaneous GEOS routines, particularly the
ones that return the area, distance, and length.
"""
from ctypes import c_int, c_double, POINTER
from django.contrib.gis.geos.libgeos import GEOM_PTR, GEOS_PREPARE
from django.contrib.gis.geos.prototypes.errcheck import check_dbl, check_string
from django.contrib.gis.geos.prototypes.geom import geos_char_p
from django.contrib.gis.geos.prototypes.threadsafe import GEOSFunc
__all__ = ['geos_area', 'geos_distance', 'geos_length']
### ctypes generator function ###
def dbl_from_geom(func, num_geom=1):
"""
Argument is a Geometry, return type is double that is passed
in by reference as the last argument.
"""
argtypes = [GEOM_PTR for i in xrange(num_geom)]
argtypes += [POINTER(c_double)]
func.argtypes = argtypes
func.restype = c_int # Status code returned
func.errcheck = check_dbl
return func
### ctypes prototypes ###
# Area, distance, and length prototypes.
geos_area = dbl_from_geom(GEOSFunc('GEOSArea'))
geos_distance = dbl_from_geom(GEOSFunc('GEOSDistance'), num_geom=2)
geos_length = dbl_from_geom(GEOSFunc('GEOSLength'))
# Validity reason; only in GEOS 3.1+
if GEOS_PREPARE:
geos_isvalidreason = GEOSFunc('GEOSisValidReason')
geos_isvalidreason.argtypes = [GEOM_PTR]
geos_isvalidreason.restype = geos_char_p
geos_isvalidreason.errcheck = check_string
__all__.append('geos_isvalidreason')
| apache-2.0 |
1013553207/django | tests/migrations/test_commands.py | 29 | 50059 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import codecs
import importlib
import os
from django.apps import apps
from django.core.management import CommandError, call_command
from django.db import DatabaseError, connection, models
from django.db.migrations.recorder import MigrationRecorder
from django.test import ignore_warnings, mock, override_settings
from django.utils import six
from django.utils.deprecation import RemovedInDjango110Warning
from django.utils.encoding import force_text
from .models import UnicodeModel, UnserializableModel
from .test_base import MigrationTestBase
class MigrateTests(MigrationTestBase):
"""
Tests running the migrate command.
"""
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_migrate(self):
"""
Tests basic usage of the migrate command.
"""
# Make sure no tables are created
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
# Run the migrations to 0001 only
call_command("migrate", "migrations", "0001", verbosity=0)
# Make sure the right tables exist
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
# Run migrations all the way
call_command("migrate", verbosity=0)
# Make sure the right tables exist
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableExists("migrations_book")
# Unmigrate everything
call_command("migrate", "migrations", "zero", verbosity=0)
# Make sure it's all gone
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_initial_false"})
def test_migrate_initial_false(self):
"""
`Migration.initial = False` skips fake-initial detection.
"""
# Make sure no tables are created
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
# Run the migrations to 0001 only
call_command("migrate", "migrations", "0001", verbosity=0)
# Fake rollback
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
# Make sure fake-initial detection does not run
with self.assertRaises(DatabaseError):
call_command("migrate", "migrations", "0001", fake_initial=True, verbosity=0)
call_command("migrate", "migrations", "0001", fake=True, verbosity=0)
# Real rollback
call_command("migrate", "migrations", "zero", verbosity=0)
# Make sure it's all gone
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_migrate_fake_initial(self):
"""
#24184 - Tests that --fake-initial only works if all tables created in
the initial migration of an app exists
"""
# Make sure no tables are created
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
# Run the migrations to 0001 only
call_command("migrate", "migrations", "0001", verbosity=0)
# Make sure the right tables exist
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
# Fake a roll-back
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
# Make sure the tables still exist
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
# Try to run initial migration
with self.assertRaises(DatabaseError):
call_command("migrate", "migrations", "0001", verbosity=0)
# Run initial migration with an explicit --fake-initial
out = six.StringIO()
with mock.patch('django.core.management.color.supports_color', lambda *args: False):
call_command("migrate", "migrations", "0001", fake_initial=True, stdout=out, verbosity=1)
self.assertIn(
"migrations.0001_initial... faked",
out.getvalue().lower()
)
# Run migrations all the way
call_command("migrate", verbosity=0)
# Make sure the right tables exist
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableExists("migrations_book")
# Fake a roll-back
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
# Make sure the tables still exist
self.assertTableExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableExists("migrations_book")
# Try to run initial migration
with self.assertRaises(DatabaseError):
call_command("migrate", "migrations", verbosity=0)
# Run initial migration with an explicit --fake-initial
with self.assertRaises(DatabaseError):
# Fails because "migrations_tribble" does not exist but needs to in
# order to make --fake-initial work.
call_command("migrate", "migrations", fake_initial=True, verbosity=0)
# Fake a apply
call_command("migrate", "migrations", fake=True, verbosity=0)
# Unmigrate everything
call_command("migrate", "migrations", "zero", verbosity=0)
# Make sure it's all gone
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
self.assertTableNotExists("migrations_book")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_fake_split_initial"})
def test_migrate_fake_split_initial(self):
"""
Split initial migrations can be faked with --fake-initial.
"""
call_command("migrate", "migrations", "0002", verbosity=0)
call_command("migrate", "migrations", "zero", fake=True, verbosity=0)
out = six.StringIO()
with mock.patch('django.core.management.color.supports_color', lambda *args: False):
call_command("migrate", "migrations", "0002", fake_initial=True, stdout=out, verbosity=1)
value = out.getvalue().lower()
self.assertIn("migrations.0001_initial... faked", value)
self.assertIn("migrations.0002_second... faked", value)
# Fake an apply
call_command("migrate", "migrations", fake=True, verbosity=0)
# Unmigrate everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_conflict"})
def test_migrate_conflict_exit(self):
"""
Makes sure that migrate exits if it detects a conflict.
"""
with self.assertRaisesMessage(CommandError, "Conflicting migrations detected"):
call_command("migrate", "migrations")
@ignore_warnings(category=RemovedInDjango110Warning)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_migrate_list(self):
"""
Tests --list output of migrate command
"""
out = six.StringIO()
with mock.patch('django.core.management.color.supports_color', lambda *args: True):
call_command("migrate", list=True, stdout=out, verbosity=0, no_color=False)
self.assertEqual(
'\x1b[1mmigrations\n\x1b[0m'
' [ ] 0001_initial\n'
' [ ] 0002_second\n',
out.getvalue().lower()
)
call_command("migrate", "migrations", "0001", verbosity=0)
out = six.StringIO()
# Giving the explicit app_label tests for selective `show_migration_list` in the command
call_command("migrate", "migrations", list=True, stdout=out, verbosity=0, no_color=True)
self.assertEqual(
'migrations\n'
' [x] 0001_initial\n'
' [ ] 0002_second\n',
out.getvalue().lower()
)
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_showmigrations_list(self):
"""
Tests --list output of showmigrations command
"""
out = six.StringIO()
with mock.patch('django.core.management.color.supports_color', lambda *args: True):
call_command("showmigrations", format='list', stdout=out, verbosity=0, no_color=False)
self.assertEqual(
'\x1b[1mmigrations\n\x1b[0m'
' [ ] 0001_initial\n'
' [ ] 0002_second\n',
out.getvalue().lower()
)
call_command("migrate", "migrations", "0001", verbosity=0)
out = six.StringIO()
# Giving the explicit app_label tests for selective `show_list` in the command
call_command("showmigrations", "migrations", format='list', stdout=out, verbosity=0, no_color=True)
self.assertEqual(
'migrations\n'
' [x] 0001_initial\n'
' [ ] 0002_second\n',
out.getvalue().lower()
)
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_run_before"})
def test_showmigrations_plan(self):
"""
Tests --plan output of showmigrations command
"""
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out)
self.assertIn(
"[ ] migrations.0001_initial\n"
"[ ] migrations.0003_third\n"
"[ ] migrations.0002_second",
out.getvalue().lower()
)
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out, verbosity=2)
self.assertIn(
"[ ] migrations.0001_initial\n"
"[ ] migrations.0003_third ... (migrations.0001_initial)\n"
"[ ] migrations.0002_second ... (migrations.0001_initial)",
out.getvalue().lower()
)
call_command("migrate", "migrations", "0003", verbosity=0)
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out)
self.assertIn(
"[x] migrations.0001_initial\n"
"[x] migrations.0003_third\n"
"[ ] migrations.0002_second",
out.getvalue().lower()
)
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out, verbosity=2)
self.assertIn(
"[x] migrations.0001_initial\n"
"[x] migrations.0003_third ... (migrations.0001_initial)\n"
"[ ] migrations.0002_second ... (migrations.0001_initial)",
out.getvalue().lower()
)
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_empty"})
def test_showmigrations_plan_no_migrations(self):
"""
Tests --plan output of showmigrations command without migrations
"""
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out)
self.assertEqual("", out.getvalue().lower())
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out, verbosity=2)
self.assertEqual("", out.getvalue().lower())
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed_complex"})
def test_showmigrations_plan_squashed(self):
"""
Tests --plan output of showmigrations command with squashed migrations.
"""
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out)
self.assertEqual(
"[ ] migrations.1_auto\n"
"[ ] migrations.2_auto\n"
"[ ] migrations.3_squashed_5\n"
"[ ] migrations.6_auto\n"
"[ ] migrations.7_auto\n",
out.getvalue().lower()
)
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out, verbosity=2)
self.assertEqual(
"[ ] migrations.1_auto\n"
"[ ] migrations.2_auto ... (migrations.1_auto)\n"
"[ ] migrations.3_squashed_5 ... (migrations.2_auto)\n"
"[ ] migrations.6_auto ... (migrations.3_squashed_5)\n"
"[ ] migrations.7_auto ... (migrations.6_auto)\n",
out.getvalue().lower()
)
call_command("migrate", "migrations", "3_squashed_5", verbosity=0)
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out)
self.assertEqual(
"[x] migrations.1_auto\n"
"[x] migrations.2_auto\n"
"[x] migrations.3_squashed_5\n"
"[ ] migrations.6_auto\n"
"[ ] migrations.7_auto\n",
out.getvalue().lower()
)
out = six.StringIO()
call_command("showmigrations", format='plan', stdout=out, verbosity=2)
self.assertEqual(
"[x] migrations.1_auto\n"
"[x] migrations.2_auto ... (migrations.1_auto)\n"
"[x] migrations.3_squashed_5 ... (migrations.2_auto)\n"
"[ ] migrations.6_auto ... (migrations.3_squashed_5)\n"
"[ ] migrations.7_auto ... (migrations.6_auto)\n",
out.getvalue().lower()
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_sqlmigrate_forwards(self):
"""
Makes sure that sqlmigrate does something.
"""
out = six.StringIO()
call_command("sqlmigrate", "migrations", "0001", stdout=out)
output = out.getvalue().lower()
index_tx_start = output.find(connection.ops.start_transaction_sql().lower())
index_op_desc_author = output.find('-- create model author')
index_create_table = output.find('create table')
index_op_desc_tribble = output.find('-- create model tribble')
index_op_desc_unique_together = output.find('-- alter unique_together')
index_tx_end = output.find(connection.ops.end_transaction_sql().lower())
self.assertGreater(index_tx_start, -1, "Transaction start not found")
self.assertGreater(index_op_desc_author, index_tx_start,
"Operation description (author) not found or found before transaction start")
self.assertGreater(index_create_table, index_op_desc_author,
"CREATE TABLE not found or found before operation description (author)")
self.assertGreater(index_op_desc_tribble, index_create_table,
"Operation description (tribble) not found or found before CREATE TABLE (author)")
self.assertGreater(index_op_desc_unique_together, index_op_desc_tribble,
"Operation description (unique_together) not found or found before operation description (tribble)")
self.assertGreater(index_tx_end, index_op_desc_unique_together,
"Transaction end not found or found before operation description (unique_together)")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_sqlmigrate_backwards(self):
"""
Makes sure that sqlmigrate does something.
"""
# Cannot generate the reverse SQL unless we've applied the migration.
call_command("migrate", "migrations", verbosity=0)
out = six.StringIO()
call_command("sqlmigrate", "migrations", "0001", stdout=out, backwards=True)
output = out.getvalue().lower()
index_tx_start = output.find(connection.ops.start_transaction_sql().lower())
index_op_desc_unique_together = output.find('-- alter unique_together')
index_op_desc_tribble = output.find('-- create model tribble')
index_op_desc_author = output.find('-- create model author')
index_drop_table = output.rfind('drop table')
index_tx_end = output.find(connection.ops.end_transaction_sql().lower())
self.assertGreater(index_tx_start, -1, "Transaction start not found")
self.assertGreater(index_op_desc_unique_together, index_tx_start,
"Operation description (unique_together) not found or found before transaction start")
self.assertGreater(index_op_desc_tribble, index_op_desc_unique_together,
"Operation description (tribble) not found or found before operation description (unique_together)")
self.assertGreater(index_op_desc_author, index_op_desc_tribble,
"Operation description (author) not found or found before operation description (tribble)")
self.assertGreater(index_drop_table, index_op_desc_author,
"DROP TABLE not found or found before operation description (author)")
self.assertGreater(index_tx_end, index_op_desc_unique_together,
"Transaction end not found or found before DROP TABLE")
# Cleanup by unmigrating everything
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_app",
"migrations.migrations_test_apps.migrated_unapplied_app",
"migrations.migrations_test_apps.unmigrated_app"])
def test_regression_22823_unmigrated_fk_to_migrated_model(self):
"""
https://code.djangoproject.com/ticket/22823
Assuming you have 3 apps, `A`, `B`, and `C`, such that:
* `A` has migrations
* `B` has a migration we want to apply
* `C` has no migrations, but has an FK to `A`
When we try to migrate "B", an exception occurs because the
"B" was not included in the ProjectState that is used to detect
soft-applied migrations.
"""
call_command("migrate", "migrated_unapplied_app", stdout=six.StringIO())
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"})
def test_migrate_record_replaced(self):
"""
Running a single squashed migration should record all of the original
replaced migrations as run.
"""
recorder = MigrationRecorder(connection)
out = six.StringIO()
call_command("migrate", "migrations", verbosity=0)
call_command("showmigrations", "migrations", stdout=out, no_color=True)
self.assertEqual(
'migrations\n'
' [x] 0001_squashed_0002 (2 squashed migrations)\n',
out.getvalue().lower()
)
applied_migrations = recorder.applied_migrations()
self.assertIn(("migrations", "0001_initial"), applied_migrations)
self.assertIn(("migrations", "0002_second"), applied_migrations)
self.assertIn(("migrations", "0001_squashed_0002"), applied_migrations)
# Rollback changes
call_command("migrate", "migrations", "zero", verbosity=0)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"})
def test_migrate_record_squashed(self):
"""
Running migrate for a squashed migration should record as run
if all of the replaced migrations have been run (#25231).
"""
recorder = MigrationRecorder(connection)
recorder.record_applied("migrations", "0001_initial")
recorder.record_applied("migrations", "0002_second")
out = six.StringIO()
call_command("migrate", "migrations", verbosity=0)
call_command("showmigrations", "migrations", stdout=out, no_color=True)
self.assertEqual(
'migrations\n'
' [x] 0001_squashed_0002 (2 squashed migrations)\n',
out.getvalue().lower()
)
self.assertIn(
("migrations", "0001_squashed_0002"),
recorder.applied_migrations()
)
# No changes were actually applied so there is nothing to rollback
class MakeMigrationsTests(MigrationTestBase):
"""
Tests running the makemigrations command.
"""
def setUp(self):
super(MigrationTestBase, self).setUp()
self._old_models = apps.app_configs['migrations'].models.copy()
def tearDown(self):
apps.app_configs['migrations'].models = self._old_models
apps.all_models['migrations'] = self._old_models
apps.clear_cache()
super(MigrationTestBase, self).tearDown()
def test_files_content(self):
self.assertTableNotExists("migrations_unicodemodel")
apps.register_model('migrations', UnicodeModel)
with self.temporary_migration_module() as migration_dir:
call_command("makemigrations", "migrations", verbosity=0)
# Check for empty __init__.py file in migrations folder
init_file = os.path.join(migration_dir, "__init__.py")
self.assertTrue(os.path.exists(init_file))
with open(init_file, 'r') as fp:
content = force_text(fp.read())
self.assertEqual(content, '')
# Check for existing 0001_initial.py file in migration folder
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_file))
with codecs.open(initial_file, 'r', encoding='utf-8') as fp:
content = fp.read()
self.assertIn('# -*- coding: utf-8 -*-', content)
self.assertIn('migrations.CreateModel', content)
self.assertIn('initial = True', content)
if six.PY3:
self.assertIn('úñí©óðé µóðéø', content) # Meta.verbose_name
self.assertIn('úñí©óðé µóðéøß', content) # Meta.verbose_name_plural
self.assertIn('ÚÑÍ¢ÓÐÉ', content) # title.verbose_name
self.assertIn('“Ðjáñgó”', content) # title.default
else:
# Meta.verbose_name
self.assertIn('\\xfa\\xf1\\xed\\xa9\\xf3\\xf0\\xe9 \\xb5\\xf3\\xf0\\xe9\\xf8', content)
# Meta.verbose_name_plural
self.assertIn('\\xfa\\xf1\\xed\\xa9\\xf3\\xf0\\xe9 \\xb5\\xf3\\xf0\\xe9\\xf8\\xdf', content)
self.assertIn('\\xda\\xd1\\xcd\\xa2\\xd3\\xd0\\xc9', content) # title.verbose_name
self.assertIn('\\u201c\\xd0j\\xe1\\xf1g\\xf3\\u201d', content) # title.default
def test_makemigrations_order(self):
"""
makemigrations should recognize number-only migrations (0001.py).
"""
module = 'migrations.test_migrations_order'
with self.temporary_migration_module(module=module) as migration_dir:
if hasattr(importlib, 'invalidate_caches'):
# Python 3 importlib caches os.listdir() on some platforms like
# Mac OS X (#23850).
importlib.invalidate_caches()
call_command('makemigrations', 'migrations', '--empty', '-n', 'a', '-v', '0')
self.assertTrue(os.path.exists(os.path.join(migration_dir, '0002_a.py')))
def test_failing_migration(self):
# If a migration fails to serialize, it shouldn't generate an empty file. #21280
apps.register_model('migrations', UnserializableModel)
with self.temporary_migration_module() as migration_dir:
with six.assertRaisesRegex(self, ValueError, r'Cannot serialize'):
call_command("makemigrations", "migrations", verbosity=0)
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertFalse(os.path.exists(initial_file))
def test_makemigrations_conflict_exit(self):
"""
Makes sure that makemigrations exits if it detects a conflict.
"""
with self.temporary_migration_module(module="migrations.test_migrations_conflict"):
with self.assertRaises(CommandError):
call_command("makemigrations")
def test_makemigrations_merge_no_conflict(self):
"""
Makes sure that makemigrations exits if in merge mode with no conflicts.
"""
out = six.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations"):
try:
call_command("makemigrations", merge=True, stdout=out)
except CommandError:
self.fail("Makemigrations errored in merge mode with no conflicts")
self.assertIn("No conflicts detected to merge.", out.getvalue())
def test_makemigrations_no_app_sys_exit(self):
"""
Makes sure that makemigrations exits if a non-existent app is specified.
"""
err = six.StringIO()
with self.assertRaises(SystemExit):
call_command("makemigrations", "this_app_does_not_exist", stderr=err)
self.assertIn("'this_app_does_not_exist' could not be found.", err.getvalue())
def test_makemigrations_empty_no_app_specified(self):
"""
Makes sure that makemigrations exits if no app is specified with 'empty' mode.
"""
with self.assertRaises(CommandError):
call_command("makemigrations", empty=True)
def test_makemigrations_empty_migration(self):
"""
Makes sure that makemigrations properly constructs an empty migration.
"""
with self.temporary_migration_module() as migration_dir:
try:
call_command("makemigrations", "migrations", empty=True, verbosity=0)
except CommandError:
self.fail("Makemigrations errored in creating empty migration for a proper app.")
# Check for existing 0001_initial.py file in migration folder
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_file))
with codecs.open(initial_file, 'r', encoding='utf-8') as fp:
content = fp.read()
self.assertIn('# -*- coding: utf-8 -*-', content)
# Remove all whitespace to check for empty dependencies and operations
content = content.replace(' ', '')
self.assertIn('dependencies=[\n]', content)
self.assertIn('operations=[\n]', content)
@override_settings(MIGRATION_MODULES={"migrations": None})
def test_makemigrations_disabled_migrations_for_app(self):
"""
makemigrations raises a nice error when migrations are disabled for an
app.
"""
msg = (
"Django can't create migrations for app 'migrations' because migrations "
"have been disabled via the MIGRATION_MODULES setting."
)
with self.assertRaisesMessage(ValueError, msg):
call_command("makemigrations", "migrations", empty=True, verbosity=0)
def test_makemigrations_no_changes_no_apps(self):
"""
Makes sure that makemigrations exits when there are no changes and no apps are specified.
"""
out = six.StringIO()
call_command("makemigrations", stdout=out)
self.assertIn("No changes detected", out.getvalue())
def test_makemigrations_no_changes(self):
"""
Makes sure that makemigrations exits when there are no changes to an app.
"""
out = six.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations_no_changes"):
call_command("makemigrations", "migrations", stdout=out)
self.assertIn("No changes detected in app 'migrations'", out.getvalue())
def test_makemigrations_no_apps_initial(self):
"""
makemigrations should detect initial is needed on empty migration
modules if no app provided.
"""
out = six.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations_empty"):
call_command("makemigrations", stdout=out)
self.assertIn("0001_initial.py", out.getvalue())
def test_makemigrations_migrations_announce(self):
"""
Makes sure that makemigrations announces the migration at the default verbosity level.
"""
out = six.StringIO()
with self.temporary_migration_module():
call_command("makemigrations", "migrations", stdout=out)
self.assertIn("Migrations for 'migrations'", out.getvalue())
def test_makemigrations_no_common_ancestor(self):
"""
Makes sure that makemigrations fails to merge migrations with no common ancestor.
"""
with self.assertRaises(ValueError) as context:
with self.temporary_migration_module(module="migrations.test_migrations_no_ancestor"):
call_command("makemigrations", "migrations", merge=True)
exception_message = str(context.exception)
self.assertIn("Could not find common ancestor of", exception_message)
self.assertIn("0002_second", exception_message)
self.assertIn("0002_conflicting_second", exception_message)
def test_makemigrations_interactive_reject(self):
"""
Makes sure that makemigrations enters and exits interactive mode properly.
"""
# Monkeypatch interactive questioner to auto reject
with mock.patch('django.db.migrations.questioner.input', mock.Mock(return_value='N')):
try:
with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir:
call_command("makemigrations", "migrations", merge=True, interactive=True, verbosity=0)
merge_file = os.path.join(migration_dir, '0003_merge.py')
self.assertFalse(os.path.exists(merge_file))
except CommandError:
self.fail("Makemigrations failed while running interactive questioner")
def test_makemigrations_interactive_accept(self):
"""
Makes sure that makemigrations enters interactive mode and merges properly.
"""
# Monkeypatch interactive questioner to auto accept
with mock.patch('django.db.migrations.questioner.input', mock.Mock(return_value='y')):
out = six.StringIO()
try:
with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir:
call_command("makemigrations", "migrations", merge=True, interactive=True, stdout=out)
merge_file = os.path.join(migration_dir, '0003_merge.py')
self.assertTrue(os.path.exists(merge_file))
except CommandError:
self.fail("Makemigrations failed while running interactive questioner")
self.assertIn("Created new merge migration", force_text(out.getvalue()))
def test_makemigrations_non_interactive_not_null_addition(self):
"""
Tests that non-interactive makemigrations fails when a default is missing on a new not-null field.
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
silly_int = models.IntegerField()
class Meta:
app_label = "migrations"
out = six.StringIO()
with self.assertRaises(SystemExit):
with self.temporary_migration_module(module="migrations.test_migrations_no_default"):
call_command("makemigrations", "migrations", interactive=False, stdout=out)
def test_makemigrations_non_interactive_not_null_alteration(self):
"""
Tests that non-interactive makemigrations fails when a default is missing on a field changed to not-null.
"""
class Author(models.Model):
name = models.CharField(max_length=255)
slug = models.SlugField()
age = models.IntegerField(default=0)
class Meta:
app_label = "migrations"
out = six.StringIO()
try:
with self.temporary_migration_module(module="migrations.test_migrations"):
call_command("makemigrations", "migrations", interactive=False, stdout=out)
except CommandError:
self.fail("Makemigrations failed while running non-interactive questioner.")
self.assertIn("Alter field slug on author", force_text(out.getvalue()))
def test_makemigrations_non_interactive_no_model_rename(self):
"""
Makes sure that makemigrations adds and removes a possible model rename in non-interactive mode.
"""
class RenamedModel(models.Model):
silly_field = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
out = six.StringIO()
try:
with self.temporary_migration_module(module="migrations.test_migrations_no_default"):
call_command("makemigrations", "migrations", interactive=False, stdout=out)
except CommandError:
self.fail("Makemigrations failed while running non-interactive questioner")
self.assertIn("Delete model SillyModel", force_text(out.getvalue()))
self.assertIn("Create model RenamedModel", force_text(out.getvalue()))
def test_makemigrations_non_interactive_no_field_rename(self):
"""
Makes sure that makemigrations adds and removes a possible field rename in non-interactive mode.
"""
class SillyModel(models.Model):
silly_rename = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
out = six.StringIO()
try:
with self.temporary_migration_module(module="migrations.test_migrations_no_default"):
call_command("makemigrations", "migrations", interactive=False, stdout=out)
except CommandError:
self.fail("Makemigrations failed while running non-interactive questioner")
self.assertIn("Remove field silly_field from sillymodel", force_text(out.getvalue()))
self.assertIn("Add field silly_rename to sillymodel", force_text(out.getvalue()))
def test_makemigrations_handle_merge(self):
"""
Makes sure that makemigrations properly merges the conflicting migrations with --noinput.
"""
out = six.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir:
call_command("makemigrations", "migrations", merge=True, interactive=False, stdout=out)
merge_file = os.path.join(migration_dir, '0003_merge.py')
self.assertTrue(os.path.exists(merge_file))
output = force_text(out.getvalue())
self.assertIn("Merging migrations", output)
self.assertIn("Branch 0002_second", output)
self.assertIn("Branch 0002_conflicting_second", output)
self.assertIn("Created new merge migration", output)
def test_makemigration_merge_dry_run(self):
"""
Makes sure that makemigrations respects --dry-run option when fixing
migration conflicts (#24427).
"""
out = six.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir:
call_command("makemigrations", "migrations", dry_run=True, merge=True, interactive=False, stdout=out)
merge_file = os.path.join(migration_dir, '0003_merge.py')
self.assertFalse(os.path.exists(merge_file))
output = force_text(out.getvalue())
self.assertIn("Merging migrations", output)
self.assertIn("Branch 0002_second", output)
self.assertIn("Branch 0002_conflicting_second", output)
self.assertNotIn("Created new merge migration", output)
def test_makemigration_merge_dry_run_verbosity_3(self):
"""
Makes sure that `makemigrations --merge --dry-run` writes the merge
migration file to stdout with `verbosity == 3` (#24427).
"""
out = six.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir:
call_command("makemigrations", "migrations", dry_run=True, merge=True, interactive=False,
stdout=out, verbosity=3)
merge_file = os.path.join(migration_dir, '0003_merge.py')
self.assertFalse(os.path.exists(merge_file))
output = force_text(out.getvalue())
self.assertIn("Merging migrations", output)
self.assertIn("Branch 0002_second", output)
self.assertIn("Branch 0002_conflicting_second", output)
self.assertNotIn("Created new merge migration", output)
# Additional output caused by verbosity 3
# The complete merge migration file that would be written
self.assertIn("# -*- coding: utf-8 -*-", output)
self.assertIn("class Migration(migrations.Migration):", output)
self.assertIn("dependencies = [", output)
self.assertIn("('migrations', '0002_second')", output)
self.assertIn("('migrations', '0002_conflicting_second')", output)
self.assertIn("operations = [", output)
self.assertIn("]", output)
def test_makemigrations_dry_run(self):
"""
Ticket #22676 -- `makemigrations --dry-run` should not ask for defaults.
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
silly_date = models.DateField() # Added field without a default
class Meta:
app_label = "migrations"
out = six.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations_no_default"):
call_command("makemigrations", "migrations", dry_run=True, stdout=out)
# Output the expected changes directly, without asking for defaults
self.assertIn("Add field silly_date to sillymodel", out.getvalue())
def test_makemigrations_dry_run_verbosity_3(self):
"""
Ticket #22675 -- Allow `makemigrations --dry-run` to output the
migrations file to stdout (with verbosity == 3).
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
silly_char = models.CharField(default="")
class Meta:
app_label = "migrations"
out = six.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations_no_default"):
call_command("makemigrations", "migrations", dry_run=True, stdout=out, verbosity=3)
# Normal --dry-run output
self.assertIn("- Add field silly_char to sillymodel", out.getvalue())
# Additional output caused by verbosity 3
# The complete migrations file that would be written
self.assertIn("# -*- coding: utf-8 -*-", out.getvalue())
self.assertIn("class Migration(migrations.Migration):", out.getvalue())
self.assertIn("dependencies = [", out.getvalue())
self.assertIn("('migrations', '0001_initial'),", out.getvalue())
self.assertIn("migrations.AddField(", out.getvalue())
self.assertIn("model_name='sillymodel',", out.getvalue())
self.assertIn("name='silly_char',", out.getvalue())
def test_makemigrations_migrations_modules_path_not_exist(self):
"""
Ticket #22682 -- Makemigrations fails when specifying custom location
for migration files (using MIGRATION_MODULES) if the custom path
doesn't already exist.
"""
class SillyModel(models.Model):
silly_field = models.BooleanField(default=False)
class Meta:
app_label = "migrations"
out = six.StringIO()
migration_module = "migrations.test_migrations_path_doesnt_exist.foo.bar"
with self.temporary_migration_module(module=migration_module) as migration_dir:
call_command("makemigrations", "migrations", stdout=out)
# Migrations file is actually created in the expected path.
initial_file = os.path.join(migration_dir, "0001_initial.py")
self.assertTrue(os.path.exists(initial_file))
# Command output indicates the migration is created.
self.assertIn(" - Create model SillyModel", out.getvalue())
def test_makemigrations_interactive_by_default(self):
"""
Makes sure that the user is prompted to merge by default if there are
conflicts and merge is True. Answer negative to differentiate it from
behavior when --noinput is specified.
"""
# Monkeypatch interactive questioner to auto reject
out = six.StringIO()
with mock.patch('django.db.migrations.questioner.input', mock.Mock(return_value='N')):
try:
with self.temporary_migration_module(module="migrations.test_migrations_conflict") as migration_dir:
call_command("makemigrations", "migrations", merge=True, stdout=out)
merge_file = os.path.join(migration_dir, '0003_merge.py')
# This will fail if interactive is False by default
self.assertFalse(os.path.exists(merge_file))
except CommandError:
self.fail("Makemigrations failed while running interactive questioner")
self.assertNotIn("Created new merge migration", out.getvalue())
@override_settings(
INSTALLED_APPS=[
"migrations",
"migrations.migrations_test_apps.unspecified_app_with_conflict"])
def test_makemigrations_unspecified_app_with_conflict_no_merge(self):
"""
Makes sure that makemigrations does not raise a CommandError when an
unspecified app has conflicting migrations.
"""
try:
with self.temporary_migration_module(module="migrations.test_migrations_no_changes"):
call_command("makemigrations", "migrations", merge=False, verbosity=0)
except CommandError:
self.fail("Makemigrations fails resolving conflicts in an unspecified app")
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_app",
"migrations.migrations_test_apps.unspecified_app_with_conflict"])
def test_makemigrations_unspecified_app_with_conflict_merge(self):
"""
Makes sure that makemigrations does not create a merge for an
unspecified app even if it has conflicting migrations.
"""
# Monkeypatch interactive questioner to auto accept
with mock.patch('django.db.migrations.questioner.input', mock.Mock(return_value='y')):
out = six.StringIO()
try:
with self.temporary_migration_module(app_label="migrated_app") as migration_dir:
call_command("makemigrations", "migrated_app", merge=True, interactive=True, stdout=out)
merge_file = os.path.join(migration_dir, '0003_merge.py')
self.assertFalse(os.path.exists(merge_file))
self.assertIn("No conflicts detected to merge.", out.getvalue())
except CommandError:
self.fail("Makemigrations fails resolving conflicts in an unspecified app")
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.migrated_app",
"migrations.migrations_test_apps.conflicting_app_with_dependencies"])
def test_makemigrations_merge_dont_output_dependency_operations(self):
"""
Makes sure that makemigrations --merge does not output any operations
from apps that don't belong to a given app.
"""
# Monkeypatch interactive questioner to auto accept
with mock.patch('django.db.migrations.questioner.input', mock.Mock(return_value='N')):
out = six.StringIO()
with mock.patch('django.core.management.color.supports_color', lambda *args: False):
call_command(
"makemigrations", "conflicting_app_with_dependencies",
merge=True, interactive=True, stdout=out
)
val = out.getvalue().lower()
self.assertIn('merging conflicting_app_with_dependencies\n', val)
self.assertIn(
' branch 0002_conflicting_second\n'
' - create model something\n',
val
)
self.assertIn(
' branch 0002_second\n'
' - delete model tribble\n'
' - remove field silly_field from author\n'
' - add field rating to author\n'
' - create model book\n',
val
)
def test_makemigrations_with_custom_name(self):
"""
Makes sure that makemigrations generate a custom migration.
"""
with self.temporary_migration_module() as migration_dir:
def cmd(migration_count, migration_name, *args):
try:
call_command("makemigrations", "migrations", "--verbosity", "0", "--name", migration_name, *args)
except CommandError:
self.fail("Makemigrations errored in creating empty migration with custom name for a proper app.")
migration_file = os.path.join(migration_dir, "%s_%s.py" % (migration_count, migration_name))
# Check for existing migration file in migration folder
self.assertTrue(os.path.exists(migration_file))
with codecs.open(migration_file, "r", encoding="utf-8") as fp:
content = fp.read()
self.assertIn("# -*- coding: utf-8 -*-", content)
content = content.replace(" ", "")
return content
# generate an initial migration
migration_name_0001 = "my_initial_migration"
content = cmd("0001", migration_name_0001)
self.assertIn("dependencies=[\n]", content)
# Python 3 importlib caches os.listdir() on some platforms like
# Mac OS X (#23850).
if hasattr(importlib, 'invalidate_caches'):
importlib.invalidate_caches()
# generate an empty migration
migration_name_0002 = "my_custom_migration"
content = cmd("0002", migration_name_0002, "--empty")
self.assertIn("dependencies=[\n('migrations','0001_%s'),\n]" % migration_name_0001, content)
self.assertIn("operations=[\n]", content)
def test_makemigrations_exit(self):
"""
makemigrations --exit should exit with sys.exit(1) when there are no
changes to an app.
"""
with self.temporary_migration_module():
call_command("makemigrations", "--exit", "migrations", verbosity=0)
with self.temporary_migration_module(module="migrations.test_migrations_no_changes"):
with self.assertRaises(SystemExit):
call_command("makemigrations", "--exit", "migrations", verbosity=0)
class SquashMigrationsTests(MigrationTestBase):
"""
Tests running the squashmigrations command.
"""
def test_squashmigrations_squashes(self):
"""
Tests that squashmigrations squashes migrations.
"""
with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir:
call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=0)
squashed_migration_file = os.path.join(migration_dir, "0001_squashed_0002_second.py")
self.assertTrue(os.path.exists(squashed_migration_file))
def test_squashmigrations_initial_attribute(self):
with self.temporary_migration_module(module="migrations.test_migrations") as migration_dir:
call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=0)
squashed_migration_file = os.path.join(migration_dir, "0001_squashed_0002_second.py")
with codecs.open(squashed_migration_file, "r", encoding="utf-8") as fp:
content = fp.read()
self.assertIn("initial = True", content)
def test_squashmigrations_optimizes(self):
"""
Tests that squashmigrations optimizes operations.
"""
out = six.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations"):
call_command("squashmigrations", "migrations", "0002", interactive=False, verbosity=1, stdout=out)
self.assertIn("Optimized from 7 operations to 3 operations.", force_text(out.getvalue()))
def test_ticket_23799_squashmigrations_no_optimize(self):
"""
Makes sure that squashmigrations --no-optimize really doesn't optimize operations.
"""
out = six.StringIO()
with self.temporary_migration_module(module="migrations.test_migrations"):
call_command("squashmigrations", "migrations", "0002",
interactive=False, verbosity=1, no_optimize=True, stdout=out)
self.assertIn("Skipping optimization", force_text(out.getvalue()))
| bsd-3-clause |
viz4biz/PyDataNYC2015 | vtklib/vtk_surface_properties.py | 1 | 2442 | """
VTK surface properties helper class
"""
from atom.api import Atom, Str, Bool, Float, Typed, Dict
from vtk_surface import VTKSurfaceConfig
class VTKSurfaceProperties(Atom):
Config = Typed(VTKSurfaceConfig)
Title = Str('')
CurrentColorMap = Str('rainbow')
XCutterOn = Bool()
YCutterOn = Bool()
ZCutterOn = Bool()
XCutterPos = Float()
YCutterPos = Float()
ZCutterPos = Float()
XFactor = Float()
YFactor = Float()
ZFactor = Float()
DrawBox = Bool()
DrawGrid = Bool()
DrawAxes = Bool()
DrawLegend = Bool()
ZoomFactor = Float()
Autoscale = Bool()
ScaleFactor = Float()
AutoZRange = Bool()
SurfaceProperties = Dict()
def __init__(self, config):
self.config = config or VTKSurfaceConfig()
self.SurfaceProperties = self.surface_properties()
def surface_properties(self):
""" surface 3D attributes """
return dict(
xlabel = self.Config().XLabel(),
ylabel = self.Config().YLabel(),
zlabel = self.Config().ZLabel(),
title = self.Title(),
colorMap = self.CurrentColorMap,
XCutterOn = self.XCutterOn,
XCutterPos = self.XCutterPos,
XCutterFactor = self.XFactor,
YCutterOn = self.YCutterOn,
YCutterPos = self.YCutterPos,
YCutterFactor = self.YFactor,
ZCutterOn = self.ZCutterOn,
ZCutterPos = self.ZCutterPos,
ZCutterFactor = self.ZFactor,
drawBox = self.DrawBox,
drawGrid = self.DrawGrid,
drawAxes = self.DrawAxes,
drawLegend = self.DrawLegend,
rotateX = 30,
rotateZ = 180,
zoomFactor = self.ZoomFactor,
xtics = 1,
ytics = 1,
ztics = 1,
nlabels = 5,
logToFile = True,
autoScale = self.Autoscale,
scaleFactor = self.ScaleFactor,
autoZRange = self.AutoZRange,
labelFormat = '%6.2f',)
| apache-2.0 |
schreiberx/sweet | benchmarks_sphere/paper_jrn_nla_rexi_linear/sph_rexi_linear_paper_gaussian_ts_comparison_earth_scale_cheyenne_performance/pp_plot_csv_pdf.py | 1 | 3040 | #! /usr/bin/python3
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
import sys
first = True
zoom_lat = True
zoom_lat = False
zoom_lat = 'eta' in sys.argv[1]
fontsize=8
figsize=(9, 3)
files = sys.argv[1:]
refdataavailable = False
if files[0] == 'reference':
reffilename = files[1]
files = files[2:]
print("Loading reference solution from '"+reffilename+"'")
refdata = np.loadtxt(reffilename, skiprows=3)
refdata = refdata[1:,1:]
refdataavailable = True
#for filename in sys.argv[1:]:
for filename in files:
print(filename)
data = np.loadtxt(filename, skiprows=3)
labelsx = data[0,1:]
labelsy = data[1:,0]
data = data[1:,1:]
if zoom_lat:
while labelsy[1] < 10:
labelsy = labelsy[1:]
data = data[1:]
while labelsy[-2] > 80:
labelsy = labelsy[0:-2]
data = data[0:-2]
if first:
lon_min = labelsx[0]
lon_max = labelsx[-1]
lat_min = labelsy[0]
lat_max = labelsy[-1]
new_labelsx = np.linspace(lon_min, lon_max, 7)
new_labelsy = np.linspace(lat_min, lat_max, 7)
labelsx = np.interp(new_labelsx, labelsx, labelsx)
labelsy = np.interp(new_labelsy, labelsy, labelsy)
if first:
cmin = np.amin(data)
cmax = np.amax(data)
if 'eta' in filename:
cmin *= 1.2
cmax *= 1.2
if cmax-cmin < 0.3 and cmin > 0.9 and cmax < 1.1:
hs = 0.005
cmin = 0.96
cmax = 1.04
cmid = 0.5*(cmax-cmin)
contour_levels = np.append(np.arange(cmin, cmid-hs, hs), np.arange(cmid+hs, cmax, hs))
elif cmax-cmin < 3000 and cmin > 9000 and cmax < 11000:
hs = 30
cmin = 9000
cmax = 11000
cmid = 0.5*(cmax+cmin)
#contour_levels = np.append(np.arange(cmin, cmid-hs, hs), np.arange(cmid+hs, cmax, hs))
contour_levels = np.arange(cmin, cmax, hs)
else:
if 'eta' in filename:
hs = 2e-5
contour_levels = np.append(np.arange(-1e-4, 0, s), np.arange(s, 1e-4, hs))
else:
hs = 5
contour_levels = np.append(np.arange(900, 1000-hs, hs), np.arange(1000+hs, 1100, hs))
extent = (labelsx[0], labelsx[-1], labelsy[0], labelsy[-1])
plt.figure(figsize=figsize)
plt.imshow(data, interpolation='nearest', extent=extent, origin='lower', aspect='auto', cmap=plt.get_cmap('rainbow'))
plt.clim(cmin, cmax)
cbar = plt.colorbar()
cbar.ax.tick_params(labelsize=fontsize)
plt.title(filename, fontsize=fontsize)
if refdataavailable:
CS = plt.contour(refdata, colors="black", origin='lower', extent=extent, vmin=cmin, vmax=cmax, levels=contour_levels, linewidths=0.35)
for c in CS.collections:
c.set_dashes([(0, (2.0, 2.0))])
plt.contour(data, colors="black", origin='lower', extent=extent, vmin=cmin, vmax=cmax, levels=contour_levels, linewidths=0.35)
ax = plt.gca()
ax.xaxis.set_label_coords(0.5, -0.075)
plt.xticks(labelsx, fontsize=fontsize)
plt.xlabel("Longitude", fontsize=fontsize)
plt.yticks(labelsy, fontsize=fontsize)
plt.ylabel("Latitude", fontsize=fontsize)
outfilename = filename.replace('.csv', '.pdf')
print(outfilename)
plt.savefig(outfilename)
plt.close()
first = False
| mit |
TeravoxelTwoPhotonTomography/fetch | 3rdParty/gtest-1.5.0/test/gtest_env_var_test.py | 233 | 3509 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that Google Test correctly parses environment variables."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
IS_WINDOWS = os.name == 'nt'
IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_env_var_test_')
environ = os.environ.copy()
def AssertEq(expected, actual):
if expected != actual:
print 'Expected: %s' % (expected,)
print ' Actual: %s' % (actual,)
raise AssertionError
def SetEnvVar(env_var, value):
"""Sets the env variable to 'value'; unsets it when 'value' is None."""
if value is not None:
environ[env_var] = value
elif env_var in environ:
del environ[env_var]
def GetFlag(flag):
"""Runs gtest_env_var_test_ and returns its output."""
args = [COMMAND]
if flag is not None:
args += [flag]
return gtest_test_utils.Subprocess(args, env=environ).output
def TestFlag(flag, test_val, default_val):
"""Verifies that the given flag is affected by the corresponding env var."""
env_var = 'GTEST_' + flag.upper()
SetEnvVar(env_var, test_val)
AssertEq(test_val, GetFlag(flag))
SetEnvVar(env_var, None)
AssertEq(default_val, GetFlag(flag))
class GTestEnvVarTest(gtest_test_utils.TestCase):
def testEnvVarAffectsFlag(self):
"""Tests that environment variable should affect the corresponding flag."""
TestFlag('break_on_failure', '1', '0')
TestFlag('color', 'yes', 'auto')
TestFlag('filter', 'FooTest.Bar', '*')
TestFlag('output', 'xml:tmp/foo.xml', '')
TestFlag('print_time', '0', '1')
TestFlag('repeat', '999', '1')
TestFlag('throw_on_failure', '1', '0')
TestFlag('death_test_style', 'threadsafe', 'fast')
if IS_WINDOWS:
TestFlag('catch_exceptions', '1', '0')
if IS_LINUX:
TestFlag('death_test_use_fork', '1', '0')
TestFlag('stack_trace_depth', '0', '100')
if __name__ == '__main__':
gtest_test_utils.Main()
| bsd-3-clause |
ganeshgore/myremolab | server/launch/sample_balanced2_concurrent_experiments/main_machine/lab_and_experiment2/laboratory/server_config.py | 5 | 7406 | ##################################
# Laboratory Server configuration #
##################################
laboratory_assigned_experiments = {
'exp40:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment40:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp41:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment41:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp42:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment42:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp43:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment43:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp44:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment44:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp45:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment45:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp46:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment46:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp47:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment47:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp48:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment48:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp49:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment49:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp50:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment50:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp51:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment51:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp52:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment52:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp53:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment53:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp54:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment54:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp55:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment55:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp56:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment56:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp57:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment57:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp58:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment58:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp59:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment59:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp60:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment60:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp61:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment61:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp62:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment62:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp63:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment63:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp64:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment64:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp65:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment65:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp66:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment66:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp67:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment67:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp68:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment68:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp69:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment69:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp70:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment70:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp71:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment71:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp72:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment72:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp73:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment73:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp74:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment74:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp75:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment75:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp76:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment76:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp77:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment77:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp78:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment78:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp79:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment79:lab_and_experiment2@main_machine',
'checkers' : ()
},
'exp80:ud-dummy@Dummy experiments' : {
'coord_address' : 'experiment80:lab_and_experiment2@main_machine',
'checkers' : ()
},
}
| bsd-2-clause |
yoer/hue | desktop/core/ext-py/requests-2.6.0/requests/packages/chardet/constants.py | 3008 | 1335 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
_debug = 0
eDetecting = 0
eFoundIt = 1
eNotMe = 2
eStart = 0
eError = 1
eItsMe = 2
SHORTCUT_THRESHOLD = 0.95
| apache-2.0 |
sputnick-dev/weboob | modules/blablacar/test.py | 2 | 1038 | # -*- coding: utf-8 -*-
# Copyright(C) 2015 Bezleputh
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from weboob.tools.test import BackendTest
from datetime import datetime
class BlablacarTest(BackendTest):
MODULE = 'blablacar'
def test_blablacar(self):
departures = list(self.backend.iter_station_departures('lille', 'lens', datetime.now()))
self.assertTrue(len(departures) > 0)
| agpl-3.0 |
ansible/ansible | test/units/plugins/filter/test_core.py | 58 | 1522 | # -*- coding: utf-8 -*-
# Copyright (c) 2019 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from ansible.module_utils._text import to_native
from ansible.plugins.filter.core import to_uuid
from ansible.errors import AnsibleFilterError
UUID_DEFAULT_NAMESPACE_TEST_CASES = (
('example.com', 'ae780c3a-a3ab-53c2-bfb4-098da300b3fe'),
('test.example', '8e437a35-c7c5-50ea-867c-5c254848dbc2'),
('café.example', '8a99d6b1-fb8f-5f78-af86-879768589f56'),
)
UUID_TEST_CASES = (
('361E6D51-FAEC-444A-9079-341386DA8E2E', 'example.com', 'ae780c3a-a3ab-53c2-bfb4-098da300b3fe'),
('361E6D51-FAEC-444A-9079-341386DA8E2E', 'test.example', '8e437a35-c7c5-50ea-867c-5c254848dbc2'),
('11111111-2222-3333-4444-555555555555', 'example.com', 'e776faa5-5299-55dc-9057-7a00e6be2364'),
)
@pytest.mark.parametrize('value, expected', UUID_DEFAULT_NAMESPACE_TEST_CASES)
def test_to_uuid_default_namespace(value, expected):
assert expected == to_uuid(value)
@pytest.mark.parametrize('namespace, value, expected', UUID_TEST_CASES)
def test_to_uuid(namespace, value, expected):
assert expected == to_uuid(value, namespace=namespace)
def test_to_uuid_invalid_namespace():
with pytest.raises(AnsibleFilterError) as e:
to_uuid('example.com', namespace='11111111-2222-3333-4444-555555555')
assert 'Invalid value' in to_native(e.value)
| gpl-3.0 |
mitar/django | django/conf/locale/tr/formats.py | 329 | 1281 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'd F Y'
TIME_FORMAT = 'H:i:s'
DATETIME_FORMAT = 'd F Y H:i:s'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'd F'
SHORT_DATE_FORMAT = 'd M Y'
SHORT_DATETIME_FORMAT = 'd M Y H:i:s'
FIRST_DAY_OF_WEEK = 1 # Pazartesi
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d/%m/%Y', '%d/%m/%y', # '25/10/2006', '25/10/06'
'%Y-%m-%d', '%y-%m-%d', # '2006-10-25', '06-10-25'
# '%d %B %Y', '%d %b. %Y', # '25 Ekim 2006', '25 Eki. 2006'
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M', # '14:30'
)
DATETIME_INPUT_FORMATS = (
'%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59'
'%d/%m/%Y %H:%M', # '25/10/2006 14:30'
'%d/%m/%Y', # '25/10/2006'
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
| bsd-3-clause |
ghosthawkone/micro-http-daemon | libevent/make_epoll_table.py | 16 | 1807 | #!/usr/bin/python2
def get(old,wc,rc,cc):
if ('xxx' in (rc, wc, cc)):
return "0",255
if ('add' in (rc, wc, cc)):
events = []
if rc == 'add' or (rc != 'del' and 'r' in old):
events.append("EPOLLIN")
if wc == 'add' or (wc != 'del' and 'w' in old):
events.append("EPOLLOUT")
if cc == 'add' or (cc != 'del' and 'c' in old):
events.append("EPOLLRDHUP")
if old == "0":
op = "EPOLL_CTL_ADD"
else:
op = "EPOLL_CTL_MOD"
return "|".join(events), op
if ('del' in (rc, wc, cc)):
delevents = []
modevents = []
op = "EPOLL_CTL_DEL"
if 'r' in old:
modevents.append("EPOLLIN")
if 'w' in old:
modevents.append("EPOLLOUT")
if 'c' in old:
modevents.append("EPOLLRDHUP")
for item, event in [(rc,"EPOLLIN"),
(wc,"EPOLLOUT"),
(cc,"EPOLLRDHUP")]:
if item == 'del':
delevents.append(event)
if event in modevents:
modevents.remove(event)
if modevents:
return "|".join(modevents), "EPOLL_CTL_MOD"
else:
return "|".join(delevents), "EPOLL_CTL_DEL"
return 0, 0
def fmt(op, ev, old, wc, rc, cc):
entry = "{ %s, %s },"%(op, ev)
print "\t/* old=%3s, write:%3s, read:%3s, close:%3s */\n\t%s" % (
old, wc, rc, cc, entry)
return len(entry)
for old in ('0','r','w','rw','c','cr','cw','crw'):
for wc in ('0', 'add', 'del', 'xxx'):
for rc in ('0', 'add', 'del', 'xxx'):
for cc in ('0', 'add', 'del', 'xxx'):
op,ev = get(old,wc,rc,cc)
fmt(op, ev, old, wc, rc, cc)
| mit |
Emilgardis/falloutsnip | Vendor/IronPython/Lib/encodings/cp1258.py | 593 | 13620 | """ Python Character Mapping Codec cp1258 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1258.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp1258',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\u20ac' # 0x80 -> EURO SIGN
u'\ufffe' # 0x81 -> UNDEFINED
u'\u201a' # 0x82 -> SINGLE LOW-9 QUOTATION MARK
u'\u0192' # 0x83 -> LATIN SMALL LETTER F WITH HOOK
u'\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK
u'\u2026' # 0x85 -> HORIZONTAL ELLIPSIS
u'\u2020' # 0x86 -> DAGGER
u'\u2021' # 0x87 -> DOUBLE DAGGER
u'\u02c6' # 0x88 -> MODIFIER LETTER CIRCUMFLEX ACCENT
u'\u2030' # 0x89 -> PER MILLE SIGN
u'\ufffe' # 0x8A -> UNDEFINED
u'\u2039' # 0x8B -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
u'\u0152' # 0x8C -> LATIN CAPITAL LIGATURE OE
u'\ufffe' # 0x8D -> UNDEFINED
u'\ufffe' # 0x8E -> UNDEFINED
u'\ufffe' # 0x8F -> UNDEFINED
u'\ufffe' # 0x90 -> UNDEFINED
u'\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK
u'\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK
u'\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK
u'\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK
u'\u2022' # 0x95 -> BULLET
u'\u2013' # 0x96 -> EN DASH
u'\u2014' # 0x97 -> EM DASH
u'\u02dc' # 0x98 -> SMALL TILDE
u'\u2122' # 0x99 -> TRADE MARK SIGN
u'\ufffe' # 0x9A -> UNDEFINED
u'\u203a' # 0x9B -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
u'\u0153' # 0x9C -> LATIN SMALL LIGATURE OE
u'\ufffe' # 0x9D -> UNDEFINED
u'\ufffe' # 0x9E -> UNDEFINED
u'\u0178' # 0x9F -> LATIN CAPITAL LETTER Y WITH DIAERESIS
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\xa1' # 0xA1 -> INVERTED EXCLAMATION MARK
u'\xa2' # 0xA2 -> CENT SIGN
u'\xa3' # 0xA3 -> POUND SIGN
u'\xa4' # 0xA4 -> CURRENCY SIGN
u'\xa5' # 0xA5 -> YEN SIGN
u'\xa6' # 0xA6 -> BROKEN BAR
u'\xa7' # 0xA7 -> SECTION SIGN
u'\xa8' # 0xA8 -> DIAERESIS
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\xaa' # 0xAA -> FEMININE ORDINAL INDICATOR
u'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xac' # 0xAC -> NOT SIGN
u'\xad' # 0xAD -> SOFT HYPHEN
u'\xae' # 0xAE -> REGISTERED SIGN
u'\xaf' # 0xAF -> MACRON
u'\xb0' # 0xB0 -> DEGREE SIGN
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\xb2' # 0xB2 -> SUPERSCRIPT TWO
u'\xb3' # 0xB3 -> SUPERSCRIPT THREE
u'\xb4' # 0xB4 -> ACUTE ACCENT
u'\xb5' # 0xB5 -> MICRO SIGN
u'\xb6' # 0xB6 -> PILCROW SIGN
u'\xb7' # 0xB7 -> MIDDLE DOT
u'\xb8' # 0xB8 -> CEDILLA
u'\xb9' # 0xB9 -> SUPERSCRIPT ONE
u'\xba' # 0xBA -> MASCULINE ORDINAL INDICATOR
u'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER
u'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
u'\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS
u'\xbf' # 0xBF -> INVERTED QUESTION MARK
u'\xc0' # 0xC0 -> LATIN CAPITAL LETTER A WITH GRAVE
u'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
u'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
u'\u0102' # 0xC3 -> LATIN CAPITAL LETTER A WITH BREVE
u'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
u'\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE
u'\xc7' # 0xC7 -> LATIN CAPITAL LETTER C WITH CEDILLA
u'\xc8' # 0xC8 -> LATIN CAPITAL LETTER E WITH GRAVE
u'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xca' # 0xCA -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
u'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
u'\u0300' # 0xCC -> COMBINING GRAVE ACCENT
u'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
u'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
u'\xcf' # 0xCF -> LATIN CAPITAL LETTER I WITH DIAERESIS
u'\u0110' # 0xD0 -> LATIN CAPITAL LETTER D WITH STROKE
u'\xd1' # 0xD1 -> LATIN CAPITAL LETTER N WITH TILDE
u'\u0309' # 0xD2 -> COMBINING HOOK ABOVE
u'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
u'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
u'\u01a0' # 0xD5 -> LATIN CAPITAL LETTER O WITH HORN
u'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xd7' # 0xD7 -> MULTIPLICATION SIGN
u'\xd8' # 0xD8 -> LATIN CAPITAL LETTER O WITH STROKE
u'\xd9' # 0xD9 -> LATIN CAPITAL LETTER U WITH GRAVE
u'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
u'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
u'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\u01af' # 0xDD -> LATIN CAPITAL LETTER U WITH HORN
u'\u0303' # 0xDE -> COMBINING TILDE
u'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S
u'\xe0' # 0xE0 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
u'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\u0103' # 0xE3 -> LATIN SMALL LETTER A WITH BREVE
u'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
u'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
u'\xe6' # 0xE6 -> LATIN SMALL LETTER AE
u'\xe7' # 0xE7 -> LATIN SMALL LETTER C WITH CEDILLA
u'\xe8' # 0xE8 -> LATIN SMALL LETTER E WITH GRAVE
u'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
u'\xea' # 0xEA -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
u'\u0301' # 0xEC -> COMBINING ACUTE ACCENT
u'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
u'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0xEF -> LATIN SMALL LETTER I WITH DIAERESIS
u'\u0111' # 0xF0 -> LATIN SMALL LETTER D WITH STROKE
u'\xf1' # 0xF1 -> LATIN SMALL LETTER N WITH TILDE
u'\u0323' # 0xF2 -> COMBINING DOT BELOW
u'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
u'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\u01a1' # 0xF5 -> LATIN SMALL LETTER O WITH HORN
u'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xf7' # 0xF7 -> DIVISION SIGN
u'\xf8' # 0xF8 -> LATIN SMALL LETTER O WITH STROKE
u'\xf9' # 0xF9 -> LATIN SMALL LETTER U WITH GRAVE
u'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
u'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
u'\u01b0' # 0xFD -> LATIN SMALL LETTER U WITH HORN
u'\u20ab' # 0xFE -> DONG SIGN
u'\xff' # 0xFF -> LATIN SMALL LETTER Y WITH DIAERESIS
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| gpl-3.0 |
DualSpark/ansible | v1/ansible/runner/connection_plugins/accelerate.py | 109 | 15527 | # (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import json
import os
import base64
import socket
import struct
import time
from ansible.callbacks import vvv, vvvv
from ansible.errors import AnsibleError, AnsibleFileNotFound
from ansible.runner.connection_plugins.ssh import Connection as SSHConnection
from ansible.runner.connection_plugins.paramiko_ssh import Connection as ParamikoConnection
from ansible import utils
from ansible import constants
# the chunk size to read and send, assuming mtu 1500 and
# leaving room for base64 (+33%) encoding and header (8 bytes)
# ((1400-8)/4)*3) = 1044
# which leaves room for the TCP/IP header. We set this to a
# multiple of the value to speed up file reads.
CHUNK_SIZE=1044*20
class Connection(object):
''' raw socket accelerated connection '''
def __init__(self, runner, host, port, user, password, private_key_file, *args, **kwargs):
self.runner = runner
self.host = host
self.context = None
self.conn = None
self.user = user
self.key = utils.key_for_hostname(host)
self.port = port[0]
self.accport = port[1]
self.is_connected = False
self.has_pipelining = False
self.become_methods_supported=['sudo']
if not self.port:
self.port = constants.DEFAULT_REMOTE_PORT
elif not isinstance(self.port, int):
self.port = int(self.port)
if not self.accport:
self.accport = constants.ACCELERATE_PORT
elif not isinstance(self.accport, int):
self.accport = int(self.accport)
if self.runner.original_transport == "paramiko":
self.ssh = ParamikoConnection(
runner=self.runner,
host=self.host,
port=self.port,
user=self.user,
password=password,
private_key_file=private_key_file
)
else:
self.ssh = SSHConnection(
runner=self.runner,
host=self.host,
port=self.port,
user=self.user,
password=password,
private_key_file=private_key_file
)
if not getattr(self.ssh, 'shell', None):
self.ssh.shell = utils.plugins.shell_loader.get('sh')
# attempt to work around shared-memory funness
if getattr(self.runner, 'aes_keys', None):
utils.AES_KEYS = self.runner.aes_keys
def _execute_accelerate_module(self):
args = "password=%s port=%s minutes=%d debug=%d ipv6=%s" % (
base64.b64encode(self.key.__str__()),
str(self.accport),
constants.ACCELERATE_DAEMON_TIMEOUT,
int(utils.VERBOSITY),
self.runner.accelerate_ipv6,
)
if constants.ACCELERATE_MULTI_KEY:
args += " multi_key=yes"
inject = dict(password=self.key)
if getattr(self.runner, 'accelerate_inventory_host', False):
inject = utils.combine_vars(inject, self.runner.inventory.get_variables(self.runner.accelerate_inventory_host))
else:
inject = utils.combine_vars(inject, self.runner.inventory.get_variables(self.host))
vvvv("attempting to start up the accelerate daemon...")
self.ssh.connect()
tmp_path = self.runner._make_tmp_path(self.ssh)
return self.runner._execute_module(self.ssh, tmp_path, 'accelerate', args, inject=inject)
def connect(self, allow_ssh=True):
''' activates the connection object '''
try:
if not self.is_connected:
wrong_user = False
tries = 3
self.conn = socket.socket()
self.conn.settimeout(constants.ACCELERATE_CONNECT_TIMEOUT)
vvvv("attempting connection to %s via the accelerated port %d" % (self.host,self.accport))
while tries > 0:
try:
self.conn.connect((self.host,self.accport))
break
except socket.error:
vvvv("connection to %s failed, retrying..." % self.host)
time.sleep(0.1)
tries -= 1
if tries == 0:
vvv("Could not connect via the accelerated connection, exceeded # of tries")
raise AnsibleError("FAILED")
elif wrong_user:
vvv("Restarting daemon with a different remote_user")
raise AnsibleError("WRONG_USER")
self.conn.settimeout(constants.ACCELERATE_TIMEOUT)
if not self.validate_user():
# the accelerated daemon was started with a
# different remote_user. The above command
# should have caused the accelerate daemon to
# shutdown, so we'll reconnect.
wrong_user = True
except AnsibleError, e:
if allow_ssh:
if "WRONG_USER" in e:
vvv("Switching users, waiting for the daemon on %s to shutdown completely..." % self.host)
time.sleep(5)
vvv("Falling back to ssh to startup accelerated mode")
res = self._execute_accelerate_module()
if not res.is_successful():
raise AnsibleError("Failed to launch the accelerated daemon on %s (reason: %s)" % (self.host,res.result.get('msg')))
return self.connect(allow_ssh=False)
else:
raise AnsibleError("Failed to connect to %s:%s" % (self.host,self.accport))
self.is_connected = True
return self
def send_data(self, data):
packed_len = struct.pack('!Q',len(data))
return self.conn.sendall(packed_len + data)
def recv_data(self):
header_len = 8 # size of a packed unsigned long long
data = b""
try:
vvvv("%s: in recv_data(), waiting for the header" % self.host)
while len(data) < header_len:
d = self.conn.recv(header_len - len(data))
if not d:
vvvv("%s: received nothing, bailing out" % self.host)
return None
data += d
vvvv("%s: got the header, unpacking" % self.host)
data_len = struct.unpack('!Q',data[:header_len])[0]
data = data[header_len:]
vvvv("%s: data received so far (expecting %d): %d" % (self.host,data_len,len(data)))
while len(data) < data_len:
d = self.conn.recv(data_len - len(data))
if not d:
vvvv("%s: received nothing, bailing out" % self.host)
return None
vvvv("%s: received %d bytes" % (self.host, len(d)))
data += d
vvvv("%s: received all of the data, returning" % self.host)
return data
except socket.timeout:
raise AnsibleError("timed out while waiting to receive data")
def validate_user(self):
'''
Checks the remote uid of the accelerated daemon vs. the
one specified for this play and will cause the accel
daemon to exit if they don't match
'''
vvvv("%s: sending request for validate_user" % self.host)
data = dict(
mode='validate_user',
username=self.user,
)
data = utils.jsonify(data)
data = utils.encrypt(self.key, data)
if self.send_data(data):
raise AnsibleError("Failed to send command to %s" % self.host)
vvvv("%s: waiting for validate_user response" % self.host)
while True:
# we loop here while waiting for the response, because a
# long running command may cause us to receive keepalive packets
# ({"pong":"true"}) rather than the response we want.
response = self.recv_data()
if not response:
raise AnsibleError("Failed to get a response from %s" % self.host)
response = utils.decrypt(self.key, response)
response = utils.parse_json(response)
if "pong" in response:
# it's a keepalive, go back to waiting
vvvv("%s: received a keepalive packet" % self.host)
continue
else:
vvvv("%s: received the validate_user response: %s" % (self.host, response))
break
if response.get('failed'):
return False
else:
return response.get('rc') == 0
def exec_command(self, cmd, tmp_path, become_user=None, sudoable=False, executable='/bin/sh', in_data=None):
''' run a command on the remote host '''
if sudoable and self.runner.become and self.runner.become_method not in self.become_methods_supported:
raise errors.AnsibleError("Internal Error: this module does not support running commands via %s" % self.runner.become_method)
if in_data:
raise AnsibleError("Internal Error: this module does not support optimized module pipelining")
if executable == "":
executable = constants.DEFAULT_EXECUTABLE
if self.runner.become and sudoable:
cmd, prompt, success_key = utils.make_become_cmd(cmd, become_user, executable, self.runner.become_method, '', self.runner.become_exe)
vvv("EXEC COMMAND %s" % cmd)
data = dict(
mode='command',
cmd=cmd,
tmp_path=tmp_path,
executable=executable,
)
data = utils.jsonify(data)
data = utils.encrypt(self.key, data)
if self.send_data(data):
raise AnsibleError("Failed to send command to %s" % self.host)
while True:
# we loop here while waiting for the response, because a
# long running command may cause us to receive keepalive packets
# ({"pong":"true"}) rather than the response we want.
response = self.recv_data()
if not response:
raise AnsibleError("Failed to get a response from %s" % self.host)
response = utils.decrypt(self.key, response)
response = utils.parse_json(response)
if "pong" in response:
# it's a keepalive, go back to waiting
vvvv("%s: received a keepalive packet" % self.host)
continue
else:
vvvv("%s: received the response" % self.host)
break
return (response.get('rc',None), '', response.get('stdout',''), response.get('stderr',''))
def put_file(self, in_path, out_path):
''' transfer a file from local to remote '''
vvv("PUT %s TO %s" % (in_path, out_path), host=self.host)
if not os.path.exists(in_path):
raise AnsibleFileNotFound("file or module does not exist: %s" % in_path)
fd = file(in_path, 'rb')
fstat = os.stat(in_path)
try:
vvv("PUT file is %d bytes" % fstat.st_size)
last = False
while fd.tell() <= fstat.st_size and not last:
vvvv("file position currently %ld, file size is %ld" % (fd.tell(), fstat.st_size))
data = fd.read(CHUNK_SIZE)
if fd.tell() >= fstat.st_size:
last = True
data = dict(mode='put', data=base64.b64encode(data), out_path=out_path, last=last)
if self.runner.become:
data['user'] = self.runner.become_user
data = utils.jsonify(data)
data = utils.encrypt(self.key, data)
if self.send_data(data):
raise AnsibleError("failed to send the file to %s" % self.host)
response = self.recv_data()
if not response:
raise AnsibleError("Failed to get a response from %s" % self.host)
response = utils.decrypt(self.key, response)
response = utils.parse_json(response)
if response.get('failed',False):
raise AnsibleError("failed to put the file in the requested location")
finally:
fd.close()
vvvv("waiting for final response after PUT")
response = self.recv_data()
if not response:
raise AnsibleError("Failed to get a response from %s" % self.host)
response = utils.decrypt(self.key, response)
response = utils.parse_json(response)
if response.get('failed',False):
raise AnsibleError("failed to put the file in the requested location")
def fetch_file(self, in_path, out_path):
''' save a remote file to the specified path '''
vvv("FETCH %s TO %s" % (in_path, out_path), host=self.host)
data = dict(mode='fetch', in_path=in_path)
data = utils.jsonify(data)
data = utils.encrypt(self.key, data)
if self.send_data(data):
raise AnsibleError("failed to initiate the file fetch with %s" % self.host)
fh = open(out_path, "w")
try:
bytes = 0
while True:
response = self.recv_data()
if not response:
raise AnsibleError("Failed to get a response from %s" % self.host)
response = utils.decrypt(self.key, response)
response = utils.parse_json(response)
if response.get('failed', False):
raise AnsibleError("Error during file fetch, aborting")
out = base64.b64decode(response['data'])
fh.write(out)
bytes += len(out)
# send an empty response back to signify we
# received the last chunk without errors
data = utils.jsonify(dict())
data = utils.encrypt(self.key, data)
if self.send_data(data):
raise AnsibleError("failed to send ack during file fetch")
if response.get('last', False):
break
finally:
# we don't currently care about this final response,
# we just receive it and drop it. It may be used at some
# point in the future or we may just have the put/fetch
# operations not send back a final response at all
response = self.recv_data()
vvv("FETCH wrote %d bytes to %s" % (bytes, out_path))
fh.close()
def close(self):
''' terminate the connection '''
# Be a good citizen
try:
self.conn.close()
except:
pass
| gpl-3.0 |
jamesblunt/content | labs/lab8/generate_friends.py | 41 | 1594 | #!/usr/bin/python
"""
generate_friends.py
Generates data file "baseball_friends.csv" to be used for lab8 MapReduce
example.
Reads list of names from "names.txt", randomly assigns team alligiences,
then assigns friendships based on super simple algorithm, and finally
writes out the file in the following csv format:
name, team, friend1, friend2, friend3, ...
"""
import numpy as np
from numpy.random import binomial
# Read list of names from file.
names = [line.strip() for line in open("names.txt")]
names = np.unique(names)
# Randomly generate team affiliations for each person.
team = binomial(1, 0.5, len(names))
# Probability that two people who are fans of the same team are friends.
friendliness_same = 0.05
# Probability that two people who are fans of opposite teams are friends.
friendliness_diff = 0.03
# Create matrix to store friend relationships.
friends = np.zeros([len(names), len(names)])
for i1 in range(len(names)):
for i2 in range(i1 + 1, len(names)):
if team[i1] == team[i2]:
flip = binomial(1, friendliness_same)
else:
flip = binomial(1, friendliness_diff)
friends[i1, i2] = flip
friends[i2, i1] = flip
# Write output file.
outfile = open("baseball_friends.csv", 'w')
for i in range(len(names)):
# Get data for this row.
this_name = names[i]
this_team = "Red Sox" if team[i] else "Cardinals"
friend_list = np.array(names)[friends[i,:] == 1]
# Write to file.
outstr = ", ".join((this_name, this_team) + tuple(friend_list))
outfile.write(outstr + "\n")
outfile.close()
| mit |
apepper/elasticsearch | dev-tools/get-bwc-version.py | 136 | 3149 | # Licensed to Elasticsearch under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on
# an 'AS IS' BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
'''
Downloads and extracts elasticsearch for backwards compatibility tests.
'''
import argparse
import os
import platform
import shutil
import subprocess
import urllib.request
import zipfile
def parse_config():
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument('--path', metavar='DIR', default='./backwards',
help='Where to extract elasticsearch')
parser.add_argument('--force', action='store_true', default=False,
help='Delete and redownload if the version already exists')
parser.add_argument('version', metavar='X.Y.Z',
help='Version of elasticsearch to grab')
return parser.parse_args()
def main():
c = parse_config()
if not os.path.exists(c.path):
print('Creating %s' % c.path)
os.mkdir(c.path)
is_windows = platform.system() == 'Windows'
os.chdir(c.path)
version_dir = 'elasticsearch-%s' % c.version
if os.path.exists(version_dir):
if c.force:
print('Removing old download %s' % version_dir)
shutil.rmtree(version_dir)
else:
print('Version %s exists at %s' % (c.version, version_dir))
return
# before 1.4.0, the zip file contains windows scripts, and tar.gz contained *nix scripts
if is_windows:
filename = '%s.zip' % version_dir
else:
filename = '%s.tar.gz' % version_dir
if c.version == '1.2.0':
# 1.2.0 was pulled from download.elasticsearch.org because of routing bug:
url = 'http://central.maven.org/maven2/org/elasticsearch/elasticsearch/1.2.0/%s' % filename
elif c.version.startswith('0.') or c.version.startswith('1.'):
url = 'https://download.elasticsearch.org/elasticsearch/elasticsearch/%s' % filename
else:
url = 'http://download.elasticsearch.org/elasticsearch/release/org/elasticsearch/distribution/tar/elasticsearch/%s/%s' % (c.version, filename)
print('Downloading %s' % url)
urllib.request.urlretrieve(url, filename)
print('Extracting to %s' % version_dir)
if is_windows:
archive = zipfile.ZipFile(filename)
archive.extractall()
else:
# for some reason python's tarfile module has trouble with ES tgz?
subprocess.check_call('tar -xzf %s' % filename, shell=True)
print('Cleaning up %s' % filename)
os.remove(filename)
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
print('Ctrl-C caught, exiting')
| apache-2.0 |
cfelton/rhea | examples/boards/icestick/test_blinky_host_icestick.py | 2 | 2293 |
from __future__ import print_function, division
import myhdl
from myhdl import (Signal, intbv, instance, delay, StopSimulation)
from rhea.system import Global, Clock, Reset
from rhea.models.uart import UARTModel
from rhea.utils.test import run_testbench, tb_args, tb_default_args, tb_convert
from rhea.utils import CommandPacket
from rhea.utils.command_packet import PACKET_LENGTH
from icestick_blinky_host import icestick_blinky_host
def test_ibh(args=None):
args = tb_default_args(args)
numbytes = 13
clock = Clock(0, frequency=50e6)
glbl = Global(clock, None)
led = Signal(intbv(0)[8:])
pmod = Signal(intbv(0)[8:])
uart_tx = Signal(bool(0))
uart_rx = Signal(bool(0))
uart_dtr = Signal(bool(0))
uart_rts = Signal(bool(0))
uartmdl = UARTModel()
@myhdl.block
def bench_ibh():
tbclk = clock.gen()
tbmdl = uartmdl.process(glbl, uart_tx, uart_rx)
tbdut = icestick_blinky_host(clock, led, pmod,
uart_tx, uart_rx,
uart_dtr, uart_rts)
@instance
def tbstim():
yield delay(1000)
# send a write that should enable all five LEDs
pkt = CommandPacket(False, address=0x20, vals=[0xFF])
for bb in pkt.rawbytes:
uartmdl.write(bb)
waitticks = int((1/115200.) / 1e-9) * 10 * 28
yield delay(waitticks)
timeout = 100
yield delay(waitticks)
# get the response packet
for ii in range(PACKET_LENGTH):
rb = uartmdl.read()
while rb is None and timeout > 0:
yield clock.posedge
rb = uartmdl.read()
timeout -= 1
if rb is None:
raise TimeoutError
# the last byte should be the byte written
assert rb == 0xFF
yield delay(1000)
raise StopSimulation
return tbclk, tbmdl, tbdut, tbstim
run_testbench(bench_ibh, args=args)
inst = icestick_blinky_host(
clock, led, pmod,
uart_tx, uart_rx, uart_dtr, uart_rts
)
tb_convert(inst)
if __name__ == '__main__':
test_ibh(tb_args()) | mit |
xguse/outspline | src/outspline/plugins/wxscheduler_basicrules/occur_selected_months_weekday_inverse.py | 1 | 10851 | # Outspline - A highly modular and extensible outliner.
# Copyright (C) 2011-2014 Dario Giovannetti <dev@dariogiovannetti.net>
#
# This file is part of Outspline.
#
# Outspline is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Outspline is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Outspline. If not, see <http://www.gnu.org/licenses/>.
import time as _time
import datetime as _datetime
import calendar as _calendar
import random
from outspline.static.wxclasses.timectrls import (MonthInverseWeekdayHourCtrl,
TimeSpanCtrl, MonthsCtrl)
import outspline.extensions.organism_basicrules_api as organism_basicrules_api
import outspline.plugins.wxscheduler_api as wxscheduler_api
import interface
import msgboxes
class Rule(object):
def __init__(self, parent, filename, id_, standard, rule):
self.original_values = self._compute_values(standard, rule)
self.ui = interface.Interface(parent, filename, id_,
(interface.Months,
interface.StartNthWeekDayInverse,
interface.EndTime,
interface.AlarmTime,
interface.Standard),
self.original_values)
def apply_rule(self, filename, id_):
values = self.ui.get_values()
smonths = values['selected_months']
rstartin = values['start_weekday_number']
rstartA = values['start_weekday']
weekday = values['start_weekday_index']
rstartH = values['start_hour']
rstartM = values['start_minute']
endtype = values['end_type']
rend = values['end_relative_time']
fend = values['end_next_day']
rendn = values['end_relative_number']
rendu = values['end_relative_unit']
rendH = values['end_hour']
rendM = values['end_minute']
alarmtype = values['alarm_type']
ralarm = values['alarm_relative_time']
palarm = values['alarm_previous_day']
ralarmn = values['alarm_relative_number']
ralarmu = values['alarm_relative_unit']
ralarmH = values['alarm_hour']
ralarmM = values['alarm_minute']
standard = values['time_standard']
try:
if standard == 'UTC':
ruled = organism_basicrules_api.make_occur_monthly_weekday_inverse_rule_UTC(
smonths, weekday, rstartin, rstartH, rstartM,
rend, ralarm, (None, endtype, alarmtype))
else:
ruled = organism_basicrules_api.make_occur_monthly_weekday_inverse_rule_local(
smonths, weekday, rstartin, rstartH, rstartM,
rend, ralarm, (None, endtype, alarmtype))
except organism_basicrules_api.BadRuleError:
msgboxes.warn_bad_rule(msgboxes.generic).ShowModal()
else:
label = self._make_label(smonths, rstartin, rstartA, rstartH,
rstartM, rendH, rendM, ralarmH, ralarmM,
rendn, rendu, ralarmn, ralarmu,
endtype, alarmtype, fend, palarm, standard)
wxscheduler_api.apply_rule(filename, id_, ruled, label)
@classmethod
def insert_rule(cls, filename, id_, rule, rulev):
standard = 'UTC' if rule['rule'] == \
'occur_monthly_weekday_inverse_UTC' else 'local'
values = cls._compute_values(standard, rulev)
label = cls._make_label(values['selected_months'],
values['start_weekday_number'],
values['start_weekday'],
values['start_hour'],
values['start_minute'],
values['end_hour'],
values['end_minute'],
values['alarm_hour'],
values['alarm_minute'],
values['end_relative_number'],
values['end_relative_unit'],
values['alarm_relative_number'],
values['alarm_relative_unit'],
values['end_type'],
values['alarm_type'],
values['end_next_day'],
values['alarm_previous_day'],
values['time_standard'])
wxscheduler_api.insert_rule(filename, id_, rule, label)
@classmethod
def _compute_values(cls, standard, rule):
# Remember to support also time zones that differ from UTC by not
# exact hours (e.g. Australia/Adelaide)
if not rule:
nh = _datetime.datetime.now() + _datetime.timedelta(hours=1)
lday = _calendar.monthrange(nh.year, nh.month)[1]
win = (lday - nh.day) // 7 + 1
values = {
'selected_months': range(1, 13),
'start_weekday_raw': nh.weekday(),
'start_hour': nh.hour,
'start_minute': 0,
'end_relative_time': 3600,
'alarm_relative_time': 0,
'end_type': 0,
'alarm_type': 0,
'time_standard': standard,
}
else:
values = {
'selected_months_raw': rule[0],
'start_weekday_raw': rule[1],
'start_weekday_number_raw': rule[2],
'max_overlap': rule[3],
'start_hour': rule[4],
'start_minute': rule[5],
'end_relative_time': rule[6] if rule[6] is not None else 3600,
'alarm_relative_time': rule[7] if rule[7] is not None else 0,
'end_type': rule[8][1],
'alarm_type': rule[8][2],
'time_standard': standard,
}
values['selected_months'] = list(set(
values['selected_months_raw']))
values['selected_months'].sort()
win = values['start_weekday_number_raw'] + 1
values['end_relative_number'], values['end_relative_unit'] = \
TimeSpanCtrl.compute_widget_values(
values['end_relative_time'])
# ralarm could be negative
values['alarm_relative_number'], values['alarm_relative_unit'] = \
TimeSpanCtrl.compute_widget_values(
max((0, values['alarm_relative_time'])))
rrstart = values['start_hour'] * 3600 + values['start_minute'] * 60
rrend = rrstart + values['end_relative_time']
values['end_next_day'] = False
# End time could be set after 23:59 of the start day
if rrend > 86399:
rrend = rrend % 86400
values['end_next_day'] = True
rralarm = rrstart - values['alarm_relative_time']
values['alarm_previous_day'] = False
# Alarm time could be set before 00:00 of the start day
if rralarm < 0:
rralarm = 86400 - abs(rralarm) % 86400
values['alarm_previous_day'] = True
values.update({
'start_weekday_number': win,
'start_weekday': MonthInverseWeekdayHourCtrl.compute_widget_weekday(
values['start_weekday_raw']),
'end_hour': rrend // 3600,
'end_minute': rrend % 3600 // 60,
'alarm_hour': rralarm // 3600,
'alarm_minute': rralarm % 3600 // 60,
})
return values
@staticmethod
def _make_label(smonths, rstartin, rstartA, rstartH, rstartM, rendH, rendM,
ralarmH, ralarmM, rendn, rendu, ralarmn, ralarmu,
endtype, alarmtype, fend, palarm, standard):
label = 'Occur on the {} {} of {} at {}:{} ({})'.format(
MonthInverseWeekdayHourCtrl.compute_weekday_number_label(
rstartin), rstartA,
', '.join([MonthsCtrl.compute_month_name(m)
for m in smonths]),
str(rstartH).zfill(2), str(rstartM).zfill(2), standard)
if endtype == 1:
label += ' for {} {}'.format(rendn, rendu)
elif endtype == 2:
label += ' until {}:{}'.format(str(rendH).zfill(2),
str(rendM).zfill(2))
if fend:
label += ' of the following day'
if alarmtype == 1:
label += ', activate alarm {} {} before'.format(ralarmn, ralarmu)
elif alarmtype == 2:
label += ', activate alarm at {}:{}'.format(
str(ralarmH).zfill(2), str(ralarmM).zfill(2))
if palarm:
label += ' of the previous day'
return label
@staticmethod
def create_random_rule():
smonths = random.sample(range(1, 13), random.randint(1, 12))
weekday = random.randint(0, 6)
inumber = random.randint(1, 5)
hour = random.randint(0, 23)
minute = random.randint(0, 59)
endtype = random.randint(0, 2)
if endtype == 0:
rend = None
else:
rend = random.randint(1, 360) * 60
alarmtype = random.randint(0, 2)
if alarmtype == 0:
ralarm = None
else:
ralarm = random.randint(0, 360) * 60
stdn = random.randint(0, 1)
if stdn == 0:
return organism_basicrules_api.make_occur_monthly_weekday_inverse_rule_local(
smonths, weekday, inumber, hour, minute, rend, ralarm,
(None, endtype, alarmtype))
else:
return organism_basicrules_api.make_occur_monthly_weekday_inverse_rule_UTC(
smonths, weekday, inumber, hour, minute, rend, ralarm,
(None, endtype, alarmtype))
| gpl-3.0 |
caveman-dick/ansible | lib/ansible/plugins/strategy/free.py | 14 | 11816 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
strategy: free
short_description: Executes tasks on each host independently
description:
- Task execution is as fast as possible per host in batch as defined by C(serial) (default all).
Ansible will not wait for other hosts to finish the current task before queuing the next task for a host that has finished.
Once a host is done with the play, it opens it's slot to a new host that was waiting to start.
version_added: "2.0"
author: Ansible Core Team
'''
import time
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.playbook.included_file import IncludedFile
from ansible.plugins.loader import action_loader
from ansible.plugins.strategy import StrategyBase
from ansible.template import Templar
from ansible.module_utils._text import to_text
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class StrategyModule(StrategyBase):
def run(self, iterator, play_context):
'''
The "free" strategy is a bit more complex, in that it allows tasks to
be sent to hosts as quickly as they can be processed. This means that
some hosts may finish very quickly if run tasks result in little or no
work being done versus other systems.
The algorithm used here also tries to be more "fair" when iterating
through hosts by remembering the last host in the list to be given a task
and starting the search from there as opposed to the top of the hosts
list again, which would end up favoring hosts near the beginning of the
list.
'''
# the last host to be given a task
last_host = 0
result = self._tqm.RUN_OK
work_to_do = True
while work_to_do and not self._tqm._terminated:
hosts_left = self.get_hosts_left(iterator)
if len(hosts_left) == 0:
self._tqm.send_callback('v2_playbook_on_no_hosts_remaining')
result = False
break
work_to_do = False # assume we have no more work to do
starting_host = last_host # save current position so we know when we've looped back around and need to break
# try and find an unblocked host with a task to run
host_results = []
while True:
host = hosts_left[last_host]
display.debug("next free host: %s" % host)
host_name = host.get_name()
# peek at the next task for the host, to see if there's
# anything to do do for this host
(state, task) = iterator.get_next_task_for_host(host, peek=True)
display.debug("free host state: %s" % state)
display.debug("free host task: %s" % task)
if host_name not in self._tqm._unreachable_hosts and task:
# set the flag so the outer loop knows we've still found
# some work which needs to be done
work_to_do = True
display.debug("this host has work to do")
# check to see if this host is blocked (still executing a previous task)
if host_name not in self._blocked_hosts or not self._blocked_hosts[host_name]:
# pop the task, mark the host blocked, and queue it
self._blocked_hosts[host_name] = True
(state, task) = iterator.get_next_task_for_host(host)
try:
action = action_loader.get(task.action, class_only=True)
except KeyError:
# we don't care here, because the action may simply not have a
# corresponding action plugin
action = None
display.debug("getting variables")
task_vars = self._variable_manager.get_vars(play=iterator._play, host=host, task=task)
self.add_tqm_variables(task_vars, play=iterator._play)
templar = Templar(loader=self._loader, variables=task_vars)
display.debug("done getting variables")
try:
task.name = to_text(templar.template(task.name, fail_on_undefined=False), nonstring='empty')
display.debug("done templating")
except:
# just ignore any errors during task name templating,
# we don't care if it just shows the raw name
display.debug("templating failed for some reason")
run_once = templar.template(task.run_once) or action and getattr(action, 'BYPASS_HOST_LOOP', False)
if run_once:
if action and getattr(action, 'BYPASS_HOST_LOOP', False):
raise AnsibleError("The '%s' module bypasses the host loop, which is currently not supported in the free strategy "
"and would instead execute for every host in the inventory list." % task.action, obj=task._ds)
else:
display.warning("Using run_once with the free strategy is not currently supported. This task will still be "
"executed for every host in the inventory list.")
# check to see if this task should be skipped, due to it being a member of a
# role which has already run (and whether that role allows duplicate execution)
if task._role and task._role.has_run(host):
# If there is no metadata, the default behavior is to not allow duplicates,
# if there is metadata, check to see if the allow_duplicates flag was set to true
if task._role._metadata is None or task._role._metadata and not task._role._metadata.allow_duplicates:
display.debug("'%s' skipped because role has already run" % task)
del self._blocked_hosts[host_name]
continue
if task.action == 'meta':
self._execute_meta(task, play_context, iterator, target_host=host)
self._blocked_hosts[host_name] = False
else:
# handle step if needed, skip meta actions as they are used internally
if not self._step or self._take_step(task, host_name):
if task.any_errors_fatal:
display.warning("Using any_errors_fatal with the free strategy is not supported, "
"as tasks are executed independently on each host")
self._tqm.send_callback('v2_playbook_on_task_start', task, is_conditional=False)
self._queue_task(host, task, task_vars, play_context)
del task_vars
else:
display.debug("%s is blocked, skipping for now" % host_name)
# move on to the next host and make sure we
# haven't gone past the end of our hosts list
last_host += 1
if last_host > len(hosts_left) - 1:
last_host = 0
# if we've looped around back to the start, break out
if last_host == starting_host:
break
results = self._process_pending_results(iterator)
host_results.extend(results)
try:
included_files = IncludedFile.process_include_results(
host_results,
self._tqm,
iterator=iterator,
inventory=self._inventory,
loader=self._loader,
variable_manager=self._variable_manager
)
except AnsibleError as e:
return self._tqm.RUN_ERROR
if len(included_files) > 0:
all_blocks = dict((host, []) for host in hosts_left)
for included_file in included_files:
display.debug("collecting new blocks for %s" % included_file)
try:
if included_file._is_role:
new_ir = included_file._task.copy()
new_ir.vars.update(included_file._args)
new_blocks, handler_blocks = new_ir.get_block_list(
play=iterator._play,
variable_manager=self._variable_manager,
loader=self._loader,
)
self._tqm.update_handler_list([handler for handler_block in handler_blocks for handler in handler_block.block])
else:
new_blocks = self._load_included_file(included_file, iterator=iterator)
except AnsibleError as e:
for host in included_file._hosts:
iterator.mark_host_failed(host)
display.warning(str(e))
continue
for new_block in new_blocks:
task_vars = self._variable_manager.get_vars(play=iterator._play, task=included_file._task)
final_block = new_block.filter_tagged_tasks(play_context, task_vars)
for host in hosts_left:
if host in included_file._hosts:
all_blocks[host].append(final_block)
display.debug("done collecting new blocks for %s" % included_file)
display.debug("adding all collected blocks from %d included file(s) to iterator" % len(included_files))
for host in hosts_left:
iterator.add_tasks(host, all_blocks[host])
display.debug("done adding collected blocks to iterator")
# pause briefly so we don't spin lock
time.sleep(C.DEFAULT_INTERNAL_POLL_INTERVAL)
# collect all the final results
results = self._wait_on_pending_results(iterator)
# run the base class run() method, which executes the cleanup function
# and runs any outstanding handlers which have been triggered
return super(StrategyModule, self).run(iterator, play_context, result)
| gpl-3.0 |
alisidd/tensorflow | tensorflow/python/tools/selective_registration_header_lib.py | 27 | 6109 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""Computes a header file to be used with SELECTIVE_REGISTRATION.
See the executable wrapper, print_selective_registration_header.py, for more
information.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import sys
from google.protobuf import text_format
from tensorflow.core.framework import graph_pb2
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.platform import gfile
from tensorflow.python.platform import tf_logging
def get_ops_and_kernels(proto_fileformat, proto_files, default_ops_str):
"""Gets the ops and kernels needed from the model files."""
ops = set()
for proto_file in proto_files:
tf_logging.info('Loading proto file %s', proto_file)
# Load GraphDef.
file_data = gfile.GFile(proto_file, 'rb').read()
if proto_fileformat == 'rawproto':
graph_def = graph_pb2.GraphDef.FromString(file_data)
else:
assert proto_fileformat == 'textproto'
graph_def = text_format.Parse(file_data, graph_pb2.GraphDef())
# Find all ops and kernels used by the graph.
for node_def in graph_def.node:
if not node_def.device:
node_def.device = '/cpu:0'
kernel_class = pywrap_tensorflow.TryFindKernelClass(
node_def.SerializeToString())
if kernel_class:
op_and_kernel = (str(node_def.op), kernel_class.decode('utf-8'))
if op_and_kernel not in ops:
ops.add(op_and_kernel)
else:
print(
'Warning: no kernel found for op %s' % node_def.op, file=sys.stderr)
# Add default ops.
if default_ops_str and default_ops_str != 'all':
for s in default_ops_str.split(','):
op, kernel = s.split(':')
op_and_kernel = (op, kernel)
if op_and_kernel not in ops:
ops.add(op_and_kernel)
return list(sorted(ops))
def get_header_from_ops_and_kernels(ops_and_kernels,
include_all_ops_and_kernels):
"""Returns a header for use with tensorflow SELECTIVE_REGISTRATION.
Args:
ops_and_kernels: a set of (op_name, kernel_class_name) pairs to include.
include_all_ops_and_kernels: if True, ops_and_kernels is ignored and all op
kernels are included.
Returns:
the string of the header that should be written as ops_to_register.h.
"""
ops = set([op for op, _ in ops_and_kernels])
result_list = []
def append(s):
result_list.append(s)
_, script_name = os.path.split(sys.argv[0])
append('// This file was autogenerated by %s' % script_name)
append('#ifndef OPS_TO_REGISTER')
append('#define OPS_TO_REGISTER')
if include_all_ops_and_kernels:
append('#define SHOULD_REGISTER_OP(op) true')
append('#define SHOULD_REGISTER_OP_KERNEL(clz) true')
append('#define SHOULD_REGISTER_OP_GRADIENT true')
else:
append('constexpr inline bool ShouldRegisterOp(const char op[]) {')
append(' return false')
for op in sorted(ops):
append(' || (strcmp(op, "%s") == 0)' % op)
append(' ;')
append('}')
append('#define SHOULD_REGISTER_OP(op) ShouldRegisterOp(op)')
append('')
line = '''
namespace {
constexpr const char* skip(const char* x) {
return (*x) ? (*x == ' ' ? skip(x + 1) : x) : x;
}
constexpr bool isequal(const char* x, const char* y) {
return (*skip(x) && *skip(y))
? (*skip(x) == *skip(y) && isequal(skip(x) + 1, skip(y) + 1))
: (!*skip(x) && !*skip(y));
}
template<int N>
struct find_in {
static constexpr bool f(const char* x, const char* const y[N]) {
return isequal(x, y[0]) || find_in<N - 1>::f(x, y + 1);
}
};
template<>
struct find_in<0> {
static constexpr bool f(const char* x, const char* const y[]) {
return false;
}
};
} // end namespace
'''
line += 'constexpr const char* kNecessaryOpKernelClasses[] = {\n'
for _, kernel_class in ops_and_kernels:
line += '"%s",\n' % kernel_class
line += '};'
append(line)
append('#define SHOULD_REGISTER_OP_KERNEL(clz) '
'(find_in<sizeof(kNecessaryOpKernelClasses) '
'/ sizeof(*kNecessaryOpKernelClasses)>::f(clz, '
'kNecessaryOpKernelClasses))')
append('')
append('#define SHOULD_REGISTER_OP_GRADIENT ' + (
'true' if 'SymbolicGradient' in ops else 'false'))
append('#endif')
return '\n'.join(result_list)
def get_header(graphs,
proto_fileformat='rawproto',
default_ops='NoOp:NoOp,_Recv:RecvOp,_Send:SendOp'):
"""Computes a header for use with tensorflow SELECTIVE_REGISTRATION.
Args:
graphs: a list of paths to GraphDef files to include.
proto_fileformat: optional format of proto file, either 'textproto' or
'rawproto' (default).
default_ops: optional comma-separated string of operator:kernel pairs to
always include implementation for. Pass 'all' to have all operators and
kernels included. Default: 'NoOp:NoOp,_Recv:RecvOp,_Send:SendOp'.
Returns:
the string of the header that should be written as ops_to_register.h.
"""
ops_and_kernels = get_ops_and_kernels(proto_fileformat, graphs, default_ops)
if not ops_and_kernels:
print('Error reading graph!')
return 1
return get_header_from_ops_and_kernels(ops_and_kernels, default_ops == 'all')
| apache-2.0 |
pdellaert/ansible | lib/ansible/modules/network/aci/aci_bd_subnet.py | 8 | 13616 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: aci_bd_subnet
short_description: Manage Subnets (fv:Subnet)
description:
- Manage Subnets on Cisco ACI fabrics.
version_added: '2.4'
options:
bd:
description:
- The name of the Bridge Domain.
type: str
aliases: [ bd_name ]
description:
description:
- The description for the Subnet.
type: str
aliases: [ descr ]
enable_vip:
description:
- Determines if the Subnet should be treated as a VIP; used when the BD is extended to multiple sites.
- The APIC defaults to C(no) when unset during creation.
type: bool
gateway:
description:
- The IPv4 or IPv6 gateway address for the Subnet.
type: str
aliases: [ gateway_ip ]
mask:
description:
- The subnet mask for the Subnet.
- This is the number associated with CIDR notation.
- For IPv4 addresses, accepted values range between C(0) and C(32).
- For IPv6 addresses, accepted Values range between C(0) and C(128).
type: int
aliases: [ subnet_mask ]
nd_prefix_policy:
description:
- The IPv6 Neighbor Discovery Prefix Policy to associate with the Subnet.
type: str
preferred:
description:
- Determines if the Subnet is preferred over all available Subnets. Only one Subnet per Address Family (IPv4/IPv6).
can be preferred in the Bridge Domain.
- The APIC defaults to C(no) when unset during creation.
type: bool
route_profile:
description:
- The Route Profile to the associate with the Subnet.
type: str
route_profile_l3_out:
description:
- The L3 Out that contains the associated Route Profile.
type: str
scope:
description:
- Determines the scope of the Subnet.
- The C(private) option only allows communication with hosts in the same VRF.
- The C(public) option allows the Subnet to be advertised outside of the ACI Fabric, and allows communication with
hosts in other VRFs.
- The shared option limits communication to hosts in either the same VRF or the shared VRF.
- The value is a list of options, C(private) and C(public) are mutually exclusive, but both can be used with C(shared).
- The APIC defaults to C(private) when unset during creation.
type: list
choices:
- private
- public
- shared
subnet_control:
description:
- Determines the Subnet's Control State.
- The C(querier_ip) option is used to treat the gateway_ip as an IGMP querier source IP.
- The C(nd_ra) option is used to treat the gateway_ip address as a Neighbor Discovery Router Advertisement Prefix.
- The C(no_gw) option is used to remove default gateway functionality from the gateway address.
- The APIC defaults to C(nd_ra) when unset during creation.
type: str
choices: [ nd_ra, no_gw, querier_ip, unspecified ]
subnet_name:
description:
- The name of the Subnet.
type: str
aliases: [ name ]
tenant:
description:
- The name of the Tenant.
type: str
aliases: [ tenant_name ]
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
type: str
choices: [ absent, present, query ]
default: present
extends_documentation_fragment: aci
notes:
- The C(gateway) parameter is the root key used to access the Subnet (not name), so the C(gateway)
is required when the state is C(absent) or C(present).
- The C(tenant) and C(bd) used must exist before using this module in your playbook.
The M(aci_tenant) module and M(aci_bd) can be used for these.
seealso:
- module: aci_bd
- module: aci_tenant
- name: APIC Management Information Model reference
description: More information about the internal APIC class B(fv:Subnet).
link: https://developer.cisco.com/docs/apic-mim-ref/
author:
- Jacob McGill (@jmcgill298)
'''
EXAMPLES = r'''
- name: Create a tenant
aci_tenant:
host: apic
username: admin
password: SomeSecretPassword
tenant: production
state: present
delegate_to: localhost
- name: Create a bridge domain
aci_bd:
host: apic
username: admin
password: SomeSecretPassword
tenant: production
bd: database
state: present
delegate_to: localhost
- name: Create a subnet
aci_bd_subnet:
host: apic
username: admin
password: SomeSecretPassword
tenant: production
bd: database
gateway: 10.1.1.1
mask: 24
state: present
delegate_to: localhost
- name: Create a subnet with options
aci_bd_subnet:
host: apic
username: admin
password: SomeSecretPassword
tenant: production
bd: database
subnet_name: sql
gateway: 10.1.2.1
mask: 23
description: SQL Servers
scope: public
route_profile_l3_out: corp
route_profile: corp_route_profile
state: present
delegate_to: localhost
- name: Update a subnets scope to private and shared
aci_bd_subnet:
host: apic
username: admin
password: SomeSecretPassword
tenant: production
bd: database
gateway: 10.1.1.1
mask: 24
scope: [private, shared]
state: present
delegate_to: localhost
- name: Get all subnets
aci_bd_subnet:
host: apic
username: admin
password: SomeSecretPassword
state: query
delegate_to: localhost
- name: Get all subnets of specific gateway in specified tenant
aci_bd_subnet:
host: apic
username: admin
password: SomeSecretPassword
tenant: production
gateway: 10.1.1.1
mask: 24
state: query
delegate_to: localhost
register: query_result
- name: Get specific subnet
aci_bd_subnet:
host: apic
username: admin
password: SomeSecretPassword
tenant: production
bd: database
gateway: 10.1.1.1
mask: 24
state: query
delegate_to: localhost
register: query_result
- name: Delete a subnet
aci_bd_subnet:
host: apic
username: admin
password: SomeSecretPassword
tenant: production
bd: database
gateway: 10.1.1.1
mask: 24
state: absent
delegate_to: localhost
'''
RETURN = r'''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: str
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: str
sample: ?rsp-prop-include=config-only
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: str
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: str
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: str
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
SUBNET_CONTROL_MAPPING = dict(
nd_ra='nd',
no_gw='no-default-gateway',
querier_ip='querier',
unspecified='',
)
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
bd=dict(type='str', aliases=['bd_name']), # Not required for querying all objects
description=dict(type='str', aliases=['descr']),
enable_vip=dict(type='bool'),
gateway=dict(type='str', aliases=['gateway_ip']), # Not required for querying all objects
mask=dict(type='int', aliases=['subnet_mask']), # Not required for querying all objects
subnet_name=dict(type='str', aliases=['name']),
nd_prefix_policy=dict(type='str'),
preferred=dict(type='bool'),
route_profile=dict(type='str'),
route_profile_l3_out=dict(type='str'),
scope=dict(type='list', choices=['private', 'public', 'shared']),
subnet_control=dict(type='str', choices=['nd_ra', 'no_gw', 'querier_ip', 'unspecified']),
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
tenant=dict(type='str', aliases=['tenant_name']), # Not required for querying all objects
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_together=[['gateway', 'mask']],
required_if=[
['state', 'present', ['bd', 'gateway', 'mask', 'tenant']],
['state', 'absent', ['bd', 'gateway', 'mask', 'tenant']],
],
)
aci = ACIModule(module)
description = module.params['description']
enable_vip = aci.boolean(module.params['enable_vip'])
tenant = module.params['tenant']
bd = module.params['bd']
gateway = module.params['gateway']
mask = module.params['mask']
if mask is not None and mask not in range(0, 129):
# TODO: split checks between IPv4 and IPv6 Addresses
module.fail_json(msg='Valid Subnet Masks are 0 to 32 for IPv4 Addresses and 0 to 128 for IPv6 addresses')
if gateway is not None:
gateway = '{0}/{1}'.format(gateway, str(mask))
subnet_name = module.params['subnet_name']
nd_prefix_policy = module.params['nd_prefix_policy']
preferred = aci.boolean(module.params['preferred'])
route_profile = module.params['route_profile']
route_profile_l3_out = module.params['route_profile_l3_out']
scope = module.params['scope']
if scope is not None:
if 'private' in scope and 'public' in scope:
module.fail_json(msg="Parameter 'scope' cannot be both 'private' and 'public', got: %s" % scope)
else:
scope = ','.join(sorted(scope))
state = module.params['state']
subnet_control = module.params['subnet_control']
if subnet_control:
subnet_control = SUBNET_CONTROL_MAPPING[subnet_control]
aci.construct_url(
root_class=dict(
aci_class='fvTenant',
aci_rn='tn-{0}'.format(tenant),
module_object=tenant,
target_filter={'name': tenant},
),
subclass_1=dict(
aci_class='fvBD',
aci_rn='BD-{0}'.format(bd),
module_object=bd,
target_filter={'name': bd},
),
subclass_2=dict(
aci_class='fvSubnet',
aci_rn='subnet-[{0}]'.format(gateway),
module_object=gateway,
target_filter={'ip': gateway},
),
child_classes=['fvRsBDSubnetToProfile', 'fvRsNdPfxPol'],
)
aci.get_existing()
if state == 'present':
aci.payload(
aci_class='fvSubnet',
class_config=dict(
ctrl=subnet_control,
descr=description,
ip=gateway,
name=subnet_name,
preferred=preferred,
scope=scope,
virtual=enable_vip,
),
child_configs=[
{'fvRsBDSubnetToProfile': {'attributes': {'tnL3extOutName': route_profile_l3_out, 'tnRtctrlProfileName': route_profile}}},
{'fvRsNdPfxPol': {'attributes': {'tnNdPfxPolName': nd_prefix_policy}}},
],
)
aci.get_diff(aci_class='fvSubnet')
aci.post_config()
elif state == 'absent':
aci.delete_config()
aci.exit_json()
if __name__ == "__main__":
main()
| gpl-3.0 |
robjohnson189/home-assistant | homeassistant/components/camera/foscam.py | 17 | 2226 | """
This component provides basic support for Foscam IP cameras.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/camera.foscam/
"""
import logging
import requests
import voluptuous as vol
from homeassistant.components.camera import (Camera, PLATFORM_SCHEMA)
from homeassistant.const import (
CONF_NAME, CONF_USERNAME, CONF_PASSWORD, CONF_PORT)
from homeassistant.helpers import config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_IP = 'ip'
DEFAULT_NAME = 'Foscam Camera'
DEFAULT_PORT = 88
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_IP): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
})
# pylint: disable=unused-argument
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup a Foscam IP Camera."""
add_devices([FoscamCamera(config)])
class FoscamCamera(Camera):
"""An implementation of a Foscam IP camera."""
def __init__(self, device_info):
"""Initialize a Foscam camera."""
super(FoscamCamera, self).__init__()
ip_address = device_info.get(CONF_IP)
port = device_info.get(CONF_PORT)
self._base_url = 'http://{}:{}/'.format(ip_address, port)
self._username = device_info.get(CONF_USERNAME)
self._password = device_info.get(CONF_PASSWORD)
self._snap_picture_url = self._base_url \
+ 'cgi-bin/CGIProxy.fcgi?cmd=snapPicture2&usr=' \
+ self._username + '&pwd=' + self._password
self._name = device_info.get(CONF_NAME)
_LOGGER.info('Using the following URL for %s: %s',
self._name, self._snap_picture_url)
def camera_image(self):
"""Return a still image reponse from the camera."""
# Send the request to snap a picture and return raw jpg data
response = requests.get(self._snap_picture_url, timeout=10)
return response.content
@property
def name(self):
"""Return the name of this camera."""
return self._name
| mit |
s40223154/2017springvcp_hw | 2017springvcp_hw-gh-pages/plugin/liquid_tags/test_notebook.py | 311 | 3042 | import re
from pelican.tests.support import unittest
from . import notebook
class TestNotebookTagRegex(unittest.TestCase):
def get_argdict(self, markup):
match = notebook.FORMAT.search(markup)
if match:
argdict = match.groupdict()
src = argdict['src']
start = argdict['start']
end = argdict['end']
language = argdict['language']
return src, start, end, language
return None
def test_basic_notebook_tag(self):
markup = u'path/to/thing.ipynb'
src, start, end, language = self.get_argdict(markup)
self.assertEqual(src, u'path/to/thing.ipynb')
self.assertIsNone(start)
self.assertIsNone(end)
self.assertIsNone(language)
def test_basic_notebook_tag_insensitive_to_whitespace(self):
markup = u' path/to/thing.ipynb '
src, start, end, language = self.get_argdict(markup)
self.assertEqual(src, u'path/to/thing.ipynb')
self.assertIsNone(start)
self.assertIsNone(end)
self.assertIsNone(language)
def test_notebook_tag_with_cells(self):
markup = u'path/to/thing.ipynb cells[1:5]'
src, start, end, language = self.get_argdict(markup)
self.assertEqual(src, u'path/to/thing.ipynb')
self.assertEqual(start, u'1')
self.assertEqual(end, u'5')
self.assertIsNone(language)
def test_notebook_tag_with_alphanumeric_language(self):
markup = u'path/to/thing.ipynb language[python3]'
src, start, end, language = self.get_argdict(markup)
self.assertEqual(src, u'path/to/thing.ipynb')
self.assertIsNone(start)
self.assertIsNone(end)
self.assertEqual(language, u'python3')
def test_notebook_tag_with_symbol_in_name_language(self):
for short_name in [u'c++', u'cpp-objdump', u'c++-objdumb', u'cxx-objdump']:
markup = u'path/to/thing.ipynb language[{}]'.format(short_name)
src, start, end, language = self.get_argdict(markup)
self.assertEqual(src, u'path/to/thing.ipynb')
self.assertIsNone(start)
self.assertIsNone(end)
self.assertEqual(language, short_name)
def test_notebook_tag_with_language_and_cells(self):
markup = u'path/to/thing.ipynb cells[1:5] language[julia]'
src, start, end, language = self.get_argdict(markup)
self.assertEqual(src, u'path/to/thing.ipynb')
self.assertEqual(start, u'1')
self.assertEqual(end, u'5')
self.assertEqual(language, u'julia')
def test_notebook_tag_with_language_and_cells_and_weird_spaces(self):
markup = u' path/to/thing.ipynb cells[1:5] language[julia] '
src, start, end, language = self.get_argdict(markup)
self.assertEqual(src, u'path/to/thing.ipynb')
self.assertEqual(start, u'1')
self.assertEqual(end, u'5')
self.assertEqual(language, u'julia')
if __name__ == '__main__':
unittest.main() | agpl-3.0 |
h3biomed/ansible | test/units/modules/network/nos/test_nos_config.py | 38 | 6757 | #
# (c) 2018 Extreme Networks Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat.mock import patch
from units.modules.utils import set_module_args
from ansible.modules.network.nos import nos_config
from .nos_module import TestNosModule, load_fixture
class TestNosConfigModule(TestNosModule):
module = nos_config
def setUp(self):
super(TestNosConfigModule, self).setUp()
self.mock_get_config = patch('ansible.modules.network.nos.nos_config.get_config')
self.get_config = self.mock_get_config.start()
self.mock_load_config = patch('ansible.modules.network.nos.nos_config.load_config')
self.load_config = self.mock_load_config.start()
self.mock_run_commands = patch('ansible.modules.network.nos.nos_config.run_commands')
self.run_commands = self.mock_run_commands.start()
def tearDown(self):
super(TestNosConfigModule, self).tearDown()
self.mock_get_config.stop()
self.mock_load_config.stop()
self.mock_run_commands.stop()
def load_fixtures(self, commands=None):
config_file = 'nos_config_config.cfg'
self.get_config.return_value = load_fixture(config_file)
self.load_config.return_value = None
def test_nos_config_unchanged(self):
src = load_fixture('nos_config_config.cfg')
set_module_args(dict(src=src))
self.execute_module()
def test_nos_config_src(self):
src = load_fixture('nos_config_src.cfg')
set_module_args(dict(src=src))
commands = ['hostname foo', 'interface TenGigabitEthernet 104/0/0',
'no ip address']
self.execute_module(changed=True, commands=commands)
def test_nos_config_backup(self):
set_module_args(dict(backup=True))
result = self.execute_module()
self.assertIn('__backup__', result)
def test_nos_config_lines_wo_parents(self):
set_module_args(dict(lines=['hostname foo']))
commands = ['hostname foo']
self.execute_module(changed=True, commands=commands)
def test_nos_config_lines_w_parents(self):
set_module_args(dict(lines=['shutdown'], parents=['interface TenGigabitEthernet 104/0/0']))
commands = ['interface TenGigabitEthernet 104/0/0', 'shutdown']
self.execute_module(changed=True, commands=commands)
def test_nos_config_before(self):
set_module_args(dict(lines=['hostname foo'], before=['test1', 'test2']))
commands = ['test1', 'test2', 'hostname foo']
self.execute_module(changed=True, commands=commands, sort=False)
def test_nos_config_after(self):
set_module_args(dict(lines=['hostname foo'], after=['test1', 'test2']))
commands = ['hostname foo', 'test1', 'test2']
self.execute_module(changed=True, commands=commands, sort=False)
def test_nos_config_before_after_no_change(self):
set_module_args(dict(lines=['hostname router'],
before=['test1', 'test2'],
after=['test3', 'test4']))
self.execute_module()
def test_nos_config_config(self):
config = 'hostname localhost'
set_module_args(dict(lines=['hostname router'], config=config))
commands = ['hostname router']
self.execute_module(changed=True, commands=commands)
def test_nos_config_replace_block(self):
lines = ['description test string', 'test string']
parents = ['interface TenGigabitEthernet 104/0/0']
set_module_args(dict(lines=lines, replace='block', parents=parents))
commands = parents + lines
self.execute_module(changed=True, commands=commands)
def test_nos_config_match_none(self):
lines = ['hostname router']
set_module_args(dict(lines=lines, match='none'))
self.execute_module(changed=True, commands=lines)
def test_nos_config_match_none_parents(self):
lines = ['ip address 1.2.3.4 255.255.255.0', 'description test string']
parents = ['interface TenGigabitEthernet 104/0/0']
set_module_args(dict(lines=lines, parents=parents, match='none'))
commands = parents + lines
self.execute_module(changed=True, commands=commands, sort=False)
def test_nos_config_match_strict(self):
lines = ['ip address 1.2.3.4 255.255.255.0', 'description test string',
'shutdown']
parents = ['interface TenGigabitEthernet 104/0/0']
set_module_args(dict(lines=lines, parents=parents, match='strict'))
commands = parents + ['shutdown']
self.execute_module(changed=True, commands=commands, sort=False)
def test_nos_config_match_exact(self):
lines = ['ip address 1.2.3.4 255.255.255.0', 'description test string',
'shutdown']
parents = ['interface TenGigabitEthernet 104/0/0']
set_module_args(dict(lines=lines, parents=parents, match='exact'))
commands = parents + lines
self.execute_module(changed=True, commands=commands, sort=False)
def test_nos_config_src_and_lines_fails(self):
args = dict(src='foo', lines='foo')
set_module_args(args)
self.execute_module(failed=True)
def test_nos_config_src_and_parents_fails(self):
args = dict(src='foo', parents='foo')
set_module_args(args)
self.execute_module(failed=True)
def test_nos_config_match_exact_requires_lines(self):
args = dict(match='exact')
set_module_args(args)
self.execute_module(failed=True)
def test_nos_config_match_strict_requires_lines(self):
args = dict(match='strict')
set_module_args(args)
self.execute_module(failed=True)
def test_nos_config_replace_block_requires_lines(self):
args = dict(replace='block')
set_module_args(args)
self.execute_module(failed=True)
def test_nos_config_replace_config_requires_src(self):
args = dict(replace='config')
set_module_args(args)
self.execute_module(failed=True)
| gpl-3.0 |
remotesyssupport/cobbler-1 | cobbler/modules/serializer_catalog.py | 1 | 8126 | """
Serializer code for cobbler.
As of 8/2009, this is the "best" serializer option.
It uses multiple files in /var/lib/cobbler/config/distros.d, profiles.d, etc
And JSON, when possible, and YAML, when not.
It is particularly fast, especially when using JSON. YAML, not so much.
It also knows how to upgrade the old "single file" configs to .d versions.
Copyright 2006-2009, Red Hat, Inc
Michael DeHaan <mdehaan@redhat.com>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA
"""
import distutils.sysconfig
import os
import sys
import glob
import traceback
import yaml # PyYAML
import simplejson
import exceptions
plib = distutils.sysconfig.get_python_lib()
mod_path="%s/cobbler" % plib
sys.path.insert(0, mod_path)
from utils import _
import utils
from cexceptions import *
import os
def can_use_json():
version = sys.version[:3]
version = float(version)
return (version > 2.3)
def register():
"""
The mandatory cobbler module registration hook.
"""
return "serializer"
def what():
"""
Module identification function
"""
return "serializer/catalog"
def serialize_item(obj, item):
if item.name is None or item.name == "":
raise exceptions.RuntimeError("name unset for object!")
# FIXME: Need a better way to support collections/items
# appending an 's' does not work in all cases
if obj.collection_type() in [ 'mgmtclass' ]:
filename = "/var/lib/cobbler/config/%ses.d/%s" % (obj.collection_type(),item.name)
else:
filename = "/var/lib/cobbler/config/%ss.d/%s" % (obj.collection_type(),item.name)
datastruct = item.to_datastruct()
jsonable = can_use_json()
if jsonable:
# avoid using JSON on python 2.3 where we can encounter
# unicode problems with simplejson pre 2.0
if os.path.exists(filename):
print "upgrading yaml file to json: %s" % filename
os.remove(filename)
filename = filename + ".json"
datastruct = item.to_datastruct()
fd = open(filename,"w+")
data = simplejson.dumps(datastruct, encoding="utf-8")
#data = data.encode('utf-8')
fd.write(data)
else:
if os.path.exists(filename + ".json"):
print "downgrading json file back to yaml: %s" % filename
os.remove(filename + ".json")
datastruct = item.to_datastruct()
fd = open(filename,"w+")
data = yaml.dump(datastruct)
fd.write(data)
fd.close()
return True
def serialize_delete(obj, item):
# FIXME: Need a better way to support collections/items
# appending an 's' does not work in all cases
if obj.collection_type() in [ 'mgmtclass', ]:
filename = "/var/lib/cobbler/config/%ses.d/%s" % (obj.collection_type(),item.name)
else:
filename = "/var/lib/cobbler/config/%ss.d/%s" % (obj.collection_type(),item.name)
filename2 = filename + ".json"
if os.path.exists(filename):
os.remove(filename)
if os.path.exists(filename2):
os.remove(filename2)
return True
def deserialize_item_raw(collection_type, item_name):
# this new fn is not really implemented performantly in this module.
# yet.
# FIXME: Need a better way to support collections/items
# appending an 's' does not work in all cases
if item_name in [ 'mgmtclass' ]:
filename = "/var/lib/cobbler/config/%ses.d/%s" % (collection_type(),item_name)
else:
filename = "/var/lib/cobbler/config/%ss.d/%s" % (collection_type,item_name)
filename2 = filename + ".json"
if os.path.exists(filename):
fd = open(filename)
data = fd.read()
return yaml.safe_load(data)
elif os.path.exists(filename2):
fd = open(filename2)
data = fd.read()
return simplejson.loads(data, encoding="utf-8")
else:
return None
def serialize(obj):
"""
Save an object to disk. Object must "implement" Serializable.
FIXME: Return False on access/permission errors.
This should NOT be used by API if serialize_item is available.
"""
ctype = obj.collection_type()
if ctype == "settings":
return True
for x in obj:
serialize_item(obj,x)
return True
def deserialize_raw(collection_type):
# FIXME: Need a better way to support collections/items
# appending an 's' does not work in all cases
if collection_type in [ 'mgmtclass' ]:
old_filename = "/var/lib/cobbler/%ses" % collection_type
else:
old_filename = "/var/lib/cobbler/%ss" % collection_type
if collection_type == "settings":
fd = open("/etc/cobbler/settings")
datastruct = yaml.safe_load(fd.read())
fd.close()
return datastruct
elif os.path.exists(old_filename):
# for use in migration from serializer_yaml to serializer_catalog (yaml/json)
fd = open(old_filename)
datastruct = yaml.safe_load(fd.read())
fd.close()
return datastruct
else:
results = []
# FIXME: Need a better way to support collections/items
# appending an 's' does not work in all cases
if collection_type in [ 'mgmtclass' ]:
all_files = glob.glob("/var/lib/cobbler/config/%ses.d/*" % collection_type)
else:
all_files = glob.glob("/var/lib/cobbler/config/%ss.d/*" % collection_type)
all_files = filter_upgrade_duplicates(all_files)
for f in all_files:
fd = open(f)
ydata = fd.read()
# ydata = ydata.decode()
if f.endswith(".json"):
datastruct = simplejson.loads(ydata, encoding='utf-8')
else:
datastruct = yaml.safe_load(ydata)
results.append(datastruct)
fd.close()
return results
def filter_upgrade_duplicates(file_list):
"""
In a set of files, some ending with .json, some not, return
the list of files with the .json ones taking priority over
the ones that are not.
"""
bases = {}
for f in file_list:
basekey = f.replace(".json","")
if f.endswith(".json"):
bases[basekey] = f
else:
lookup = bases.get(basekey,"")
if not lookup.endswith(".json"):
bases[basekey] = f
return bases.values()
def deserialize(obj,topological=True):
"""
Populate an existing object with the contents of datastruct.
Object must "implement" Serializable.
"""
# FIXME: Need a better way to support collections/items
# appending an 's' does not work in all cases
if obj.collection_type() in [ 'mgmtclass' ]:
old_filename = "/var/lib/cobbler/%ses" % obj.collection_type()
else:
old_filename = "/var/lib/cobbler/%ss" % obj.collection_type()
datastruct = deserialize_raw(obj.collection_type())
if topological and type(datastruct) == list:
datastruct.sort(__depth_cmp)
obj.from_datastruct(datastruct)
if os.path.exists(old_filename):
# we loaded it in from the old filename, so now migrate to new fmt
sys.stderr.write("auto-removing old config format: %s\n" % old_filename)
serialize(obj)
os.remove(old_filename)
return True
def __depth_cmp(item1, item2):
d1 = item1.get("depth",1)
d2 = item2.get("depth",1)
return cmp(d1,d2)
if __name__ == "__main__":
print deserialize_item_raw("distro","D1")
| gpl-2.0 |
tompecina/legal | legal/common/fields.py | 1 | 4257 | # -*- coding: utf-8 -*-
#
# common/fields.py
#
# Copyright (C) 2011-19 Tomáš Pecina <tomas@pecina.cz>
#
# This file is part of legal.pecina.cz, a web-based toolbox for lawyers.
#
# This application is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This application is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from decimal import Decimal
from datetime import datetime, date
from django import forms
from django.core.validators import EMPTY_VALUES
from django.core.exceptions import ValidationError
from legal.common.utils import LocalFloat
from legal.common import widgets
def proc_num(string):
return string.replace(' ', '').replace('.', '').replace(',', '.').replace('−', '-')
STYPES = (str,)
class BooleanField(forms.BooleanField):
widget = widgets.CheckboxWidget
class InlineBooleanField(BooleanField):
def __init__(self, *args, **kwargs):
kwargs.setdefault('label_suffix', '')
super().__init__(*args, **kwargs)
class CharField(forms.CharField):
pass
class ChoiceField(forms.ChoiceField):
widget = widgets.SelectWidget
class EmailField(forms.EmailField):
pass
class DateField(forms.DateField):
def to_python(self, value):
if value in EMPTY_VALUES:
return None
if isinstance(value, datetime):
return value.date()
if isinstance(value, date):
return value
return super().to_python(value.replace(' ', ''))
class AmountField(forms.FloatField):
rounding = 0
def prepare_value(self, value):
if value in EMPTY_VALUES:
return None
if not isinstance(value, STYPES):
value = '{:.{prec}f}'.format(
LocalFloat(value),
prec=self.rounding)
return value
def to_python(self, value):
if value in EMPTY_VALUES:
return None
if isinstance(value, STYPES):
value = proc_num(value)
try:
return round(float(value), self.rounding)
except:
raise ValidationError('Conversion error')
class DecimalField(forms.DecimalField):
def to_python(self, value):
if value in EMPTY_VALUES:
return None
if isinstance(value, STYPES):
value = proc_num(value)
try:
return Decimal(value)
except:
raise ValidationError('Conversion error')
class FloatField(forms.FloatField):
def to_python(self, value):
if value in EMPTY_VALUES:
return None
if isinstance(value, STYPES):
value = proc_num(value)
try:
return float(value)
except:
raise ValidationError('Conversion error')
class IntegerField(forms.IntegerField):
def to_python(self, value):
if value in EMPTY_VALUES:
return None
if isinstance(value, STYPES):
value = proc_num(value)
try:
return int(float(value))
except:
raise ValidationError('Conversion error')
class CurrencyField(forms.MultiValueField):
def __init__(self, czk=True, *args, **kwargs):
kwargs['required'] = False
super().__init__(
widget=widgets.SelectCurrencyWidget(czk=czk),
fields=(
CharField(),
CharField(min_length=3, max_length=3)),
*args,
**kwargs)
def compress(self, data_list):
if data_list:
return data_list[1].upper() if data_list[0] == 'OTH' else data_list[0]
return None
def validate(self, value):
if not value:
self.widget.widgets[1].attrs['class'] += ' err'
raise ValidationError('Currency is required')
| gpl-3.0 |
erik-sn/xlwrap | xlwrap.py | 1 | 14765 | import os
import ntpath
import xlrd
import openpyxl
from openpyxl.utils import coordinate_from_string, column_index_from_string
from openpyxl.utils.exceptions import CellCoordinatesException
class ExcelManager:
"""
Wrapper that opens and operates on .xls, .xlsx or .xlsm excel files. By
default we take in a string representing the excel file path (extension
included), and depending on the file type use xlrd or openpyxl to operate
on it.
The dev facing api is identical for either - internally we use xlrd or
openpyxl methods depending on the file type.
For rows & columns we use 1 based indexing to stay with the more modern
openpyxl (and most users are more familiar with it if they are coming from
an office environment, not a programming one). Be aware of which file type
you are using if you retrieve the sheet object - it could be using zero OR
one based indexing.
Public Variables:
file_path: full file path with extension of the file we are operating on
workbook: openpyxl/xlrd workbook object for this file
sheet: currently in use openpxl/xlrd sheet object for this work book
read_count: number of sheet reads this object has done
write_count: number of sheet writes this object has done
Public Methods:
select_sheet - choose which sheet to use (by index or name)
cell - retrieve an openpyxl/xlrd cell object by row/column or index
read - retrieve the value from the current sheet at row/column or index
write - write a value to the current sheet at row/column or index
save - save the workbook at the initial file path, or a new file path
if one is specified
info - return basic information/status of the object
to_array - return a 2D numpy array representation of the current sheet
find_index - return the first index of the match or None if it does not exist
find_indexes - return a list of tuples containing the indexes of all matches
"""
write_count = 0
read_count = 0
sheet_array = None
def __init__(self, file_path, sheet_name=None, sheet_index=None):
self.file_path = file_path
self.__check_file_extension(file_path)
self.__check_file_exists(file_path)
if file_path.endswith('.xls'):
self.__is_xls = True
self.__init_xls(sheet_name, sheet_index)
else:
self.__is_xls = False
self.__init_excel(sheet_name, sheet_index)
def change_sheet(self, *args):
"""
Change the current active sheet object
:param name: sheet name
:param index: sheet index (1 index)
:return: None
"""
if isinstance(args[0], str):
name = args[0]
index = None
elif isinstance(args[0], int):
name = None
index = args[0]
else:
raise ValueError('Specify either the sheet name or sheet index to change sheets')
if self.__is_xls:
self.__select_xls_sheet(name, index - 1 if index else None)
else:
self.__select_excel_sheet(name, index - 1 if index else None)
def row(self, row_index):
"""
Return the row at the specified index
:row_index row_index: 1 based index
:return: list of values
"""
self.sheet_array = self.array()
return self.sheet_array[row_index - 1]
def column(self, column_index):
"""
return the column at the specified index
:param column_index: string or (1 based) int index
:return: list of values
"""
if isinstance(column_index, int):
column = column_index - 1
else:
column = column_index_from_string(column_index.upper()) - 1
self.sheet_array = self.array()
return [row[column] for row in self.sheet_array]
def cell(self, *args):
"""
Return the cell at the specified location
:param args: tuple with either a 1 based representation for row/column
or string based index
:return: xlrd/openpyxl cell object
"""
row, column = self.__parse_row_column_from_args(*args)
if self.__is_xls:
return self.__get_xls_cell(row - 1, column - 1) # xlrd is a 1 based index
else:
return self.__get_excel_cell(row, column)
def read(self, *args):
"""
Read the value from the target cell
:param args: tuple with either a 1 based representation for row/column
or string based index
:return: string
"""
self.read_count += 1
value = self.cell(*args).value
return value if value else ''
def write(self, *args, value=None):
"""
Input the value at the specified target
:param args: tuple with either a 1 based representation for row/column
or string based index
:param value:
:return:
"""
if self.__is_xls:
raise TypeError('Writing to a cell is not supported for .xls files')
self.cell(*args).value = value
self.write_count += 1
def save(self, *args):
"""
Save the current sheet either at the original file_path (if none
specified) or at the file_path parameter
:param file_path: new file path to save file
:return: None
"""
if len(args) == 1:
self.__check_file_extension(args[0])
file_path = args[0]
else:
file_path = self.file_path
if self.__is_xls:
raise TypeError('Saving is not supported for .xls files')
else:
self.workbook.save(file_path)
def info(self, string=False):
"""
return basic information about this ExcelWrapper instance
:return: string
"""
sheet_name = self.sheet.name if self.__is_xls else self.sheet.title
if string:
return 'File: {}\nSheet: {}\nReads: {}\nWrites: {}' \
.format(self.file_path, sheet_name, self.read_count, self.write_count)
else:
return {
'file': self.file_path,
'sheet': sheet_name,
'reads': self.read_count,
'writes': self.write_count
}
def array(self):
"""
Return a 2D list representing the spreadsheet
:return: list(list())
"""
if self.__is_xls:
self.sheet_array = self.__xls_to_array()
return self.sheet_array
else:
self.sheet_array = self.__excel_to_array()
return self.sheet_array
def search(self, value, match=1, case_insensitive=True, contains=False, many=False):
"""
Given a value find the 1 based index where that value is located
on the sheet or None if it does not exist. If 'many' is set true then
an empty list is returned if no matches are found
:param value: the value we are searching for
:param match: if multiple results are found we return only one - this
parameter determines which index of the list we return with a 1 based index
:param case_insensitive: whether or not the search should be case insensitive
:param contains: whether or not the search should use 'in' or equality to
check if the spreadsheet value is a match
:param many: whether or not to return a singular value or a list of values
:return:
"""
indexes = self.__find_indexes(value, case_insensitive=case_insensitive, contains=contains)
if many:
return indexes
try:
match = indexes[match - 1]
return match[0], match[1]
except IndexError:
return None, None
def __find_indexes(self, value, case_insensitive, contains):
"""
Iterate over the 2D list representation of the sheet and determine
if the input value exists based on search parameters
:param value: value we are looking for
:param case_insensitive: whether or not search is case_insensitive
:param contains: use 'in' to find matches
:return:
"""
self.sheet_array = self.array()
indexes = []
for i, row in enumerate(self.sheet_array):
for j, column in enumerate(row):
input_val, column_val = self.__check_case_sensitive(case_insensitive, value, column)
if contains and input_val in column_val:
indexes.append((i + 1, j + 1))
elif input_val == column_val:
indexes.append((i + 1, j + 1))
return indexes
@staticmethod
def __check_case_sensitive(case_insensitive, value, column):
column_val = column.lower() if case_insensitive else column
input_val = value.lower() if case_insensitive else value
return input_val, column_val
def __parse_row_column_from_args(self, *args):
"""
convert a generic arguments tuple into a 1-based ow/column index. This
is to support both numeric (1, 1) and string (A1) representation of
cells with the same API.
:param args: args tuple
:return: int, int tuple
"""
if len(args) == 1 and isinstance(args[0], str):
row, column = self.__parse_row_column_from_index(args[0])
elif len(args) == 2 and isinstance(args[0], int) and isinstance(args[1], int):
row = args[0]
column = args[1]
else:
raise ValueError('Specify either row and column numbers (1, 1) OR a cell index ("A1")')
return row, column
@staticmethod
def __parse_row_column_from_index(cell_index):
"""
Given a string based excel index return the int based row, column
representation
:param cell_index: string based excel input
:return: row, column ints
"""
try:
xy = coordinate_from_string(cell_index.upper())
row = xy[1]
column = column_index_from_string(xy[0])
return row, column
except CellCoordinatesException:
raise ValueError('The index must be a valid Excel index (A1, E17, etc.)')
def __init_xls(self, sheet_name, sheet_index):
"""
initialize a .xls file with xlrd
"""
self.workbook = xlrd.open_workbook(self.file_path)
self.__select_xls_sheet(sheet_name, sheet_index)
def __select_xls_sheet(self, sheet_name, sheet_index):
"""
change the currently active xlrd sheet
"""
if sheet_name:
self.sheet = self.workbook.sheet_by_name(sheet_name)
elif sheet_index:
self.sheet = self.workbook.sheet_by_index(sheet_index)
else:
self.sheet = self.workbook.sheet_by_index(0)
def __get_xls_cell(self, row, column):
"""
retrieve the xlrd cell object at the specified row/column
:param row: 1-based row index
:param column: 1-based column index
:return: cell object
"""
return self.sheet.cell(row, column)
def __xls_to_array(self):
"""
convert an xlrd sheet to a 2D list of values.
:return:
"""
sheet_array = []
for row in range(1, self.__get_max_rows() + 1):
row_array = []
for column in range(1, self.__get_max_columns() + 1):
value = self.read(row, column)
row_array.append(value)
sheet_array.append(row_array)
return sheet_array
def __init_excel(self, sheet_name, sheet_index):
"""
initialize a .xlsx file with openpyxl
"""
self.workbook = openpyxl.load_workbook(self.file_path)
self.__select_excel_sheet(sheet_name, sheet_index)
def __select_excel_sheet(self, sheet_name, sheet_index):
"""
change the currently active openpyxl sheet
"""
if sheet_name:
self.sheet = self.workbook[sheet_name]
elif sheet_index:
sheet_names = self.workbook.sheetnames
self.sheet = self.workbook[sheet_names[sheet_index]]
else:
sheet_names = self.workbook.sheetnames
self.sheet = self.workbook[(sheet_names[0])]
def __get_excel_cell(self, row, column):
"""
retrieve the openpyxl cell object at the specified row/column
:param row: 1-based row index
:param column: 1-based column index
:return: cell object
"""
return self.sheet.cell(row=row, column=column)
def __excel_to_array(self):
"""
convert an openpyxl sheet to a 2D list of values.
:return:
"""
sheet_array = []
for row in range(1, self.sheet.max_row + 1):
row_array = []
for column in range(1, self.sheet.max_column + 1):
value = self.read(row, column)
row_array.append(value)
sheet_array.append(row_array)
return sheet_array
def __get_max_rows(self):
"""
return the number of rows in the current xlrd sheet object
:return: int
"""
if self.__is_xls:
return self.sheet.nrows
return self.sheet.max_rows
def __get_max_columns(self):
"""
return the number of columns in the current xlrd sheet object
:return: int
"""
if self.__is_xls:
return self.sheet.ncols
return self.sheet.max_column
@staticmethod
def __check_file_extension(file_path):
extensions = ['.xls', '.xlsx', '.xlsm']
if not any(file_path.endswith(extension) for extension in extensions):
raise ValueError("""
No extension found on file path - make sure you include the FULL file path with the extension. Valid
extensions include: {}
""".format(', '.join(extensions)))
@staticmethod
def __check_file_exists(file_path):
"""
Check to see if the input file exists - if not, raise an error
that lists other excel files in the same directory
:param file_path: full file path to excel file
:return:
"""
if not os.path.exists(file_path):
file_name = ntpath.basename(file_path)
file_directory = file_path.replace(file_name, '')
valid_files = [file for file in os.listdir(file_directory) if 'xls' in file]
base_error = 'The file {} was not found. Maybe you were looking for one of these?\n\n'.format(file_name)
raise FileNotFoundError(base_error + '\n'.join(valid_files[:10]))
| mit |
AASHE/python-membersuite-api-client | membersuite_api_client/subscriptions/services.py | 1 | 2115 | """
The service for connecting to MemberSuite for SubscriptionService
http://api.docs.membersuite.com/#References/Objects/Subscription.htm
@todo
set up fixtures in MemberSuite sandbox for integration testing
@todo
add date modified param for performance
"""
from ..mixins import ChunkQueryMixin
from ..utils import get_new_client
from .models import Subscription
import datetime
class SubscriptionService(ChunkQueryMixin, object):
def __init__(self, client=None):
"""
Accepts a ConciergeClient to connect with MemberSuite
"""
super(SubscriptionService, self).__init__()
self.client = client or get_new_client()
def get_subscriptions(self, publication_id=None, owner_id=None,
since_when=None, limit_to=200, max_calls=None,
start_record=0, verbose=False):
"""
Fetches all subscriptions from Membersuite of a particular
`publication_id` if set.
"""
query = "SELECT Objects() FROM Subscription"
# collect all where parameters into a list of
# (key, operator, value) tuples
where_params = []
if owner_id:
where_params.append(('owner', '=', "'%s'" % owner_id))
if publication_id:
where_params.append(('publication', '=', "'%s'" % publication_id))
if since_when:
d = datetime.date.today() - datetime.timedelta(days=since_when)
where_params.append(
('LastModifiedDate', ">", "'%s 00:00:00'" % d))
if where_params:
query += " WHERE "
query += " AND ".join(
["%s %s %s" % (p[0], p[1], p[2]) for p in where_params])
subscription_list = self.get_long_query(
query, limit_to=limit_to, max_calls=max_calls,
start_record=start_record, verbose=verbose)
return subscription_list
def ms_object_to_model(self, ms_obj):
" Converts an individual result to a Subscription Model "
return Subscription(membersuite_object_data=ms_obj)
| mit |
spacemansteve/Japan-Digital-Archive | web/js/lib/OpenLayers-2.12/tools/toposort.py | 305 | 1086 | """
toposort.py
Sorts dictionary keys based on lists of dependencies.
"""
class MissingDependency(Exception):
"""Exception raised when a listed dependency is not in the dictionary."""
class Sorter(object):
def __init__(self, dependencies):
self.dependencies = dependencies
self.visited = set()
self.sorted = ()
def sort(self):
for key in self.dependencies:
self._visit(key)
return self.sorted
def _visit(self, key):
if key not in self.visited:
self.visited.add(key)
if not self.dependencies.has_key(key):
raise MissingDependency(key)
for depends in self.dependencies[key]:
self._visit(depends)
self.sorted += (key,)
def toposort(dependencies):
"""Returns a tuple of the dependencies dictionary keys sorted by entries
in the dependency lists. Given circular dependencies, sort will impose
an order. Raises MissingDependency if a key is not found.
"""
s = Sorter(dependencies)
return s.sort()
| mit |
mavit/ansible | test/units/module_utils/network/aci/test_aci.py | 7 | 13750 | # -*- coding: utf-8 -*-
# Copyright 2017 Dag Wieers <dag@wieers.com>
# This file is part of Ansible by Red Hat
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import sys
from ansible.compat.tests import unittest
from ansible.module_utils.network.aci.aci import ACIModule
from ansible.module_utils.six import PY2, PY3
from ansible.module_utils._text import to_native
from nose.plugins.skip import SkipTest
class AltModule():
params = dict(
hostname='dummy',
port=123,
protocol='https',
state='present',
)
class AltACIModule(ACIModule):
def __init__(self):
self.result = dict(changed=False)
self.module = AltModule
self.params = self.module.params
aci = AltACIModule()
try:
from lxml import etree
if sys.version_info >= (2, 7):
from xmljson import cobra
except ImportError:
raise SkipTest("ACI Ansible modules require the lxml and xmljson Python libraries")
class AciRest(unittest.TestCase):
def test_invalid_aci_login(self):
self.maxDiff = None
error = dict(
code='401',
text='Username or password is incorrect - FAILED local authentication',
)
imdata = [{
'error': {
'attributes': {
'code': '401',
'text': 'Username or password is incorrect - FAILED local authentication',
},
},
}]
totalCount = 1
json_response = '{"totalCount":"1","imdata":[{"error":{"attributes":{"code":"401","text":"Username or password is incorrect - FAILED local authentication"}}}]}' # NOQA
json_result = dict()
aci.response_json(json_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.imdata, imdata)
self.assertEqual(aci.totalCount, totalCount)
# Python 2.7+ is needed for xmljson
if sys.version_info < (2, 7):
return
xml_response = '''<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1">
<error code="401" text="Username or password is incorrect - FAILED local authentication"/>
</imdata>
'''
xml_result = dict()
aci.response_xml(xml_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.imdata, imdata)
self.assertEqual(aci.totalCount, totalCount)
def test_valid_aci_login(self):
self.maxDiff = None
imdata = [{
'aaaLogin': {
'attributes': {
'token': 'ZldYAsoO9d0FfAQM8xaEVWvQPSOYwpnqzhwpIC1r4MaToknJjlIuAt9+TvXqrZ8lWYIGPj6VnZkWiS8nJfaiaX/AyrdD35jsSxiP3zydh+849xym7ALCw/fFNsc7b5ik1HaMuSUtdrN8fmCEUy7Pq/QNpGEqkE8m7HaxAuHpmvXgtdW1bA+KKJu2zY1c/tem', # NOQA
'siteFingerprint': 'NdxD72K/uXaUK0wn',
'refreshTimeoutSeconds': '600',
'maximumLifetimeSeconds': '86400',
'guiIdleTimeoutSeconds': '1200',
'restTimeoutSeconds': '90',
'creationTime': '1500134817',
'firstLoginTime': '1500134817',
'userName': 'admin',
'remoteUser': 'false',
'unixUserId': '15374',
'sessionId': 'o7hObsqNTfCmDGcZI5c4ng==',
'lastName': '',
'firstName': '',
'version': '2.0(2f)',
'buildTime': 'Sat Aug 20 23:07:07 PDT 2016',
'node': 'topology/pod-1/node-1',
},
'children': [{
'aaaUserDomain': {
'attributes': {
'name': 'all',
'rolesR': 'admin',
'rolesW': 'admin',
},
'children': [{
'aaaReadRoles': {
'attributes': {},
},
}, {
'aaaWriteRoles': {
'attributes': {},
'children': [{
'role': {
'attributes': {
'name': 'admin',
},
},
}],
},
}],
},
}, {
'DnDomainMapEntry': {
'attributes': {
'dn': 'uni/tn-common',
'readPrivileges': 'admin',
'writePrivileges': 'admin',
},
},
}, {
'DnDomainMapEntry': {
'attributes': {
'dn': 'uni/tn-infra',
'readPrivileges': 'admin',
'writePrivileges': 'admin',
},
},
}, {
'DnDomainMapEntry': {
'attributes': {
'dn': 'uni/tn-mgmt',
'readPrivileges': 'admin',
'writePrivileges': 'admin',
},
},
}],
},
}]
totalCount = 1
json_response = '{"totalCount":"1","imdata":[{"aaaLogin":{"attributes":{"token":"ZldYAsoO9d0FfAQM8xaEVWvQPSOYwpnqzhwpIC1r4MaToknJjlIuAt9+TvXqrZ8lWYIGPj6VnZkWiS8nJfaiaX/AyrdD35jsSxiP3zydh+849xym7ALCw/fFNsc7b5ik1HaMuSUtdrN8fmCEUy7Pq/QNpGEqkE8m7HaxAuHpmvXgtdW1bA+KKJu2zY1c/tem","siteFingerprint":"NdxD72K/uXaUK0wn","refreshTimeoutSeconds":"600","maximumLifetimeSeconds":"86400","guiIdleTimeoutSeconds":"1200","restTimeoutSeconds":"90","creationTime":"1500134817","firstLoginTime":"1500134817","userName":"admin","remoteUser":"false","unixUserId":"15374","sessionId":"o7hObsqNTfCmDGcZI5c4ng==","lastName":"","firstName":"","version":"2.0(2f)","buildTime":"Sat Aug 20 23:07:07 PDT 2016","node":"topology/pod-1/node-1"},"children":[{"aaaUserDomain":{"attributes":{"name":"all","rolesR":"admin","rolesW":"admin"},"children":[{"aaaReadRoles":{"attributes":{}}},{"aaaWriteRoles":{"attributes":{},"children":[{"role":{"attributes":{"name":"admin"}}}]}}]}},{"DnDomainMapEntry":{"attributes":{"dn":"uni/tn-common","readPrivileges":"admin","writePrivileges":"admin"}}},{"DnDomainMapEntry":{"attributes":{"dn":"uni/tn-infra","readPrivileges":"admin","writePrivileges":"admin"}}},{"DnDomainMapEntry":{"attributes":{"dn":"uni/tn-mgmt","readPrivileges":"admin","writePrivileges":"admin"}}}]}}]}' # NOQA
json_result = dict()
aci.response_json(json_response)
self.assertEqual(aci.imdata, imdata)
self.assertEqual(aci.totalCount, totalCount)
# Python 2.7+ is needed for xmljson
if sys.version_info < (2, 7):
return
xml_response = '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1">\n<aaaLogin token="ZldYAsoO9d0FfAQM8xaEVWvQPSOYwpnqzhwpIC1r4MaToknJjlIuAt9+TvXqrZ8lWYIGPj6VnZkWiS8nJfaiaX/AyrdD35jsSxiP3zydh+849xym7ALCw/fFNsc7b5ik1HaMuSUtdrN8fmCEUy7Pq/QNpGEqkE8m7HaxAuHpmvXgtdW1bA+KKJu2zY1c/tem" siteFingerprint="NdxD72K/uXaUK0wn" refreshTimeoutSeconds="600" maximumLifetimeSeconds="86400" guiIdleTimeoutSeconds="1200" restTimeoutSeconds="90" creationTime="1500134817" firstLoginTime="1500134817" userName="admin" remoteUser="false" unixUserId="15374" sessionId="o7hObsqNTfCmDGcZI5c4ng==" lastName="" firstName="" version="2.0(2f)" buildTime="Sat Aug 20 23:07:07 PDT 2016" node="topology/pod-1/node-1">\n<aaaUserDomain name="all" rolesR="admin" rolesW="admin">\n<aaaReadRoles/>\n<aaaWriteRoles>\n<role name="admin"/>\n</aaaWriteRoles>\n</aaaUserDomain>\n<DnDomainMapEntry dn="uni/tn-common" readPrivileges="admin" writePrivileges="admin"/>\n<DnDomainMapEntry dn="uni/tn-infra" readPrivileges="admin" writePrivileges="admin"/>\n<DnDomainMapEntry dn="uni/tn-mgmt" readPrivileges="admin" writePrivileges="admin"/>\n</aaaLogin></imdata>\n''' # NOQA
xml_result = dict()
aci.response_xml(xml_response)
self.assertEqual(aci.imdata, imdata)
self.assertEqual(aci.totalCount, totalCount)
def test_invalid_input(self):
self.maxDiff = None
error = dict(
code='401',
text='Username or password is incorrect - FAILED local authentication',
)
imdata = [{
'error': {
'attributes': {
'code': '401',
'text': 'Username or password is incorrect - FAILED local authentication',
},
},
}]
totalCount = 1
json_response = '{"totalCount":"1","imdata":[{"error":{"attributes":{"code":"401","text":"Username or password is incorrect - FAILED local authentication"}}}]}' # NOQA
json_result = dict()
aci.response_json(json_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.imdata, imdata)
self.assertEqual(aci.totalCount, totalCount)
# Python 2.7+ is needed for xmljson
if sys.version_info < (2, 7):
return
xml_response = '''<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1">
<error code="401" text="Username or password is incorrect - FAILED local authentication"/>
</imdata>
'''
xml_result = dict()
aci.response_xml(xml_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.imdata, imdata)
self.assertEqual(aci.totalCount, totalCount)
def test_empty_response(self):
self.maxDiffi = None
if PY2:
error_text = "Unable to parse output as JSON, see 'raw' output. No JSON object could be decoded"
else:
error_text = "Unable to parse output as JSON, see 'raw' output. Expecting value: line 1 column 1 (char 0)"
error = dict(
code=-1,
text=error_text,
)
raw = ''
json_response = ''
json_result = dict()
aci.response_json(json_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.result['raw'], raw)
# Python 2.7+ is needed for xmljson
if sys.version_info < (2, 7):
return
elif etree.LXML_VERSION < (3, 3, 0, 0):
error_text = "Unable to parse output as XML, see 'raw' output. None",
elif etree.LXML_VERSION < (4, 0, 0, 0):
error_text = to_native(u"Unable to parse output as XML, see 'raw' output. None (line 0)", errors='surrogate_or_strict')
elif PY2:
error_text = "Unable to parse output as XML, see 'raw' output. Document is empty, line 1, column 1 (line 1)"
else:
error_text = "Unable to parse output as XML, see 'raw' output. Document is empty, line 1, column 1 (<string>, line 1)"
error = dict(
code=-1,
text=error_text,
)
raw = ''
xml_response = ''
xml_result = dict()
aci.response_xml(xml_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.result['raw'], raw)
def test_invalid_response(self):
self.maxDiff = None
if sys.version_info < (2, 7):
error_text = "Unable to parse output as JSON, see 'raw' output. Expecting object: line 1 column 8 (char 8)"
elif PY2:
error_text = "Unable to parse output as JSON, see 'raw' output. No JSON object could be decoded"
else:
error_text = "Unable to parse output as JSON, see 'raw' output. Expecting value: line 1 column 9 (char 8)"
error = dict(
code=-1,
text=error_text,
)
raw = '{ "aaa":'
json_response = '{ "aaa":'
json_result = dict()
aci.response_json(json_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.result['raw'], raw)
# Python 2.7+ is needed for xmljson
if sys.version_info < (2, 7):
return
elif etree.LXML_VERSION < (3, 3, 0, 0):
error_text = "Unable to parse output as XML, see 'raw' output. Couldn't find end of Start Tag aaa line 1, line 1, column 5" # NOQA
elif PY2:
error_text = "Unable to parse output as XML, see 'raw' output. Couldn't find end of Start Tag aaa line 1, line 1, column 6 (line 1)" # NOQA
else:
error_text = "Unable to parse output as XML, see 'raw' output. Couldn't find end of Start Tag aaa line 1, line 1, column 6 (<string>, line 1)" # NOQA
error = dict(
code=-1,
text=error_text,
)
raw = '<aaa '
xml_response = '<aaa '
xml_result = dict()
aci.response_xml(xml_response)
self.assertEqual(aci.error, error)
self.assertEqual(aci.result['raw'], raw)
| gpl-3.0 |
fire-hawk-86/plugin.video.lastship | resources/lib/sources/de/streamking.py | 2 | 5226 | # -*- coding: UTF-8 -*-
"""
Lastship Add-on (C) 2019
Credits to Lastship, Placenta and Covenant; our thanks go to their creators
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
# Addon Name: Lastship
# Addon id: plugin.video.lastship
# Addon Provider: Lastship
import base64
import json
import re
import urllib
import urlparse
import requests
from resources.lib.modules import cleantitle
from resources.lib.modules import source_utils
from resources.lib.modules import dom_parser
from resources.lib.modules import source_faultlog
from resources.lib.modules.handler.requestHandler import cRequestHandler
class source:
def __init__(self):
self.priority = 1
self.language = ['de']
self.domains = ['streamking.media']
self.base_link = 'https://streamking.media'
self.search_link = '/search?q=%s'
def movie(self, imdb, title, localtitle, aliases, year):
try:
titles = [localtitle] + source_utils.aliases_to_array(aliases)
url = self.__search(titles, year)
if not url and title != localtitle: url = self.__search([title] + source_utils.aliases_to_array(aliases), year)
if not url:
from resources.lib.modules import duckduckgo
url = duckduckgo.search(titles, year, self.domains[0], '(.*?)\sstream')
return url
except:
return
def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year):
try:
url = {'imdb': imdb, 'tvdb': tvdb, 'tvshowtitle': tvshowtitle, 'localtvshowtitle': localtvshowtitle, 'aliases': aliases, 'year': year}
url = urllib.urlencode(url)
return url
except:
return
def episode(self, url, imdb, tvdb, title, premiered, season, episode):
try:
if not url:
return
except:
return
def sources(self, url, hostDict, hostprDict):
sources = []
try:
if not url:
return sources
oRequest = cRequestHandler(urlparse.urljoin(self.base_link, url))
oRequest.removeBreakLines(False)
oRequest.removeNewLines(False)
moviecontent = oRequest.request()
results = re.findall(r'data-video-url=\"(.*?)\"', moviecontent)
quality = re.findall(r'<span class="label label-primary">(.*?)</span>', moviecontent)
if "HD" in quality:
quality = "720p"
else:
quality = "SD"
for link in results:
valid, hoster = source_utils.is_host_valid(link, hostDict)
if not valid: continue
sources.append(
{'source': hoster, 'quality': quality, 'language': 'de', 'url': link, 'direct': False,
'debridonly': False, 'checkquality': True})
return sources
except:
source_faultlog.logFault(__name__, source_faultlog.tagScrape)
return sources
def resolve(self, url):
return url
def __search(self, titles, year, season='0'):
try:
query = self.search_link % (urllib.quote_plus(cleantitle.query(titles[0])))
query = urlparse.urljoin(self.base_link, query)
titles = [cleantitle.get(i) for i in set(titles) if i]
oRequest = cRequestHandler(query)
oRequest.addHeaderEntry('Referer', 'https://streamking.eu/')
oRequest.addHeaderEntry('Upgrade-Insecure-Requests', '1')
oRequest.removeBreakLines(False)
oRequest.removeNewLines(False)
searchResult = oRequest.request()
results = dom_parser.parse_dom(searchResult, 'div', attrs={'id': 'section-opt'})
results = re.findall(r'<a href=\"(.*?)\">(.*?)</a>', results[0].content)
usedIndex = 0
#Find result with matching name and season
for x in range(0, len(results)):
title = cleantitle.get(results[x][1])
if any(i in title for i in titles):
if season == "0" or ("staffel" in title and ("0"+str(season) in title or str(season) in title)):
#We have the suspected link!
return source_utils.strip_domain(results[x][0]).decode('utf-8')
usedIndex += 1
return
except:
try:
source_faultlog.logFault(__name__, source_faultlog.tagSearch, titles[0])
except:
return
return
| gpl-3.0 |
dannykopping/mysql-utilities | mysql-test/suite/experimental/t/t3.py | 1 | 1489 | #
# Copyright (c) 2010, 2013, Oracle and/or its affiliates. All rights reserved.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
import mutlib
class test(mutlib.System_test):
"""Experimental test #3
This example tests the return codes for the methods. Uncomment out the
False returns and comment out the True returns to see failed execution.
"""
def check_prerequisites(self):
return True
#return False
def setup(self):
return True
#return False
def run(self):
return True
#return False
def get_result(self):
return (True, None)
#return (False, "Test message\nAnother test message\n")
def record(self):
# Not a comparative test, returning True
return True
#return False
def cleanup(self):
return True
#return False
| gpl-2.0 |
XiaosongWei/chromium-crosswalk | tools/telemetry/telemetry/internal/platform/network_controller_backend_unittest.py | 8 | 9982 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import tempfile
import unittest
from telemetry.internal.platform import network_controller_backend
from telemetry.internal import forwarders
from telemetry.util import wpr_modes
class FakePlatformBackend(object):
@property
def wpr_ca_cert_path(self):
return None
@property
def forwarder_factory(self):
return FakeForwarderFactory()
class FakeForwarderFactory(object):
def __init__(self):
self.host_ip = '123.321.123.321'
self.port_pairs = None
def Create(self, port_pairs):
return forwarders.Forwarder(port_pairs)
class FakeBrowserBackend(object):
def __init__(self, http_ports, https_ports, dns_ports):
self.wpr_port_pairs = forwarders.PortPairs(
http=forwarders.PortPair(*http_ports),
https=forwarders.PortPair(*https_ports),
dns=forwarders.PortPair(*dns_ports) if dns_ports else None)
class FakeReplayServer(object):
def __init__(self, archive_path, host_ip, http_port, https_port, dns_port,
replay_args):
self.archive_path = archive_path
self.host_ip = host_ip
self.http_port = http_port
self.https_port = https_port
self.dns_port = dns_port
self.replay_args = replay_args
self.is_stopped = False
def StartServer(self):
return self.http_port, self.https_port, self.dns_port
def StopServer(self):
assert not self.is_stopped
self.is_stopped = True
class TestNetworkControllerBackend(
network_controller_backend.NetworkControllerBackend):
"""NetworkControllerBackend with a fake ReplayServer."""
def __init__(self, platform_backend, fake_started_http_port,
fake_started_https_port, fake_started_dns_port):
super(TestNetworkControllerBackend, self).__init__(platform_backend)
self.fake_started_http_port = fake_started_http_port
self.fake_started_https_port = fake_started_https_port
self.fake_started_dns_port = fake_started_dns_port
self.fake_replay_server = None
def _ReplayServer(self, archive_path, host_ip, http_port, https_port,
dns_port, replay_args):
http_port = http_port or self.fake_started_http_port
https_port = https_port or self.fake_started_https_port
dns_port = (None if dns_port is None else
(dns_port or self.fake_started_dns_port))
self.fake_replay_server = FakeReplayServer(
archive_path, host_ip, http_port, https_port, dns_port, replay_args)
return self.fake_replay_server
class NetworkControllerBackendTest(unittest.TestCase):
def setUp(self):
self.browser_backend = FakeBrowserBackend(
http_ports=(0, 0),
https_ports=(0, 0),
dns_ports=None)
self.network_controller_backend = TestNetworkControllerBackend(
FakePlatformBackend(),
fake_started_http_port=222,
fake_started_https_port=444,
fake_started_dns_port=None)
def testSameArgsReuseServer(self):
b = self.network_controller_backend
with tempfile.NamedTemporaryFile() as temp_file:
archive_path = temp_file.name
# Create Replay server.
b.SetReplayArgs(archive_path, wpr_modes.WPR_REPLAY, '3g', ['--some-arg'])
b.UpdateReplay(self.browser_backend)
self.assertEqual(archive_path, b.fake_replay_server.archive_path)
self.assertEqual('123.321.123.321', b.fake_replay_server.host_ip)
self.assertEqual(
['--some-arg', '--net=3g', '--inject_scripts='],
b.fake_replay_server.replay_args)
self.assertEqual(222, b.wpr_http_device_port)
self.assertEqual(444, b.wpr_https_device_port)
# Reuse Replay server.
fake_replay_server = b.fake_replay_server
b.SetReplayArgs(archive_path, wpr_modes.WPR_REPLAY, '3g', ['--some-arg'])
b.UpdateReplay(self.browser_backend)
self.assertIs(fake_replay_server, b.fake_replay_server)
b.StopReplay()
self.assertTrue(b.fake_replay_server.is_stopped)
def testDifferentArgsUseDifferentServer(self):
b = self.network_controller_backend
with tempfile.NamedTemporaryFile() as temp_file:
archive_file = temp_file.name
# Create Replay server.
b.SetReplayArgs(archive_file, wpr_modes.WPR_REPLAY, '3g', ['--some-arg'])
b.UpdateReplay(self.browser_backend)
self.assertEqual(
['--some-arg', '--net=3g', '--inject_scripts='],
b.fake_replay_server.replay_args)
self.assertEqual(222, b.wpr_http_device_port)
self.assertEqual(444, b.wpr_https_device_port)
# If Replay restarts, it uses these ports when passed "0" for ports.
b.fake_started_http_port = 212
b.fake_started_https_port = 323
b.fake_started_dns_port = None
# Create different Replay server (set netsim to None instead of 3g).
fake_replay_server = b.fake_replay_server
b.SetReplayArgs(archive_file, wpr_modes.WPR_REPLAY, None, ['--some-arg'])
b.UpdateReplay(self.browser_backend)
self.assertIsNot(fake_replay_server, b.fake_replay_server)
self.assertTrue(fake_replay_server.is_stopped)
self.assertFalse(b.fake_replay_server.is_stopped)
self.assertEqual(
['--some-arg', '--inject_scripts='],
b.fake_replay_server.replay_args)
self.assertEqual(212, b.wpr_http_device_port)
self.assertEqual(323, b.wpr_https_device_port)
b.StopReplay()
self.assertTrue(b.fake_replay_server.is_stopped)
def testUpdateReplayWithoutArchivePathDoesNotStopReplay(self):
b = TestNetworkControllerBackend(
FakePlatformBackend(),
fake_started_http_port=222,
fake_started_https_port=444,
fake_started_dns_port=None)
with tempfile.NamedTemporaryFile() as temp_file:
archive_file = temp_file.name
# Create Replay server.
b.SetReplayArgs(archive_file, wpr_modes.WPR_REPLAY, '3g', ['--some-arg'])
browser_backend = FakeBrowserBackend(
http_ports=(0, 0), https_ports=(0, 0), dns_ports=None)
b.UpdateReplay(browser_backend)
self.assertFalse(b.fake_replay_server.is_stopped)
b.SetReplayArgs(None, wpr_modes.WPR_REPLAY, '3g', ['--some-arg'])
b.UpdateReplay()
self.assertFalse(b.fake_replay_server.is_stopped)
def testUpdateReplayWithoutArgsIsOkay(self):
b = self.network_controller_backend
b.UpdateReplay(self.browser_backend) # does not raise
def testBadArchivePathRaises(self):
b = self.network_controller_backend
b.SetReplayArgs('/tmp/nonexistant', wpr_modes.WPR_REPLAY, '3g', [])
with self.assertRaises(network_controller_backend.ArchiveDoesNotExistError):
b.UpdateReplay(self.browser_backend)
def testBadArchivePathOnRecordIsOkay(self):
"""No ArchiveDoesNotExistError for record mode."""
b = self.network_controller_backend
b.SetReplayArgs('/tmp/nonexistant', wpr_modes.WPR_RECORD, '3g', [])
b.UpdateReplay(self.browser_backend) # does not raise
def testModeOffDoesNotCreateReplayServer(self):
b = self.network_controller_backend
b.SetReplayArgs('/tmp/nonexistant', wpr_modes.WPR_OFF, '3g', [])
b.UpdateReplay(self.browser_backend)
self.assertIsNone(b.fake_replay_server)
def testSameBrowserUsesSamePorts(self):
b = self.network_controller_backend
with tempfile.NamedTemporaryFile() as temp_file:
archive_path = temp_file.name
# Create Replay server.
b.SetReplayArgs(archive_path, wpr_modes.WPR_REPLAY, '3g', ['--some-arg'])
b.UpdateReplay(self.browser_backend)
self.assertEqual(archive_path, b.fake_replay_server.archive_path)
self.assertEqual('123.321.123.321', b.fake_replay_server.host_ip)
self.assertEqual(
['--some-arg', '--net=3g', '--inject_scripts='],
b.fake_replay_server.replay_args)
self.assertEqual(222, b.wpr_http_device_port)
self.assertEqual(444, b.wpr_https_device_port)
# If Replay restarts, it uses these ports when passed "0" for ports.
b.fake_started_http_port = 212
b.fake_started_https_port = 434
b.fake_started_dns_port = None
# Reuse Replay server.
fake_replay_server = b.fake_replay_server
b.SetReplayArgs(archive_path, wpr_modes.WPR_REPLAY, None, ['--NEW-ARG'])
b.UpdateReplay() # no browser backend means use the previous one
# Even though WPR is restarted, it uses the same ports because
# the browser was configured to a particular port set.
self.assertIsNot(fake_replay_server, b.fake_replay_server)
self.assertEqual(222, b.wpr_http_device_port)
self.assertEqual(444, b.wpr_https_device_port)
b.StopReplay()
self.assertTrue(b.fake_replay_server.is_stopped)
# pylint: disable=protected-access
class ForwarderPortPairsTest(unittest.TestCase):
def testZeroIsOkayForRemotePorts(self):
started_ports = (8080, 8443, None)
wpr_port_pairs = forwarders.PortPairs(
http=forwarders.PortPair(0, 0),
https=forwarders.PortPair(0, 0),
dns=None)
expected_port_pairs = forwarders.PortPairs(
http=forwarders.PortPair(8080, 0),
https=forwarders.PortPair(8443, 0),
dns=None)
self.assertEqual(
expected_port_pairs,
network_controller_backend._ForwarderPortPairs(started_ports,
wpr_port_pairs))
def testCombineStartedAndRemotePorts(self):
started_ports = (8888, 4343, 5353)
wpr_port_pairs = forwarders.PortPairs(
http=forwarders.PortPair(0, 80),
https=forwarders.PortPair(0, 443),
dns=forwarders.PortPair(0, 53))
expected_port_pairs = forwarders.PortPairs(
http=forwarders.PortPair(8888, 80),
https=forwarders.PortPair(4343, 443),
dns=forwarders.PortPair(5353, 53))
self.assertEqual(
expected_port_pairs,
network_controller_backend._ForwarderPortPairs(started_ports,
wpr_port_pairs))
| bsd-3-clause |
FusionSP/android_external_chromium_org | third_party/jinja2/defaults.py | 659 | 1068 | # -*- coding: utf-8 -*-
"""
jinja2.defaults
~~~~~~~~~~~~~~~
Jinja default filters and tags.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
from jinja2._compat import range_type
from jinja2.utils import generate_lorem_ipsum, Cycler, Joiner
# defaults for the parser / lexer
BLOCK_START_STRING = '{%'
BLOCK_END_STRING = '%}'
VARIABLE_START_STRING = '{{'
VARIABLE_END_STRING = '}}'
COMMENT_START_STRING = '{#'
COMMENT_END_STRING = '#}'
LINE_STATEMENT_PREFIX = None
LINE_COMMENT_PREFIX = None
TRIM_BLOCKS = False
LSTRIP_BLOCKS = False
NEWLINE_SEQUENCE = '\n'
KEEP_TRAILING_NEWLINE = False
# default filters, tests and namespace
from jinja2.filters import FILTERS as DEFAULT_FILTERS
from jinja2.tests import TESTS as DEFAULT_TESTS
DEFAULT_NAMESPACE = {
'range': range_type,
'dict': lambda **kw: kw,
'lipsum': generate_lorem_ipsum,
'cycler': Cycler,
'joiner': Joiner
}
# export all constants
__all__ = tuple(x for x in locals().keys() if x.isupper())
| bsd-3-clause |
makinacorpus/django | django/core/management/commands/runserver.py | 6 | 5656 | from optparse import make_option
from datetime import datetime
import os
import re
import sys
import socket
from django.core.management.base import BaseCommand, CommandError
from django.core.servers.basehttp import run, WSGIServerException, get_internal_wsgi_application
from django.utils import autoreload
naiveip_re = re.compile(r"""^(?:
(?P<addr>
(?P<ipv4>\d{1,3}(?:\.\d{1,3}){3}) | # IPv4 address
(?P<ipv6>\[[a-fA-F0-9:]+\]) | # IPv6 address
(?P<fqdn>[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*) # FQDN
):)?(?P<port>\d+)$""", re.X)
DEFAULT_PORT = "8000"
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--ipv6', '-6', action='store_true', dest='use_ipv6', default=False,
help='Tells Django to use a IPv6 address.'),
make_option('--nothreading', action='store_false', dest='use_threading', default=True,
help='Tells Django to NOT use threading.'),
make_option('--noreload', action='store_false', dest='use_reloader', default=True,
help='Tells Django to NOT use the auto-reloader.'),
)
help = "Starts a lightweight Web server for development."
args = '[optional port number, or ipaddr:port]'
# Validation is called explicitly each time the server is reloaded.
requires_model_validation = False
def get_handler(self, *args, **options):
"""
Returns the default WSGI handler for the runner.
"""
return get_internal_wsgi_application()
def handle(self, addrport='', *args, **options):
from django.conf import settings
if not settings.DEBUG and not settings.ALLOWED_HOSTS:
raise CommandError('You must set settings.ALLOWED_HOSTS if DEBUG is False.')
self.use_ipv6 = options.get('use_ipv6')
if self.use_ipv6 and not socket.has_ipv6:
raise CommandError('Your Python does not support IPv6.')
if args:
raise CommandError('Usage is runserver %s' % self.args)
self._raw_ipv6 = False
if not addrport:
self.addr = ''
self.port = DEFAULT_PORT
else:
m = re.match(naiveip_re, addrport)
if m is None:
raise CommandError('"%s" is not a valid port number '
'or address:port pair.' % addrport)
self.addr, _ipv4, _ipv6, _fqdn, self.port = m.groups()
if not self.port.isdigit():
raise CommandError("%r is not a valid port number." % self.port)
if self.addr:
if _ipv6:
self.addr = self.addr[1:-1]
self.use_ipv6 = True
self._raw_ipv6 = True
elif self.use_ipv6 and not _fqdn:
raise CommandError('"%s" is not a valid IPv6 address.' % self.addr)
if not self.addr:
self.addr = '::1' if self.use_ipv6 else '127.0.0.1'
self._raw_ipv6 = bool(self.use_ipv6)
self.run(*args, **options)
def run(self, *args, **options):
"""
Runs the server, using the autoreloader if needed
"""
use_reloader = options.get('use_reloader')
if use_reloader:
autoreload.main(self.inner_run, args, options)
else:
self.inner_run(*args, **options)
def inner_run(self, *args, **options):
from django.conf import settings
from django.utils import translation
threading = options.get('use_threading')
shutdown_message = options.get('shutdown_message', '')
quit_command = 'CTRL-BREAK' if sys.platform == 'win32' else 'CONTROL-C'
self.stdout.write("Validating models...\n\n")
self.validate(display_num_errors=True)
self.stdout.write((
"%(started_at)s\n"
"Django version %(version)s, using settings %(settings)r\n"
"Starting development server at http://%(addr)s:%(port)s/\n"
"Quit the server with %(quit_command)s.\n"
) % {
"started_at": datetime.now().strftime('%B %d, %Y - %X'),
"version": self.get_version(),
"settings": settings.SETTINGS_MODULE,
"addr": '[%s]' % self.addr if self._raw_ipv6 else self.addr,
"port": self.port,
"quit_command": quit_command,
})
# django.core.management.base forces the locale to en-us. We should
# set it up correctly for the first request (particularly important
# in the "--noreload" case).
translation.activate(settings.LANGUAGE_CODE)
try:
handler = self.get_handler(*args, **options)
run(self.addr, int(self.port), handler,
ipv6=self.use_ipv6, threading=threading)
except WSGIServerException as e:
# Use helpful error messages instead of ugly tracebacks.
ERRORS = {
13: "You don't have permission to access that port.",
98: "That port is already in use.",
99: "That IP address can't be assigned-to.",
}
try:
error_text = ERRORS[e.args[0].args[0]]
except (AttributeError, KeyError):
error_text = str(e)
self.stderr.write("Error: %s" % error_text)
# Need to use an OS exit because sys.exit doesn't work in a thread
os._exit(1)
except KeyboardInterrupt:
if shutdown_message:
self.stdout.write(shutdown_message)
sys.exit(0)
# Kept for backward compatibility
BaseRunserverCommand = Command
| bsd-3-clause |
erinspace/osf.io | scripts/utils.py | 21 | 1576 | # -*- coding: utf-8 -*-
import os
import logging
import sys
from django.utils import timezone
from website import settings
def format_now():
return timezone.now().isoformat()
def add_file_logger(logger, script_name, suffix=None):
_, name = os.path.split(script_name)
name = name.rstrip('c')
if suffix is not None:
name = '{0}-{1}'.format(name, suffix)
file_handler = logging.FileHandler(
os.path.join(
settings.LOG_PATH,
'.'.join([name, format_now(), 'log'])
)
)
logger.addHandler(file_handler)
class Progress(object):
def __init__(self, bar_len=50, precision=1):
self.bar_len = bar_len
self.precision = precision
self.bar_format = '{}[{}] {:0.' + str(self.precision) + 'f}% ... {}\r'
def start(self, total, prefix):
self.total = total
self.count = 0
self.prefix = prefix
self.last_percents = None
def increment(self, inc=1):
self.count = self.count + inc
percents = round(100.0 * self.count / float(self.total), self.precision)
if self.last_percents == percents:
return
self.last_percents = percents
filled_len = int(round(self.bar_len * self.count / float(self.total)))
bar = '=' * filled_len + '-' * (self.bar_len - filled_len)
sys.stdout.flush()
sys.stdout.write(self.bar_format.format(self.prefix, bar, percents, str(self.total)))
def stop(self):
# To preserve line, there is probably a better way to do this
print('')
| apache-2.0 |
abhishekgahlot/scikit-learn | examples/cluster/plot_feature_agglomeration_vs_univariate_selection.py | 30 | 3909 | """
==============================================
Feature agglomeration vs. univariate selection
==============================================
This example compares 2 dimensionality reduction strategies:
- univariate feature selection with Anova
- feature agglomeration with Ward hierarchical clustering
Both methods are compared in a regression problem using
a BayesianRidge as supervised estimator.
"""
# Author: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# License: BSD 3 clause
print(__doc__)
import shutil
import tempfile
import numpy as np
import matplotlib.pyplot as plt
from scipy import linalg, ndimage
from sklearn.feature_extraction.image import grid_to_graph
from sklearn import feature_selection
from sklearn.cluster import FeatureAgglomeration
from sklearn.linear_model import BayesianRidge
from sklearn.pipeline import Pipeline
from sklearn.grid_search import GridSearchCV
from sklearn.externals.joblib import Memory
from sklearn.cross_validation import KFold
###############################################################################
# Generate data
n_samples = 200
size = 40 # image size
roi_size = 15
snr = 5.
np.random.seed(0)
mask = np.ones([size, size], dtype=np.bool)
coef = np.zeros((size, size))
coef[0:roi_size, 0:roi_size] = -1.
coef[-roi_size:, -roi_size:] = 1.
X = np.random.randn(n_samples, size ** 2)
for x in X: # smooth data
x[:] = ndimage.gaussian_filter(x.reshape(size, size), sigma=1.0).ravel()
X -= X.mean(axis=0)
X /= X.std(axis=0)
y = np.dot(X, coef.ravel())
noise = np.random.randn(y.shape[0])
noise_coef = (linalg.norm(y, 2) / np.exp(snr / 20.)) / linalg.norm(noise, 2)
y += noise_coef * noise # add noise
###############################################################################
# Compute the coefs of a Bayesian Ridge with GridSearch
cv = KFold(len(y), 2) # cross-validation generator for model selection
ridge = BayesianRidge()
cachedir = tempfile.mkdtemp()
mem = Memory(cachedir=cachedir, verbose=1)
# Ward agglomeration followed by BayesianRidge
connectivity = grid_to_graph(n_x=size, n_y=size)
ward = FeatureAgglomeration(n_clusters=10, connectivity=connectivity,
memory=mem, n_components=1)
clf = Pipeline([('ward', ward), ('ridge', ridge)])
# Select the optimal number of parcels with grid search
clf = GridSearchCV(clf, {'ward__n_clusters': [10, 20, 30]}, n_jobs=1, cv=cv)
clf.fit(X, y) # set the best parameters
coef_ = clf.best_estimator_.steps[-1][1].coef_
coef_ = clf.best_estimator_.steps[0][1].inverse_transform(coef_)
coef_agglomeration_ = coef_.reshape(size, size)
# Anova univariate feature selection followed by BayesianRidge
f_regression = mem.cache(feature_selection.f_regression) # caching function
anova = feature_selection.SelectPercentile(f_regression)
clf = Pipeline([('anova', anova), ('ridge', ridge)])
# Select the optimal percentage of features with grid search
clf = GridSearchCV(clf, {'anova__percentile': [5, 10, 20]}, cv=cv)
clf.fit(X, y) # set the best parameters
coef_ = clf.best_estimator_.steps[-1][1].coef_
coef_ = clf.best_estimator_.steps[0][1].inverse_transform(coef_)
coef_selection_ = coef_.reshape(size, size)
###############################################################################
# Inverse the transformation to plot the results on an image
plt.close('all')
plt.figure(figsize=(7.3, 2.7))
plt.subplot(1, 3, 1)
plt.imshow(coef, interpolation="nearest", cmap=plt.cm.RdBu_r)
plt.title("True weights")
plt.subplot(1, 3, 2)
plt.imshow(coef_selection_, interpolation="nearest", cmap=plt.cm.RdBu_r)
plt.title("Feature Selection")
plt.subplot(1, 3, 3)
plt.imshow(coef_agglomeration_, interpolation="nearest", cmap=plt.cm.RdBu_r)
plt.title("Feature Agglomeration")
plt.subplots_adjust(0.04, 0.0, 0.98, 0.94, 0.16, 0.26)
plt.show()
# Attempt to remove the temporary cachedir, but don't worry if it fails
shutil.rmtree(cachedir, ignore_errors=True)
| bsd-3-clause |
rcbops/quantum-buildpackage | quantum/plugins/cisco/client/cli.py | 2 | 6842 | """
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2011 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Initial structure and framework of this CLI has been borrowed from Quantum,
# written by the following authors
# @author: Somik Behera, Nicira Networks, Inc.
# @author: Brad Hall, Nicira Networks, Inc.
# @author: Salvatore Orlando, Citrix
#
# Cisco adaptation for extensions
# @author: Sumit Naiksatam, Cisco Systems, Inc.
# @author: Ying Liu, Cisco Systems, Inc.
#
"""
import gettext
import logging
import logging.handlers
import os
import sys
import subprocess
from optparse import OptionParser
sys.path.append(os.getcwd())
import quantum.client.cli as qcli
POSSIBLE_TOPDIR = os.path.normpath(os.path.join(os.path.abspath(sys.argv[0]),
os.pardir,
os.pardir))
if os.path.exists(os.path.join(POSSIBLE_TOPDIR, 'quantum', '__init__.py')):
sys.path.insert(0, POSSIBLE_TOPDIR)
gettext.install('quantum', unicode=1)
from quantum.client import Client
from quantum.plugins.cisco.common import cisco_constants as const
LOG = logging.getLogger('quantum')
FORMAT = 'json'
ACTION_PREFIX_EXT = '/v1.0'
ACTION_PREFIX_CSCO = ACTION_PREFIX_EXT + \
'/extensions/csco/tenants/{tenant_id}'
TENANT_ID = 'nova'
CSCO_EXT_NAME = 'Cisco Nova Tenant'
def help():
"""Help for CLI"""
print "\nCisco Extension Commands:"
for key in COMMANDS.keys():
print " %s %s" % (key,
" ".join(["<%s>" % y for y in COMMANDS[key]["args"]]))
def build_args(cmd, cmdargs, arglist):
"""Building the list of args for a particular CLI"""
args = []
orig_arglist = arglist[:]
try:
for cmdarg in cmdargs:
args.append(arglist[0])
del arglist[0]
except:
LOG.error("Not enough arguments for \"%s\" (expected: %d, got: %d)" % (
cmd, len(cmdargs), len(orig_arglist)))
print "Usage:\n %s %s" % (cmd,
" ".join(["<%s>" % y for y in COMMANDS[cmd]["args"]]))
sys.exit()
if len(arglist) > 0:
LOG.error("Too many arguments for \"%s\" (expected: %d, got: %d)" % (
cmd, len(cmdargs), len(orig_arglist)))
print "Usage:\n %s %s" % (cmd,
" ".join(["<%s>" % y for y in COMMANDS[cmd]["args"]]))
sys.exit()
return args
def list_extensions(*args):
"""Invoking the action to get the supported extensions"""
request_url = "/extensions"
client = Client(HOST, PORT, USE_SSL, format='json',
action_prefix=ACTION_PREFIX_EXT, tenant="dummy")
data = client.do_request('GET', request_url)
print("Obtained supported extensions from Quantum: %s" % data)
def schedule_host(tenant_id, instance_id, user_id=None):
"""Gets the host name from the Quantum service"""
project_id = tenant_id
instance_data_dict = \
{'novatenant': \
{'instance_id': instance_id,
'instance_desc': \
{'user_id': user_id,
'project_id': project_id}}}
request_url = "/novatenants/" + project_id + "/schedule_host"
client = Client(HOST, PORT, USE_SSL, format='json', tenant=TENANT_ID,
action_prefix=ACTION_PREFIX_CSCO)
data = client.do_request('PUT', request_url, body=instance_data_dict)
hostname = data["host_list"]["host_1"]
if not hostname:
print("Scheduler was unable to locate a host" + \
" for this request. Is the appropriate" + \
" service running?")
print("Quantum service returned host: %s" % hostname)
def create_multiport(tenant_id, net_id_list, *args):
"""Creates ports on a single host"""
net_list = net_id_list.split(",")
ports_info = {'multiport': \
{'status': 'ACTIVE',
'net_id_list': net_list,
'ports_desc': {'key': 'value'}}}
request_url = "/multiport"
client = Client(HOST, PORT, USE_SSL, format='json', tenant=tenant_id,
action_prefix=ACTION_PREFIX_CSCO)
data = client.do_request('POST', request_url, body=ports_info)
print("Created ports: %s" % data)
COMMANDS = {
"create_multiport": {
"func": create_multiport,
"args": ["tenant-id",
"net-id-list (comma separated list of netword IDs)"]},
"list_extensions": {
"func": list_extensions,
"args": []},
"schedule_host": {
"func": schedule_host,
"args": ["tenant-id", "instance-id"]}, }
def main():
import cli
usagestr = "Usage: %prog [OPTIONS] <command> [args]"
PARSER = OptionParser(usage=usagestr)
PARSER.add_option("-H", "--host", dest="host",
type="string", default="127.0.0.1", help="ip address of api host")
PARSER.add_option("-p", "--port", dest="port",
type="int", default=9696, help="api poort")
PARSER.add_option("-s", "--ssl", dest="ssl",
action="store_true", default=False, help="use ssl")
PARSER.add_option("-v", "--verbose", dest="verbose",
action="store_true", default=False, help="turn on verbose logging")
PARSER.add_option("-f", "--logfile", dest="logfile",
type="string", default="syslog", help="log file path")
options, args = PARSER.parse_args()
if options.verbose:
LOG.setLevel(logging.DEBUG)
else:
LOG.setLevel(logging.WARN)
if options.logfile == "syslog":
LOG.addHandler(logging.handlers.SysLogHandler(address='/dev/log'))
else:
LOG.addHandler(logging.handlers.WatchedFileHandler(options.logfile))
os.chmod(options.logfile, 0644)
if len(args) < 1:
PARSER.print_help()
qcli.help()
help()
sys.exit(1)
CMD = args[0]
if CMD in qcli.commands.keys():
qcli.main()
sys.exit(1)
if CMD not in COMMANDS.keys():
LOG.error("Unknown command: %s" % CMD)
qcli.help()
help()
sys.exit(1)
args = build_args(CMD, COMMANDS[CMD]["args"], args[1:])
LOG.info("Executing command \"%s\" with args: %s" % (CMD, args))
HOST = options.host
PORT = options.port
USE_SSL = options.ssl
COMMANDS[CMD]["func"](*args)
LOG.info("Command execution completed")
sys.exit(0)
if __name__ == "__main__":
main()
| apache-2.0 |
p0psicles/SickRage | lib/github/StatsContributor.py | 74 | 4471 | # -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import github.GithubObject
import github.NamedUser
class StatsContributor(github.GithubObject.NonCompletableGithubObject):
"""
This class represents statistics of a contibutor. The reference can be found here http://developer.github.com/v3/repos/statistics/#get-contributors-list-with-additions-deletions-and-commit-counts
"""
class Week(github.GithubObject.NonCompletableGithubObject):
"""
This class represents weekly statistics of a contibutor.
"""
@property
def w(self):
"""
:type: datetime.datetime
"""
return self._w.value
@property
def a(self):
"""
:type: int
"""
return self._a.value
@property
def d(self):
"""
:type: int
"""
return self._d.value
@property
def c(self):
"""
:type: int
"""
return self._c.value
def _initAttributes(self):
self._w = github.GithubObject.NotSet
self._a = github.GithubObject.NotSet
self._d = github.GithubObject.NotSet
self._c = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "w" in attributes: # pragma no branch
self._w = self._makeTimestampAttribute(attributes["w"])
if "a" in attributes: # pragma no branch
self._a = self._makeIntAttribute(attributes["a"])
if "d" in attributes: # pragma no branch
self._d = self._makeIntAttribute(attributes["d"])
if "c" in attributes: # pragma no branch
self._c = self._makeIntAttribute(attributes["c"])
@property
def author(self):
"""
:type: :class:`github.NamedUser.NamedUser`
"""
return self._author.value
@property
def total(self):
"""
:type: int
"""
return self._total.value
@property
def weeks(self):
"""
:type: list of :class:`.Week`
"""
return self._weeks.value
def _initAttributes(self):
self._author = github.GithubObject.NotSet
self._total = github.GithubObject.NotSet
self._weeks = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "author" in attributes: # pragma no branch
self._author = self._makeClassAttribute(github.NamedUser.NamedUser, attributes["author"])
if "total" in attributes: # pragma no branch
self._total = self._makeIntAttribute(attributes["total"])
if "weeks" in attributes: # pragma no branch
self._weeks = self._makeListOfClassesAttribute(self.Week, attributes["weeks"])
| gpl-3.0 |
zamattiac/osf.io | api_tests/nodes/views/test_node_list.py | 4 | 83065 | # -*- coding: utf-8 -*-
from nose.tools import * # flake8: noqa
from modularodm import Q
from framework.auth.core import Auth
from website.models import Node, NodeLog
from website.util import permissions
from website.util.sanitize import strip_html
from api.base.settings.defaults import API_BASE, MAX_PAGE_SIZE
from tests.base import ApiTestCase
from tests.factories import (
BookmarkCollectionFactory,
CollectionFactory,
ProjectFactory,
RegistrationFactory,
AuthUserFactory,
UserFactory,
)
class TestNodeList(ApiTestCase):
def setUp(self):
super(TestNodeList, self).setUp()
self.user = AuthUserFactory()
self.non_contrib = AuthUserFactory()
self.deleted = ProjectFactory(is_deleted=True)
self.private = ProjectFactory(is_public=False, creator=self.user)
self.public = ProjectFactory(is_public=True, creator=self.user)
self.url = '/{}nodes/'.format(API_BASE)
def tearDown(self):
super(TestNodeList, self).tearDown()
Node.remove()
def test_only_returns_non_deleted_public_projects(self):
res = self.app.get(self.url)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_in(self.public._id, ids)
assert_not_in(self.deleted._id, ids)
assert_not_in(self.private._id, ids)
def test_return_public_node_list_logged_out_user(self):
res = self.app.get(self.url, expect_errors=True)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
ids = [each['id'] for each in res.json['data']]
assert_in(self.public._id, ids)
assert_not_in(self.private._id, ids)
def test_return_public_node_list_logged_in_user(self):
res = self.app.get(self.url, auth=self.non_contrib)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
ids = [each['id'] for each in res.json['data']]
assert_in(self.public._id, ids)
assert_not_in(self.private._id, ids)
def test_return_private_node_list_logged_out_user(self):
res = self.app.get(self.url)
ids = [each['id'] for each in res.json['data']]
assert_in(self.public._id, ids)
assert_not_in(self.private._id, ids)
def test_return_private_node_list_logged_in_contributor(self):
res = self.app.get(self.url, auth=self.user.auth)
assert_equal(res.status_code, 200)
assert_equal(res.content_type, 'application/vnd.api+json')
ids = [each['id'] for each in res.json['data']]
assert_in(self.public._id, ids)
assert_in(self.private._id, ids)
def test_return_private_node_list_logged_in_non_contributor(self):
res = self.app.get(self.url, auth=self.non_contrib.auth)
ids = [each['id'] for each in res.json['data']]
assert_in(self.public._id, ids)
assert_not_in(self.private._id, ids)
def test_node_list_does_not_returns_registrations(self):
registration = RegistrationFactory(project=self.public, creator=self.user)
res = self.app.get(self.url, auth=self.user.auth)
ids = [each['id'] for each in res.json['data']]
assert_not_in(registration._id, ids)
def test_node_list_has_root(self):
res = self.app.get(self.url, auth=self.user.auth)
projects_with_root = 0
for project in res.json['data']:
if project['relationships'].get('root', None):
projects_with_root += 1
assert_not_equal(projects_with_root, 0)
assert_true(
all([each['relationships'].get(
'root'
) is not None for each in res.json['data']])
)
def test_node_list_has_proper_root(self):
project_one = ProjectFactory(title="Project One", is_public=True)
ProjectFactory(parent=project_one, is_public=True)
res = self.app.get(self.url+'?embed=root&embed=parent', auth=self.user.auth)
for project_json in res.json['data']:
project = Node.load(project_json['id'])
assert_equal(project_json['embeds']['root']['data']['id'], project.root._id)
class TestNodeFiltering(ApiTestCase):
def setUp(self):
super(TestNodeFiltering, self).setUp()
self.user_one = AuthUserFactory()
self.user_two = AuthUserFactory()
self.project_one = ProjectFactory(title="Project One", is_public=True)
self.project_two = ProjectFactory(title="Project Two", description="One Three", is_public=True)
self.project_three = ProjectFactory(title="Three", is_public=True)
self.private_project_user_one = ProjectFactory(title="Private Project User One",
is_public=False,
creator=self.user_one)
self.private_project_user_two = ProjectFactory(title="Private Project User Two",
is_public=False,
creator=self.user_two)
self.folder = CollectionFactory()
self.bookmark_collection = BookmarkCollectionFactory()
self.url = "/{}nodes/".format(API_BASE)
self.tag1, self.tag2 = 'tag1', 'tag2'
self.project_one.add_tag(self.tag1, Auth(self.project_one.creator), save=False)
self.project_one.add_tag(self.tag2, Auth(self.project_one.creator), save=False)
self.project_one.save()
self.project_two.add_tag(self.tag1, Auth(self.project_two.creator), save=True)
def tearDown(self):
super(TestNodeFiltering, self).tearDown()
Node.remove()
def test_filtering_by_id(self):
url = '/{}nodes/?filter[id]={}'.format(API_BASE, self.project_one._id)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
ids = [each['id'] for each in res.json['data']]
assert_in(self.project_one._id, ids)
assert_equal(len(ids), 1)
def test_filtering_by_multiple_ids(self):
url = '/{}nodes/?filter[id]={},{}'.format(API_BASE, self.project_one._id, self.project_two._id)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
ids = [each['id'] for each in res.json['data']]
assert_in(self.project_one._id, ids)
assert_in(self.project_two._id, ids)
assert_equal(len(ids), 2)
def test_filtering_by_multiple_ids_one_private(self):
url = '/{}nodes/?filter[id]={},{}'.format(API_BASE, self.project_one._id, self.private_project_user_two._id)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
ids = [each['id'] for each in res.json['data']]
assert_in(self.project_one._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_equal(len(ids), 1)
def test_filtering_by_multiple_ids_brackets_in_query_params(self):
url = '/{}nodes/?filter[id]=[{}, {}]'.format(API_BASE, self.project_one._id, self.project_two._id)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
ids = [each['id'] for each in res.json['data']]
assert_in(self.project_one._id, ids)
assert_in(self.project_two._id, ids)
assert_equal(len(ids), 2)
def test_filtering_by_category(self):
project = ProjectFactory(creator=self.user_one, category='hypothesis')
project2 = ProjectFactory(creator=self.user_one, category='procedure')
url = '/{}nodes/?filter[category]=hypothesis'.format(API_BASE)
res = self.app.get(url, auth=self.user_one.auth)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_in(project._id, ids)
assert_not_in(project2._id, ids)
def test_filtering_by_public(self):
project = ProjectFactory(creator=self.user_one, is_public=True)
project2 = ProjectFactory(creator=self.user_one, is_public=False)
url = '/{}nodes/?filter[public]=false'.format(API_BASE)
res = self.app.get(url, auth=self.user_one.auth)
node_json = res.json['data']
# No public projects returned
assert_false(
any([each['attributes']['public'] for each in node_json])
)
ids = [each['id'] for each in node_json]
assert_not_in(project._id, ids)
assert_in(project2._id, ids)
url = '/{}nodes/?filter[public]=true'.format(API_BASE)
res = self.app.get(url, auth=self.user_one.auth)
node_json = res.json['data']
# No private projects returned
assert_true(
all([each['attributes']['public'] for each in node_json])
)
ids = [each['id'] for each in node_json]
assert_not_in(project2._id, ids)
assert_in(project._id, ids)
def test_filtering_tags(self):
# both project_one and project_two have tag1
url = '/{}nodes/?filter[tags]={}'.format(API_BASE, self.tag1)
res = self.app.get(url, auth=self.project_one.creator.auth)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_in(self.project_one._id, ids)
assert_in(self.project_two._id, ids)
# filtering two tags
# project_one has both tags; project_two only has one
url = '/{}nodes/?filter[tags]={}&filter[tags]={}'.format(API_BASE, self.tag1, self.tag2)
res = self.app.get(url, auth=self.project_one.creator.auth)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_in(self.project_one._id, ids)
assert_not_in(self.project_two._id, ids)
def test_get_all_projects_with_no_filter_logged_in(self):
res = self.app.get(self.url, auth=self.user_one.auth)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_in(self.project_one._id, ids)
assert_in(self.project_two._id, ids)
assert_in(self.project_three._id, ids)
assert_in(self.private_project_user_one._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.bookmark_collection._id, ids)
def test_get_all_projects_with_no_filter_not_logged_in(self):
res = self.app.get(self.url)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_in(self.project_one._id, ids)
assert_in(self.project_two._id, ids)
assert_in(self.project_three._id, ids)
assert_not_in(self.private_project_user_one._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.bookmark_collection._id, ids)
def test_get_one_project_with_exact_filter_logged_in(self):
url = "/{}nodes/?filter[title]=Project%20One".format(API_BASE)
res = self.app.get(url, auth=self.user_one.auth)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_in(self.project_one._id, ids)
assert_not_in(self.project_two._id, ids)
assert_not_in(self.project_three._id, ids)
assert_not_in(self.private_project_user_one._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.bookmark_collection._id, ids)
def test_get_one_project_with_exact_filter_not_logged_in(self):
url = "/{}nodes/?filter[title]=Project%20One".format(API_BASE)
res = self.app.get(url)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_in(self.project_one._id, ids)
assert_not_in(self.project_two._id, ids)
assert_not_in(self.project_three._id, ids)
assert_not_in(self.private_project_user_one._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.bookmark_collection._id, ids)
def test_get_some_projects_with_substring_logged_in(self):
url = "/{}nodes/?filter[title]=Two".format(API_BASE)
res = self.app.get(url, auth=self.user_one.auth)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_not_in(self.project_one._id, ids)
assert_in(self.project_two._id, ids)
assert_not_in(self.project_three._id, ids)
assert_not_in(self.private_project_user_one._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.bookmark_collection._id, ids)
def test_get_some_projects_with_substring_not_logged_in(self):
url = "/{}nodes/?filter[title]=Two".format(API_BASE)
res = self.app.get(url, auth=self.user_one.auth)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_not_in(self.project_one._id, ids)
assert_in(self.project_two._id, ids)
assert_not_in(self.project_three._id, ids)
assert_not_in(self.private_project_user_one._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.bookmark_collection._id, ids)
def test_get_only_public_or_my_projects_with_filter_logged_in(self):
url = "/{}nodes/?filter[title]=Project".format(API_BASE)
res = self.app.get(url, auth=self.user_one.auth)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_in(self.project_one._id, ids)
assert_in(self.project_two._id, ids)
assert_not_in(self.project_three._id, ids)
assert_in(self.private_project_user_one._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.bookmark_collection._id, ids)
def test_get_only_public_projects_with_filter_not_logged_in(self):
url = "/{}nodes/?filter[title]=Project".format(API_BASE)
res = self.app.get(url)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_in(self.project_one._id, ids)
assert_in(self.project_two._id, ids)
assert_not_in(self.project_three._id, ids)
assert_not_in(self.private_project_user_one._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.bookmark_collection._id, ids)
def test_alternate_filtering_field_logged_in(self):
url = "/{}nodes/?filter[description]=Three".format(API_BASE)
res = self.app.get(url, auth=self.user_one.auth)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_not_in(self.project_one._id, ids)
assert_in(self.project_two._id, ids)
assert_not_in(self.project_three._id, ids)
assert_not_in(self.private_project_user_one._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.bookmark_collection._id, ids)
def test_alternate_filtering_field_not_logged_in(self):
url = "/{}nodes/?filter[description]=Three".format(API_BASE)
res = self.app.get(url)
node_json = res.json['data']
ids = [each['id'] for each in node_json]
assert_not_in(self.project_one._id, ids)
assert_in(self.project_two._id, ids)
assert_not_in(self.project_three._id, ids)
assert_not_in(self.private_project_user_one._id, ids)
assert_not_in(self.private_project_user_two._id, ids)
assert_not_in(self.folder._id, ids)
assert_not_in(self.bookmark_collection._id, ids)
def test_incorrect_filtering_field_not_logged_in(self):
url = '/{}nodes/?filter[notafield]=bogus'.format(API_BASE)
res = self.app.get(url, expect_errors=True)
assert_equal(res.status_code, 400)
errors = res.json['errors']
assert_equal(len(errors), 1)
assert_equal(errors[0]['detail'], "'notafield' is not a valid field for this endpoint.")
def test_filtering_on_root(self):
root = ProjectFactory(is_public=True)
child = ProjectFactory(parent=root, is_public=True)
ProjectFactory(parent=root, is_public=True)
ProjectFactory(parent=child, is_public=True)
# create some unrelated projects
ProjectFactory(title="Road Dogg Jesse James", is_public=True)
ProjectFactory(title="Badd *** Billy Gunn", is_public=True)
url = '/{}nodes/?filter[root]={}'.format(API_BASE, root._id)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
root_nodes = Node.find(Q('is_public', 'eq', True) & Q('root', 'eq', root._id))
assert_equal(len(res.json['data']), root_nodes.count())
def test_filtering_on_null_parent(self):
# add some nodes TO be included
new_user = AuthUserFactory()
root = ProjectFactory(is_public=True)
ProjectFactory(is_public=True)
# Build up a some of nodes not to be included
child = ProjectFactory(parent=root, is_public=True)
ProjectFactory(parent=root, is_public=True)
ProjectFactory(parent=child, is_public=True)
url = '/{}nodes/?filter[parent]=null'.format(API_BASE)
res = self.app.get(url, auth=new_user.auth)
assert_equal(res.status_code, 200)
public_root_nodes = Node.find(Q('is_public', 'eq', True) & Q('parent_node', 'eq', None))
assert_equal(len(res.json['data']), public_root_nodes.count())
def test_filtering_on_title_not_equal(self):
url = '/{}nodes/?filter[title][ne]=Project%20One'.format(API_BASE)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
data = res.json['data']
assert_equal(len(data), 3)
titles = [each['attributes']['title'] for each in data]
assert_not_in(self.project_one.title, titles)
assert_in(self.project_two.title, titles)
assert_in(self.project_three.title, titles)
assert_in(self.private_project_user_one.title, titles)
def test_filtering_on_description_not_equal(self):
url = '/{}nodes/?filter[description][ne]=One%20Three'.format(API_BASE)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
data = res.json['data']
assert_equal(len(data), 3)
descriptions = [each['attributes']['description'] for each in data]
assert_not_in(self.project_two.description, descriptions)
assert_in(self.project_one.description, descriptions)
assert_in(self.project_three.description, descriptions)
assert_in(self.private_project_user_one.description, descriptions)
class TestNodeCreate(ApiTestCase):
def setUp(self):
super(TestNodeCreate, self).setUp()
self.user_one = AuthUserFactory()
self.url = '/{}nodes/'.format(API_BASE)
self.title = 'Cool Project'
self.description = 'A Properly Cool Project'
self.category = 'data'
self.user_two = AuthUserFactory()
self.public_project = {
'data': {
'type': 'nodes',
'attributes':
{
'title': self.title,
'description': self.description,
'category': self.category,
'public': True,
}
}
}
self.private_project = {
'data': {
'type': 'nodes',
'attributes': {
'title': self.title,
'description': self.description,
'category': self.category,
'public': False
}
}
}
def test_node_create_invalid_data(self):
res = self.app.post_json_api(self.url, "Incorrect data", auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], "Malformed request.")
res = self.app.post_json_api(self.url, ["Incorrect data"], auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], "Malformed request.")
def test_creates_public_project_logged_out(self):
res = self.app.post_json_api(self.url, self.public_project, expect_errors=True)
assert_equal(res.status_code, 401)
assert_in('detail', res.json['errors'][0])
def test_creates_public_project_logged_in(self):
res = self.app.post_json_api(self.url, self.public_project, expect_errors=True, auth=self.user_one.auth)
assert_equal(res.status_code, 201)
assert_equal(res.json['data']['attributes']['title'], self.public_project['data']['attributes']['title'])
assert_equal(res.json['data']['attributes']['description'], self.public_project['data']['attributes']['description'])
assert_equal(res.json['data']['attributes']['category'], self.public_project['data']['attributes']['category'])
assert_equal(res.content_type, 'application/vnd.api+json')
pid = res.json['data']['id']
project = Node.load(pid)
assert_equal(project.logs[-1].action, NodeLog.PROJECT_CREATED)
def test_creates_private_project_logged_out(self):
res = self.app.post_json_api(self.url, self.private_project, expect_errors=True)
assert_equal(res.status_code, 401)
assert_in('detail', res.json['errors'][0])
def test_creates_private_project_logged_in_contributor(self):
res = self.app.post_json_api(self.url, self.private_project, auth=self.user_one.auth)
assert_equal(res.status_code, 201)
assert_equal(res.content_type, 'application/vnd.api+json')
assert_equal(res.json['data']['attributes']['title'], self.private_project['data']['attributes']['title'])
assert_equal(res.json['data']['attributes']['description'], self.private_project['data']['attributes']['description'])
assert_equal(res.json['data']['attributes']['category'], self.private_project['data']['attributes']['category'])
pid = res.json['data']['id']
project = Node.load(pid)
assert_equal(project.logs[-1].action, NodeLog.PROJECT_CREATED)
def test_creates_project_from_template(self):
template_from = ProjectFactory(creator=self.user_one, is_public=True)
template_component = ProjectFactory(creator=self.user_one, is_public=True, parent=template_from)
templated_project_title = 'Templated Project'
templated_project_data = {
'data': {
'type': 'nodes',
'attributes':
{
'title': templated_project_title,
'category': self.category,
'template_from': template_from._id,
}
}
}
res = self.app.post_json_api(self.url, templated_project_data, auth=self.user_one.auth)
assert_equal(res.status_code, 201)
json_data = res.json['data']
new_project_id = json_data['id']
new_project = Node.load(new_project_id)
assert_equal(new_project.title, templated_project_title)
assert_equal(new_project.description, None)
assert_false(new_project.is_public)
assert_equal(len(new_project.nodes), len(template_from.nodes))
assert_equal(new_project.nodes[0].title, template_component.title)
def test_404_on_create_from_template_of_nonexistent_project(self):
template_from_id = 'thisisnotavalidguid'
templated_project_data = {
'data': {
'type': 'nodes',
'attributes':
{
'title': 'No title',
'category': 'project',
'template_from': template_from_id,
}
}
}
res = self.app.post_json_api(self.url, templated_project_data, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 404)
def test_403_on_create_from_template_of_unauthorized_project(self):
template_from = ProjectFactory(creator=self.user_two, is_public=True)
templated_project_data = {
'data': {
'type': 'nodes',
'attributes':
{
'title': 'No permission',
'category': 'project',
'template_from': template_from._id,
}
}
}
res = self.app.post_json_api(self.url, templated_project_data, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 403)
def test_creates_project_creates_project_and_sanitizes_html(self):
title = '<em>Cool</em> <strong>Project</strong>'
description = 'An <script>alert("even cooler")</script> project'
res = self.app.post_json_api(self.url, {
'data': {
'attributes': {
'title': title,
'description': description,
'category': self.category,
'public': True
},
'type': 'nodes'
}
}, auth=self.user_one.auth)
project_id = res.json['data']['id']
assert_equal(res.status_code, 201)
assert_equal(res.content_type, 'application/vnd.api+json')
url = '/{}nodes/{}/'.format(API_BASE, project_id)
project = Node.load(project_id)
assert_equal(project.logs[-1].action, NodeLog.PROJECT_CREATED)
res = self.app.get(url, auth=self.user_one.auth)
assert_equal(res.json['data']['attributes']['title'], strip_html(title))
assert_equal(res.json['data']['attributes']['description'], strip_html(description))
assert_equal(res.json['data']['attributes']['category'], self.category)
def test_creates_project_no_type(self):
project = {
'data': {
'attributes': {
'title': self.title,
'description': self.description,
'category': self.category,
'public': False
}
}
}
res = self.app.post_json_api(self.url, project, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'This field may not be null.')
assert_equal(res.json['errors'][0]['source']['pointer'], '/data/type')
def test_creates_project_incorrect_type(self):
project = {
'data': {
'attributes': {
'title': self.title,
'description': self.description,
'category': self.category,
'public': False
},
'type': 'Wrong type.'
}
}
res = self.app.post_json_api(self.url, project, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 409)
assert_equal(res.json['errors'][0]['detail'], 'Resource identifier does not match server endpoint.')
def test_creates_project_properties_not_nested(self):
project = {
'data': {
'title': self.title,
'description': self.description,
'category': self.category,
'public': False,
'type': 'nodes'
}
}
res = self.app.post_json_api(self.url, project, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'Request must include /data/attributes.')
assert_equal(res.json['errors'][0]['source']['pointer'], '/data/attributes')
def test_create_project_invalid_title(self):
project = {
'data': {
'type': 'nodes',
'attributes': {
'title': 'A' * 201,
'description': self.description,
'category': self.category,
'public': False,
}
}
}
res = self.app.post_json_api(self.url, project, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'Title cannot exceed 200 characters.')
class TestNodeBulkCreate(ApiTestCase):
def setUp(self):
super(TestNodeBulkCreate, self).setUp()
self.user_one = AuthUserFactory()
self.url = '/{}nodes/'.format(API_BASE)
self.title = 'Cool Project'
self.description = 'A Properly Cool Project'
self.category = 'data'
self.user_two = AuthUserFactory()
self.public_project = {
'type': 'nodes',
'attributes': {
'title': self.title,
'description': self.description,
'category': self.category,
'public': True
}
}
self.private_project = {
'type': 'nodes',
'attributes': {
'title': self.title,
'description': self.description,
'category': self.category,
'public': False
}
}
self.empty_project = {'type': 'nodes', 'attributes': {'title': "", 'description': "", "category": ""}}
def test_bulk_create_nodes_blank_request(self):
res = self.app.post_json_api(self.url, auth=self.user_one.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
def test_bulk_create_all_or_nothing(self):
res = self.app.post_json_api(self.url, {'data': [self.public_project, self.empty_project]}, bulk=True, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 400)
res = self.app.get(self.url, auth=self.user_one.auth)
assert_equal(len(res.json['data']), 0)
def test_bulk_create_logged_out(self):
res = self.app.post_json_api(self.url, {'data': [self.public_project, self.private_project]}, bulk=True, expect_errors=True)
assert_equal(res.status_code, 401)
res = self.app.get(self.url, auth=self.user_one.auth)
assert_equal(len(res.json['data']), 0)
def test_bulk_create_error_formatting(self):
res = self.app.post_json_api(self.url, {'data': [self.empty_project, self.empty_project]}, bulk=True, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 400)
assert_equal(len(res.json['errors']), 2)
errors = res.json['errors']
assert_items_equal([errors[0]['source'], errors[1]['source']],
[{'pointer': '/data/0/attributes/title'}, {'pointer': '/data/1/attributes/title'}])
assert_items_equal([errors[0]['detail'], errors[1]['detail']],
["This field may not be blank.", "This field may not be blank."])
def test_bulk_create_limits(self):
node_create_list = {'data': [self.public_project] * 101}
res = self.app.post_json_api(self.url, node_create_list, auth=self.user_one.auth, expect_errors=True, bulk=True)
assert_equal(res.json['errors'][0]['detail'], 'Bulk operation limit is 100, got 101.')
assert_equal(res.json['errors'][0]['source']['pointer'], '/data')
res = self.app.get(self.url, auth=self.user_one.auth)
assert_equal(len(res.json['data']), 0)
def test_bulk_create_no_type(self):
payload = {'data': [{"attributes": {'category': self.category, 'title': self.title}}]}
res = self.app.post_json_api(self.url, payload, auth=self.user_one.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['source']['pointer'], '/data/0/type')
res = self.app.get(self.url, auth=self.user_one.auth)
assert_equal(len(res.json['data']), 0)
def test_bulk_create_incorrect_type(self):
payload = {'data': [self.public_project, {'type': 'Incorrect type.', "attributes": {'category': self.category, 'title': self.title}}]}
res = self.app.post_json_api(self.url, payload, auth=self.user_one.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 409)
res = self.app.get(self.url, auth=self.user_one.auth)
assert_equal(len(res.json['data']), 0)
def test_bulk_create_no_attributes(self):
payload = {'data': [self.public_project, {'type': 'nodes', }]}
res = self.app.post_json_api(self.url, payload, auth=self.user_one.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['source']['pointer'], '/data/attributes')
res = self.app.get(self.url, auth=self.user_one.auth)
assert_equal(len(res.json['data']), 0)
def test_bulk_create_no_title(self):
payload = {'data': [self.public_project, {'type': 'nodes', "attributes": {'category': self.category}}]}
res = self.app.post_json_api(self.url, payload, auth=self.user_one.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['source']['pointer'], '/data/1/attributes/title')
res = self.app.get(self.url, auth=self.user_one.auth)
assert_equal(len(res.json['data']), 0)
def test_ugly_payload(self):
payload = 'sdf;jlasfd'
res = self.app.post_json_api(self.url, payload, auth=self.user_one.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
res = self.app.get(self.url, auth=self.user_one.auth)
assert_equal(len(res.json['data']), 0)
def test_bulk_create_logged_in(self):
res = self.app.post_json_api(self.url, {'data': [self.public_project, self.private_project]}, auth=self.user_one.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 201)
assert_equal(len(res.json['data']), 2)
assert_equal(res.json['data'][0]['attributes']['title'], self.public_project['attributes']['title'])
assert_equal(res.json['data'][0]['attributes']['category'], self.public_project['attributes']['category'])
assert_equal(res.json['data'][0]['attributes']['description'], self.public_project['attributes']['description'])
assert_equal(res.json['data'][1]['attributes']['title'], self.private_project['attributes']['title'])
assert_equal(res.json['data'][1]['attributes']['category'], self.public_project['attributes']['category'])
assert_equal(res.json['data'][1]['attributes']['description'], self.public_project['attributes']['description'])
assert_equal(res.content_type, 'application/vnd.api+json')
res = self.app.get(self.url, auth=self.user_one.auth)
assert_equal(len(res.json['data']), 2)
id_one = res.json['data'][0]['id']
id_two = res.json['data'][1]['id']
res = self.app.delete_json_api(self.url, {'data': [{'id': id_one, 'type': 'nodes'},
{'id': id_two, 'type': 'nodes'}]},
auth=self.user_one.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 204)
class TestNodeBulkUpdate(ApiTestCase):
def setUp(self):
super(TestNodeBulkUpdate, self).setUp()
self.user = AuthUserFactory()
self.title = 'Cool Project'
self.new_title = 'Super Cool Project'
self.description = 'A Properly Cool Project'
self.new_description = 'An even cooler project'
self.category = 'data'
self.new_category = 'project'
self.user_two = AuthUserFactory()
self.public_project = ProjectFactory(title=self.title,
description=self.description,
category=self.category,
is_public=True,
creator=self.user)
self.public_project_two = ProjectFactory(title=self.title,
description=self.description,
category=self.category,
is_public=True,
creator=self.user)
self.public_payload = {
'data': [
{
'id': self.public_project._id,
'type': 'nodes',
'attributes': {
'title': self.new_title,
'description': self.new_description,
'category': self.new_category,
'public': True
}
},
{
'id': self.public_project_two._id,
'type': 'nodes',
'attributes': {
'title': self.new_title,
'description': self.new_description,
'category': self.new_category,
'public': True
}
}
]
}
self.url = '/{}nodes/'.format(API_BASE)
self.private_project = ProjectFactory(title=self.title,
description=self.description,
category=self.category,
is_public=False,
creator=self.user)
self.private_project_two = ProjectFactory(title=self.title,
description=self.description,
category=self.category,
is_public=False,
creator=self.user)
self.private_payload = {'data': [
{
'id': self.private_project._id,
'type': 'nodes',
'attributes': {
'title': self.new_title,
'description': self.new_description,
'category': self.new_category,
'public': False
}
},
{
'id': self.private_project_two._id,
'type': 'nodes',
'attributes': {
'title': self.new_title,
'description': self.new_description,
'category': self.new_category,
'public': False
}
}
]
}
self.empty_payload = {'data': [
{'id': self.public_project._id, 'type': 'nodes', 'attributes': {'title': "", 'description': "", "category": ""}},
{'id': self.public_project_two._id, 'type': 'nodes', 'attributes': {'title': "", 'description': "", "category": ""}}
]}
def test_bulk_update_nodes_blank_request(self):
res = self.app.put_json_api(self.url, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
def test_bulk_update_blank_but_not_empty_title(self):
payload = {
"data": [
{
"id": self.public_project._id,
"type": "nodes",
"attributes": {
"title": "This shouldn't update.",
"category": "instrumentation"
}
},
{
"id": self.public_project_two._id,
"type": "nodes",
"attributes": {
"title": " ",
"category": "hypothesis"
}
}
]
}
url = '/{}nodes/{}/'.format(API_BASE, self.public_project._id)
res = self.app.put_json_api(self.url, payload, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
res = self.app.get(url)
assert_equal(res.json['data']['attributes']['title'], self.title)
def test_bulk_update_with_tags(self):
new_payload = {'data': [{'id': self.public_project._id, 'type': 'nodes', 'attributes': {'title': 'New title', 'category': 'project', 'tags': ['new tag']}}]}
res = self.app.put_json_api(self.url, new_payload, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 200)
assert_equal(res.json['data'][0]['attributes']['tags'], ['new tag'])
def test_bulk_update_public_projects_one_not_found(self):
empty_payload = {'data': [
{
'id': 12345,
'type': 'nodes',
'attributes': {
'title': self.new_title,
'category': self.new_category
}
}, self.public_payload['data'][0]
]}
res = self.app.put_json_api(self.url, empty_payload, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'Could not find all objects to update.')
url = '/{}nodes/{}/'.format(API_BASE, self.public_project._id)
res = self.app.get(url)
assert_equal(res.json['data']['attributes']['title'], self.title)
def test_bulk_update_public_projects_logged_out(self):
res = self.app.put_json_api(self.url, self.public_payload, expect_errors=True, bulk=True)
assert_equal(res.status_code, 401)
assert_equal(res.json['errors'][0]['detail'], "Authentication credentials were not provided.")
url = '/{}nodes/{}/'.format(API_BASE, self.public_project._id)
url_two = '/{}nodes/{}/'.format(API_BASE, self.public_project_two._id)
res = self.app.get(url)
assert_equal(res.json['data']['attributes']['title'], self.title)
res = self.app.get(url_two)
assert_equal(res.json['data']['attributes']['title'], self.title)
def test_bulk_update_public_projects_logged_in(self):
res = self.app.put_json_api(self.url, self.public_payload, auth=self.user.auth, bulk=True)
assert_equal(res.status_code, 200)
assert_equal({self.public_project._id, self.public_project_two._id},
{res.json['data'][0]['id'], res.json['data'][1]['id']})
assert_equal(res.json['data'][0]['attributes']['title'], self.new_title)
assert_equal(res.json['data'][1]['attributes']['title'], self.new_title)
def test_bulk_update_private_projects_logged_out(self):
res = self.app.put_json_api(self.url, self.private_payload, expect_errors=True, bulk=True)
assert_equal(res.status_code, 401)
assert_equal(res.json['errors'][0]['detail'], 'Authentication credentials were not provided.')
url = '/{}nodes/{}/'.format(API_BASE, self.private_project._id)
url_two = '/{}nodes/{}/'.format(API_BASE, self.private_project_two._id)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.json['data']['attributes']['title'], self.title)
res = self.app.get(url_two, auth=self.user.auth)
assert_equal(res.json['data']['attributes']['title'], self.title)
def test_bulk_update_private_projects_logged_in_contrib(self):
res = self.app.put_json_api(self.url, self.private_payload, auth=self.user.auth, bulk=True)
assert_equal(res.status_code, 200)
assert_equal({self.private_project._id, self.private_project_two._id},
{res.json['data'][0]['id'], res.json['data'][1]['id']})
assert_equal(res.json['data'][0]['attributes']['title'], self.new_title)
assert_equal(res.json['data'][1]['attributes']['title'], self.new_title)
def test_bulk_update_private_projects_logged_in_non_contrib(self):
res = self.app.put_json_api(self.url, self.private_payload, auth=self.user_two.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 403)
assert_equal(res.json['errors'][0]['detail'], 'You do not have permission to perform this action.')
url = '/{}nodes/{}/'.format(API_BASE, self.private_project._id)
url_two = '/{}nodes/{}/'.format(API_BASE, self.private_project_two._id)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.json['data']['attributes']['title'], self.title)
res = self.app.get(url_two, auth=self.user.auth)
assert_equal(res.json['data']['attributes']['title'], self.title)
def test_bulk_update_private_projects_logged_in_read_only_contrib(self):
self.private_project.add_contributor(self.user_two, permissions=[permissions.READ], save=True)
self.private_project_two.add_contributor(self.user_two, permissions=[permissions.READ], save=True)
res = self.app.put_json_api(self.url, self.private_payload, auth=self.user_two.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 403)
assert_equal(res.json['errors'][0]['detail'], 'You do not have permission to perform this action.')
url = '/{}nodes/{}/'.format(API_BASE, self.private_project._id)
url_two = '/{}nodes/{}/'.format(API_BASE, self.private_project_two._id)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.json['data']['attributes']['title'], self.title)
res = self.app.get(url_two, auth=self.user.auth)
assert_equal(res.json['data']['attributes']['title'], self.title)
def test_bulk_update_projects_send_dictionary_not_list(self):
res = self.app.put_json_api(self.url, {'data': {'id': self.public_project._id, 'type': 'nodes',
'attributes': {'title': self.new_title, 'category': "project"}}},
auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'Expected a list of items but got type "dict".')
def test_bulk_update_error_formatting(self):
res = self.app.put_json_api(self.url, self.empty_payload, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(len(res.json['errors']), 2)
errors = res.json['errors']
assert_items_equal([errors[0]['source'], errors[1]['source']],
[{'pointer': '/data/0/attributes/title'}, {'pointer': '/data/1/attributes/title'}])
assert_items_equal([errors[0]['detail'], errors[1]['detail']],
['This field may not be blank.'] * 2)
def test_bulk_update_id_not_supplied(self):
res = self.app.put_json_api(self.url, {'data': [self.public_payload['data'][1], {'type': 'nodes', 'attributes':
{'title': self.new_title, 'category': self.new_category}}]}, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(len(res.json['errors']), 1)
assert_equal(res.json['errors'][0]['source']['pointer'], '/data/1/id')
assert_equal(res.json['errors'][0]['detail'], "This field may not be null.")
url = '/{}nodes/{}/'.format(API_BASE, self.public_project_two._id)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.json['data']['attributes']['title'], self.title)
def test_bulk_update_type_not_supplied(self):
res = self.app.put_json_api(self.url, {'data': [self.public_payload['data'][1], {'id': self.public_project._id, 'attributes':
{'title': self.new_title, 'category': self.new_category}}]}, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(len(res.json['errors']), 1)
assert_equal(res.json['errors'][0]['source']['pointer'], '/data/1/type')
assert_equal(res.json['errors'][0]['detail'], "This field may not be null.")
url = '/{}nodes/{}/'.format(API_BASE, self.public_project_two._id)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.json['data']['attributes']['title'], self.title)
def test_bulk_update_incorrect_type(self):
res = self.app.put_json_api(self.url, {'data': [self.public_payload['data'][1], {'id': self.public_project._id, 'type': 'Incorrect', 'attributes':
{'title': self.new_title, 'category': self.new_category}}]}, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 409)
url = '/{}nodes/{}/'.format(API_BASE, self.public_project_two._id)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.json['data']['attributes']['title'], self.title)
def test_bulk_update_limits(self):
node_update_list = {'data': [self.public_payload['data'][0]] * 101}
res = self.app.put_json_api(self.url, node_update_list, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.json['errors'][0]['detail'], 'Bulk operation limit is 100, got 101.')
assert_equal(res.json['errors'][0]['source']['pointer'], '/data')
def test_bulk_update_no_title_or_category(self):
new_payload = {'id': self.public_project._id, 'type': 'nodes', 'attributes': {}}
res = self.app.put_json_api(self.url, {'data': [self.public_payload['data'][1], new_payload]}, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
url = '/{}nodes/{}/'.format(API_BASE, self.public_project_two._id)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.json['data']['attributes']['title'], self.title)
class TestNodeBulkPartialUpdate(ApiTestCase):
def setUp(self):
super(TestNodeBulkPartialUpdate, self).setUp()
self.user = AuthUserFactory()
self.title = 'Cool Project'
self.new_title = 'Super Cool Project'
self.description = 'A Properly Cool Project'
self.new_description = 'An even cooler project'
self.category = 'data'
self.new_category = 'project'
self.user_two = AuthUserFactory()
self.public_project = ProjectFactory(title=self.title,
description=self.description,
category=self.category,
is_public=True,
creator=self.user)
self.public_project_two = ProjectFactory(title=self.title,
description=self.description,
category=self.category,
is_public=True,
creator=self.user)
self.public_payload = {'data': [
{
'id': self.public_project._id,
'type': 'nodes',
'attributes': {
'title': self.new_title
}
},
{
'id': self.public_project_two._id,
'type': 'nodes',
'attributes': {
'title': self.new_title
}
}
]}
self.url = '/{}nodes/'.format(API_BASE)
self.private_project = ProjectFactory(title=self.title,
description=self.description,
category=self.category,
is_public=False,
creator=self.user)
self.private_project_two = ProjectFactory(title=self.title,
description=self.description,
category=self.category,
is_public=False,
creator=self.user)
self.private_payload = {'data': [
{
'id': self.private_project._id,
'type': 'nodes',
'attributes': {
'title': self.new_title
}
},
{
'id': self.private_project_two._id,
'type': 'nodes',
'attributes': {
'title': self.new_title
}
}
]}
self.empty_payload = {'data': [
{'id': self.public_project._id, 'type': 'nodes', 'attributes': {'title': ""}},
{'id': self.public_project_two._id, 'type': 'nodes', 'attributes': {'title': ""}}
]
}
def test_bulk_patch_nodes_blank_request(self):
res = self.app.patch_json_api(self.url, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
def test_bulk_partial_update_public_projects_one_not_found(self):
empty_payload = {'data': [
{
'id': 12345,
'type': 'nodes',
'attributes': {
'title': self.new_title
}
},
self.public_payload['data'][0]
]}
res = self.app.patch_json_api(self.url, empty_payload, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'Could not find all objects to update.')
url = '/{}nodes/{}/'.format(API_BASE, self.public_project._id)
res = self.app.get(url)
assert_equal(res.json['data']['attributes']['title'], self.title)
def test_bulk_partial_update_public_projects_logged_out(self):
res = self.app.patch_json_api(self.url, self.public_payload, expect_errors=True, bulk=True)
assert_equal(res.status_code, 401)
assert_equal(res.json['errors'][0]['detail'], "Authentication credentials were not provided.")
url = '/{}nodes/{}/'.format(API_BASE, self.public_project._id)
url_two = '/{}nodes/{}/'.format(API_BASE, self.public_project_two._id)
res = self.app.get(url)
assert_equal(res.json['data']['attributes']['title'], self.title)
res = self.app.get(url_two)
assert_equal(res.json['data']['attributes']['title'], self.title)
def test_bulk_partial_update_public_projects_logged_in(self):
res = self.app.patch_json_api(self.url, self.public_payload, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 200)
assert_equal({self.public_project._id, self.public_project_two._id},
{res.json['data'][0]['id'], res.json['data'][1]['id']})
assert_equal(res.json['data'][0]['attributes']['title'], self.new_title)
assert_equal(res.json['data'][1]['attributes']['title'], self.new_title)
def test_bulk_partial_update_private_projects_logged_out(self):
res = self.app.patch_json_api(self.url, self.private_payload, expect_errors=True, bulk=True)
assert_equal(res.status_code, 401)
assert_equal(res.json['errors'][0]['detail'], 'Authentication credentials were not provided.')
url = '/{}nodes/{}/'.format(API_BASE, self.private_project._id)
url_two = '/{}nodes/{}/'.format(API_BASE, self.private_project_two._id)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.json['data']['attributes']['title'], self.title)
res = self.app.get(url_two, auth=self.user.auth)
assert_equal(res.json['data']['attributes']['title'], self.title)
def test_bulk_partial_update_private_projects_logged_in_contrib(self):
res = self.app.patch_json_api(self.url, self.private_payload, auth=self.user.auth, bulk=True)
assert_equal(res.status_code, 200)
assert_equal({self.private_project._id, self.private_project_two._id},
{res.json['data'][0]['id'], res.json['data'][1]['id']})
assert_equal(res.json['data'][0]['attributes']['title'], self.new_title)
assert_equal(res.json['data'][1]['attributes']['title'], self.new_title)
def test_bulk_partial_update_private_projects_logged_in_non_contrib(self):
res = self.app.patch_json_api(self.url, self.private_payload, auth=self.user_two.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 403)
assert_equal(res.json['errors'][0]['detail'], 'You do not have permission to perform this action.')
url = '/{}nodes/{}/'.format(API_BASE, self.private_project._id)
url_two = '/{}nodes/{}/'.format(API_BASE, self.private_project_two._id)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.json['data']['attributes']['title'], self.title)
res = self.app.get(url_two, auth=self.user.auth)
assert_equal(res.json['data']['attributes']['title'], self.title)
def test_bulk_partial_update_private_projects_logged_in_read_only_contrib(self):
self.private_project.add_contributor(self.user_two, permissions=[permissions.READ], save=True)
self.private_project_two.add_contributor(self.user_two, permissions=[permissions.READ], save=True)
res = self.app.patch_json_api(self.url, self.private_payload, auth=self.user_two.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 403)
assert_equal(res.json['errors'][0]['detail'], 'You do not have permission to perform this action.')
url = '/{}nodes/{}/'.format(API_BASE, self.private_project._id)
url_two = '/{}nodes/{}/'.format(API_BASE, self.private_project_two._id)
res = self.app.get(url, auth=self.user.auth)
assert_equal(res.json['data']['attributes']['title'], self.title)
res = self.app.get(url_two, auth=self.user.auth)
assert_equal(res.json['data']['attributes']['title'], self.title)
def test_bulk_partial_update_projects_send_dictionary_not_list(self):
res = self.app.patch_json_api(self.url, {'data': {'id': self.public_project._id, 'attributes': {'title': self.new_title, 'category': "project"}}},
auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'Expected a list of items but got type "dict".')
def test_bulk_partial_update_error_formatting(self):
res = self.app.patch_json_api(self.url, self.empty_payload, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(len(res.json['errors']), 2)
errors = res.json['errors']
assert_items_equal([errors[0]['source'], errors[1]['source']],
[{'pointer': '/data/0/attributes/title'}, {'pointer': '/data/1/attributes/title'}])
assert_items_equal([errors[0]['detail'], errors[1]['detail']],
['This field may not be blank.']*2)
def test_bulk_partial_update_id_not_supplied(self):
res = self.app.patch_json_api(self.url, {'data': [{'type': 'nodes', 'attributes': {'title': self.new_title}}]},
auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(len(res.json['errors']), 1)
assert_equal(res.json['errors'][0]['detail'], 'This field may not be null.')
def test_bulk_partial_update_limits(self):
node_update_list = {'data': [self.public_payload['data'][0]] * 101 }
res = self.app.patch_json_api(self.url, node_update_list, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.json['errors'][0]['detail'], 'Bulk operation limit is 100, got 101.')
assert_equal(res.json['errors'][0]['source']['pointer'], '/data')
def test_bulk_partial_update_privacy_has_no_effect_on_tags(self):
self.public_project.add_tag('tag1', Auth(self.public_project.creator), save=True)
payload = {'id': self.public_project._id, 'type': 'nodes', 'attributes': {'public': False}}
res = self.app.patch_json_api(self.url, {'data': [payload]}, auth=self.user.auth, bulk=True)
assert_equal(res.status_code, 200)
self.public_project.reload()
assert_equal(self.public_project.tags, ['tag1'])
assert_equal(self.public_project.is_public, False)
class TestNodeBulkUpdateSkipUneditable(ApiTestCase):
def setUp(self):
super(TestNodeBulkUpdateSkipUneditable, self).setUp()
self.user = AuthUserFactory()
self.user_two = AuthUserFactory()
self.title = 'Cool Project'
self.new_title = 'Super Cool Project'
self.description = 'A Properly Cool Project'
self.new_description = 'An even cooler project'
self.category = 'data'
self.new_category = 'project'
self.public_project = ProjectFactory(title=self.title,
description=self.description,
category=self.category,
is_public=True,
creator=self.user)
self.public_project_two = ProjectFactory(title=self.title,
description=self.description,
category=self.category,
is_public=True,
creator=self.user)
self.public_project_three = ProjectFactory(title=self.title,
description=self.description,
category=self.category,
is_public=True,
creator=self.user_two)
self.public_project_four = ProjectFactory(title=self.title,
description=self.description,
category=self.category,
is_public=True,
creator=self.user_two)
self.public_payload = {
'data': [
{
'id': self.public_project._id,
'type': 'nodes',
'attributes': {
'title': self.new_title,
'description': self.new_description,
'category': self.new_category,
'public': True
}
},
{
'id': self.public_project_two._id,
'type': 'nodes',
'attributes': {
'title': self.new_title,
'description': self.new_description,
'category': self.new_category,
'public': True
}
},
{
'id': self.public_project_three._id,
'type': 'nodes',
'attributes': {
'title': self.new_title,
'description': self.new_description,
'category': self.new_category,
'public': True
}
},
{
'id': self.public_project_four._id,
'type': 'nodes',
'attributes': {
'title': self.new_title,
'description': self.new_description,
'category': self.new_category,
'public': True
}
}
]
}
self.url = '/{}nodes/?skip_uneditable=True'.format(API_BASE)
def test_skip_uneditable_bulk_update(self):
res = self.app.put_json_api(self.url, self.public_payload, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 200)
edited = res.json['data']
skipped = res.json['errors']
assert_items_equal([edited[0]['id'], edited[1]['id']],
[self.public_project._id, self.public_project_two._id])
assert_items_equal([skipped[0]['_id'], skipped[1]['_id']],
[self.public_project_three._id, self.public_project_four._id])
self.public_project.reload()
self.public_project_two.reload()
self.public_project_three.reload()
self.public_project_four.reload()
assert_equal(self.public_project.title, self.new_title)
assert_equal(self.public_project_two.title, self.new_title)
assert_equal(self.public_project_three.title, self.title)
assert_equal(self.public_project_four.title, self.title)
def test_skip_uneditable_bulk_update_query_param_required(self):
url = '/{}nodes/'.format(API_BASE)
res = self.app.put_json_api(url, self.public_payload, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 403)
self.public_project.reload()
self.public_project_two.reload()
self.public_project_three.reload()
self.public_project_four.reload()
assert_equal(self.public_project.title, self.title)
assert_equal(self.public_project_two.title, self.title)
assert_equal(self.public_project_three.title, self.title)
assert_equal(self.public_project_four.title, self.title)
def test_skip_uneditable_equals_false_bulk_update(self):
url = '/{}nodes/?skip_uneditable=False'.format(API_BASE)
res = self.app.put_json_api(url, self.public_payload, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 403)
self.public_project.reload()
self.public_project_two.reload()
self.public_project_three.reload()
self.public_project_four.reload()
assert_equal(self.public_project.title, self.title)
assert_equal(self.public_project_two.title, self.title)
assert_equal(self.public_project_three.title, self.title)
assert_equal(self.public_project_four.title, self.title)
def test_skip_uneditable_bulk_partial_update(self):
res = self.app.patch_json_api(self.url, self.public_payload, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 200)
edited = res.json['data']
skipped = res.json['errors']
assert_items_equal([edited[0]['id'], edited[1]['id']],
[self.public_project._id, self.public_project_two._id])
assert_items_equal([skipped[0]['_id'], skipped[1]['_id']],
[self.public_project_three._id, self.public_project_four._id])
self.public_project.reload()
self.public_project_two.reload()
self.public_project_three.reload()
self.public_project_four.reload()
assert_equal(self.public_project.title, self.new_title)
assert_equal(self.public_project_two.title, self.new_title)
assert_equal(self.public_project_three.title, self.title)
assert_equal(self.public_project_four.title, self.title)
def test_skip_uneditable_bulk_partial_update_query_param_required(self):
url = '/{}nodes/'.format(API_BASE)
res = self.app.patch_json_api(url, self.public_payload, auth=self.user.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 403)
self.public_project.reload()
self.public_project_two.reload()
self.public_project_three.reload()
self.public_project_four.reload()
assert_equal(self.public_project.title, self.title)
assert_equal(self.public_project_two.title, self.title)
assert_equal(self.public_project_three.title, self.title)
assert_equal(self.public_project_four.title, self.title)
class TestNodeBulkDelete(ApiTestCase):
def setUp(self):
super(TestNodeBulkDelete, self).setUp()
self.user_one = AuthUserFactory()
self.user_two = AuthUserFactory()
self.project_one = ProjectFactory(title="Project One", is_public=True, creator=self.user_one, category="project")
self.project_two = ProjectFactory(title="Project Two", description="One Three", is_public=True, creator=self.user_one)
self.private_project_user_one = ProjectFactory(title="Private Project User One",
is_public=False,
creator=self.user_one)
self.private_project_user_two = ProjectFactory(title="Private Project User Two",
is_public=False,
creator=self.user_two)
self.url = "/{}nodes/".format(API_BASE)
self.project_one_url = '/{}nodes/{}/'.format(API_BASE, self.project_one._id)
self.project_two_url = '/{}nodes/{}/'.format(API_BASE, self.project_two._id)
self.private_project_url = "/{}nodes/{}/".format(API_BASE, self.private_project_user_one._id)
self.public_payload = {'data': [{'id': self.project_one._id, 'type': 'nodes'}, {'id': self.project_two._id, 'type': 'nodes'}]}
self.private_payload = {'data': [{'id': self.private_project_user_one._id, 'type': 'nodes'}]}
def test_bulk_delete_nodes_blank_request(self):
res = self.app.delete_json_api(self.url, auth=self.user_one.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
def test_bulk_delete_no_type(self):
payload = {'data': [
{'id': self.project_one._id},
{'id': self.project_two._id}
]}
res = self.app.delete_json_api(self.url, payload, auth=self.user_one.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'Request must include /type.')
def test_bulk_delete_no_id(self):
payload = {'data': [
{'type': 'nodes'},
{'id': 'nodes'}
]}
res = self.app.delete_json_api(self.url, payload, auth=self.user_one.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'Request must include /data/id.')
def test_bulk_delete_dict_inside_data(self):
res = self.app.delete_json_api(self.url, {'data': {'id': self.project_one._id, 'type': 'nodes'}},
auth=self.user_one.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'Expected a list of items but got type "dict".')
def test_bulk_delete_invalid_type(self):
res = self.app.delete_json_api(self.url, {'data': [{'type': 'Wrong type', 'id': self.project_one._id}]},
auth=self.user_one.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 409)
def test_bulk_delete_public_projects_logged_in(self):
res = self.app.delete_json_api(self.url, self.public_payload, auth=self.user_one.auth, bulk=True)
assert_equal(res.status_code, 204)
res = self.app.get(self.project_one_url, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 410)
self.project_one.reload()
self.project_two.reload()
def test_bulk_delete_public_projects_logged_out(self):
res = self.app.delete_json_api(self.url, self.public_payload, expect_errors=True, bulk=True)
assert_equal(res.status_code, 401)
assert_equal(res.json['errors'][0]['detail'], 'Authentication credentials were not provided.')
res = self.app.get(self.project_one_url, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 200)
res = self.app.get(self.project_two_url, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 200)
def test_bulk_delete_private_projects_logged_out(self):
res = self.app.delete_json_api(self.url, self.private_payload, expect_errors=True, bulk=True)
assert_equal(res.status_code, 401)
assert_equal(res.json['errors'][0]['detail'], 'Authentication credentials were not provided.')
def test_bulk_delete_private_projects_logged_in_contributor(self):
res = self.app.delete_json_api(self.url, self.private_payload,
auth=self.user_one.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 204)
res = self.app.get(self.private_project_url, auth=self.user_one.auth, expect_errors=True)
assert_equal(res.status_code, 410)
self.private_project_user_one.reload()
def test_bulk_delete_private_projects_logged_in_non_contributor(self):
res = self.app.delete_json_api(self.url, self.private_payload,
auth=self.user_two.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 403)
assert_equal(res.json['errors'][0]['detail'], 'You do not have permission to perform this action.')
res = self.app.get(self.private_project_url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
def test_bulk_delete_private_projects_logged_in_read_only_contributor(self):
self.private_project_user_one.add_contributor(self.user_two, permissions=[permissions.READ], save=True)
res = self.app.delete_json_api(self.url, self.private_payload,
auth=self.user_two.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 403)
assert_equal(res.json['errors'][0]['detail'], 'You do not have permission to perform this action.')
res = self.app.get(self.private_project_url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
def test_bulk_delete_all_or_nothing(self):
new_payload = {'data': [{'id': self.private_project_user_one._id, 'type': 'nodes'}, {'id': self.private_project_user_two._id, 'type': 'nodes'}]}
res = self.app.delete_json_api(self.url, new_payload,
auth=self.user_one.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 403)
assert_equal(res.json['errors'][0]['detail'], 'You do not have permission to perform this action.')
res = self.app.get(self.private_project_url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
url = "/{}nodes/{}/".format(API_BASE, self.private_project_user_two._id)
res = self.app.get(url, auth=self.user_two.auth)
assert_equal(res.status_code, 200)
def test_bulk_delete_limits(self):
new_payload = {'data': [{'id': self.private_project_user_one._id, 'type':'nodes'}] * 101 }
res = self.app.delete_json_api(self.url, new_payload,
auth=self.user_one.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'Bulk operation limit is 100, got 101.')
assert_equal(res.json['errors'][0]['source']['pointer'], '/data')
def test_bulk_delete_invalid_payload_one_not_found(self):
new_payload = {'data': [self.public_payload['data'][0], {'id': '12345', 'type': 'nodes'}]}
res = self.app.delete_json_api(self.url, new_payload, auth=self.user_one.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
assert_equal(res.json['errors'][0]['detail'], 'Could not find all objects to delete.')
res = self.app.get(self.project_one_url, auth=self.user_one.auth)
assert_equal(res.status_code, 200)
def test_bulk_delete_no_payload(self):
res = self.app.delete_json_api(self.url, auth=self.user_one.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 400)
class TestNodeBulkDeleteSkipUneditable(ApiTestCase):
def setUp(self):
super(TestNodeBulkDeleteSkipUneditable, self).setUp()
self.user_one = AuthUserFactory()
self.user_two = AuthUserFactory()
self.project_one = ProjectFactory(title="Project One", is_public=True, creator=self.user_one)
self.project_two = ProjectFactory(title="Project Two", is_public=True, creator=self.user_one)
self.project_three = ProjectFactory(title="Project Three", is_public=True, creator=self.user_two)
self.project_four = ProjectFactory(title="Project Four", is_public=True, creator=self.user_two)
self.payload = {
'data': [
{
'id': self.project_one._id,
'type': 'nodes',
},
{
'id': self.project_two._id,
'type': 'nodes',
},
{
'id': self.project_three._id,
'type': 'nodes',
},
{
'id': self.project_four._id,
'type': 'nodes',
}
]
}
self.url = "/{}nodes/?skip_uneditable=True".format(API_BASE)
def tearDown(self):
super(TestNodeBulkDeleteSkipUneditable, self).tearDown()
Node.remove()
def test_skip_uneditable_bulk_delete(self):
res = self.app.delete_json_api(self.url, self.payload, auth=self.user_one.auth, bulk=True)
assert_equal(res.status_code, 200)
skipped = res.json['errors']
assert_items_equal([skipped[0]['id'], skipped[1]['id']],
[self.project_three._id, self.project_four._id])
res = self.app.get('/{}nodes/'.format(API_BASE), auth=self.user_one.auth)
assert_items_equal([res.json['data'][0]['id'], res.json['data'][1]['id']],
[self.project_three._id, self.project_four._id])
def test_skip_uneditable_bulk_delete_query_param_required(self):
url = '/{}nodes/'.format(API_BASE)
res = self.app.delete_json_api(url, self.payload, auth=self.user_one.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 403)
res = self.app.get('/{}nodes/'.format(API_BASE), auth=self.user_one.auth)
assert_equal(res.status_code, 200)
assert_equal(len(res.json['data']), 4)
def test_skip_uneditable_has_admin_permission_for_all_nodes(self):
payload = {
'data': [
{
'id': self.project_one._id,
'type': 'nodes',
},
{
'id': self.project_two._id,
'type': 'nodes',
}
]
}
res = self.app.delete_json_api(self.url, payload, auth=self.user_one.auth, bulk=True)
assert_equal(res.status_code, 204)
self.project_one.reload()
self.project_two.reload()
assert_equal(self.project_one.is_deleted, True)
assert_equal(self.project_two.is_deleted, True)
def test_skip_uneditable_does_not_have_admin_permission_for_any_nodes(self):
payload = {
'data': [
{
'id': self.project_three._id,
'type': 'nodes',
},
{
'id': self.project_four._id,
'type': 'nodes',
}
]
}
res = self.app.delete_json_api(self.url, payload, auth=self.user_one.auth, expect_errors=True, bulk=True)
assert_equal(res.status_code, 403)
class TestNodeListPagination(ApiTestCase):
def setUp(self):
super(TestNodeListPagination, self).setUp()
# Ordered by date modified: oldest first
self.users = [UserFactory() for _ in range(11)]
self.projects = [ProjectFactory(is_public=True, creator=self.users[0]) for _ in range(11)]
self.url = '/{}nodes/'.format(API_BASE)
def tearDown(self):
super(TestNodeListPagination, self).tearDown()
Node.remove()
def test_default_pagination_size(self):
res = self.app.get(self.url, auth=Auth(self.users[0]))
pids = [e['id'] for e in res.json['data']]
for project in self.projects[1:]:
assert_in(project._id, pids)
assert_not_in(self.projects[0]._id, pids)
assert_equal(res.json['links']['meta']['per_page'], 10)
def test_max_page_size_enforced(self):
url = '{}?page[size]={}'.format(self.url, MAX_PAGE_SIZE+1)
res = self.app.get(url, auth=Auth(self.users[0]))
pids = [e['id'] for e in res.json['data']]
for project in self.projects:
assert_in(project._id, pids)
assert_equal(res.json['links']['meta']['per_page'], MAX_PAGE_SIZE)
def test_embed_page_size_not_affected(self):
for user in self.users[1:]:
self.projects[-1].add_contributor(user, auth=Auth(self.users[0]), save=True)
url = '{}?page[size]={}&embed=contributors'.format(self.url, MAX_PAGE_SIZE+1)
res = self.app.get(url, auth=Auth(self.users[0]))
pids = [e['id'] for e in res.json['data']]
for project in self.projects:
assert_in(project._id, pids)
assert_equal(res.json['links']['meta']['per_page'], MAX_PAGE_SIZE)
uids = [e['id'] for e in res.json['data'][0]['embeds']['contributors']['data']]
for user in self.users[:9]:
assert_in(user._id, uids)
assert_not_in(self.users[10]._id, uids)
assert_equal(res.json['data'][0]['embeds']['contributors']['links']['meta']['per_page'], 10)
| apache-2.0 |
saumishr/django | tests/regressiontests/backends/tests.py | 24 | 26372 | # -*- coding: utf-8 -*-
# Unit and doctests for specific database backends.
from __future__ import with_statement, absolute_import
import datetime
import threading
from django.conf import settings
from django.core.management.color import no_style
from django.core.exceptions import ImproperlyConfigured
from django.db import (backend, connection, connections, DEFAULT_DB_ALIAS,
IntegrityError, transaction)
from django.db.backends.signals import connection_created
from django.db.backends.postgresql_psycopg2 import version as pg_version
from django.db.utils import ConnectionHandler, DatabaseError, load_backend
from django.test import TestCase, skipUnlessDBFeature, TransactionTestCase
from django.test.utils import override_settings
from django.utils import unittest
from . import models
class OracleChecks(unittest.TestCase):
@unittest.skipUnless(connection.vendor == 'oracle',
"No need to check Oracle cursor semantics")
def test_dbms_session(self):
# If the backend is Oracle, test that we can call a standard
# stored procedure through our cursor wrapper.
convert_unicode = backend.convert_unicode
cursor = connection.cursor()
cursor.callproc(convert_unicode('DBMS_SESSION.SET_IDENTIFIER'),
[convert_unicode('_django_testing!'),])
@unittest.skipUnless(connection.vendor == 'oracle',
"No need to check Oracle cursor semantics")
def test_cursor_var(self):
# If the backend is Oracle, test that we can pass cursor variables
# as query parameters.
cursor = connection.cursor()
var = cursor.var(backend.Database.STRING)
cursor.execute("BEGIN %s := 'X'; END; ", [var])
self.assertEqual(var.getvalue(), 'X')
@unittest.skipUnless(connection.vendor == 'oracle',
"No need to check Oracle cursor semantics")
def test_long_string(self):
# If the backend is Oracle, test that we can save a text longer
# than 4000 chars and read it properly
c = connection.cursor()
c.execute('CREATE TABLE ltext ("TEXT" NCLOB)')
long_str = ''.join([unicode(x) for x in xrange(4000)])
c.execute('INSERT INTO ltext VALUES (%s)',[long_str])
c.execute('SELECT text FROM ltext')
row = c.fetchone()
self.assertEqual(long_str, row[0].read())
c.execute('DROP TABLE ltext')
@unittest.skipUnless(connection.vendor == 'oracle',
"No need to check Oracle connection semantics")
def test_client_encoding(self):
# If the backend is Oracle, test that the client encoding is set
# correctly. This was broken under Cygwin prior to r14781.
c = connection.cursor() # Ensure the connection is initialized.
self.assertEqual(connection.connection.encoding, "UTF-8")
self.assertEqual(connection.connection.nencoding, "UTF-8")
class MySQLTests(TestCase):
@unittest.skipUnless(connection.vendor == 'mysql',
"Test valid only for MySQL")
def test_server_version_connections(self):
connection.close()
connection.get_server_version()
self.assertTrue(connection.connection is None)
class DateQuotingTest(TestCase):
def test_django_date_trunc(self):
"""
Test the custom ``django_date_trunc method``, in particular against
fields which clash with strings passed to it (e.g. 'year') - see
#12818__.
__: http://code.djangoproject.com/ticket/12818
"""
updated = datetime.datetime(2010, 2, 20)
models.SchoolClass.objects.create(year=2009, last_updated=updated)
years = models.SchoolClass.objects.dates('last_updated', 'year')
self.assertEqual(list(years), [datetime.datetime(2010, 1, 1, 0, 0)])
def test_django_extract(self):
"""
Test the custom ``django_extract method``, in particular against fields
which clash with strings passed to it (e.g. 'day') - see #12818__.
__: http://code.djangoproject.com/ticket/12818
"""
updated = datetime.datetime(2010, 2, 20)
models.SchoolClass.objects.create(year=2009, last_updated=updated)
classes = models.SchoolClass.objects.filter(last_updated__day=20)
self.assertEqual(len(classes), 1)
class LastExecutedQueryTest(TestCase):
def setUp(self):
# connection.queries will not be filled in without this
settings.DEBUG = True
def tearDown(self):
settings.DEBUG = False
# There are no tests for the sqlite backend because it does not
# implement paramater escaping. See #14091.
@unittest.skipUnless(connection.vendor in ('oracle', 'postgresql'),
"These backends use the standard parameter escaping rules")
def test_parameter_escaping(self):
# check that both numbers and string are properly quoted
list(models.Tag.objects.filter(name="special:\\\"':", object_id=12))
sql = connection.queries[-1]['sql']
self.assertTrue("= 'special:\\\"'':' " in sql)
self.assertTrue("= 12 " in sql)
@unittest.skipUnless(connection.vendor == 'mysql',
"MySQL uses backslashes to escape parameters.")
def test_parameter_escaping(self):
list(models.Tag.objects.filter(name="special:\\\"':", object_id=12))
sql = connection.queries[-1]['sql']
# only this line is different from the test above
self.assertTrue("= 'special:\\\\\\\"\\':' " in sql)
self.assertTrue("= 12 " in sql)
class ParameterHandlingTest(TestCase):
def test_bad_parameter_count(self):
"An executemany call with too many/not enough parameters will raise an exception (Refs #12612)"
cursor = connection.cursor()
query = ('INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (
connection.introspection.table_name_converter('backends_square'),
connection.ops.quote_name('root'),
connection.ops.quote_name('square')
))
self.assertRaises(Exception, cursor.executemany, query, [(1,2,3),])
self.assertRaises(Exception, cursor.executemany, query, [(1,),])
# Unfortunately, the following tests would be a good test to run on all
# backends, but it breaks MySQL hard. Until #13711 is fixed, it can't be run
# everywhere (although it would be an effective test of #13711).
class LongNameTest(TestCase):
"""Long primary keys and model names can result in a sequence name
that exceeds the database limits, which will result in truncation
on certain databases (e.g., Postgres). The backend needs to use
the correct sequence name in last_insert_id and other places, so
check it is. Refs #8901.
"""
@skipUnlessDBFeature('supports_long_model_names')
def test_sequence_name_length_limits_create(self):
"""Test creation of model with long name and long pk name doesn't error. Ref #8901"""
models.VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ.objects.create()
@skipUnlessDBFeature('supports_long_model_names')
def test_sequence_name_length_limits_m2m(self):
"""Test an m2m save of a model with a long name and a long m2m field name doesn't error as on Django >=1.2 this now uses object saves. Ref #8901"""
obj = models.VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ.objects.create()
rel_obj = models.Person.objects.create(first_name='Django', last_name='Reinhardt')
obj.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.add(rel_obj)
@skipUnlessDBFeature('supports_long_model_names')
def test_sequence_name_length_limits_flush(self):
"""Test that sequence resetting as part of a flush with model with long name and long pk name doesn't error. Ref #8901"""
# A full flush is expensive to the full test, so we dig into the
# internals to generate the likely offending SQL and run it manually
# Some convenience aliases
VLM = models.VeryLongModelNameZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZZ
VLM_m2m = VLM.m2m_also_quite_long_zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz.through
tables = [
VLM._meta.db_table,
VLM_m2m._meta.db_table,
]
sequences = [
{
'column': VLM._meta.pk.column,
'table': VLM._meta.db_table
},
]
cursor = connection.cursor()
for statement in connection.ops.sql_flush(no_style(), tables, sequences):
cursor.execute(statement)
class SequenceResetTest(TestCase):
def test_generic_relation(self):
"Sequence names are correct when resetting generic relations (Ref #13941)"
# Create an object with a manually specified PK
models.Post.objects.create(id=10, name='1st post', text='hello world')
# Reset the sequences for the database
cursor = connection.cursor()
commands = connections[DEFAULT_DB_ALIAS].ops.sequence_reset_sql(no_style(), [models.Post])
for sql in commands:
cursor.execute(sql)
# If we create a new object now, it should have a PK greater
# than the PK we specified manually.
obj = models.Post.objects.create(name='New post', text='goodbye world')
self.assertTrue(obj.pk > 10)
class PostgresVersionTest(TestCase):
def assert_parses(self, version_string, version):
self.assertEqual(pg_version._parse_version(version_string), version)
def test_parsing(self):
"""Test PostgreSQL version parsing from `SELECT version()` output"""
self.assert_parses("PostgreSQL 8.3 beta4", 80300)
self.assert_parses("PostgreSQL 8.3", 80300)
self.assert_parses("EnterpriseDB 8.3", 80300)
self.assert_parses("PostgreSQL 8.3.6", 80306)
self.assert_parses("PostgreSQL 8.4beta1", 80400)
self.assert_parses("PostgreSQL 8.3.1 on i386-apple-darwin9.2.2, compiled by GCC i686-apple-darwin9-gcc-4.0.1 (GCC) 4.0.1 (Apple Inc. build 5478)", 80301)
def test_version_detection(self):
"""Test PostgreSQL version detection"""
# Helper mocks
class CursorMock(object):
"Very simple mock of DB-API cursor"
def execute(self, arg):
pass
def fetchone(self):
return ["PostgreSQL 8.3"]
class OlderConnectionMock(object):
"Mock of psycopg2 (< 2.0.12) connection"
def cursor(self):
return CursorMock()
# psycopg2 < 2.0.12 code path
conn = OlderConnectionMock()
self.assertEqual(pg_version.get_version(conn), 80300)
class PostgresNewConnectionTest(TestCase):
"""
#17062: PostgreSQL shouldn't roll back SET TIME ZONE, even if the first
transaction is rolled back.
"""
@unittest.skipUnless(
connection.vendor == 'postgresql' and connection.isolation_level > 0,
"This test applies only to PostgreSQL without autocommit")
def test_connect_and_rollback(self):
new_connections = ConnectionHandler(settings.DATABASES)
new_connection = new_connections[DEFAULT_DB_ALIAS]
try:
# Ensure the database default time zone is different than
# the time zone in new_connection.settings_dict. We can
# get the default time zone by reset & show.
cursor = new_connection.cursor()
cursor.execute("RESET TIMEZONE")
cursor.execute("SHOW TIMEZONE")
db_default_tz = cursor.fetchone()[0]
new_tz = 'Europe/Paris' if db_default_tz == 'UTC' else 'UTC'
new_connection.close()
# Fetch a new connection with the new_tz as default
# time zone, run a query and rollback.
new_connection.settings_dict['TIME_ZONE'] = new_tz
new_connection.enter_transaction_management()
cursor = new_connection.cursor()
new_connection.rollback()
# Now let's see if the rollback rolled back the SET TIME ZONE.
cursor.execute("SHOW TIMEZONE")
tz = cursor.fetchone()[0]
self.assertEqual(new_tz, tz)
finally:
try:
new_connection.close()
except DatabaseError:
pass
# Unfortunately with sqlite3 the in-memory test database cannot be
# closed, and so it cannot be re-opened during testing, and so we
# sadly disable this test for now.
class ConnectionCreatedSignalTest(TestCase):
@skipUnlessDBFeature('test_db_allows_multiple_connections')
def test_signal(self):
data = {}
def receiver(sender, connection, **kwargs):
data["connection"] = connection
connection_created.connect(receiver)
connection.close()
cursor = connection.cursor()
self.assertTrue(data["connection"].connection is connection.connection)
connection_created.disconnect(receiver)
data.clear()
cursor = connection.cursor()
self.assertTrue(data == {})
class EscapingChecks(TestCase):
@unittest.skipUnless(connection.vendor == 'sqlite',
"This is a sqlite-specific issue")
def test_parameter_escaping(self):
#13648: '%s' escaping support for sqlite3
cursor = connection.cursor()
response = cursor.execute(
"select strftime('%%s', date('now'))").fetchall()[0][0]
self.assertNotEqual(response, None)
# response should be an non-zero integer
self.assertTrue(int(response))
class BackendTestCase(TestCase):
def create_squares_with_executemany(self, args):
cursor = connection.cursor()
opts = models.Square._meta
tbl = connection.introspection.table_name_converter(opts.db_table)
f1 = connection.ops.quote_name(opts.get_field('root').column)
f2 = connection.ops.quote_name(opts.get_field('square').column)
query = 'INSERT INTO %s (%s, %s) VALUES (%%s, %%s)' % (tbl, f1, f2)
cursor.executemany(query, args)
def test_cursor_executemany(self):
#4896: Test cursor.executemany
args = [(i, i**2) for i in range(-5, 6)]
self.create_squares_with_executemany(args)
self.assertEqual(models.Square.objects.count(), 11)
for i in range(-5, 6):
square = models.Square.objects.get(root=i)
self.assertEqual(square.square, i**2)
def test_cursor_executemany_with_empty_params_list(self):
#4765: executemany with params=[] does nothing
args = []
self.create_squares_with_executemany(args)
self.assertEqual(models.Square.objects.count(), 0)
def test_cursor_executemany_with_iterator(self):
#10320: executemany accepts iterators
args = iter((i, i**2) for i in range(-3, 2))
self.create_squares_with_executemany(args)
self.assertEqual(models.Square.objects.count(), 5)
args = iter((i, i**2) for i in range(3, 7))
with override_settings(DEBUG=True):
# same test for DebugCursorWrapper
self.create_squares_with_executemany(args)
self.assertEqual(models.Square.objects.count(), 9)
def test_unicode_fetches(self):
#6254: fetchone, fetchmany, fetchall return strings as unicode objects
qn = connection.ops.quote_name
models.Person(first_name="John", last_name="Doe").save()
models.Person(first_name="Jane", last_name="Doe").save()
models.Person(first_name="Mary", last_name="Agnelline").save()
models.Person(first_name="Peter", last_name="Parker").save()
models.Person(first_name="Clark", last_name="Kent").save()
opts2 = models.Person._meta
f3, f4 = opts2.get_field('first_name'), opts2.get_field('last_name')
query2 = ('SELECT %s, %s FROM %s ORDER BY %s'
% (qn(f3.column), qn(f4.column), connection.introspection.table_name_converter(opts2.db_table),
qn(f3.column)))
cursor = connection.cursor()
cursor.execute(query2)
self.assertEqual(cursor.fetchone(), (u'Clark', u'Kent'))
self.assertEqual(list(cursor.fetchmany(2)), [(u'Jane', u'Doe'), (u'John', u'Doe')])
self.assertEqual(list(cursor.fetchall()), [(u'Mary', u'Agnelline'), (u'Peter', u'Parker')])
def test_database_operations_helper_class(self):
# Ticket #13630
self.assertTrue(hasattr(connection, 'ops'))
self.assertTrue(hasattr(connection.ops, 'connection'))
self.assertEqual(connection, connection.ops.connection)
def test_duplicate_table_error(self):
""" Test that creating an existing table returns a DatabaseError """
cursor = connection.cursor()
query = 'CREATE TABLE %s (id INTEGER);' % models.Article._meta.db_table
with self.assertRaises(DatabaseError):
cursor.execute(query)
# We don't make these tests conditional because that means we would need to
# check and differentiate between:
# * MySQL+InnoDB, MySQL+MYISAM (something we currently can't do).
# * if sqlite3 (if/once we get #14204 fixed) has referential integrity turned
# on or not, something that would be controlled by runtime support and user
# preference.
# verify if its type is django.database.db.IntegrityError.
class FkConstraintsTests(TransactionTestCase):
def setUp(self):
# Create a Reporter.
self.r = models.Reporter.objects.create(first_name='John', last_name='Smith')
def test_integrity_checks_on_creation(self):
"""
Try to create a model instance that violates a FK constraint. If it
fails it should fail with IntegrityError.
"""
a = models.Article(headline="This is a test", pub_date=datetime.datetime(2005, 7, 27), reporter_id=30)
try:
a.save()
except IntegrityError:
return
self.skipTest("This backend does not support integrity checks.")
def test_integrity_checks_on_update(self):
"""
Try to update a model instance introducing a FK constraint violation.
If it fails it should fail with IntegrityError.
"""
# Create an Article.
models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrive it from the DB
a = models.Article.objects.get(headline="Test article")
a.reporter_id = 30
try:
a.save()
except IntegrityError:
return
self.skipTest("This backend does not support integrity checks.")
def test_disable_constraint_checks_manually(self):
"""
When constraint checks are disabled, should be able to write bad data without IntegrityErrors.
"""
with transaction.commit_manually():
# Create an Article.
models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrive it from the DB
a = models.Article.objects.get(headline="Test article")
a.reporter_id = 30
try:
connection.disable_constraint_checking()
a.save()
connection.enable_constraint_checking()
except IntegrityError:
self.fail("IntegrityError should not have occurred.")
finally:
transaction.rollback()
def test_disable_constraint_checks_context_manager(self):
"""
When constraint checks are disabled (using context manager), should be able to write bad data without IntegrityErrors.
"""
with transaction.commit_manually():
# Create an Article.
models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrive it from the DB
a = models.Article.objects.get(headline="Test article")
a.reporter_id = 30
try:
with connection.constraint_checks_disabled():
a.save()
except IntegrityError:
self.fail("IntegrityError should not have occurred.")
finally:
transaction.rollback()
def test_check_constraints(self):
"""
Constraint checks should raise an IntegrityError when bad data is in the DB.
"""
with transaction.commit_manually():
# Create an Article.
models.Article.objects.create(headline="Test article", pub_date=datetime.datetime(2010, 9, 4), reporter=self.r)
# Retrive it from the DB
a = models.Article.objects.get(headline="Test article")
a.reporter_id = 30
try:
with connection.constraint_checks_disabled():
a.save()
with self.assertRaises(IntegrityError):
connection.check_constraints()
finally:
transaction.rollback()
class ThreadTests(TestCase):
def test_default_connection_thread_local(self):
"""
Ensure that the default connection (i.e. django.db.connection) is
different for each thread.
Refs #17258.
"""
connections_set = set()
connection.cursor()
connections_set.add(connection.connection)
def runner():
from django.db import connection
connection.cursor()
connections_set.add(connection.connection)
for x in xrange(2):
t = threading.Thread(target=runner)
t.start()
t.join()
self.assertEquals(len(connections_set), 3)
# Finish by closing the connections opened by the other threads (the
# connection opened in the main thread will automatically be closed on
# teardown).
for conn in connections_set:
if conn != connection.connection:
conn.close()
def test_connections_thread_local(self):
"""
Ensure that the connections are different for each thread.
Refs #17258.
"""
connections_set = set()
for conn in connections.all():
connections_set.add(conn)
def runner():
from django.db import connections
for conn in connections.all():
# Allow thread sharing so the connection can be closed by the
# main thread.
conn.allow_thread_sharing = True
connections_set.add(conn)
for x in xrange(2):
t = threading.Thread(target=runner)
t.start()
t.join()
self.assertEquals(len(connections_set), 6)
# Finish by closing the connections opened by the other threads (the
# connection opened in the main thread will automatically be closed on
# teardown).
for conn in connections_set:
if conn != connection:
conn.close()
def test_pass_connection_between_threads(self):
"""
Ensure that a connection can be passed from one thread to the other.
Refs #17258.
"""
models.Person.objects.create(first_name="John", last_name="Doe")
def do_thread():
def runner(main_thread_connection):
from django.db import connections
connections['default'] = main_thread_connection
try:
models.Person.objects.get(first_name="John", last_name="Doe")
except DatabaseError, e:
exceptions.append(e)
t = threading.Thread(target=runner, args=[connections['default']])
t.start()
t.join()
# Without touching allow_thread_sharing, which should be False by default.
exceptions = []
do_thread()
# Forbidden!
self.assertTrue(isinstance(exceptions[0], DatabaseError))
# If explicitly setting allow_thread_sharing to False
connections['default'].allow_thread_sharing = False
exceptions = []
do_thread()
# Forbidden!
self.assertTrue(isinstance(exceptions[0], DatabaseError))
# If explicitly setting allow_thread_sharing to True
connections['default'].allow_thread_sharing = True
exceptions = []
do_thread()
# All good
self.assertEqual(len(exceptions), 0)
def test_closing_non_shared_connections(self):
"""
Ensure that a connection that is not explicitly shareable cannot be
closed by another thread.
Refs #17258.
"""
# First, without explicitly enabling the connection for sharing.
exceptions = set()
def runner1():
def runner2(other_thread_connection):
try:
other_thread_connection.close()
except DatabaseError, e:
exceptions.add(e)
t2 = threading.Thread(target=runner2, args=[connections['default']])
t2.start()
t2.join()
t1 = threading.Thread(target=runner1)
t1.start()
t1.join()
# The exception was raised
self.assertEqual(len(exceptions), 1)
# Then, with explicitly enabling the connection for sharing.
exceptions = set()
def runner1():
def runner2(other_thread_connection):
try:
other_thread_connection.close()
except DatabaseError, e:
exceptions.add(e)
# Enable thread sharing
connections['default'].allow_thread_sharing = True
t2 = threading.Thread(target=runner2, args=[connections['default']])
t2.start()
t2.join()
t1 = threading.Thread(target=runner1)
t1.start()
t1.join()
# No exception was raised
self.assertEqual(len(exceptions), 0)
class BackendLoadingTests(TestCase):
def test_old_style_backends_raise_useful_exception(self):
self.assertRaisesRegexp(ImproperlyConfigured,
"Try using django.db.backends.sqlite3 instead",
load_backend, 'sqlite3')
| bsd-3-clause |
adrianmugnoz/Documentacion-Divulgame | readthedocs/rtd_tests/tests/test_api.py | 22 | 4739 | from django.test import TestCase
import json
import base64
super_auth = base64.b64encode('super:test')
eric_auth = base64.b64encode('eric:test')
class APIBuildTests(TestCase):
fixtures = ['eric.json', 'test_data.json']
def test_make_build(self):
"""
Test that a superuser can use the API
"""
post_data = {
"project": "/api/v1/project/1/",
"version": "/api/v1/version/1/",
"success": True,
"output": "Test Output",
"error": "Test Error",
}
resp = self.client.post('/api/v1/build/', data=json.dumps(post_data),
content_type='application/json',
HTTP_AUTHORIZATION='Basic %s' % super_auth)
self.assertEqual(resp.status_code, 201)
self.assertEqual(resp['location'],
'http://testserver/api/v1/build/1/')
resp = self.client.get('/api/v1/build/1/', data={'format': 'json'},
HTTP_AUTHORIZATION='Basic %s' % super_auth)
self.assertEqual(resp.status_code, 200)
obj = json.loads(resp.content)
self.assertEqual(obj['output'], 'Test Output')
class APITests(TestCase):
fixtures = ['eric.json', 'test_data.json']
def test_make_project(self):
"""
Test that a superuser can use the API
"""
post_data = {"name": "awesome-project",
"repo": "https://github.com/ericholscher/django-kong.git"}
resp = self.client.post('/api/v1/project/',
data=json.dumps(post_data),
content_type='application/json',
HTTP_AUTHORIZATION='Basic %s' % super_auth)
self.assertEqual(resp.status_code, 201)
self.assertEqual(resp['location'],
'http://testserver/api/v1/project/24/')
resp = self.client.get('/api/v1/project/24/', data={'format': 'json'},
HTTP_AUTHORIZATION='Basic %s' % eric_auth)
self.assertEqual(resp.status_code, 200)
obj = json.loads(resp.content)
self.assertEqual(obj['slug'], 'awesome-project')
def test_invalid_make_project(self):
"""
Test that the authentication is turned on.
"""
post_data = {"user": "/api/v1/user/2/",
"name": "awesome-project-2",
"repo": "https://github.com/ericholscher/django-bob.git"
}
resp = self.client.post(
'/api/v1/project/', data=json.dumps(post_data),
content_type='application/json',
HTTP_AUTHORIZATION='Basic %s' % base64.b64encode('tester:notapass')
)
self.assertEqual(resp.status_code, 401)
def test_make_project_dishonest_user(self):
"""
Test that you can't create a project for another user
"""
# represents dishonest data input, authentication happens for user 2
post_data = {
"users": ["/api/v1/user/1/"],
"name": "awesome-project-2",
"repo": "https://github.com/ericholscher/django-bob.git"
}
resp = self.client.post(
'/api/v1/project/',
data=json.dumps(post_data),
content_type='application/json',
HTTP_AUTHORIZATION='Basic %s' % base64.b64encode('tester:test')
)
self.assertEqual(resp.status_code, 401)
def test_ensure_get_unauth(self):
"""
Test that GET requests work without authenticating.
"""
resp = self.client.get("/api/v1/project/", data={"format": "json"})
self.assertEqual(resp.status_code, 200)
def test_not_highest(self):
resp = self.client.get(
"http://testserver/api/v1/version/read-the-docs/highest/0.2.1/",
data={"format": "json"}
)
self.assertEqual(resp.status_code, 200)
obj = json.loads(resp.content)
self.assertEqual(obj['is_highest'], False)
def test_latest_version_highest(self):
resp = self.client.get(
"http://testserver/api/v1/version/read-the-docs/highest/latest/",
data={"format": "json"}
)
self.assertEqual(resp.status_code, 200)
obj = json.loads(resp.content)
self.assertEqual(obj['is_highest'], True)
def test_real_highest(self):
resp = self.client.get(
"http://testserver/api/v1/version/read-the-docs/highest/0.2.2/",
data={"format": "json"}
)
self.assertEqual(resp.status_code, 200)
obj = json.loads(resp.content)
self.assertEqual(obj['is_highest'], True)
| mit |
ShiYw/Sigil | 3rdparty/python/Lib/plat-netbsd1/IN.py | 181 | 1167 | # Generated by h2py from /usr/include/netinet/in.h
IPPROTO_IP = 0
IPPROTO_ICMP = 1
IPPROTO_IGMP = 2
IPPROTO_GGP = 3
IPPROTO_IPIP = 4
IPPROTO_TCP = 6
IPPROTO_EGP = 8
IPPROTO_PUP = 12
IPPROTO_UDP = 17
IPPROTO_IDP = 22
IPPROTO_TP = 29
IPPROTO_EON = 80
IPPROTO_ENCAP = 98
IPPROTO_RAW = 255
IPPROTO_MAX = 256
IPPORT_RESERVED = 1024
IPPORT_USERRESERVED = 5000
def __IPADDR(x): return ((u_int32_t)(x))
IN_CLASSA_NSHIFT = 24
IN_CLASSA_MAX = 128
IN_CLASSB_NSHIFT = 16
IN_CLASSB_MAX = 65536
IN_CLASSC_NSHIFT = 8
IN_CLASSD_NSHIFT = 28
def IN_MULTICAST(i): return IN_CLASSD(i)
IN_LOOPBACKNET = 127
IP_OPTIONS = 1
IP_HDRINCL = 2
IP_TOS = 3
IP_TTL = 4
IP_RECVOPTS = 5
IP_RECVRETOPTS = 6
IP_RECVDSTADDR = 7
IP_RETOPTS = 8
IP_MULTICAST_IF = 9
IP_MULTICAST_TTL = 10
IP_MULTICAST_LOOP = 11
IP_ADD_MEMBERSHIP = 12
IP_DROP_MEMBERSHIP = 13
IP_RECVIF = 20
IP_DEFAULT_MULTICAST_TTL = 1
IP_DEFAULT_MULTICAST_LOOP = 1
IP_MAX_MEMBERSHIPS = 20
IPPROTO_MAXID = (IPPROTO_IDP + 1)
IPCTL_FORWARDING = 1
IPCTL_SENDREDIRECTS = 2
IPCTL_DEFTTL = 3
IPCTL_DEFMTU = 4
IPCTL_FORWSRCRT = 5
IPCTL_DIRECTEDBCAST = 6
IPCTL_ALLOWSRCRT = 7
IPCTL_MAXID = 8
def in_nullhost(x): return ((x).s_addr == INADDR_ANY)
| gpl-3.0 |
scotthuang1989/Python-3-Module-of-the-Week | networking/select_echo_multiclient.py | 2 | 1139 | import socket
import sys
messages = [
'This is the message. ',
'It will be sent ',
'in parts.',
]
server_address = ('localhost', 10000)
# Create a TCP/IP socket
socks = [
socket.socket(socket.AF_INET, socket.SOCK_STREAM),
socket.socket(socket.AF_INET, socket.SOCK_STREAM),
]
# Connect the socket to the port where the server is listening
print('connecting to {} port {}'.format(*server_address),
file=sys.stderr)
for s in socks:
s.connect(server_address)
for message in messages:
outgoing_data = message.encode()
# Send messages on both sockets
for s in socks:
print('{}: sending {!r}'.format(s.getsockname(),
outgoing_data),
file=sys.stderr)
s.send(outgoing_data)
# Read responses on both sockets
for s in socks:
data = s.recv(1024)
print('{}: received {!r}'.format(s.getsockname(),
data),
file=sys.stderr)
if not data:
print('closing socket', s.getsockname(),
file=sys.stderr)
s.close() | apache-2.0 |
jesseward/discogstagger | discogstagger/discogswrapper.py | 1 | 2977 | #!/usr/bin/env python
from __future__ import unicode_literals, print_function
import os
from discogs_client.exceptions import HTTPError
from six.moves import input
import discogs_client as dc
USER_AGENT = "discogstagger +http://github.com/jesseward"
class DiscogsWrapper(object):
''' Returns an OAuth authentication handle for requests against the
Discogs API. '''
consumer_key = 'sxOsKeryYGLwrSsHtRVA'
consumer_secret = 'npfUDQEVDgjNLPIqpSvcGyLWqaMcUaeX'
def __init__(self):
user = os.getenv("USER")
if os.getenv("SUDO_USER") is not None:
user = os.getenv("SUDO_USER")
self.token_file = os.path.expanduser('~{0}/.config/discogstagger/token'.format(user))
if self.is_authenticated:
token, secret = self._get_access_token()
self.discogs = dc.Client(USER_AGENT, consumer_key=self.consumer_key,
consumer_secret=self.consumer_secret,
token=token, secret=secret)
# otherwise handle authentication process.
else:
self.discogs = dc.Client(USER_AGENT)
self._get_request_token()
def _get_request_token(self):
"""completes the oauth handshakes for the request_token, verification and
access_token. Then persists the access_token to disk."""
self.discogs.set_consumer_key(self.consumer_key, self.consumer_secret)
token, secret, url = self.discogs.get_authorize_url()
auth = False
while not auth:
print('=== ACTION REQUIRED ===')
print('In order to fetch images from discogs, you\'re required to grant the discogs-banner application access to perform actions on behalf of your discogs account.')
print('Please visit {url} and accept the authentication request'.format(url=url))
verification_code = input('Verification code > ')
try:
access_token, access_secret = self.discogs.get_access_token(verification_code)
except HTTPError as e:
print('\nUnable to authenticate, please try again. error="{0}"\n'.format(e))
continue
if access_token:
auth = True
# persist token to disk.
with open(self.token_file, 'w') as fh:
fh.write('{token}||{secret}'.format(token=access_token, secret=access_secret))
def _get_access_token(self):
"""
:return: two strings str a = auth token, str b = auth token secret
"""
with open(self.token_file, 'r') as fh:
token, secret = fh.read().split('||')
return token, secret
@property
def is_authenticated(self):
""" return True is a token exists on the local file system. """
# very rudimentary check. Simply ensures the file exists on the local
# disk.
if os.path.isfile(self.token_file):
return True
| mit |
LICEF/edx-platform | docs/en_us/ORA2/source/conf.py | 48 | 1200 | # -*- coding: utf-8 -*-
#
import sys, os
# on_rtd is whether we are on readthedocs.org, this line of code grabbed from docs.readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
sys.path.append(os.path.abspath('../../../'))
sys.path.append(os.path.abspath('../../'))
#from docs.shared.conf import *
sys.path.insert(0, os.path.abspath('.'))
master_doc = 'index'
# Add any paths that contain templates here, relative to this directory.
#templates_path.append('source/_templates')
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path.append('source/_static')
# General information about the project.
project = u'Creating a Peer Assessment'
copyright = u'2014, edX'
# The short X.Y version.
version = ''
# The full version, including alpha/beta/rc tags.
release = ''
| agpl-3.0 |
tamasgal/km3pipe | examples/monitoring/pmt_rates.py | 1 | 4444 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# vim: ts=4 sw=4 et
"""
======================
Mean PMT Rates Monitor
======================
The following script calculates the mean PMT rates and updates the plot.
"""
# Author: Tamas Gal <tgal@km3net.de>
# License: MIT
from datetime import datetime
import io
from collections import defaultdict
import threading
import time
import km3pipe as kp
from km3pipe.io.daq import TMCHData
import numpy as np
import matplotlib
matplotlib.use("Agg") # noqa
import matplotlib.pyplot as plt
import km3pipe.style as kpst
kpst.use("km3pipe")
__author__ = "Tamas Gal"
__email__ = "tgal@km3net.de"
VERSION = "1.0"
log = kp.logger.get_logger("PMTrates")
class PMTRates(kp.Module):
def configure(self):
self.detector = self.require("detector")
self.du = self.require("du")
self.interval = self.get("interval") or 10
self.plot_path = self.get("plot_path") or "km3web/plots/pmtrates.png"
self.max_x = 800
self.index = 0
self.rates = defaultdict(list)
self.rates_matrix = np.full((18 * 31, self.max_x), np.nan)
self.lock = threading.Lock()
self.thread = threading.Thread(target=self.run, args=())
self.thread.daemon = True
self.thread.start()
def run(self):
interval = self.interval
while True:
time.sleep(interval)
now = datetime.now()
self.add_column()
self.update_plot()
with self.lock:
self.rates = defaultdict(list)
delta_t = (datetime.now() - now).total_seconds()
remaining_t = self.interval - delta_t
log.info(
"Delta t: {} -> waiting for {}s".format(
delta_t, self.interval - delta_t
)
)
if remaining_t < 0:
log.error(
"Can't keep up with plot production. " "Increase the interval!"
)
interval = 1
else:
interval = remaining_t
def add_column(self):
m = np.roll(self.rates_matrix, -1, 1)
y_range = 18 * 31
mean_rates = np.full(y_range, np.nan)
for i in range(y_range):
if i not in self.rates:
continue
mean_rates[i] = np.mean(self.rates[i])
m[:, self.max_x - 1] = mean_rates
self.rates_matrix = m
def update_plot(self):
print("Updating plot at {}".format(self.plot_path))
now = time.time()
max_x = self.max_x
interval = self.interval
def xlabel_func(timestamp):
return datetime.utcfromtimestamp(timestamp).strftime("%H:%M")
m = self.rates_matrix
m[m > 15000] = 15000
m[m < 5000] = 5000
fig, ax = plt.subplots(figsize=(10, 6))
ax.imshow(m, origin="lower")
ax.set_title(
"Mean PMT Rates for DU{} (colours from 5kHz to 15kHz)\n{}".format(
self.du, datetime.utcnow()
)
)
ax.set_xlabel("UTC time [{}s/px]".format(interval))
plt.yticks(
[i * 31 for i in range(18)], ["Floor {}".format(f) for f in range(1, 19)]
)
xtics_int = range(0, max_x, int(max_x / 10))
plt.xticks(
[i for i in xtics_int],
[xlabel_func(now - (max_x - i) * interval) for i in xtics_int],
)
fig.tight_layout()
plt.savefig(self.plot_path)
plt.close("all")
def process(self, blob):
tmch_data = TMCHData(io.BytesIO(blob["CHData"]))
dom_id = tmch_data.dom_id
if dom_id not in self.detector.doms:
return blob
du, floor, _ = self.detector.doms[dom_id]
if du != self.du:
return blob
y_base = (floor - 1) * 31
for channel_id, rate in enumerate(tmch_data.pmt_rates):
idx = y_base + channel_id
with self.lock:
self.rates[idx].append(rate)
return blob
def main():
detector = kp.hardware.Detector(det_id=29)
pipe = kp.Pipeline(timeit=True)
pipe.attach(
kp.io.CHPump,
host="192.168.0.110",
port=5553,
tags="IO_MONIT",
timeout=60 * 60 * 24 * 7,
max_queue=1000,
)
pipe.attach(PMTRates, detector=detector, du=2, interval=2)
pipe.drain()
if __name__ == "__main__":
main()
| mit |
rafael-sanz/qsAPI | qsAPI/_controller.py | 1 | 11896 | # -*- coding: UTF-8 -*-
'''
@author: Rafael Sanz
@contact: rafael.sanz@selab.es
@Copyright: 2016 <Rafael Sanz - (R)SELAB>
# MIT License (see LICENSE or https://opensource.org/licenses/MIT)
'''
import sys, os.path
import requests as req
import urllib.parse as up
import random, string, json, re
import logging
class _Controller(object):
""" Handler REST-API QRS"""
_referer='Mozilla/5.0 (Windows NT 6.3; Win64; x64) qsAPI APIREST (QSense)'
try:
from requests_ntlm import HttpNtlmAuth as _ntlm
except ImportError:
_ntlm=None
def __init__(self, schema, proxy, port, vproxy, certificate, verify, user, verbosity, logName):
'''
@Function setup: Setup the connection and initialize handlers
@param schema: http/https
@param proxy: hostname to connect
@param port: port number
@param vproxy: virtual proxy conf. {preffix:'proxy', path: '^/qrs/', template:'/{}/qrs/'})
@param certificate: path to .pem client certificate
@param verify: false to trust in self-signed certificates
@param user: dict with keys {userDirectory:, userID:, password:} or tuple
@param verbosity: debug level
@param logger: logger instance name
'''
self.proxy = proxy
self.port = str(port)
self.proxy = proxy;
self.vproxy = None;
self.baseurl = None
self.request = None
self.response = None
self.session = None
if vproxy:
self.setVProxy(**vproxy)
self.setUser(**user) if isinstance(user, dict) else self.setUser(*user)
self.chunk_size = 512 #Kb
self.log=logging.getLogger(logName)
if not self.log.hasHandlers():
self.log.addHandler(logging.StreamHandler(sys.stdout))
self.log.setLevel(verbosity)
self.baseurl= '{schema}://{host}:{port}'.format(schema=schema, host=proxy, port=str(port))
if isinstance(certificate, str):
(base,ext)=os.path.splitext(certificate)
self.cafile=(base+ext, base+'_key'+ext)
self.log.debug('CERTKEY: %s%s', base, ext)
elif isinstance(certificate, tuple):
self.cafile=certificate
self.log.debug('CERT: %s',certificate)
else:
self.cafile=False
self._verify=bool(verify)
if not self._verify:
req.packages.urllib3.disable_warnings()
self.session=req.Session()
if self._ntlm and not self.cafile:
self.log.debug('NTLM authentication enabled')
self.session.auth = self._ntlm('{domain}\\{user}'.format(domain=self.UserDirectory, user=self.UserId), self.Password)
def setVProxy(self, preffix, path, template):
self.vproxy={}
self.vproxy['preffix'] =preffix # proxy
self.vproxy['path'] =re.compile(path) # ^/qrs/
self.vproxy['template']=template # /{}/qrs/
self.vproxy['pxpath'] =template.format(preffix)
def setUser(self, userDirectory, userID, password=None):
self.UserDirectory=userDirectory
self.UserId = userID
self.Password=password
@staticmethod
def normalize(schema, proxy, port, certificate):
if '://' in proxy:
schema, proxy = proxy.split('://')
if not certificate and isinstance(port, int):
port=443
if ':' in proxy:
proxy, port = proxy.split(':')
return(schema, proxy, port)
def _params_prepare(self, param, xhd={}):
par=dict({'Xrfkey': ''.join(random.choice(string.ascii_letters + string.digits) for _ in range(16))})
if isinstance(param, dict):
for p,v in param.items():
if v is not None:
if isinstance(v, bool):
par[p]=str(v).lower()
else:
par[p]=str(v)
self.log.debug(" >> %s=>%s",p , par[p])
else:
self.log.debug(" >> %s=>(default)", p)
hd= { 'User-agent': self._referer,
'Pragma': 'no-cache',
'X-Qlik-User': 'UserDirectory={directory}; UserId={user}'.format(directory=self.UserDirectory, user=self.UserId),
'x-Qlik-Xrfkey': par.get('Xrfkey'),
'Accept': 'application/json',
'Content-Type': 'application/json'}
if self.vproxy:
hd['X-Qlik-Virtual-Proxy-Prefix']=self.vproxy['preffix']
hd.update(xhd)
return(par, hd)
def _params_update(self, url, par):
scheme, netloc, path, query, fragment=up.urlsplit(url)
if self.vproxy:
path= self.vproxy['path'].sub(self.vproxy['pxpath'], path)
p=up.parse_qs(query)
p.update(par)
query=up.urlencode(p,doseq=True,quote_via=up.quote)
return up.urlunsplit((scheme, netloc, path, query, fragment))
def call(self, method, apipath, param=None, data=None, files=None):
""" initialize control structure """
if str(method).upper() not in ('GET', 'POST', 'PUT', 'DELETE'):
raise ValueError('invalid method <{0}>'.format(method))
self.log.info('API %s <%s>', method[:3], apipath)
(par,hd)=self._params_prepare(param, {} if files is None else {'Content-Type': 'application/vnd.qlik.sense.app'})
# Build the request
self.response= None
url=self._params_update(up.urljoin(self.baseurl,apipath), par)
self.request=req.Request(method, url, headers=hd, data=data, files=files, auth=self.session.auth)
pr=self.session.prepare_request(self.request)
self.log.debug('SEND: %s', self.request.url)
# Execute the HTTP request
self.response = self.session.send(pr, cert=self.cafile, verify=self._verify, allow_redirects=False)
rc=0
while self.response.is_redirect:
rc+=1
if rc > self.session.max_redirects:
raise req.HTTPError('Too many redirections')
self.session.rebuild_auth(self.response.next, self.response)
self.response.next.prepare_headers(hd)
self.response.next.prepare_cookies(self.response.cookies)
self.response.next.url=self._params_update(self.response.next.url, par)
self.log.debug('REDIR: %s', self.response.next.url)
self.response = self.session.send(self.response.next, verify=self._verify, allow_redirects=False)
self.log.debug('RECV: %s',self.response.text)
return(self.response)
def download(self, apipath, filename, param=None):
""" initialize control structure """
self.log.info('API DOWN <%s>', apipath)
(par,hd)=self._params_prepare(param)
# Build the request
self.response= None
url=self._params_update(up.urljoin(self.baseurl,apipath), par)
self.log.debug('__SEND: %s',url)
# Execute the HTTP request
self.request = self.session.get(url, headers=hd, cert=self.cafile, verify=self._verify, stream=True, auth=self.session.auth)
with open(filename, 'wb') as f:
self.log.info('__Downloading (in %sKb blocks): ', str(self.chunk_size))
#download in 512Kb blocks
for chunk in self.request.iter_content(chunk_size=self.chunk_size << 10):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
self.log.info('__Saved: %s', os.path.abspath(filename))
return(self.request)
def upload(self, apipath, filename, param=None):
""" initialize control structure """
class upload_in_chunks(object):
def __init__(self, filename, chunksize=512):
self.filename = filename
self.chunksize = chunksize << 10
self.totalsize = os.path.getsize(filename)
self.readsofar = 0
def __iter__(self):
with open(self.filename, 'rb') as file:
while True:
data = file.read(self.chunksize)
if not data:
break
self.readsofar += len(data)
yield data
def __len__(self):
return self.totalsize
self.log.info('API UPLO <%s>', apipath)
(par,hd)=self._params_prepare(param, {'Content-Type': 'application/vnd.qlik.sense.app'})
# Build the request
self.response= None
url=self._params_update(up.urljoin(self.baseurl,apipath), par)
self.log.debug('__SEND: %s', url)
# Execute the HTTP request
self.log.info('__Uploading {:,} bytes'.format(os.path.getsize(filename)))
self.request = self.session.post(url, headers=hd, cert=self.cafile, verify=self._verify, \
data=upload_in_chunks(filename, self.chunk_size), auth=self.session.auth)
self.log.info('__Done.')
return(self.request)
def get(self, apipath, param=None):
'''
@Function get: generic purpose call
@param apipath: uri REST path
@param param : whatever other param needed in form a dict
(example: {'filter': "name eq 'myApp'} )
'''
return self.call('GET', apipath, param)
def post(self, apipath, param=None, data=None, files=None):
'''
@Function post: generic purpose call
@param apipath: uri REST path
@param param : whatever other param needed in form a dict
(example: {'filter': "name eq 'myApp'} )
@param data : stream data input (native dict/list structures are json formated)
@param files : metafile input
'''
if isinstance(data,dict) or isinstance(data,list):
data=json.dumps(data)
return self.call('POST', apipath, param, data, files)
def put(self, apipath, param=None, data=None):
'''
@Function put: generic purpose call
@param apipath: uri REST path
@param param : whatever other param needed in form a dict
(example: {'filter': "name eq 'myApp'} )
@param data : stream data input (native dict/list structures are json formated)
'''
if isinstance(data,dict) or isinstance(data,list):
data=json.dumps(data)
return self.call('PUT', apipath, param, data)
def delete(self, apipath, param=None):
'''
@Function delete: generic purpose call
@param apipath: uri REST path
@param param : whatever other param needed in form a dict
(example: {'filter': "name eq 'myApp'} )
'''
return self.call('DELETE', apipath, param)
| gpl-3.0 |
saturday06/FrameworkBenchmarks | frameworks/Python/cherrypy/satool.py | 79 | 1643 | import cherrypy
__all__ = ['SATool']
class SATool(cherrypy.Tool):
def __init__(self):
"""
The SA tool is responsible for associating a SA session
to the SA engine and attaching it to the current request.
Since we are running in a multithreaded application,
we use the scoped_session that will create a session
on a per thread basis so that you don't worry about
concurrency on the session object itself.
This tools binds a session to the engine each time
a requests starts and commits/rollbacks whenever
the request terminates.
"""
cherrypy.Tool.__init__(self, 'on_start_resource',
self.bind_session,
priority=20)
def _setup(self):
cherrypy.Tool._setup(self)
cherrypy.request.hooks.attach('on_end_resource',
self.commit_transaction,
priority=80)
def bind_session(self):
"""
Attaches a session to the request's scope by requesting
the SA plugin to bind a session to the SA engine.
"""
session = cherrypy.engine.publish('bind-session').pop()
cherrypy.request.db = session
def commit_transaction(self):
"""
Commits the current transaction or rolls back
if an error occurs. Removes the session handle
from the request's scope.
"""
if not hasattr(cherrypy.request, 'db'):
return
cherrypy.request.db = None
cherrypy.engine.publish('commit-session')
| bsd-3-clause |
doismellburning/django | tests/get_object_or_404/models.py | 409 | 1133 | """
DB-API Shortcuts
``get_object_or_404()`` is a shortcut function to be used in view functions for
performing a ``get()`` lookup and raising a ``Http404`` exception if a
``DoesNotExist`` exception was raised during the ``get()`` call.
``get_list_or_404()`` is a shortcut function to be used in view functions for
performing a ``filter()`` lookup and raising a ``Http404`` exception if a
``DoesNotExist`` exception was raised during the ``filter()`` call.
"""
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Author(models.Model):
name = models.CharField(max_length=50)
def __str__(self):
return self.name
class ArticleManager(models.Manager):
def get_queryset(self):
return super(ArticleManager, self).get_queryset().filter(authors__name__icontains='sir')
@python_2_unicode_compatible
class Article(models.Model):
authors = models.ManyToManyField(Author)
title = models.CharField(max_length=50)
objects = models.Manager()
by_a_sir = ArticleManager()
def __str__(self):
return self.title
| bsd-3-clause |
boundarydevices/android_external_chromium_org | tools/perf/page_sets/page_cycler/dom.py | 10 | 1433 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry.page.actions.all_page_actions import *
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class DomPage(page_module.Page):
def __init__(self, url, page_set):
super(DomPage, self).__init__(url=url, page_set=page_set)
class DomPageSet(page_set_module.PageSet):
""" DOM page_cycler benchmark """
def __init__(self):
super(DomPageSet, self).__init__(
# pylint: disable=C0301
serving_dirs=set(['../../../../data/page_cycler/dom']))
urls_list = [
'file://../../../../data/page_cycler/dom/HTMLDocument_write/',
'file://../../../../data/page_cycler/dom/Document_getElementById/',
'file://../../../../data/page_cycler/dom/DOMWindow_document/',
'file://../../../../data/page_cycler/dom/DOMWindow_window/',
'file://../../../../data/page_cycler/dom/Element_getAttribute/',
'file://../../../../data/page_cycler/dom/HTMLCollection_length/',
'file://../../../../data/page_cycler/dom/HTMLElement_className/',
'file://../../../../data/page_cycler/dom/HTMLElement_id/',
'file://../../../../data/page_cycler/dom/NodeList_length/'
]
for url in urls_list:
self.AddPage(DomPage(url, self))
| bsd-3-clause |
lixiangning888/whole_project | modules/signatures/virus.py | 3 | 3898 | # -*- coding: utf-8 -*-
# Copyright (C) 2014 Accuvant, Inc. (bspengler@accuvant.com)
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
from lib.cuckoo.common.abstracts import Signature
import struct
class Virus(Signature):
name = "virus"
description = "已存在的系统二进制文件可能被病毒感染"
severity = 3
categories = ["virus"]
authors = ["Accuvant"]
minimum = "1.2"
evented = True
def __init__(self, *args, **kwargs):
Signature.__init__(self, *args, **kwargs)
self.lastprocess = 0
self.handles = dict()
self.copydests = set()
self.readcopyfiles = dict()
self.readfiles = set()
self.infected_files = set()
self.invalidated_files = set()
self.saw_virus = False
filter_apinames = set(["NtCreateFile", "NtDuplicateObject", "NtOpenFile", "NtClose", "NtWriteFile", "CopyFileA", "CopyFileW", "CopyFileExA", "CopyFileExW"])
def on_call(self, call, process):
if process is not self.lastprocess:
self.handles = dict()
self.lastprocess = process
if call["api"] == "NtDuplicateObject" and call["status"]:
tgtarg = self.get_argument(call, "TargetHandle")
if tgtarg:
srchandle = int(self.get_argument(call, "SourceHandle"), 16)
tgthandle = int(tgtarg, 16)
if srchandle in self.handles:
self.handles[tgthandle] = self.handles[srchandle]
elif call["api"].startswith("CopyFile"):
srcname = self.get_argument(call, "ExistingFileName").lower()
dstname = self.get_argument(call, "NewFileName").lower()
self.copydests.add(dstname)
self.readcopyfiles[dstname] = srcname
if srcname not in self.invalidated_files and srcname not in self.copydests:
self.readfiles.add(srcname)
if dstname in self.readfiles:
self.infected_files.add(dstname)
self.saw_virus = True
elif call["api"] == "NtClose":
handle = int(self.get_argument(call, "Handle"), 16)
self.handles.pop(handle, None)
elif call["api"] == "NtCreateFile" and call["status"]:
filename = self.get_argument(call, "FileName").lower()
handle = int(self.get_argument(call, "FileHandle"), 16)
createdisp = int(self.get_argument(call, "CreateDisposition"), 16)
if filename and filename.endswith(".exe"):
if createdisp == 1:
if handle not in self.handles and filename not in self.invalidated_files:
self.handles[handle] = filename
else:
self.invalidated_files.add(filename)
elif call["api"] == "NtOpenFile" and call["status"]:
filename = self.get_argument(call, "FileName").lower()
handle = int(self.get_argument(call, "FileHandle"), 16)
if filename and filename.endswith(".exe"):
if handle not in self.handles and filename not in self.invalidated_files:
self.handles[handle] = filename
self.readfiles.add(filename)
elif call["api"] == "NtWriteFile":
handle = int(self.get_argument(call, "FileHandle"), 16)
if handle in self.handles:
key = self.handles[handle]
if key in self.copydests:
while key in self.readcopyfiles:
key = self.readcopyfiles[key]
self.infected_files.add(key)
self.saw_virus = True
return None
def on_complete(self):
for infected in self.infected_files:
self.data.append({"file" : infected})
return self.saw_virus
| lgpl-3.0 |
asandyz/oppia | extensions/rules/graph_test.py | 9 | 11506 | # coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, softwar
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for classification of Graph."""
__author__ = 'Zhan Xiong Chin'
from extensions.rules import graph
import test_utils
import random
def _emptyGraph():
return {
'vertices': [],
'edges': [],
'isDirected': False,
'isWeighted': False,
'isLabeled': False
}
def _nullGraph(n):
ret = _emptyGraph()
for i in xrange(n):
ret['vertices'].append({
'label': '',
'x': 0.0,
'y': 0.0
})
return ret
def _cycleGraph(n):
ret = _nullGraph(n)
if n == 1:
return ret
for i in xrange(n):
ret['edges'].append({
'src': i,
'dst': (i + 1) % n,
'weight': 1
})
return ret
def _completeGraph(n):
ret = _nullGraph(n)
for i in xrange(n):
for j in xrange(i+1,n):
ret['edges'].append({
'src': i,
'dst': j,
'weight': 1
})
return ret
class GraphRuleUnitTests(test_utils.GenericTestBase):
"""Tests for rules operating on Graph objects."""
def test_isisomorphic_rule(self):
self.assertFuzzyTrue(
graph.IsIsomorphicTo(_emptyGraph()).eval(_emptyGraph()))
self.assertFuzzyTrue(
graph.IsIsomorphicTo(_cycleGraph(5)).eval(_cycleGraph(5)))
self.assertFuzzyTrue(graph.IsIsomorphicTo(_cycleGraph(5)).eval({
'vertices': [{'label': '', 'x': 1.0, 'y': 1.0} for i in xrange(5)],
'edges': [
{'src': i, 'dst': j, 'weight': 1} for i, j in
[(0, 2), (2, 4), (4, 1), (1, 3), (3, 0)]
],
'isDirected': False,
'isWeighted': False,
'isLabeled': False
}))
self.assertFuzzyTrue(graph.IsIsomorphicTo({
'vertices': [
{'label': 'a', 'x': 1.0, 'y': 1.0},
{'label': 'b', 'x': 2.0, 'y': 2.0},
{'label': 'c', 'x': 3.0, 'y': 3.0}
],
'edges': [
{'src': 0, 'dst': 1, 'weight': 1},
],
'isDirected': False,
'isWeighted': False,
'isLabeled': True
}).eval({
'vertices': [
{'label': 'c', 'x': 1.0, 'y': 1.0},
{'label': 'a', 'x': 2.0, 'y': 2.0},
{'label': 'b', 'x': 3.0, 'y': 3.0}
],
'edges': [
{'src': 2, 'dst': 1, 'weight': 1},
],
'isDirected': False,
'isWeighted': False,
'isLabeled': True
}))
self.assertFuzzyTrue(graph.IsIsomorphicTo({
'vertices': [
{'label': 'a', 'x': 1.0, 'y': 1.0},
{'label': 'b', 'x': 2.0, 'y': 2.0},
{'label': 'c', 'x': 3.0, 'y': 3.0}
],
'edges': [
{'src': 0, 'dst': 1, 'weight': 2},
{'src': 1, 'dst': 2, 'weight': 1}
],
'isDirected': False,
'isWeighted': True,
'isLabeled': True
}).eval({
'vertices': [
{'label': 'b', 'x': 1.0, 'y': 1.0},
{'label': 'a', 'x': 2.0, 'y': 2.0},
{'label': 'c', 'x': 3.0, 'y': 3.0}
],
'edges': [
{'src': 2, 'dst': 0, 'weight': 1},
{'src': 1, 'dst': 0, 'weight': 2}
],
'isDirected': False,
'isWeighted': True,
'isLabeled': True
}))
self.assertFuzzyTrue(graph.IsIsomorphicTo({
'vertices': [
{'label': '', 'x': 1.0, 'y': 1.0},
{'label': '', 'x': 2.0, 'y': 2.0}
],
'edges': [
{'src': 0, 'dst': 1, 'weight': 1}
],
'isDirected': False,
'isWeighted': False,
'isLabeled': False
}).eval({
'vertices': [
{'label': '', 'x': 1.0, 'y': 1.0},
{'label': '', 'x': 2.0, 'y': 2.0}
],
'edges': [
{'src': 0, 'dst': 1, 'weight': 1},
{'src': 1, 'dst': 0, 'weight': 1}
],
'isDirected': True,
'isWeighted': False,
'isLabeled': False
}))
self.assertFuzzyFalse(
graph.IsIsomorphicTo(_cycleGraph(5)).eval(_nullGraph(5)))
self.assertFuzzyFalse(
graph.IsIsomorphicTo(_nullGraph(5)).eval(_cycleGraph(5)))
self.assertFuzzyFalse(
graph.IsIsomorphicTo(_nullGraph(5)).eval(_nullGraph(6)))
self.assertFuzzyFalse(
graph.IsIsomorphicTo(_completeGraph(5)).eval(_cycleGraph(5)))
self.assertFuzzyFalse(
graph.IsIsomorphicTo(_cycleGraph(5)).eval(_completeGraph(5)))
self.assertFuzzyFalse(graph.IsIsomorphicTo({
'vertices': [
{'label': 'a', 'x': 1.0, 'y': 1.0},
{'label': 'b', 'x': 2.0, 'y': 2.0},
{'label': 'c', 'x': 3.0, 'y': 3.0}
],
'edges': [
{'src': 0, 'dst': 1, 'weight': 1},
{'src': 1, 'dst': 2, 'weight': 2}
],
'isDirected': False,
'isWeighted': True,
'isLabeled': True
}).eval({
'vertices': [
{'label': 'b', 'x': 1.0, 'y': 1.0},
{'label': 'a', 'x': 2.0, 'y': 2.0},
{'label': 'c', 'x': 3.0, 'y': 3.0}
],
'edges': [
{'src': 0, 'dst': 1, 'weight': 1},
{'src': 1, 'dst': 2, 'weight': 2}
],
'isDirected': False,
'isWeighted': True,
'isLabeled': True
}))
self.assertFuzzyFalse(graph.IsIsomorphicTo({
'vertices': [
{'label': '', 'x': 1.0, 'y': 1.0},
{'label': '', 'x': 2.0, 'y': 2.0}
],
'edges': [
{'src': 0, 'dst': 1, 'weight': 1}
],
'isDirected': False,
'isWeighted': True,
'isLabeled': False
}).eval({
'vertices': [
{'label': '', 'x': 1.0, 'y': 1.0},
{'label': '', 'x': 2.0, 'y': 2.0}
],
'edges': [
{'src': 0, 'dst': 1, 'weight': 2}
],
'isDirected': False,
'isWeighted': True,
'isLabeled': False
}))
self.assertFuzzyFalse(graph.IsIsomorphicTo({
'vertices': [
{'label': 'a', 'x': 1.0, 'y': 1.0},
{'label': 'b', 'x': 2.0, 'y': 2.0}
],
'edges': [
{'src': 0, 'dst': 1, 'weight': 2}
],
'isDirected': False,
'isWeighted': True,
'isLabeled': True
}).eval({
'vertices': [
{'label': 'a', 'x': 1.0, 'y': 1.0},
{'label': 'c', 'x': 2.0, 'y': 2.0}
],
'edges': [
{'src': 0, 'dst': 1, 'weight': 2}
],
'isDirected': False,
'isWeighted': True,
'isLabeled': True
}))
def test_fuzzy_matches_rule(self):
rule = graph.FuzzyMatches([{
'vertices': [
{'label': '', 'x': 1.0, 'y': 1.0},
{'label': '', 'x': 2.0, 'y': 2.0},
{'label': '', 'x': 3.0, 'y': 3.0}
],
'edges': [
{'src': 0, 'dst': 1, 'weight': 1},
{'src': 1, 'dst': 2, 'weight': 1},
{'src': 2, 'dst': 0, 'weight': 1},
],
'isDirected': False,
'isWeighted': False,
'isLabeled': False
}, {
'vertices': [
{'label': '', 'x': 1.0, 'y': 1.0},
{'label': '', 'x': 2.0, 'y': 2.0},
{'label': '', 'x': 3.0, 'y': 3.0},
{'label': '', 'x': 4.0, 'y': 4.0}
],
'edges': [
{'src': 0, 'dst': 1, 'weight': 1},
{'src': 1, 'dst': 2, 'weight': 1},
{'src': 2, 'dst': 3, 'weight': 1},
{'src': 3, 'dst': 0, 'weight': 1},
],
'isDirected': False,
'isWeighted': False,
'isLabeled': False
}
])
# An isomorphic graph should match.
self.assertFuzzyTrue(rule.eval({
'vertices': [
{'label': '', 'x': 4.0, 'y': 4.0},
{'label': '', 'x': 5.0, 'y': 5.0},
{'label': '', 'x': 6.0, 'y': 6.0}
],
'edges': [
{'src': 2, 'dst': 0, 'weight': 1},
{'src': 0, 'dst': 1, 'weight': 1},
{'src': 2, 'dst': 1, 'weight': 1},
],
'isDirected': False,
'isWeighted': False,
'isLabeled': False
}))
# An isomorphic graph should match.
self.assertFuzzyTrue(rule.eval({
'vertices': [
{'label': '', 'x': 4.0, 'y': 4.0},
{'label': '', 'x': 5.0, 'y': 5.0},
{'label': '', 'x': 6.0, 'y': 6.0}
],
'edges': [
{'src': 2, 'dst': 0, 'weight': 1},
{'src': 0, 'dst': 1, 'weight': 1},
{'src': 2, 'dst': 1, 'weight': 1},
],
'isDirected': False,
'isWeighted': False,
'isLabeled': False
}))
# If this is isomorphic to another graph in the training data, it
# should match.
self.assertFuzzyTrue(rule.eval({
'vertices': [
{'label': '', 'x': 4.0, 'y': 4.0},
{'label': '', 'x': 5.0, 'y': 5.0},
{'label': '', 'x': 6.0, 'y': 6.0},
{'label': '', 'x': 7.0, 'y': 7.0}
],
'edges': [
{'src': 3, 'dst': 0, 'weight': 1},
{'src': 0, 'dst': 1, 'weight': 1},
{'src': 2, 'dst': 1, 'weight': 1},
{'src': 3, 'dst': 2, 'weight': 1}
],
'isDirected': False,
'isWeighted': False,
'isLabeled': False
}))
# A completely different graph should not match.
self.assertFuzzyFalse(rule.eval({
'vertices': [
{'label': '', 'x': 4.0, 'y': 4.0},
{'label': '', 'x': 5.0, 'y': 5.0},
],
'edges': [
{'src': 1, 'dst': 0, 'weight': 1},
],
'isDirected': False,
'isWeighted': False,
'isLabeled': False
}))
| apache-2.0 |
ruyang/ironic | ironic/tests/unit/api/v1/test_ramdisk.py | 6 | 8162 | # Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests for the API /lookup/ methods.
"""
import mock
from oslo_config import cfg
from oslo_utils import uuidutils
from six.moves import http_client
from ironic.api.controllers import base as api_base
from ironic.api.controllers import v1 as api_v1
from ironic.api.controllers.v1 import ramdisk
from ironic.conductor import rpcapi
from ironic.tests.unit.api import base as test_api_base
from ironic.tests.unit.objects import utils as obj_utils
CONF = cfg.CONF
class TestLookup(test_api_base.BaseApiTest):
addresses = ['11:22:33:44:55:66', '66:55:44:33:22:11']
def setUp(self):
super(TestLookup, self).setUp()
self.node = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid(),
provision_state='deploying')
self.node2 = obj_utils.create_test_node(self.context,
uuid=uuidutils.generate_uuid(),
provision_state='available')
CONF.set_override('agent_backend', 'statsd', 'metrics')
def _check_config(self, data):
expected_metrics = {
'metrics': {
'backend': 'statsd',
'prepend_host': CONF.metrics.agent_prepend_host,
'prepend_uuid': CONF.metrics.agent_prepend_uuid,
'prepend_host_reverse':
CONF.metrics.agent_prepend_host_reverse,
'global_prefix': CONF.metrics.agent_global_prefix
},
'metrics_statsd': {
'statsd_host': CONF.metrics_statsd.agent_statsd_host,
'statsd_port': CONF.metrics_statsd.agent_statsd_port
},
'heartbeat_timeout': CONF.api.ramdisk_heartbeat_timeout
}
self.assertEqual(expected_metrics, data['config'])
def test_nothing_provided(self):
response = self.get_json(
'/lookup',
headers={api_base.Version.string: str(api_v1.MAX_VER)},
expect_errors=True)
self.assertEqual(http_client.BAD_REQUEST, response.status_int)
def test_not_found(self):
response = self.get_json(
'/lookup?addresses=%s' % ','.join(self.addresses),
headers={api_base.Version.string: str(api_v1.MAX_VER)},
expect_errors=True)
self.assertEqual(http_client.NOT_FOUND, response.status_int)
def test_old_api_version(self):
obj_utils.create_test_port(self.context,
node_id=self.node.id,
address=self.addresses[1])
response = self.get_json(
'/lookup?addresses=%s' % ','.join(self.addresses),
headers={api_base.Version.string: str(api_v1.MIN_VER)},
expect_errors=True)
self.assertEqual(http_client.NOT_FOUND, response.status_int)
def test_found_by_addresses(self):
obj_utils.create_test_port(self.context,
node_id=self.node.id,
address=self.addresses[1])
data = self.get_json(
'/lookup?addresses=%s' % ','.join(self.addresses),
headers={api_base.Version.string: str(api_v1.MAX_VER)})
self.assertEqual(self.node.uuid, data['node']['uuid'])
self.assertEqual(set(ramdisk._LOOKUP_RETURN_FIELDS) | {'links'},
set(data['node']))
self._check_config(data)
@mock.patch.object(ramdisk.LOG, 'warning', autospec=True)
def test_ignore_malformed_address(self, mock_log):
obj_utils.create_test_port(self.context,
node_id=self.node.id,
address=self.addresses[1])
addresses = ('not-a-valid-address,80:00:02:48:fe:80:00:00:00:00:00:00'
':f4:52:14:03:00:54:06:c2,' + ','.join(self.addresses))
data = self.get_json(
'/lookup?addresses=%s' % addresses,
headers={api_base.Version.string: str(api_v1.MAX_VER)})
self.assertEqual(self.node.uuid, data['node']['uuid'])
self.assertEqual(set(ramdisk._LOOKUP_RETURN_FIELDS) | {'links'},
set(data['node']))
self._check_config(data)
self.assertTrue(mock_log.called)
def test_found_by_uuid(self):
data = self.get_json(
'/lookup?addresses=%s&node_uuid=%s' %
(','.join(self.addresses), self.node.uuid),
headers={api_base.Version.string: str(api_v1.MAX_VER)})
self.assertEqual(self.node.uuid, data['node']['uuid'])
self.assertEqual(set(ramdisk._LOOKUP_RETURN_FIELDS) | {'links'},
set(data['node']))
self._check_config(data)
def test_found_by_only_uuid(self):
data = self.get_json(
'/lookup?node_uuid=%s' % self.node.uuid,
headers={api_base.Version.string: str(api_v1.MAX_VER)})
self.assertEqual(self.node.uuid, data['node']['uuid'])
self.assertEqual(set(ramdisk._LOOKUP_RETURN_FIELDS) | {'links'},
set(data['node']))
self._check_config(data)
def test_restrict_lookup(self):
response = self.get_json(
'/lookup?addresses=%s&node_uuid=%s' %
(','.join(self.addresses), self.node2.uuid),
headers={api_base.Version.string: str(api_v1.MAX_VER)},
expect_errors=True)
self.assertEqual(http_client.NOT_FOUND, response.status_int)
def test_no_restrict_lookup(self):
CONF.set_override('restrict_lookup', False, 'api')
data = self.get_json(
'/lookup?addresses=%s&node_uuid=%s' %
(','.join(self.addresses), self.node2.uuid),
headers={api_base.Version.string: str(api_v1.MAX_VER)})
self.assertEqual(self.node2.uuid, data['node']['uuid'])
self.assertEqual(set(ramdisk._LOOKUP_RETURN_FIELDS) | {'links'},
set(data['node']))
self._check_config(data)
@mock.patch.object(rpcapi.ConductorAPI, 'get_topic_for',
lambda *n: 'test-topic')
class TestHeartbeat(test_api_base.BaseApiTest):
def test_old_api_version(self):
response = self.post_json(
'/heartbeat/%s' % uuidutils.generate_uuid(),
{'callback_url': 'url'},
headers={api_base.Version.string: str(api_v1.MIN_VER)},
expect_errors=True)
self.assertEqual(http_client.NOT_FOUND, response.status_int)
def test_node_not_found(self):
response = self.post_json(
'/heartbeat/%s' % uuidutils.generate_uuid(),
{'callback_url': 'url'},
headers={api_base.Version.string: str(api_v1.MAX_VER)},
expect_errors=True)
self.assertEqual(http_client.NOT_FOUND, response.status_int)
@mock.patch.object(rpcapi.ConductorAPI, 'heartbeat', autospec=True)
def test_ok(self, mock_heartbeat):
node = obj_utils.create_test_node(self.context)
response = self.post_json(
'/heartbeat/%s' % node.uuid,
{'callback_url': 'url'},
headers={api_base.Version.string: str(api_v1.MAX_VER)})
self.assertEqual(http_client.ACCEPTED, response.status_int)
self.assertEqual(b'', response.body)
mock_heartbeat.assert_called_once_with(mock.ANY, mock.ANY,
node.uuid, 'url',
topic='test-topic')
| apache-2.0 |
mmardini/django | django/template/debug.py | 73 | 3754 | from django.template.base import Lexer, Parser, tag_re, NodeList, VariableNode, TemplateSyntaxError
from django.utils.encoding import force_text
from django.utils.html import escape
from django.utils.safestring import SafeData, EscapeData
from django.utils.formats import localize
from django.utils.timezone import template_localtime
class DebugLexer(Lexer):
def __init__(self, template_string, origin):
super(DebugLexer, self).__init__(template_string, origin)
def tokenize(self):
"Return a list of tokens from a given template_string"
result, upto = [], 0
for match in tag_re.finditer(self.template_string):
start, end = match.span()
if start > upto:
result.append(self.create_token(self.template_string[upto:start], (upto, start), False))
upto = start
result.append(self.create_token(self.template_string[start:end], (start, end), True))
upto = end
last_bit = self.template_string[upto:]
if last_bit:
result.append(self.create_token(last_bit, (upto, upto + len(last_bit)), False))
return result
def create_token(self, token_string, source, in_tag):
token = super(DebugLexer, self).create_token(token_string, in_tag)
token.source = self.origin, source
return token
class DebugParser(Parser):
def __init__(self, lexer):
super(DebugParser, self).__init__(lexer)
self.command_stack = []
def enter_command(self, command, token):
self.command_stack.append((command, token.source))
def exit_command(self):
self.command_stack.pop()
def error(self, token, msg):
return self.source_error(token.source, msg)
def source_error(self, source, msg):
e = TemplateSyntaxError(msg)
e.django_template_source = source
return e
def create_nodelist(self):
return DebugNodeList()
def create_variable_node(self, contents):
return DebugVariableNode(contents)
def extend_nodelist(self, nodelist, node, token):
node.source = token.source
super(DebugParser, self).extend_nodelist(nodelist, node, token)
def unclosed_block_tag(self, parse_until):
command, source = self.command_stack.pop()
msg = "Unclosed tag '%s'. Looking for one of: %s " % (command, ', '.join(parse_until))
raise self.source_error(source, msg)
def compile_filter_error(self, token, e):
if not hasattr(e, 'django_template_source'):
e.django_template_source = token.source
def compile_function_error(self, token, e):
if not hasattr(e, 'django_template_source'):
e.django_template_source = token.source
class DebugNodeList(NodeList):
def render_node(self, node, context):
try:
return node.render(context)
except Exception as e:
if not hasattr(e, 'django_template_source'):
e.django_template_source = node.source
raise
class DebugVariableNode(VariableNode):
def render(self, context):
try:
output = self.filter_expression.resolve(context)
output = template_localtime(output, use_tz=context.use_tz)
output = localize(output, use_l10n=context.use_l10n)
output = force_text(output)
except UnicodeDecodeError:
return ''
except Exception as e:
if not hasattr(e, 'django_template_source'):
e.django_template_source = self.source
raise
if (context.autoescape and not isinstance(output, SafeData)) or isinstance(output, EscapeData):
return escape(output)
else:
return output
| bsd-3-clause |
prjw/pomodorino | pomodorino/data.py | 1 | 9949 | import os
import time
import threading
import datetime
import calendar
import sqlite3
class PomoData():
def __init__(self, pomo):
"""
Makes necessary variable initializations and calls other init methods.
"""
super(PomoData, self).__init__()
self.pomo = pomo
self.connected = False
self.initDB()
self.firstPomo = self.getEarliest()
def initDB(self):
"""
Connects to the DB and prepares the connection for further use.
"""
dbPath = os.path.expanduser("~/.local/share/pomodorino/");
# Create a folder for the DB if necessary
if not os.path.exists(dbPath):
os.makedirs(dbPath)
try:
self.conn = sqlite3.connect(dbPath + "pomo.db", check_same_thread = False)
self.c = self.conn.cursor()
self.c.execute('SELECT SQLITE_VERSION()')
data = self.c.fetchone()
self.connected = True
self.createDB()
self.readDB()
except:
raise RuntimeError("No DB connection available.")
def createDB(self):
"""
Creates the database layout
"""
if self.connected is True:
self.c.execute("CREATE TABLE IF NOT EXISTS Tasks(ID INTEGER PRIMARY KEY AUTOINCREMENT, Name TEXT)")
self.c.execute("CREATE TABLE IF NOT EXISTS Pomos(Timestamp INT, TaskID INT)")
self.conn.commit()
else:
raise RuntimeError("No DB connection available.")
def readDB(self):
"""
Reads the entire DB.
"""
if self.connected is True:
self.c.execute("SELECT * FROM Tasks ORDER BY Name COLLATE NOCASE ASC")
tasks = self.c.fetchall()
self.tasks = list()
counter = 0
for taskID, taskName in tasks:
self.c.execute("SELECT count(TaskID), MAX(Timestamp) FROM Pomos WHERE TaskID = '" + str(taskID) + "'")
pomoData = self.c.fetchall()
pomoCount = pomoData[0][0]
pomoLast = pomoData[0][1]
self.tasks.append([taskID, taskName, pomoCount, pomoLast])
counter += 1
else:
raise RuntimeError("No DB connection available.")
def getEarliest(self):
if self.connected is True:
self.c.execute("SELECT Timestamp FROM Pomos ORDER BY Timestamp ASC")
timestamp = self.c.fetchone()
if timestamp is not None:
return timestamp[0]
return 0
else:
raise RuntimeError("No DB connection available.")
def addPomo(self, taskName, pomos):
"""
Adds a pomodoro, returns whether a new task had to be added to the DB.
"""
if self.connected is True:
newTask = False
try:
taskID = self.getTaskID(taskName)
except KeyError:
# taskName was not found, insert new task.
taskID = self.insertTask(taskName)
newTask = True
pomoTime = 0
for i in range(0, pomos):
pomoTime = int(time.time()) - ((i+1) * 25 * 60)
statement = ("INSERT INTO Pomos(Timestamp, TaskID) VALUES(" + str(pomoTime) + "," + str(taskID) + ")")
self.c.execute(statement)
# Timestamp indicates the beginning of a pomo
# We need to update our local cache as well.
tasks = list()
for tID, tskName, pomoCount, pomoLast in self.tasks:
if tskName == taskName:
pomoCount += pomos
pomoLast = pomoTime
tasks.append([tID, tskName, pomoCount, pomoLast])
self.tasks = tasks
self.conn.commit()
return newTask
else:
raise RuntimeError("No DB connection available.")
def getTaskID(self, name):
"""
Returns the ID of a given task or raises a KeyError.
"""
for taskID, taskName, pomoCount, pomoLast in self.tasks:
if taskName == name:
return taskID
raise KeyError
def getPomoCount(self, timeInt, taskID=0):
"""
Returns the number of pomos [of a task] done in a certain time interval.
"""
if self.connected is True:
statement = "SELECT count(TaskID) FROM Pomos WHERE "
if taskID > 0:
statement += "TaskID = '" + str(taskID) + "' AND "
statement += ("Timestamp BETWEEN " + str(timeInt[0]) +" AND " + str(timeInt[1]))
self.c.execute(statement)
val = self.c.fetchall()
return val[0][0]
else:
raise RuntimeError("No DB connection available.")
def getHighestPomoCountDaily(self):
"""
Returns the highest number of pomodoros done on a single day.
"""
first = self.getEarliest()
if first == 0:
return 0
temp = datetime.date.fromtimestamp(first)
begin = datetime.datetime(temp.year, temp.month, temp.day)
end = datetime.datetime(temp.year, temp.month, temp.day, 23, 59, 59)
delta = datetime.timedelta(days=1)
todayStamp = datetime.datetime.now().timestamp()
pomoCount = 0
while begin.timestamp() <= todayStamp:
val = self.getPomoCount([begin.timestamp(), end.timestamp()])
if val > pomoCount:
pomoCount = val
begin += delta
end += delta
return pomoCount
def getHighestPomoCountWeekly(self):
"""
Returns the highest number of pomodoros done in a single week.
"""
first = self.getEarliest()
if first == 0:
return 0
temp = datetime.date.fromtimestamp(first)
begin = datetime.datetime(temp.year, temp.month, temp.day)
begin = begin - datetime.timedelta(days=begin.weekday())
temp = begin + datetime.timedelta(days=6)
end = datetime.datetime(temp.year, temp.month, temp.day, 23, 59, 59)
delta = datetime.timedelta(days=7)
todayStamp = datetime.datetime.now().timestamp()
pomoCount = 0
while begin.timestamp() <= todayStamp:
val = self.getPomoCount([begin.timestamp(), end.timestamp()])
if val > pomoCount:
pomoCount = val
begin += delta
end += delta
return pomoCount
def getHighestPomoCountMonthly(self):
"""
Returns the highest number of pomodoros done in a single month.
"""
first = self.getEarliest()
if first == 0:
return 0
temp = datetime.date.fromtimestamp(first)
begin = datetime.datetime(temp.year, temp.month, 1)
lastDay = calendar.monthrange(begin.year, begin.month)[1]
end = datetime.datetime(begin.year, begin.month, lastDay, 23, 59, 59)
todayStamp = datetime.datetime.now().timestamp()
pomoCount = 0
while begin.timestamp() <= todayStamp:
val = self.getPomoCount([begin.timestamp(), end.timestamp()])
if val > pomoCount:
pomoCount = val
month = begin.month + 1
year = begin.year
if month == 13:
month = 1
year += 1
begin = datetime.datetime(year, month, 1)
lastDay = calendar.monthrange(year, month)[1]
end = datetime.datetime(year, month, lastDay, 23, 59, 59)
return pomoCount
def insertTask(self, taskName):
"""
Inserts a new task into the database and our local cache.
"""
if self.connected is True:
self.c.execute("INSERT INTO Tasks(Name) VALUES(\"" + taskName + "\")")
self.conn.commit()
taskID = self.c.lastrowid
self.tasks.append((taskID, taskName, 0, 0))
return taskID
else:
raise RuntimeError("No DB connection available.")
def renameTask(self, taskID, newName):
"""
Renames a task in the db and updates local cache.
"""
if self.connected is True:
# Update local cache
tasks = list()
for tID, taskName, pomoCount, pomoLast in self.tasks:
if tID == taskID:
taskName = newName
tasks.append([tID, taskName, pomoCount, pomoLast])
self.tasks = tasks
# Update DB
statement = ("UPDATE Tasks SET Name = '" + newName + "' WHERE ID = " + str(taskID) + "")
self.c.execute(statement)
self.conn.commit()
else:
raise RuntimeError("No DB connection available.")
def delTask(self, taskID):
"""
Deletes a task with all pomos from the db and updates local cache.
"""
if self.connected is True:
tasks = list()
for tID, taskName, pomoCount, pomoLast in self.tasks:
if tID != taskID:
tasks.append([tID, taskName, pomoCount, pomoLast])
self.tasks = tasks
statement = ("DELETE FROM Tasks WHERE ID = " + str(taskID) + "")
self.c.execute(statement)
statement = ("DELETE FROM Pomos WHERE TaskID = " + str(taskID) + "")
self.c.execute(statement)
self.conn.commit()
else:
raise RuntimeError("No DB connection available.")
def closeDB(self):
"""
Closes the database connection
"""
if self.connected is True:
self.conn.close()
else:
raise RuntimeError("No DB connection available.")
def initData(pomo):
pomoData = PomoData(pomo)
pomo.pomoData = pomoData
| gpl-2.0 |
Yannig/ansible | lib/ansible/modules/system/beadm.py | 56 | 11657 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2016, Adam Števko <adam.stevko@gmail.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: beadm
short_description: Manage ZFS boot environments on FreeBSD/Solaris/illumos systems.
description:
- Create, delete or activate ZFS boot environments.
- Mount and unmount ZFS boot environments.
version_added: "2.3"
author: Adam Števko (@xen0l)
options:
name:
description:
- ZFS boot environment name.
aliases: [ "be" ]
required: True
snapshot:
description:
- If specified, the new boot environment will be cloned from the given
snapshot or inactive boot environment.
required: false
default: false
description:
description:
- Associate a description with a new boot environment. This option is
available only on Solarish platforms.
required: false
default: false
options:
description:
- Create the datasets for new BE with specific ZFS properties. Multiple
options can be specified. This option is available only on
Solarish platforms.
required: false
default: false
mountpoint:
description:
- Path where to mount the ZFS boot environment
required: false
default: false
state:
description:
- Create or delete ZFS boot environment.
required: false
default: "present"
choices: [ "present", "absent", "activated", "mounted", "unmounted" ]
force:
description:
- Specifies if the unmount should be forced.
required: false
default: false
choices: [ "true", "false" ]
'''
EXAMPLES = '''
- name: Create ZFS boot environment
beadm:
name: upgrade-be
state: present
- name: Create ZFS boot environment from existing inactive boot environment
beadm:
name: upgrade-be
snapshot: be@old
state: present
- name: Create ZFS boot environment with compression enabled and description "upgrade"
beadm:
name: upgrade-be
options: "compression=on"
description: upgrade
state: present
- name: Delete ZFS boot environment
beadm:
name: old-be
state: absent
- name: Mount ZFS boot environment on /tmp/be
beadm:
name: BE
mountpoint: /tmp/be
state: mounted
- name: Unmount ZFS boot environment
beadm:
name: BE
state: unmounted
- name: Activate ZFS boot environment
beadm:
name: upgrade-be
state: activated
'''
RETURN = '''
name:
description: BE name
returned: always
type: string
sample: pre-upgrade
snapshot:
description: ZFS snapshot to create BE from
returned: always
type: string
sample: rpool/ROOT/oi-hipster@fresh
description:
description: BE description
returned: always
type: string
sample: Upgrade from 9.0 to 10.0
options:
description: BE additional options
returned: always
type: string
sample: compression=on
mountpoint:
description: BE mountpoint
returned: always
type: string
sample: /mnt/be
state:
description: state of the target
returned: always
type: string
sample: present
force:
description: if forced action is wanted
returned: always
type: boolean
sample: False
'''
import os
from ansible.module_utils.basic import AnsibleModule
class BE(object):
def __init__(self, module):
self.module = module
self.name = module.params['name']
self.snapshot = module.params['snapshot']
self.description = module.params['description']
self.options = module.params['options']
self.mountpoint = module.params['mountpoint']
self.state = module.params['state']
self.force = module.params['force']
self.is_freebsd = os.uname()[0] == 'FreeBSD'
def _beadm_list(self):
cmd = [self.module.get_bin_path('beadm')]
cmd.append('list')
cmd.append('-H')
if not self.is_freebsd:
cmd.append(self.name)
return self.module.run_command(cmd)
def _find_be_by_name(self, out):
for line in out.splitlines():
if line.split('\t')[0] == self.name:
return line
return None
def exists(self):
(rc, out, _) = self._beadm_list()
if rc == 0:
if self.is_freebsd:
if self._find_be_by_name(out):
return True
else:
return True
else:
return False
def is_activated(self):
(rc, out, _) = self._beadm_list()
if rc == 0:
if self.is_freebsd:
line = self._find_be_by_name(out)
if line is not None and 'R' in line.split('\t')[1]:
return True
else:
if 'R' in out.split(';')[2]:
return True
return False
def activate_be(self):
cmd = [self.module.get_bin_path('beadm')]
cmd.append('activate')
cmd.append(self.name)
return self.module.run_command(cmd)
def create_be(self):
cmd = [self.module.get_bin_path('beadm')]
cmd.append('create')
if self.snapshot:
cmd.append('-e')
cmd.append(self.snapshot)
if not self.is_freebsd:
if self.description:
cmd.append('-d')
cmd.append(self.description)
if self.options:
cmd.append('-o')
cmd.append(self.options)
cmd.append(self.name)
return self.module.run_command(cmd)
def destroy_be(self):
cmd = [self.module.get_bin_path('beadm')]
cmd.append('destroy')
cmd.append('-F')
cmd.append(self.name)
return self.module.run_command(cmd)
def is_mounted(self):
(rc, out, _) = self._beadm_list()
if rc == 0:
if self.is_freebsd:
line = self._find_be_by_name(out)
# On FreeBSD, we exclude currently mounted BE on /, as it is
# special and can be activated even if it is mounted. That is not
# possible with non-root BEs.
if line.split('\t')[2] is not '-' and \
line.split('\t')[2] is not '/':
return True
else:
if out.split(';')[3]:
return True
return False
def mount_be(self):
cmd = [self.module.get_bin_path('beadm')]
cmd.append('mount')
cmd.append(self.name)
if self.mountpoint:
cmd.append(self.mountpoint)
return self.module.run_command(cmd)
def unmount_be(self):
cmd = [self.module.get_bin_path('beadm')]
cmd.append('unmount')
if self.force:
cmd.append('-f')
cmd.append(self.name)
return self.module.run_command(cmd)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True, aliases=['be'], type='str'),
snapshot=dict(type='str'),
description=dict(type='str'),
options=dict(type='str'),
mountpoint=dict(default=False, type='path'),
state=dict(
default='present',
choices=['present', 'absent', 'activated',
'mounted', 'unmounted']),
force=dict(default=False, type='bool'),
),
supports_check_mode=True
)
be = BE(module)
rc = None
out = ''
err = ''
result = {}
result['name'] = be.name
result['state'] = be.state
if be.snapshot:
result['snapshot'] = be.snapshot
if be.description:
result['description'] = be.description
if be.options:
result['options'] = be.options
if be.mountpoint:
result['mountpoint'] = be.mountpoint
if be.state == 'absent':
# beadm on FreeBSD and Solarish systems differs in delete behaviour in
# that we are not allowed to delete activated BE on FreeBSD while on
# Solarish systems we cannot delete BE if it is mounted. We add mount
# check for both platforms as BE should be explicitly unmounted before
# being deleted. On FreeBSD, we also check if the BE is activated.
if be.exists():
if not be.is_mounted():
if module.check_mode:
module.exit_json(changed=True)
if be.is_freebsd:
if be.is_activated():
module.fail_json(msg='Unable to remove active BE!')
(rc, out, err) = be.destroy_be()
if rc != 0:
module.fail_json(msg='Error while destroying BE: "%s"' % err,
name=be.name,
stderr=err,
rc=rc)
else:
module.fail_json(msg='Unable to remove BE as it is mounted!')
elif be.state == 'present':
if not be.exists():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = be.create_be()
if rc != 0:
module.fail_json(msg='Error while creating BE: "%s"' % err,
name=be.name,
stderr=err,
rc=rc)
elif be.state == 'activated':
if not be.is_activated():
if module.check_mode:
module.exit_json(changed=True)
# On FreeBSD, beadm is unable to activate mounted BEs, so we add
# an explicit check for that case.
if be.is_freebsd:
if be.is_mounted():
module.fail_json(msg='Unable to activate mounted BE!')
(rc, out, err) = be.activate_be()
if rc != 0:
module.fail_json(msg='Error while activating BE: "%s"' % err,
name=be.name,
stderr=err,
rc=rc)
elif be.state == 'mounted':
if not be.is_mounted():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = be.mount_be()
if rc != 0:
module.fail_json(msg='Error while mounting BE: "%s"' % err,
name=be.name,
stderr=err,
rc=rc)
elif be.state == 'unmounted':
if be.is_mounted():
if module.check_mode:
module.exit_json(changed=True)
(rc, out, err) = be.unmount_be()
if rc != 0:
module.fail_json(msg='Error while unmounting BE: "%s"' % err,
name=be.name,
stderr=err,
rc=rc)
if rc is None:
result['changed'] = False
else:
result['changed'] = True
if out:
result['stdout'] = out
if err:
result['stderr'] = err
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
diagramsoftware/odoo | addons/crm_partner_assign/crm_lead.py | 221 | 3039 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
from openerp.tools.translate import _
from openerp.tools.safe_eval import safe_eval as eval
class crm_lead(osv.osv):
_inherit = 'crm.lead'
def get_interested_action(self, cr, uid, interested, context=None):
try:
model, action_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'crm_partner_assign', 'crm_lead_channel_interested_act')
except ValueError:
raise osv.except_osv(_('Error!'), _("The CRM Channel Interested Action is missing"))
action = self.pool[model].read(cr, uid, [action_id], context=context)[0]
action_context = eval(action['context'])
action_context['interested'] = interested
action['context'] = str(action_context)
return action
def case_interested(self, cr, uid, ids, context=None):
return self.get_interested_action(cr, uid, True, context=context)
def case_disinterested(self, cr, uid, ids, context=None):
return self.get_interested_action(cr, uid, False, context=context)
def assign_salesman_of_assigned_partner(self, cr, uid, ids, context=None):
salesmans_leads = {}
for lead in self.browse(cr, uid, ids, context=context):
if (lead.stage_id.probability > 0 and lead.stage_id.probability < 100) or lead.stage_id.sequence == 1:
if lead.partner_assigned_id and lead.partner_assigned_id.user_id and lead.partner_assigned_id.user_id != lead.user_id:
salesman_id = lead.partner_assigned_id.user_id.id
if salesmans_leads.get(salesman_id):
salesmans_leads[salesman_id].append(lead.id)
else:
salesmans_leads[salesman_id] = [lead.id]
for salesman_id, lead_ids in salesmans_leads.items():
salesteam_id = self.on_change_user(cr, uid, lead_ids, salesman_id, context=None)['value'].get('section_id')
self.write(cr, uid, lead_ids, {'user_id': salesman_id, 'section_id': salesteam_id}, context=context)
| agpl-3.0 |
xbianonpi/Sick-Beard-TPB | lib/rtorrent/__init__.py | 9 | 25246 | # Copyright (c) 2013 Chris Lucas, <chris@chrisjlucas.com>
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
import urllib
import os.path
import time
import xmlrpclib
from rtorrent.common import find_torrent, \
is_valid_port, convert_version_tuple_to_str
from rtorrent.lib.torrentparser import TorrentParser
from rtorrent.lib.xmlrpc.http import HTTPServerProxy
from rtorrent.lib.xmlrpc.scgi import SCGIServerProxy
from rtorrent.rpc import Method
from rtorrent.lib.xmlrpc.basic_auth import BasicAuthTransport
from rtorrent.torrent import Torrent
from rtorrent.group import Group
import rtorrent.rpc # @UnresolvedImport
__version__ = "0.2.9"
__author__ = "Chris Lucas"
__contact__ = "chris@chrisjlucas.com"
__license__ = "MIT"
MIN_RTORRENT_VERSION = (0, 8, 1)
MIN_RTORRENT_VERSION_STR = convert_version_tuple_to_str(MIN_RTORRENT_VERSION)
class RTorrent:
""" Create a new rTorrent connection """
rpc_prefix = None
def __init__(self, uri, username=None, password=None,
verify=False, sp=None, sp_kwargs=None):
self.uri = uri # : From X{__init__(self, url)}
self.username = username
self.password = password
self.schema = urllib.splittype(uri)[0]
if sp:
self.sp = sp
elif self.schema in ['http', 'https']:
self.sp = HTTPServerProxy
elif self.schema == 'scgi':
self.sp = SCGIServerProxy
else:
raise NotImplementedError()
self.sp_kwargs = sp_kwargs or {}
self.torrents = [] # : List of L{Torrent} instances
self._rpc_methods = [] # : List of rTorrent RPC methods
self._torrent_cache = []
self._client_version_tuple = ()
if verify is True:
self._verify_conn()
def _get_conn(self):
"""Get ServerProxy instance"""
if self.username is not None and self.password is not None:
if self.schema == 'scgi':
raise NotImplementedError()
return self.sp(
self.uri,
transport=BasicAuthTransport(self.username, self.password),
**self.sp_kwargs
)
return self.sp(self.uri, **self.sp_kwargs)
def _verify_conn(self):
# check for rpc methods that should be available
assert "system.client_version" in self._get_rpc_methods(), "Required RPC method not available."
assert "system.library_version" in self._get_rpc_methods(), "Required RPC method not available."
# minimum rTorrent version check
assert self._meets_version_requirement() is True,\
"Error: Minimum rTorrent version required is {0}".format(
MIN_RTORRENT_VERSION_STR)
def _meets_version_requirement(self):
return self._get_client_version_tuple() >= MIN_RTORRENT_VERSION
def _get_client_version_tuple(self):
conn = self._get_conn()
if not self._client_version_tuple:
if not hasattr(self, "client_version"):
setattr(self, "client_version",
conn.system.client_version())
rtver = getattr(self, "client_version")
self._client_version_tuple = tuple([int(i) for i in
rtver.split(".")])
return self._client_version_tuple
def _update_rpc_methods(self):
self._rpc_methods = self._get_conn().system.listMethods()
return self._rpc_methods
def _get_rpc_methods(self):
""" Get list of raw RPC commands
@return: raw RPC commands
@rtype: list
"""
return(self._rpc_methods or self._update_rpc_methods())
def get_torrents(self, view="main"):
"""Get list of all torrents in specified view
@return: list of L{Torrent} instances
@rtype: list
@todo: add validity check for specified view
"""
self.torrents = []
methods = rtorrent.torrent.methods
retriever_methods = [m for m in methods
if m.is_retriever() and m.is_available(self)]
m = rtorrent.rpc.Multicall(self)
m.add("d.multicall", view, "d.get_hash=",
*[method.rpc_call + "=" for method in retriever_methods])
results = m.call()[0] # only sent one call, only need first result
for result in results:
results_dict = {}
# build results_dict
for m, r in zip(retriever_methods, result[1:]): # result[0] is the info_hash
results_dict[m.varname] = rtorrent.rpc.process_result(m, r)
self.torrents.append(
Torrent(self, info_hash=result[0], **results_dict)
)
self._manage_torrent_cache()
return(self.torrents)
def _manage_torrent_cache(self):
"""Carry tracker/peer/file lists over to new torrent list"""
for torrent in self._torrent_cache:
new_torrent = rtorrent.common.find_torrent(torrent.info_hash,
self.torrents)
if new_torrent is not None:
new_torrent.files = torrent.files
new_torrent.peers = torrent.peers
new_torrent.trackers = torrent.trackers
self._torrent_cache = self.torrents
def _get_load_function(self, file_type, start, verbose):
"""Determine correct "load torrent" RPC method"""
func_name = None
if file_type == "url":
# url strings can be input directly
if start and verbose:
func_name = "load_start_verbose"
elif start:
func_name = "load_start"
elif verbose:
func_name = "load_verbose"
else:
func_name = "load"
elif file_type in ["file", "raw"]:
if start and verbose:
func_name = "load_raw_start_verbose"
elif start:
func_name = "load_raw_start"
elif verbose:
func_name = "load_raw_verbose"
else:
func_name = "load_raw"
return(func_name)
def load_magnet(self, magneturl, info_hash, start=False, verbose=False, verify_load=True):
p = self._get_conn()
info_hash = info_hash.upper()
func_name = self._get_load_function("url", start, verbose)
# load magnet
getattr(p, func_name)(magneturl)
if verify_load:
MAX_RETRIES = 3
i = 0
while i < MAX_RETRIES:
for torrent in self.get_torrents():
if torrent.info_hash != info_hash:
break
time.sleep(1)
i += 1
# Resolve magnet to torrent
torrent.start()
assert info_hash in [t.info_hash for t in self.torrents],\
"Adding torrent was unsuccessful."
MAX_RETRIES = 3
i = 0
while i < MAX_RETRIES:
for torrent in self.get_torrents():
if torrent.info_hash == info_hash:
if str(info_hash) not in str(torrent.name) :
time.sleep(1)
i += 1
return(torrent)
def load_torrent(self, torrent, start=False, verbose=False, verify_load=True):
"""
Loads torrent into rTorrent (with various enhancements)
@param torrent: can be a url, a path to a local file, or the raw data
of a torrent file
@type torrent: str
@param start: start torrent when loaded
@type start: bool
@param verbose: print error messages to rTorrent log
@type verbose: bool
@param verify_load: verify that torrent was added to rTorrent successfully
@type verify_load: bool
@return: Depends on verify_load:
- if verify_load is True, (and the torrent was
loaded successfully), it'll return a L{Torrent} instance
- if verify_load is False, it'll return None
@rtype: L{Torrent} instance or None
@raise AssertionError: If the torrent wasn't successfully added to rTorrent
- Check L{TorrentParser} for the AssertionError's
it raises
@note: Because this function includes url verification (if a url was input)
as well as verification as to whether the torrent was successfully added,
this function doesn't execute instantaneously. If that's what you're
looking for, use load_torrent_simple() instead.
"""
p = self._get_conn()
tp = TorrentParser(torrent)
torrent = xmlrpclib.Binary(tp._raw_torrent)
info_hash = tp.info_hash
func_name = self._get_load_function("raw", start, verbose)
# load torrent
getattr(p, func_name)(torrent)
if verify_load:
MAX_RETRIES = 3
i = 0
while i < MAX_RETRIES:
self.get_torrents()
if info_hash in [t.info_hash for t in self.torrents]:
break
# was still getting AssertionErrors, delay should help
time.sleep(1)
i += 1
assert info_hash in [t.info_hash for t in self.torrents],\
"Adding torrent was unsuccessful."
return(find_torrent(info_hash, self.torrents))
def load_torrent_simple(self, torrent, file_type,
start=False, verbose=False):
"""Loads torrent into rTorrent
@param torrent: can be a url, a path to a local file, or the raw data
of a torrent file
@type torrent: str
@param file_type: valid options: "url", "file", or "raw"
@type file_type: str
@param start: start torrent when loaded
@type start: bool
@param verbose: print error messages to rTorrent log
@type verbose: bool
@return: None
@raise AssertionError: if incorrect file_type is specified
@note: This function was written for speed, it includes no enhancements.
If you input a url, it won't check if it's valid. You also can't get
verification that the torrent was successfully added to rTorrent.
Use load_torrent() if you would like these features.
"""
p = self._get_conn()
assert file_type in ["raw", "file", "url"], \
"Invalid file_type, options are: 'url', 'file', 'raw'."
func_name = self._get_load_function(file_type, start, verbose)
if file_type == "file":
# since we have to assume we're connected to a remote rTorrent
# client, we have to read the file and send it to rT as raw
assert os.path.isfile(torrent), \
"Invalid path: \"{0}\"".format(torrent)
torrent = open(torrent, "rb").read()
if file_type in ["raw", "file"]:
finput = xmlrpclib.Binary(torrent)
elif file_type == "url":
finput = torrent
getattr(p, func_name)(finput)
def get_views(self):
p = self._get_conn()
return p.view_list()
def create_group(self, name, persistent=True, view=None):
p = self._get_conn()
if persistent is True:
p.group.insert_persistent_view('', name)
else:
assert view is not None, "view parameter required on non-persistent groups"
p.group.insert('', name, view)
self._update_rpc_methods()
def get_group(self, name):
assert name is not None, "group name required"
group = Group(self, name)
group.update()
return group
def set_dht_port(self, port):
"""Set DHT port
@param port: port
@type port: int
@raise AssertionError: if invalid port is given
"""
assert is_valid_port(port), "Valid port range is 0-65535"
self.dht_port = self._p.set_dht_port(port)
def enable_check_hash(self):
"""Alias for set_check_hash(True)"""
self.set_check_hash(True)
def disable_check_hash(self):
"""Alias for set_check_hash(False)"""
self.set_check_hash(False)
def find_torrent(self, info_hash):
"""Frontend for rtorrent.common.find_torrent"""
return(rtorrent.common.find_torrent(info_hash, self.get_torrents()))
def poll(self):
""" poll rTorrent to get latest torrent/peer/tracker/file information
@note: This essentially refreshes every aspect of the rTorrent
connection, so it can be very slow if working with a remote
connection that has a lot of torrents loaded.
@return: None
"""
self.update()
torrents = self.get_torrents()
for t in torrents:
t.poll()
def update(self):
"""Refresh rTorrent client info
@note: All fields are stored as attributes to self.
@return: None
"""
multicall = rtorrent.rpc.Multicall(self)
retriever_methods = [m for m in methods
if m.is_retriever() and m.is_available(self)]
for method in retriever_methods:
multicall.add(method)
multicall.call()
def _build_class_methods(class_obj):
# multicall add class
caller = lambda self, multicall, method, *args:\
multicall.add(method, self.rpc_id, *args)
caller.__doc__ = """Same as Multicall.add(), but with automatic inclusion
of the rpc_id
@param multicall: A L{Multicall} instance
@type: multicall: Multicall
@param method: L{Method} instance or raw rpc method
@type: Method or str
@param args: optional arguments to pass
"""
setattr(class_obj, "multicall_add", caller)
def __compare_rpc_methods(rt_new, rt_old):
from pprint import pprint
rt_new_methods = set(rt_new._get_rpc_methods())
rt_old_methods = set(rt_old._get_rpc_methods())
print("New Methods:")
pprint(rt_new_methods - rt_old_methods)
print("Methods not in new rTorrent:")
pprint(rt_old_methods - rt_new_methods)
def __check_supported_methods(rt):
from pprint import pprint
supported_methods = set([m.rpc_call for m in
methods +
rtorrent.file.methods +
rtorrent.torrent.methods +
rtorrent.tracker.methods +
rtorrent.peer.methods])
all_methods = set(rt._get_rpc_methods())
print("Methods NOT in supported methods")
pprint(all_methods - supported_methods)
print("Supported methods NOT in all methods")
pprint(supported_methods - all_methods)
methods = [
# RETRIEVERS
Method(RTorrent, 'get_xmlrpc_size_limit', 'get_xmlrpc_size_limit'),
Method(RTorrent, 'get_proxy_address', 'get_proxy_address'),
Method(RTorrent, 'get_split_suffix', 'get_split_suffix'),
Method(RTorrent, 'get_up_limit', 'get_upload_rate'),
Method(RTorrent, 'get_max_memory_usage', 'get_max_memory_usage'),
Method(RTorrent, 'get_max_open_files', 'get_max_open_files'),
Method(RTorrent, 'get_min_peers_seed', 'get_min_peers_seed'),
Method(RTorrent, 'get_use_udp_trackers', 'get_use_udp_trackers'),
Method(RTorrent, 'get_preload_min_size', 'get_preload_min_size'),
Method(RTorrent, 'get_max_uploads', 'get_max_uploads'),
Method(RTorrent, 'get_max_peers', 'get_max_peers'),
Method(RTorrent, 'get_timeout_sync', 'get_timeout_sync'),
Method(RTorrent, 'get_receive_buffer_size', 'get_receive_buffer_size'),
Method(RTorrent, 'get_split_file_size', 'get_split_file_size'),
Method(RTorrent, 'get_dht_throttle', 'get_dht_throttle'),
Method(RTorrent, 'get_max_peers_seed', 'get_max_peers_seed'),
Method(RTorrent, 'get_min_peers', 'get_min_peers'),
Method(RTorrent, 'get_tracker_numwant', 'get_tracker_numwant'),
Method(RTorrent, 'get_max_open_sockets', 'get_max_open_sockets'),
Method(RTorrent, 'get_session', 'get_session'),
Method(RTorrent, 'get_ip', 'get_ip'),
Method(RTorrent, 'get_scgi_dont_route', 'get_scgi_dont_route'),
Method(RTorrent, 'get_hash_read_ahead', 'get_hash_read_ahead'),
Method(RTorrent, 'get_http_cacert', 'get_http_cacert'),
Method(RTorrent, 'get_dht_port', 'get_dht_port'),
Method(RTorrent, 'get_handshake_log', 'get_handshake_log'),
Method(RTorrent, 'get_preload_type', 'get_preload_type'),
Method(RTorrent, 'get_max_open_http', 'get_max_open_http'),
Method(RTorrent, 'get_http_capath', 'get_http_capath'),
Method(RTorrent, 'get_max_downloads_global', 'get_max_downloads_global'),
Method(RTorrent, 'get_name', 'get_name'),
Method(RTorrent, 'get_session_on_completion', 'get_session_on_completion'),
Method(RTorrent, 'get_down_limit', 'get_download_rate'),
Method(RTorrent, 'get_down_total', 'get_down_total'),
Method(RTorrent, 'get_up_rate', 'get_up_rate'),
Method(RTorrent, 'get_hash_max_tries', 'get_hash_max_tries'),
Method(RTorrent, 'get_peer_exchange', 'get_peer_exchange'),
Method(RTorrent, 'get_down_rate', 'get_down_rate'),
Method(RTorrent, 'get_connection_seed', 'get_connection_seed'),
Method(RTorrent, 'get_http_proxy', 'get_http_proxy'),
Method(RTorrent, 'get_stats_preloaded', 'get_stats_preloaded'),
Method(RTorrent, 'get_timeout_safe_sync', 'get_timeout_safe_sync'),
Method(RTorrent, 'get_hash_interval', 'get_hash_interval'),
Method(RTorrent, 'get_port_random', 'get_port_random'),
Method(RTorrent, 'get_directory', 'get_directory'),
Method(RTorrent, 'get_port_open', 'get_port_open'),
Method(RTorrent, 'get_max_file_size', 'get_max_file_size'),
Method(RTorrent, 'get_stats_not_preloaded', 'get_stats_not_preloaded'),
Method(RTorrent, 'get_memory_usage', 'get_memory_usage'),
Method(RTorrent, 'get_connection_leech', 'get_connection_leech'),
Method(RTorrent, 'get_check_hash', 'get_check_hash',
boolean=True,
),
Method(RTorrent, 'get_session_lock', 'get_session_lock'),
Method(RTorrent, 'get_preload_required_rate', 'get_preload_required_rate'),
Method(RTorrent, 'get_max_uploads_global', 'get_max_uploads_global'),
Method(RTorrent, 'get_send_buffer_size', 'get_send_buffer_size'),
Method(RTorrent, 'get_port_range', 'get_port_range'),
Method(RTorrent, 'get_max_downloads_div', 'get_max_downloads_div'),
Method(RTorrent, 'get_max_uploads_div', 'get_max_uploads_div'),
Method(RTorrent, 'get_safe_sync', 'get_safe_sync'),
Method(RTorrent, 'get_bind', 'get_bind'),
Method(RTorrent, 'get_up_total', 'get_up_total'),
Method(RTorrent, 'get_client_version', 'system.client_version'),
Method(RTorrent, 'get_library_version', 'system.library_version'),
Method(RTorrent, 'get_api_version', 'system.api_version',
min_version=(0, 9, 1)
),
Method(RTorrent, "get_system_time", "system.time",
docstring="""Get the current time of the system rTorrent is running on
@return: time (posix)
@rtype: int""",
),
# MODIFIERS
Method(RTorrent, 'set_http_proxy', 'set_http_proxy'),
Method(RTorrent, 'set_max_memory_usage', 'set_max_memory_usage'),
Method(RTorrent, 'set_max_file_size', 'set_max_file_size'),
Method(RTorrent, 'set_bind', 'set_bind',
docstring="""Set address bind
@param arg: ip address
@type arg: str
""",
),
Method(RTorrent, 'set_up_limit', 'set_upload_rate',
docstring="""Set global upload limit (in bytes)
@param arg: speed limit
@type arg: int
""",
),
Method(RTorrent, 'set_port_random', 'set_port_random'),
Method(RTorrent, 'set_connection_leech', 'set_connection_leech'),
Method(RTorrent, 'set_tracker_numwant', 'set_tracker_numwant'),
Method(RTorrent, 'set_max_peers', 'set_max_peers'),
Method(RTorrent, 'set_min_peers', 'set_min_peers'),
Method(RTorrent, 'set_max_uploads_div', 'set_max_uploads_div'),
Method(RTorrent, 'set_max_open_files', 'set_max_open_files'),
Method(RTorrent, 'set_max_downloads_global', 'set_max_downloads_global'),
Method(RTorrent, 'set_session_lock', 'set_session_lock'),
Method(RTorrent, 'set_session', 'set_session'),
Method(RTorrent, 'set_split_suffix', 'set_split_suffix'),
Method(RTorrent, 'set_hash_interval', 'set_hash_interval'),
Method(RTorrent, 'set_handshake_log', 'set_handshake_log'),
Method(RTorrent, 'set_port_range', 'set_port_range'),
Method(RTorrent, 'set_min_peers_seed', 'set_min_peers_seed'),
Method(RTorrent, 'set_scgi_dont_route', 'set_scgi_dont_route'),
Method(RTorrent, 'set_preload_min_size', 'set_preload_min_size'),
Method(RTorrent, 'set_log.tracker', 'set_log.tracker'),
Method(RTorrent, 'set_max_uploads_global', 'set_max_uploads_global'),
Method(RTorrent, 'set_down_limit', 'set_download_rate',
docstring="""Set global download limit (in bytes)
@param arg: speed limit
@type arg: int
""",
),
Method(RTorrent, 'set_preload_required_rate', 'set_preload_required_rate'),
Method(RTorrent, 'set_hash_read_ahead', 'set_hash_read_ahead'),
Method(RTorrent, 'set_max_peers_seed', 'set_max_peers_seed'),
Method(RTorrent, 'set_max_uploads', 'set_max_uploads'),
Method(RTorrent, 'set_session_on_completion', 'set_session_on_completion'),
Method(RTorrent, 'set_max_open_http', 'set_max_open_http'),
Method(RTorrent, 'set_directory', 'set_directory'),
Method(RTorrent, 'set_http_cacert', 'set_http_cacert'),
Method(RTorrent, 'set_dht_throttle', 'set_dht_throttle'),
Method(RTorrent, 'set_hash_max_tries', 'set_hash_max_tries'),
Method(RTorrent, 'set_proxy_address', 'set_proxy_address'),
Method(RTorrent, 'set_split_file_size', 'set_split_file_size'),
Method(RTorrent, 'set_receive_buffer_size', 'set_receive_buffer_size'),
Method(RTorrent, 'set_use_udp_trackers', 'set_use_udp_trackers'),
Method(RTorrent, 'set_connection_seed', 'set_connection_seed'),
Method(RTorrent, 'set_xmlrpc_size_limit', 'set_xmlrpc_size_limit'),
Method(RTorrent, 'set_xmlrpc_dialect', 'set_xmlrpc_dialect'),
Method(RTorrent, 'set_safe_sync', 'set_safe_sync'),
Method(RTorrent, 'set_http_capath', 'set_http_capath'),
Method(RTorrent, 'set_send_buffer_size', 'set_send_buffer_size'),
Method(RTorrent, 'set_max_downloads_div', 'set_max_downloads_div'),
Method(RTorrent, 'set_name', 'set_name'),
Method(RTorrent, 'set_port_open', 'set_port_open'),
Method(RTorrent, 'set_timeout_sync', 'set_timeout_sync'),
Method(RTorrent, 'set_peer_exchange', 'set_peer_exchange'),
Method(RTorrent, 'set_ip', 'set_ip',
docstring="""Set IP
@param arg: ip address
@type arg: str
""",
),
Method(RTorrent, 'set_timeout_safe_sync', 'set_timeout_safe_sync'),
Method(RTorrent, 'set_preload_type', 'set_preload_type'),
Method(RTorrent, 'set_check_hash', 'set_check_hash',
docstring="""Enable/Disable hash checking on finished torrents
@param arg: True to enable, False to disable
@type arg: bool
""",
boolean=True,
),
]
_all_methods_list = [methods,
rtorrent.file.methods,
rtorrent.torrent.methods,
rtorrent.tracker.methods,
rtorrent.peer.methods,
]
class_methods_pair = {
RTorrent: methods,
rtorrent.file.File: rtorrent.file.methods,
rtorrent.torrent.Torrent: rtorrent.torrent.methods,
rtorrent.tracker.Tracker: rtorrent.tracker.methods,
rtorrent.peer.Peer: rtorrent.peer.methods,
}
for c in class_methods_pair.keys():
rtorrent.rpc._build_rpc_methods(c, class_methods_pair[c])
_build_class_methods(c)
| gpl-3.0 |
ashcrow/smizmar | src/smizmar/package.py | 2 | 1161 | # Copyright (C) 2013 Steve Milner
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Common package class.
"""
class Package(object):
def __init__(self, name, version, scope):
self.__name = name
self.__version = version
self.__scope = scope
def __repr__(self):
return 'Package(name="{0}", version="{1}", scope="{2}")'.format(
self.__name, self.__version, self.__scope)
# Read-only properties
name = lambda s: s.__name
version = lambda s: s.__version
scope = lambda s: s.__scope
| agpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.