repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
wonder-sk/QGIS | python/ext-libs/nose2/tests/unit/test_loader.py | 11 | 3081 | from nose2 import events, loader, session
from nose2.tests._common import TestCase
class TestPluggableTestLoader(TestCase):
def setUp(self):
self.session = session.Session()
self.loader = loader.PluggableTestLoader(self.session)
def test_load_from_module_calls_hook(self):
self.session.hooks.register('loadTestsFromModule', FakePlugin())
evt = events.LoadFromModuleEvent(self.loader, 'some_module')
self.session.hooks.loadTestsFromModule(evt)
self.assertTrue(evt.fakeLoadFromModule,
"FakePlugin.loadTestsFromModule() was not called")
def test_load_from_name_calls_hook(self):
self.session.hooks.register('loadTestsFromName', FakePlugin())
evt = events.LoadFromNameEvent(self.loader,
'some_name',
'some_module')
self.session.hooks.loadTestsFromName(evt)
self.assertTrue(evt.fakeLoadFromName,
"FakePlugin.fakeLoadFromName() was not called")
def test_load_from_names_calls_hook(self):
self.session.hooks.register('loadTestsFromNames', FakePlugin())
evt = events.LoadFromNamesEvent(self.loader,
['some_name'],
'some_module')
self.session.hooks.loadTestsFromNames(evt)
self.assertTrue(evt.fakeLoadFromNames,
"FakePlugin.fakeLoadFromNames() was not called")
def test_loader_from_names_calls_module_hook(self):
fake_plugin = FakePlugin()
self.session.hooks.register('loadTestsFromModule', fake_plugin)
self.loader.loadTestsFromNames([], 'some_module')
self.assertTrue(fake_plugin.fakeLoadFromModule,
"FakePlugin.loadTestsFromModule() was not called")
def test_loader_from_names_calls_name_hook(self):
fake_plugin = FakePlugin()
self.session.hooks.register('loadTestsFromName', fake_plugin)
self.loader.loadTestsFromNames(['some_name'])
self.assertTrue(fake_plugin.fakeLoadFromName,
"FakePlugin.loadTestsFromName() was not called")
def test_loader_from_names_calls_names_hook(self):
fake_plugin = FakePlugin()
self.session.hooks.register('loadTestsFromNames', fake_plugin)
self.loader.loadTestsFromNames(['some_name'])
self.assertTrue(fake_plugin.fakeLoadFromNames,
"FakePlugin.loadTestsFromNames() was not called")
class FakePlugin(object):
def __init__(self):
self.fakeLoadFromModule = False
self.fakeLoadFromName = False
self.fakeLoadFromNames = False
def loadTestsFromModule(self, event):
event.fakeLoadFromModule = True
self.fakeLoadFromModule = True
def loadTestsFromName(self, event):
event.fakeLoadFromName = True
self.fakeLoadFromName = True
def loadTestsFromNames(self, event):
event.fakeLoadFromNames = True
self.fakeLoadFromNames = True | gpl-2.0 |
liyitest/rr | openstack_dashboard/dashboards/project/access_and_security/keypairs/views.py | 17 | 4596 | # Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Views for managing keypairs.
"""
from django.core.urlresolvers import reverse
from django.core.urlresolvers import reverse_lazy
from django import http
from django.template.defaultfilters import slugify # noqa
from django.utils.translation import ugettext_lazy as _
from django.views.generic import View # noqa
from horizon import exceptions
from horizon import forms
from horizon.utils import memoized
from horizon import views
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.access_and_security.keypairs \
import forms as project_forms
class CreateView(forms.ModalFormView):
form_class = project_forms.CreateKeypair
form_id = "create_keypair_form"
modal_header = _("Create Key Pair")
template_name = 'project/access_and_security/keypairs/create.html'
submit_label = _("Create Key Pair")
submit_url = reverse_lazy(
"horizon:project:access_and_security:keypairs:create")
success_url = 'horizon:project:access_and_security:keypairs:download'
page_title = _("Create Key Pair")
def get_success_url(self):
return reverse(self.success_url,
kwargs={"keypair_name": self.request.POST['name']})
class ImportView(forms.ModalFormView):
form_class = project_forms.ImportKeypair
form_id = "import_keypair_form"
modal_header = _("Import Key Pair")
template_name = 'project/access_and_security/keypairs/import.html'
submit_label = _("Import Key Pair")
submit_url = reverse_lazy(
"horizon:project:access_and_security:keypairs:import")
success_url = reverse_lazy('horizon:project:access_and_security:index')
page_title = _("Import Key Pair")
def get_object_id(self, keypair):
return keypair.name
class DetailView(views.HorizonTemplateView):
template_name = 'project/access_and_security/keypairs/detail.html'
page_title = _("Key Pair Details")
@memoized.memoized_method
def _get_data(self):
try:
keypair = api.nova.keypair_get(self.request,
self.kwargs['keypair_name'])
except Exception:
redirect = reverse('horizon:project:access_and_security:index')
msg = _('Unable to retrieve details for keypair "%s".')\
% (self.kwargs['keypair_name'])
exceptions.handle(self.request, msg,
redirect=redirect)
return keypair
def get_context_data(self, **kwargs):
"""Gets the context data for keypair."""
context = super(DetailView, self).get_context_data(**kwargs)
context['keypair'] = self._get_data()
return context
class DownloadView(views.HorizonTemplateView):
template_name = 'project/access_and_security/keypairs/download.html'
page_title = _("Download Key Pair")
def get_context_data(self, keypair_name=None):
return {'keypair_name': keypair_name}
class GenerateView(View):
def get(self, request, keypair_name=None, optional=None):
try:
if optional == "regenerate":
api.nova.keypair_delete(request, keypair_name)
keypair = api.nova.keypair_create(request, keypair_name)
except Exception:
redirect = reverse('horizon:project:access_and_security:index')
exceptions.handle(self.request,
_('Unable to create key pair: %(exc)s'),
redirect=redirect)
response = http.HttpResponse(content_type='application/binary')
response['Content-Disposition'] = ('attachment; filename=%s.pem'
% slugify(keypair.name))
response.write(keypair.private_key)
response['Content-Length'] = str(len(response.content))
return response
| apache-2.0 |
dbbhattacharya/kitsune | vendor/packages/setuptools/setuptools/command/build_ext.py | 32 | 11520 | from distutils.command.build_ext import build_ext as _du_build_ext
try:
# Attempt to use Pyrex for building extensions, if available
from Pyrex.Distutils.build_ext import build_ext as _build_ext
except ImportError:
_build_ext = _du_build_ext
import os, sys
from distutils.file_util import copy_file
from setuptools.extension import Library
from distutils.ccompiler import new_compiler
from distutils.sysconfig import customize_compiler, get_config_var
get_config_var("LDSHARED") # make sure _config_vars is initialized
from distutils.sysconfig import _config_vars
from distutils import log
from distutils.errors import *
have_rtld = False
use_stubs = False
libtype = 'shared'
if sys.platform == "darwin":
use_stubs = True
elif os.name != 'nt':
try:
from dl import RTLD_NOW
have_rtld = True
use_stubs = True
except ImportError:
pass
def if_dl(s):
if have_rtld:
return s
return ''
class build_ext(_build_ext):
def run(self):
"""Build extensions in build directory, then copy if --inplace"""
old_inplace, self.inplace = self.inplace, 0
_build_ext.run(self)
self.inplace = old_inplace
if old_inplace:
self.copy_extensions_to_source()
def copy_extensions_to_source(self):
build_py = self.get_finalized_command('build_py')
for ext in self.extensions:
fullname = self.get_ext_fullname(ext.name)
filename = self.get_ext_filename(fullname)
modpath = fullname.split('.')
package = '.'.join(modpath[:-1])
package_dir = build_py.get_package_dir(package)
dest_filename = os.path.join(package_dir,os.path.basename(filename))
src_filename = os.path.join(self.build_lib,filename)
# Always copy, even if source is older than destination, to ensure
# that the right extensions for the current Python/platform are
# used.
copy_file(
src_filename, dest_filename, verbose=self.verbose,
dry_run=self.dry_run
)
if ext._needs_stub:
self.write_stub(package_dir or os.curdir, ext, True)
if _build_ext is not _du_build_ext and not hasattr(_build_ext,'pyrex_sources'):
# Workaround for problems using some Pyrex versions w/SWIG and/or 2.4
def swig_sources(self, sources, *otherargs):
# first do any Pyrex processing
sources = _build_ext.swig_sources(self, sources) or sources
# Then do any actual SWIG stuff on the remainder
return _du_build_ext.swig_sources(self, sources, *otherargs)
def get_ext_filename(self, fullname):
filename = _build_ext.get_ext_filename(self,fullname)
if fullname in self.ext_map:
ext = self.ext_map[fullname]
if isinstance(ext,Library):
fn, ext = os.path.splitext(filename)
return self.shlib_compiler.library_filename(fn,libtype)
elif use_stubs and ext._links_to_dynamic:
d,fn = os.path.split(filename)
return os.path.join(d,'dl-'+fn)
return filename
def initialize_options(self):
_build_ext.initialize_options(self)
self.shlib_compiler = None
self.shlibs = []
self.ext_map = {}
def finalize_options(self):
_build_ext.finalize_options(self)
self.extensions = self.extensions or []
self.check_extensions_list(self.extensions)
self.shlibs = [ext for ext in self.extensions
if isinstance(ext,Library)]
if self.shlibs:
self.setup_shlib_compiler()
for ext in self.extensions:
ext._full_name = self.get_ext_fullname(ext.name)
for ext in self.extensions:
fullname = ext._full_name
self.ext_map[fullname] = ext
ltd = ext._links_to_dynamic = \
self.shlibs and self.links_to_dynamic(ext) or False
ext._needs_stub = ltd and use_stubs and not isinstance(ext,Library)
filename = ext._file_name = self.get_ext_filename(fullname)
libdir = os.path.dirname(os.path.join(self.build_lib,filename))
if ltd and libdir not in ext.library_dirs:
ext.library_dirs.append(libdir)
if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
ext.runtime_library_dirs.append(os.curdir)
def setup_shlib_compiler(self):
compiler = self.shlib_compiler = new_compiler(
compiler=self.compiler, dry_run=self.dry_run, force=self.force
)
if sys.platform == "darwin":
tmp = _config_vars.copy()
try:
# XXX Help! I don't have any idea whether these are right...
_config_vars['LDSHARED'] = "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup"
_config_vars['CCSHARED'] = " -dynamiclib"
_config_vars['SO'] = ".dylib"
customize_compiler(compiler)
finally:
_config_vars.clear()
_config_vars.update(tmp)
else:
customize_compiler(compiler)
if self.include_dirs is not None:
compiler.set_include_dirs(self.include_dirs)
if self.define is not None:
# 'define' option is a list of (name,value) tuples
for (name,value) in self.define:
compiler.define_macro(name, value)
if self.undef is not None:
for macro in self.undef:
compiler.undefine_macro(macro)
if self.libraries is not None:
compiler.set_libraries(self.libraries)
if self.library_dirs is not None:
compiler.set_library_dirs(self.library_dirs)
if self.rpath is not None:
compiler.set_runtime_library_dirs(self.rpath)
if self.link_objects is not None:
compiler.set_link_objects(self.link_objects)
# hack so distutils' build_extension() builds a library instead
compiler.link_shared_object = link_shared_object.__get__(compiler)
def get_export_symbols(self, ext):
if isinstance(ext,Library):
return ext.export_symbols
return _build_ext.get_export_symbols(self,ext)
def build_extension(self, ext):
_compiler = self.compiler
try:
if isinstance(ext,Library):
self.compiler = self.shlib_compiler
_build_ext.build_extension(self,ext)
if ext._needs_stub:
self.write_stub(
self.get_finalized_command('build_py').build_lib, ext
)
finally:
self.compiler = _compiler
def links_to_dynamic(self, ext):
"""Return true if 'ext' links to a dynamic lib in the same package"""
# XXX this should check to ensure the lib is actually being built
# XXX as dynamic, and not just using a locally-found version or a
# XXX static-compiled version
libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
pkg = '.'.join(ext._full_name.split('.')[:-1]+[''])
for libname in ext.libraries:
if pkg+libname in libnames: return True
return False
def get_outputs(self):
outputs = _build_ext.get_outputs(self)
optimize = self.get_finalized_command('build_py').optimize
for ext in self.extensions:
if ext._needs_stub:
base = os.path.join(self.build_lib, *ext._full_name.split('.'))
outputs.append(base+'.py')
outputs.append(base+'.pyc')
if optimize:
outputs.append(base+'.pyo')
return outputs
def write_stub(self, output_dir, ext, compile=False):
log.info("writing stub loader for %s to %s",ext._full_name, output_dir)
stub_file = os.path.join(output_dir, *ext._full_name.split('.'))+'.py'
if compile and os.path.exists(stub_file):
raise DistutilsError(stub_file+" already exists! Please delete.")
if not self.dry_run:
f = open(stub_file,'w')
f.write('\n'.join([
"def __bootstrap__():",
" global __bootstrap__, __file__, __loader__",
" import sys, os, pkg_resources, imp"+if_dl(", dl"),
" __file__ = pkg_resources.resource_filename(__name__,%r)"
% os.path.basename(ext._file_name),
" del __bootstrap__",
" if '__loader__' in globals():",
" del __loader__",
if_dl(" old_flags = sys.getdlopenflags()"),
" old_dir = os.getcwd()",
" try:",
" os.chdir(os.path.dirname(__file__))",
if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"),
" imp.load_dynamic(__name__,__file__)",
" finally:",
if_dl(" sys.setdlopenflags(old_flags)"),
" os.chdir(old_dir)",
"__bootstrap__()",
"" # terminal \n
]))
f.close()
if compile:
from distutils.util import byte_compile
byte_compile([stub_file], optimize=0,
force=True, dry_run=self.dry_run)
optimize = self.get_finalized_command('install_lib').optimize
if optimize > 0:
byte_compile([stub_file], optimize=optimize,
force=True, dry_run=self.dry_run)
if os.path.exists(stub_file) and not self.dry_run:
os.unlink(stub_file)
if use_stubs or os.name=='nt':
# Build shared libraries
#
def link_shared_object(self, objects, output_libname, output_dir=None,
libraries=None, library_dirs=None, runtime_library_dirs=None,
export_symbols=None, debug=0, extra_preargs=None,
extra_postargs=None, build_temp=None, target_lang=None
): self.link(
self.SHARED_LIBRARY, objects, output_libname,
output_dir, libraries, library_dirs, runtime_library_dirs,
export_symbols, debug, extra_preargs, extra_postargs,
build_temp, target_lang
)
else:
# Build static libraries everywhere else
libtype = 'static'
def link_shared_object(self, objects, output_libname, output_dir=None,
libraries=None, library_dirs=None, runtime_library_dirs=None,
export_symbols=None, debug=0, extra_preargs=None,
extra_postargs=None, build_temp=None, target_lang=None
):
# XXX we need to either disallow these attrs on Library instances,
# or warn/abort here if set, or something...
#libraries=None, library_dirs=None, runtime_library_dirs=None,
#export_symbols=None, extra_preargs=None, extra_postargs=None,
#build_temp=None
assert output_dir is None # distutils build_ext doesn't pass this
output_dir,filename = os.path.split(output_libname)
basename, ext = os.path.splitext(filename)
if self.library_filename("x").startswith('lib'):
# strip 'lib' prefix; this is kludgy if some platform uses
# a different prefix
basename = basename[3:]
self.create_static_lib(
objects, basename, output_dir, debug, target_lang
)
| bsd-3-clause |
Gitlab11/odoo | addons/account/res_currency.py | 340 | 2267 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2010 OpenERP s.a. (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
"""Inherit res.currency to handle accounting date values when converting currencies"""
class res_currency_account(osv.osv):
_inherit = "res.currency"
def _get_conversion_rate(self, cr, uid, from_currency, to_currency, context=None):
if context is None:
context = {}
rate = super(res_currency_account, self)._get_conversion_rate(cr, uid, from_currency, to_currency, context=context)
#process the case where the account doesn't work with an outgoing currency rate method 'at date' but 'average'
account = context.get('res.currency.compute.account')
account_invert = context.get('res.currency.compute.account_invert')
if account and account.currency_mode == 'average' and account.currency_id:
query = self.pool.get('account.move.line')._query_get(cr, uid, context=context)
cr.execute('select sum(debit-credit),sum(amount_currency) from account_move_line l ' \
'where l.currency_id=%s and l.account_id=%s and '+query, (account.currency_id.id,account.id,))
tot1,tot2 = cr.fetchone()
if tot2 and not account_invert:
rate = float(tot1)/float(tot2)
elif tot1 and account_invert:
rate = float(tot2)/float(tot1)
return rate
| agpl-3.0 |
nazo/ansible | contrib/inventory/nsot.py | 51 | 9824 | #!/usr/bin/env python
'''
nsot
====
Ansible Dynamic Inventory to pull hosts from NSoT, a flexible CMDB by Dropbox
Features
--------
* Define host groups in form of NSoT device attribute criteria
* All parameters defined by the spec as of 2015-09-05 are supported.
+ ``--list``: Returns JSON hash of host groups -> hosts and top-level
``_meta`` -> ``hostvars`` which correspond to all device attributes.
Group vars can be specified in the YAML configuration, noted below.
+ ``--host <hostname>``: Returns JSON hash where every item is a device
attribute.
* In addition to all attributes assigned to resource being returned, script
will also append ``site_id`` and ``id`` as facts to utilize.
Confguration
------------
Since it'd be annoying and failure prone to guess where you're configuration
file is, use ``NSOT_INVENTORY_CONFIG`` to specify the path to it.
This file should adhere to the YAML spec. All top-level variable must be
desired Ansible group-name hashed with single 'query' item to define the NSoT
attribute query.
Queries follow the normal NSoT query syntax, `shown here`_
.. _shown here: https://github.com/dropbox/pynsot#set-queries
.. code:: yaml
routers:
query: 'deviceType=ROUTER'
vars:
a: b
c: d
juniper_fw:
query: 'deviceType=FIREWALL manufacturer=JUNIPER'
not_f10:
query: '-manufacturer=FORCE10'
The inventory will automatically use your ``.pynsotrc`` like normal pynsot from
cli would, so make sure that's configured appropriately.
.. note::
Attributes I'm showing above are influenced from ones that the Trigger
project likes. As is the spirit of NSoT, use whichever attributes work best
for your workflow.
If config file is blank or absent, the following default groups will be
created:
* ``routers``: deviceType=ROUTER
* ``switches``: deviceType=SWITCH
* ``firewalls``: deviceType=FIREWALL
These are likely not useful for everyone so please use the configuration. :)
.. note::
By default, resources will only be returned for what your default
site is set for in your ``~/.pynsotrc``.
If you want to specify, add an extra key under the group for ``site: n``.
Output Examples
---------------
Here are some examples shown from just calling the command directly::
$ NSOT_INVENTORY_CONFIG=$PWD/test.yaml ansible_nsot --list | jq '.'
{
"routers": {
"hosts": [
"test1.example.com"
],
"vars": {
"cool_level": "very",
"group": "routers"
}
},
"firewalls": {
"hosts": [
"test2.example.com"
],
"vars": {
"cool_level": "enough",
"group": "firewalls"
}
},
"_meta": {
"hostvars": {
"test2.example.com": {
"make": "SRX",
"site_id": 1,
"id": 108
},
"test1.example.com": {
"make": "MX80",
"site_id": 1,
"id": 107
}
}
},
"rtr_and_fw": {
"hosts": [
"test1.example.com",
"test2.example.com"
],
"vars": {}
}
}
$ NSOT_INVENTORY_CONFIG=$PWD/test.yaml ansible_nsot --host test1 | jq '.'
{
"make": "MX80",
"site_id": 1,
"id": 107
}
'''
from __future__ import print_function
import sys
import os
import pkg_resources
import argparse
import json
import yaml
from textwrap import dedent
from pynsot.client import get_api_client
from pynsot.app import HttpServerError
from click.exceptions import UsageError
from six import string_types
def warning(*objs):
print("WARNING: ", *objs, file=sys.stderr)
class NSoTInventory(object):
'''NSoT Client object for gather inventory'''
def __init__(self):
self.config = dict()
config_env = os.environ.get('NSOT_INVENTORY_CONFIG')
if config_env:
try:
config_file = os.path.abspath(config_env)
except IOError: # If file non-existent, use default config
self._config_default()
except Exception as e:
sys.exit('%s\n' % e)
with open(config_file) as f:
try:
self.config.update(yaml.safe_load(f))
except TypeError: # If empty file, use default config
warning('Empty config file')
self._config_default()
except Exception as e:
sys.exit('%s\n' % e)
else: # Use defaults if env var missing
self._config_default()
self.groups = self.config.keys()
self.client = get_api_client()
self._meta = {'hostvars': dict()}
def _config_default(self):
default_yaml = '''
---
routers:
query: deviceType=ROUTER
switches:
query: deviceType=SWITCH
firewalls:
query: deviceType=FIREWALL
'''
self.config = yaml.safe_load(dedent(default_yaml))
def do_list(self):
'''Direct callback for when ``--list`` is provided
Relies on the configuration generated from init to run
_inventory_group()
'''
inventory = dict()
for group, contents in self.config.items():
group_response = self._inventory_group(group, contents)
inventory.update(group_response)
inventory.update({'_meta': self._meta})
return json.dumps(inventory)
def do_host(self, host):
return json.dumps(self._hostvars(host))
def _hostvars(self, host):
'''Return dictionary of all device attributes
Depending on number of devices in NSoT, could be rather slow since this
has to request every device resource to filter through
'''
device = [i for i in self.client.devices.get()
if host in i['hostname']][0]
attributes = device['attributes']
attributes.update({'site_id': device['site_id'], 'id': device['id']})
return attributes
def _inventory_group(self, group, contents):
'''Takes a group and returns inventory for it as dict
:param group: Group name
:type group: str
:param contents: The contents of the group's YAML config
:type contents: dict
contents param should look like::
{
'query': 'xx',
'vars':
'a': 'b'
}
Will return something like::
{ group: {
hosts: [],
vars: {},
}
'''
query = contents.get('query')
hostvars = contents.get('vars', dict())
site = contents.get('site', dict())
obj = {group: dict()}
obj[group]['hosts'] = []
obj[group]['vars'] = hostvars
try:
assert isinstance(query, string_types)
except:
sys.exit('ERR: Group queries must be a single string\n'
' Group: %s\n'
' Query: %s\n' % (group, query)
)
try:
if site:
site = self.client.sites(site)
devices = site.devices.query.get(query=query)
else:
devices = self.client.devices.query.get(query=query)
except HttpServerError as e:
if '500' in str(e.response):
_site = 'Correct site id?'
_attr = 'Queried attributes actually exist?'
questions = _site + '\n' + _attr
sys.exit('ERR: 500 from server.\n%s' % questions)
else:
raise
except UsageError:
sys.exit('ERR: Could not connect to server. Running?')
# Would do a list comprehension here, but would like to save code/time
# and also acquire attributes in this step
for host in devices:
# Iterate through each device that matches query, assign hostname
# to the group's hosts array and then use this single iteration as
# a chance to update self._meta which will be used in the final
# return
hostname = host['hostname']
obj[group]['hosts'].append(hostname)
attributes = host['attributes']
attributes.update({'site_id': host['site_id'], 'id': host['id']})
self._meta['hostvars'].update({hostname: attributes})
return obj
def parse_args():
desc = __doc__.splitlines()[4] # Just to avoid being redundant
# Establish parser with options and error out if no action provided
parser = argparse.ArgumentParser(
description=desc,
conflict_handler='resolve',
)
# Arguments
#
# Currently accepting (--list | -l) and (--host | -h)
# These must not be allowed together
parser.add_argument(
'--list', '-l',
help='Print JSON object containing hosts to STDOUT',
action='store_true',
dest='list_', # Avoiding syntax highlighting for list
)
parser.add_argument(
'--host', '-h',
help='Print JSON object containing hostvars for <host>',
action='store',
)
args = parser.parse_args()
if not args.list_ and not args.host: # Require at least one option
parser.exit(status=1, message='No action requested')
if args.list_ and args.host: # Do not allow multiple options
parser.exit(status=1, message='Too many actions requested')
return args
def main():
'''Set up argument handling and callback routing'''
args = parse_args()
client = NSoTInventory()
# Callback condition
if args.list_:
print(client.do_list())
elif args.host:
print(client.do_host(args.host))
if __name__ == '__main__':
main()
| gpl-3.0 |
berendkleinhaneveld/VTK | ThirdParty/Twisted/twisted/web/proxy.py | 34 | 9574 | # -*- test-case-name: twisted.web.test.test_proxy -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Simplistic HTTP proxy support.
This comes in two main variants - the Proxy and the ReverseProxy.
When a Proxy is in use, a browser trying to connect to a server (say,
www.yahoo.com) will be intercepted by the Proxy, and the proxy will covertly
connect to the server, and return the result.
When a ReverseProxy is in use, the client connects directly to the ReverseProxy
(say, www.yahoo.com) which farms off the request to one of a pool of servers,
and returns the result.
Normally, a Proxy is used on the client end of an Internet connection, while a
ReverseProxy is used on the server end.
"""
import urlparse
from urllib import quote as urlquote
from twisted.internet import reactor
from twisted.internet.protocol import ClientFactory
from twisted.web.resource import Resource
from twisted.web.server import NOT_DONE_YET
from twisted.web.http import HTTPClient, Request, HTTPChannel
class ProxyClient(HTTPClient):
"""
Used by ProxyClientFactory to implement a simple web proxy.
@ivar _finished: A flag which indicates whether or not the original request
has been finished yet.
"""
_finished = False
def __init__(self, command, rest, version, headers, data, father):
self.father = father
self.command = command
self.rest = rest
if "proxy-connection" in headers:
del headers["proxy-connection"]
headers["connection"] = "close"
headers.pop('keep-alive', None)
self.headers = headers
self.data = data
def connectionMade(self):
self.sendCommand(self.command, self.rest)
for header, value in self.headers.items():
self.sendHeader(header, value)
self.endHeaders()
self.transport.write(self.data)
def handleStatus(self, version, code, message):
self.father.setResponseCode(int(code), message)
def handleHeader(self, key, value):
# t.web.server.Request sets default values for these headers in its
# 'process' method. When these headers are received from the remote
# server, they ought to override the defaults, rather than append to
# them.
if key.lower() in ['server', 'date', 'content-type']:
self.father.responseHeaders.setRawHeaders(key, [value])
else:
self.father.responseHeaders.addRawHeader(key, value)
def handleResponsePart(self, buffer):
self.father.write(buffer)
def handleResponseEnd(self):
"""
Finish the original request, indicating that the response has been
completely written to it, and disconnect the outgoing transport.
"""
if not self._finished:
self._finished = True
self.father.finish()
self.transport.loseConnection()
class ProxyClientFactory(ClientFactory):
"""
Used by ProxyRequest to implement a simple web proxy.
"""
protocol = ProxyClient
def __init__(self, command, rest, version, headers, data, father):
self.father = father
self.command = command
self.rest = rest
self.headers = headers
self.data = data
self.version = version
def buildProtocol(self, addr):
return self.protocol(self.command, self.rest, self.version,
self.headers, self.data, self.father)
def clientConnectionFailed(self, connector, reason):
"""
Report a connection failure in a response to the incoming request as
an error.
"""
self.father.setResponseCode(501, "Gateway error")
self.father.responseHeaders.addRawHeader("Content-Type", "text/html")
self.father.write("<H1>Could not connect</H1>")
self.father.finish()
class ProxyRequest(Request):
"""
Used by Proxy to implement a simple web proxy.
@ivar reactor: the reactor used to create connections.
@type reactor: object providing L{twisted.internet.interfaces.IReactorTCP}
"""
protocols = {'http': ProxyClientFactory}
ports = {'http': 80}
def __init__(self, channel, queued, reactor=reactor):
Request.__init__(self, channel, queued)
self.reactor = reactor
def process(self):
parsed = urlparse.urlparse(self.uri)
protocol = parsed[0]
host = parsed[1]
port = self.ports[protocol]
if ':' in host:
host, port = host.split(':')
port = int(port)
rest = urlparse.urlunparse(('', '') + parsed[2:])
if not rest:
rest = rest + '/'
class_ = self.protocols[protocol]
headers = self.getAllHeaders().copy()
if 'host' not in headers:
headers['host'] = host
self.content.seek(0, 0)
s = self.content.read()
clientFactory = class_(self.method, rest, self.clientproto, headers,
s, self)
self.reactor.connectTCP(host, port, clientFactory)
class Proxy(HTTPChannel):
"""
This class implements a simple web proxy.
Since it inherits from L{twisted.web.http.HTTPChannel}, to use it you
should do something like this::
from twisted.web import http
f = http.HTTPFactory()
f.protocol = Proxy
Make the HTTPFactory a listener on a port as per usual, and you have
a fully-functioning web proxy!
"""
requestFactory = ProxyRequest
class ReverseProxyRequest(Request):
"""
Used by ReverseProxy to implement a simple reverse proxy.
@ivar proxyClientFactoryClass: a proxy client factory class, used to create
new connections.
@type proxyClientFactoryClass: L{ClientFactory}
@ivar reactor: the reactor used to create connections.
@type reactor: object providing L{twisted.internet.interfaces.IReactorTCP}
"""
proxyClientFactoryClass = ProxyClientFactory
def __init__(self, channel, queued, reactor=reactor):
Request.__init__(self, channel, queued)
self.reactor = reactor
def process(self):
"""
Handle this request by connecting to the proxied server and forwarding
it there, then forwarding the response back as the response to this
request.
"""
self.requestHeaders.setRawHeaders(b"host", [self.factory.host])
clientFactory = self.proxyClientFactoryClass(
self.method, self.uri, self.clientproto, self.getAllHeaders(),
self.content.read(), self)
self.reactor.connectTCP(self.factory.host, self.factory.port,
clientFactory)
class ReverseProxy(HTTPChannel):
"""
Implements a simple reverse proxy.
For details of usage, see the file examples/reverse-proxy.py.
"""
requestFactory = ReverseProxyRequest
class ReverseProxyResource(Resource):
"""
Resource that renders the results gotten from another server
Put this resource in the tree to cause everything below it to be relayed
to a different server.
@ivar proxyClientFactoryClass: a proxy client factory class, used to create
new connections.
@type proxyClientFactoryClass: L{ClientFactory}
@ivar reactor: the reactor used to create connections.
@type reactor: object providing L{twisted.internet.interfaces.IReactorTCP}
"""
proxyClientFactoryClass = ProxyClientFactory
def __init__(self, host, port, path, reactor=reactor):
"""
@param host: the host of the web server to proxy.
@type host: C{str}
@param port: the port of the web server to proxy.
@type port: C{port}
@param path: the base path to fetch data from. Note that you shouldn't
put any trailing slashes in it, it will be added automatically in
request. For example, if you put B{/foo}, a request on B{/bar} will
be proxied to B{/foo/bar}. Any required encoding of special
characters (such as " " or "/") should have been done already.
@type path: C{str}
"""
Resource.__init__(self)
self.host = host
self.port = port
self.path = path
self.reactor = reactor
def getChild(self, path, request):
"""
Create and return a proxy resource with the same proxy configuration
as this one, except that its path also contains the segment given by
C{path} at the end.
"""
return ReverseProxyResource(
self.host, self.port, self.path + '/' + urlquote(path, safe=""),
self.reactor)
def render(self, request):
"""
Render a request by forwarding it to the proxied server.
"""
# RFC 2616 tells us that we can omit the port if it's the default port,
# but we have to provide it otherwise
if self.port == 80:
host = self.host
else:
host = "%s:%d" % (self.host, self.port)
request.requestHeaders.setRawHeaders(b"host", [host])
request.content.seek(0, 0)
qs = urlparse.urlparse(request.uri)[4]
if qs:
rest = self.path + '?' + qs
else:
rest = self.path
clientFactory = self.proxyClientFactoryClass(
request.method, rest, request.clientproto,
request.getAllHeaders(), request.content.read(), request)
self.reactor.connectTCP(self.host, self.port, clientFactory)
return NOT_DONE_YET
| bsd-3-clause |
lemire/PeachPy | test/x86_64/encoding/test_crypto.py | 6 | 7315 | # This file is auto-generated by /codegen/x86_64_test_encoding.py
# Reference opcodes are generated by:
# GNU assembler (GNU Binutils) 2.25
from peachpy.x86_64 import *
import unittest
class TestAESDEC(unittest.TestCase):
def runTest(self):
self.assertEqual(bytearray([0x66, 0x41, 0x0F, 0x38, 0xDE, 0xCE]), AESDEC(xmm1, xmm14).encode())
self.assertEqual(bytearray([0x66, 0x41, 0x0F, 0x38, 0xDE, 0x4C, 0xC2, 0xB3]), AESDEC(xmm1, oword[r10 + rax*8 - 77]).encode())
class TestAESDECLAST(unittest.TestCase):
def runTest(self):
self.assertEqual(bytearray([0x66, 0x41, 0x0F, 0x38, 0xDF, 0xCE]), AESDECLAST(xmm1, xmm14).encode())
self.assertEqual(bytearray([0x66, 0x41, 0x0F, 0x38, 0xDF, 0x4C, 0xC2, 0xB3]), AESDECLAST(xmm1, oword[r10 + rax*8 - 77]).encode())
class TestAESENC(unittest.TestCase):
def runTest(self):
self.assertEqual(bytearray([0x66, 0x41, 0x0F, 0x38, 0xDC, 0xCE]), AESENC(xmm1, xmm14).encode())
self.assertEqual(bytearray([0x66, 0x41, 0x0F, 0x38, 0xDC, 0x4C, 0xC2, 0xB3]), AESENC(xmm1, oword[r10 + rax*8 - 77]).encode())
class TestAESENCLAST(unittest.TestCase):
def runTest(self):
self.assertEqual(bytearray([0x66, 0x41, 0x0F, 0x38, 0xDD, 0xCE]), AESENCLAST(xmm1, xmm14).encode())
self.assertEqual(bytearray([0x66, 0x41, 0x0F, 0x38, 0xDD, 0x4C, 0xC2, 0xB3]), AESENCLAST(xmm1, oword[r10 + rax*8 - 77]).encode())
class TestAESIMC(unittest.TestCase):
def runTest(self):
self.assertEqual(bytearray([0x66, 0x41, 0x0F, 0x38, 0xDB, 0xCE]), AESIMC(xmm1, xmm14).encode())
self.assertEqual(bytearray([0x66, 0x41, 0x0F, 0x38, 0xDB, 0x4C, 0xC2, 0xB3]), AESIMC(xmm1, oword[r10 + rax*8 - 77]).encode())
class TestAESKEYGENASSIST(unittest.TestCase):
def runTest(self):
self.assertEqual(bytearray([0x66, 0x41, 0x0F, 0x3A, 0xDF, 0xCE, 0x02]), AESKEYGENASSIST(xmm1, xmm14, 2).encode())
self.assertEqual(bytearray([0x66, 0x41, 0x0F, 0x3A, 0xDF, 0x4C, 0xC2, 0xB3, 0x02]), AESKEYGENASSIST(xmm1, oword[r10 + rax*8 - 77], 2).encode())
class TestVAESDEC(unittest.TestCase):
def runTest(self):
self.assertEqual(bytearray([0xC4, 0xE2, 0x09, 0xDE, 0xCB]), VAESDEC(xmm1, xmm14, xmm3).encode())
self.assertEqual(bytearray([0xC4, 0xC2, 0x09, 0xDE, 0x4C, 0xC2, 0xB3]), VAESDEC(xmm1, xmm14, oword[r10 + rax*8 - 77]).encode())
class TestVAESDECLAST(unittest.TestCase):
def runTest(self):
self.assertEqual(bytearray([0xC4, 0xE2, 0x09, 0xDF, 0xCB]), VAESDECLAST(xmm1, xmm14, xmm3).encode())
self.assertEqual(bytearray([0xC4, 0xC2, 0x09, 0xDF, 0x4C, 0xC2, 0xB3]), VAESDECLAST(xmm1, xmm14, oword[r10 + rax*8 - 77]).encode())
class TestVAESENC(unittest.TestCase):
def runTest(self):
self.assertEqual(bytearray([0xC4, 0xE2, 0x09, 0xDC, 0xCB]), VAESENC(xmm1, xmm14, xmm3).encode())
self.assertEqual(bytearray([0xC4, 0xC2, 0x09, 0xDC, 0x4C, 0xC2, 0xB3]), VAESENC(xmm1, xmm14, oword[r10 + rax*8 - 77]).encode())
class TestVAESENCLAST(unittest.TestCase):
def runTest(self):
self.assertEqual(bytearray([0xC4, 0xE2, 0x09, 0xDD, 0xCB]), VAESENCLAST(xmm1, xmm14, xmm3).encode())
self.assertEqual(bytearray([0xC4, 0xC2, 0x09, 0xDD, 0x4C, 0xC2, 0xB3]), VAESENCLAST(xmm1, xmm14, oword[r10 + rax*8 - 77]).encode())
class TestVAESIMC(unittest.TestCase):
def runTest(self):
self.assertEqual(bytearray([0xC4, 0xC2, 0x79, 0xDB, 0xCE]), VAESIMC(xmm1, xmm14).encode())
self.assertEqual(bytearray([0xC4, 0xC2, 0x79, 0xDB, 0x4C, 0xC2, 0xB3]), VAESIMC(xmm1, oword[r10 + rax*8 - 77]).encode())
class TestVAESKEYGENASSIST(unittest.TestCase):
def runTest(self):
self.assertEqual(bytearray([0xC4, 0xC3, 0x79, 0xDF, 0xCE, 0x02]), VAESKEYGENASSIST(xmm1, xmm14, 2).encode())
self.assertEqual(bytearray([0xC4, 0xC3, 0x79, 0xDF, 0x4C, 0xC2, 0xB3, 0x02]), VAESKEYGENASSIST(xmm1, oword[r10 + rax*8 - 77], 2).encode())
class TestSHA1MSG1(unittest.TestCase):
def runTest(self):
self.assertEqual(bytearray([0x41, 0x0F, 0x38, 0xC9, 0xCE]), SHA1MSG1(xmm1, xmm14).encode())
self.assertEqual(bytearray([0x41, 0x0F, 0x38, 0xC9, 0x4C, 0xC2, 0xB3]), SHA1MSG1(xmm1, oword[r10 + rax*8 - 77]).encode())
class TestSHA1MSG2(unittest.TestCase):
def runTest(self):
self.assertEqual(bytearray([0x41, 0x0F, 0x38, 0xCA, 0xCE]), SHA1MSG2(xmm1, xmm14).encode())
self.assertEqual(bytearray([0x41, 0x0F, 0x38, 0xCA, 0x4C, 0xC2, 0xB3]), SHA1MSG2(xmm1, oword[r10 + rax*8 - 77]).encode())
class TestSHA1NEXTE(unittest.TestCase):
def runTest(self):
self.assertEqual(bytearray([0x41, 0x0F, 0x38, 0xC8, 0xCE]), SHA1NEXTE(xmm1, xmm14).encode())
self.assertEqual(bytearray([0x41, 0x0F, 0x38, 0xC8, 0x4C, 0xC2, 0xB3]), SHA1NEXTE(xmm1, oword[r10 + rax*8 - 77]).encode())
class TestSHA1RNDS4(unittest.TestCase):
def runTest(self):
self.assertEqual(bytearray([0x41, 0x0F, 0x3A, 0xCC, 0xCE, 0x02]), SHA1RNDS4(xmm1, xmm14, 2).encode())
self.assertEqual(bytearray([0x41, 0x0F, 0x3A, 0xCC, 0x4C, 0xC2, 0xB3, 0x02]), SHA1RNDS4(xmm1, oword[r10 + rax*8 - 77], 2).encode())
class TestSHA256MSG1(unittest.TestCase):
def runTest(self):
self.assertEqual(bytearray([0x41, 0x0F, 0x38, 0xCC, 0xCE]), SHA256MSG1(xmm1, xmm14).encode())
self.assertEqual(bytearray([0x41, 0x0F, 0x38, 0xCC, 0x4C, 0xC2, 0xB3]), SHA256MSG1(xmm1, oword[r10 + rax*8 - 77]).encode())
class TestSHA256MSG2(unittest.TestCase):
def runTest(self):
self.assertEqual(bytearray([0x41, 0x0F, 0x38, 0xCD, 0xCE]), SHA256MSG2(xmm1, xmm14).encode())
self.assertEqual(bytearray([0x41, 0x0F, 0x38, 0xCD, 0x4C, 0xC2, 0xB3]), SHA256MSG2(xmm1, oword[r10 + rax*8 - 77]).encode())
class TestSHA256RNDS2(unittest.TestCase):
def runTest(self):
self.assertEqual(bytearray([0x41, 0x0F, 0x38, 0xCB, 0xCE]), SHA256RNDS2(xmm1, xmm14, xmm0).encode())
self.assertEqual(bytearray([0x41, 0x0F, 0x38, 0xCB, 0x4C, 0xC2, 0xB3]), SHA256RNDS2(xmm1, oword[r10 + rax*8 - 77], xmm0).encode())
class TestPCLMULQDQ(unittest.TestCase):
def runTest(self):
self.assertEqual(bytearray([0x66, 0x41, 0x0F, 0x3A, 0x44, 0xCE, 0x02]), PCLMULQDQ(xmm1, xmm14, 2).encode())
self.assertEqual(bytearray([0x66, 0x41, 0x0F, 0x3A, 0x44, 0x4C, 0xC2, 0xB3, 0x02]), PCLMULQDQ(xmm1, oword[r10 + rax*8 - 77], 2).encode())
class TestVPCLMULQDQ(unittest.TestCase):
def runTest(self):
self.assertEqual(bytearray([0xC4, 0xE3, 0x09, 0x44, 0xCB, 0x02]), VPCLMULQDQ(xmm1, xmm14, xmm3, 2).encode())
self.assertEqual(bytearray([0xC4, 0xC3, 0x09, 0x44, 0x4C, 0xC2, 0xB3, 0x02]), VPCLMULQDQ(xmm1, xmm14, oword[r10 + rax*8 - 77], 2).encode())
class TestRDRAND(unittest.TestCase):
def runTest(self):
self.assertEqual(bytearray([0x66, 0x0F, 0xC7, 0xF6]), RDRAND(si).encode())
self.assertEqual(bytearray([0x0F, 0xC7, 0xF5]), RDRAND(ebp).encode())
self.assertEqual(bytearray([0x48, 0x0F, 0xC7, 0xF1]), RDRAND(rcx).encode())
class TestRDSEED(unittest.TestCase):
def runTest(self):
self.assertEqual(bytearray([0x66, 0x0F, 0xC7, 0xFE]), RDSEED(si).encode())
self.assertEqual(bytearray([0x0F, 0xC7, 0xFD]), RDSEED(ebp).encode())
self.assertEqual(bytearray([0x48, 0x0F, 0xC7, 0xF9]), RDSEED(rcx).encode())
| bsd-2-clause |
NixaSoftware/CVis | venv/lib/python2.7/site-packages/pygments/styles/friendly.py | 31 | 2515 | # -*- coding: utf-8 -*-
"""
pygments.styles.friendly
~~~~~~~~~~~~~~~~~~~~~~~~
A modern style based on the VIM pyte theme.
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace
class FriendlyStyle(Style):
"""
A modern style based on the VIM pyte theme.
"""
background_color = "#f0f0f0"
default_style = ""
styles = {
Whitespace: "#bbbbbb",
Comment: "italic #60a0b0",
Comment.Preproc: "noitalic #007020",
Comment.Special: "noitalic bg:#fff0f0",
Keyword: "bold #007020",
Keyword.Pseudo: "nobold",
Keyword.Type: "nobold #902000",
Operator: "#666666",
Operator.Word: "bold #007020",
Name.Builtin: "#007020",
Name.Function: "#06287e",
Name.Class: "bold #0e84b5",
Name.Namespace: "bold #0e84b5",
Name.Exception: "#007020",
Name.Variable: "#bb60d5",
Name.Constant: "#60add5",
Name.Label: "bold #002070",
Name.Entity: "bold #d55537",
Name.Attribute: "#4070a0",
Name.Tag: "bold #062873",
Name.Decorator: "bold #555555",
String: "#4070a0",
String.Doc: "italic",
String.Interpol: "italic #70a0d0",
String.Escape: "bold #4070a0",
String.Regex: "#235388",
String.Symbol: "#517918",
String.Other: "#c65d09",
Number: "#40a070",
Generic.Heading: "bold #000080",
Generic.Subheading: "bold #800080",
Generic.Deleted: "#A00000",
Generic.Inserted: "#00A000",
Generic.Error: "#FF0000",
Generic.Emph: "italic",
Generic.Strong: "bold",
Generic.Prompt: "bold #c65d09",
Generic.Output: "#888",
Generic.Traceback: "#04D",
Error: "border:#FF0000"
}
| apache-2.0 |
syphar/django | tests/gis_tests/utils.py | 124 | 1965 | import unittest
from functools import wraps
from django.conf import settings
from django.db import DEFAULT_DB_ALIAS, connection
def skipUnlessGISLookup(*gis_lookups):
"""
Skip a test unless a database supports all of gis_lookups.
"""
def decorator(test_func):
@wraps(test_func)
def skip_wrapper(*args, **kwargs):
if any(key not in connection.ops.gis_operators for key in gis_lookups):
raise unittest.SkipTest(
"Database doesn't support all the lookups: %s" % ", ".join(gis_lookups)
)
return test_func(*args, **kwargs)
return skip_wrapper
return decorator
def no_backend(test_func, backend):
"Use this decorator to disable test on specified backend."
if settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'].rsplit('.')[-1] == backend:
@unittest.skip("This test is skipped on '%s' backend" % backend)
def inner():
pass
return inner
else:
return test_func
# Decorators to disable entire test functions for specific
# spatial backends.
def no_oracle(func):
return no_backend(func, 'oracle')
# Shortcut booleans to omit only portions of tests.
_default_db = settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE'].rsplit('.')[-1]
oracle = _default_db == 'oracle'
postgis = _default_db == 'postgis'
mysql = _default_db == 'mysql'
spatialite = _default_db == 'spatialite'
# MySQL spatial indices can't handle NULL geometries.
gisfield_may_be_null = not mysql
if oracle and 'gis' in settings.DATABASES[DEFAULT_DB_ALIAS]['ENGINE']:
from django.contrib.gis.db.backends.oracle.models import OracleSpatialRefSys as SpatialRefSys
elif postgis:
from django.contrib.gis.db.backends.postgis.models import PostGISSpatialRefSys as SpatialRefSys
elif spatialite:
from django.contrib.gis.db.backends.spatialite.models import SpatialiteSpatialRefSys as SpatialRefSys
else:
SpatialRefSys = None
| bsd-3-clause |
HybridF5/jacket | jacket/tests/compute/unit/fake_ldap.py | 1 | 9268 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Fake LDAP server for test harness.
This class does very little error checking, and knows nothing about ldap
class definitions. It implements the minimum emulation of the python ldap
library to work with compute.
"""
import fnmatch
from oslo_serialization import jsonutils
import six
from six.moves import range
class Store(object):
def __init__(self):
if hasattr(self.__class__, '_instance'):
raise Exception('Attempted to instantiate singleton')
@classmethod
def instance(cls):
if not hasattr(cls, '_instance'):
cls._instance = _StorageDict()
return cls._instance
class _StorageDict(dict):
def keys(self, pat=None):
ret = super(_StorageDict, self).keys()
if pat is not None:
ret = fnmatch.filter(ret, pat)
return ret
def delete(self, key):
try:
del self[key]
except KeyError:
pass
def flushdb(self):
self.clear()
def hgetall(self, key):
"""Returns the hash for the given key
Creates the hash if the key doesn't exist.
"""
try:
return self[key]
except KeyError:
self[key] = {}
return self[key]
def hget(self, key, field):
hashdict = self.hgetall(key)
try:
return hashdict[field]
except KeyError:
hashdict[field] = {}
return hashdict[field]
def hset(self, key, field, val):
hashdict = self.hgetall(key)
hashdict[field] = val
def hmset(self, key, value_dict):
hashdict = self.hgetall(key)
for field, val in value_dict.items():
hashdict[field] = val
SCOPE_BASE = 0
SCOPE_ONELEVEL = 1 # Not implemented
SCOPE_SUBTREE = 2
MOD_ADD = 0
MOD_DELETE = 1
MOD_REPLACE = 2
class NO_SUCH_OBJECT(Exception):
"""Duplicate exception class from real LDAP module."""
pass
class OBJECT_CLASS_VIOLATION(Exception):
"""Duplicate exception class from real LDAP module."""
pass
class SERVER_DOWN(Exception):
"""Duplicate exception class from real LDAP module."""
pass
def initialize(_uri):
"""Opens a fake connection with an LDAP server."""
return FakeLDAP()
def _match_query(query, attrs):
"""Match an ldap query to an attribute dictionary.
The characters &, |, and ! are supported in the query. No syntax checking
is performed, so malformed queries will not work correctly.
"""
# cut off the parentheses
inner = query[1:-1]
if inner.startswith('&'):
# cut off the &
l, r = _paren_groups(inner[1:])
return _match_query(l, attrs) and _match_query(r, attrs)
if inner.startswith('|'):
# cut off the |
l, r = _paren_groups(inner[1:])
return _match_query(l, attrs) or _match_query(r, attrs)
if inner.startswith('!'):
# cut off the ! and the nested parentheses
return not _match_query(query[2:-1], attrs)
(k, _sep, v) = inner.partition('=')
return _match(k, v, attrs)
def _paren_groups(source):
"""Split a string into parenthesized groups."""
count = 0
start = 0
result = []
for pos in range(len(source)):
if source[pos] == '(':
if count == 0:
start = pos
count += 1
if source[pos] == ')':
count -= 1
if count == 0:
result.append(source[start:pos + 1])
return result
def _match(key, value, attrs):
"""Match a given key and value against an attribute list."""
if key not in attrs:
return False
# This is a wild card search. Implemented as all or nothing for now.
if value == "*":
return True
if key != "objectclass":
return value in attrs[key]
# it is an objectclass check, so check subclasses
values = _subs(value)
for v in values:
if v in attrs[key]:
return True
return False
def _subs(value):
"""Returns a list of subclass strings.
The strings represent the ldap object class plus any subclasses that
inherit from it. Fakeldap doesn't know about the ldap object structure,
so subclasses need to be defined manually in the dictionary below.
"""
subs = {'groupOfNames': ['novaProject']}
if value in subs:
return [value] + subs[value]
return [value]
def _from_json(encoded):
"""Convert attribute values from json representation.
Args:
encoded -- a json encoded string
Returns a list of strings
"""
return [str(x) for x in jsonutils.loads(encoded)]
def _to_json(unencoded):
"""Convert attribute values into json representation.
Args:
unencoded -- an unencoded string or list of strings. If it
is a single string, it will be converted into a list.
Returns a json string
"""
return jsonutils.dumps(list(unencoded))
server_fail = False
class FakeLDAP(object):
"""Fake LDAP connection."""
def simple_bind_s(self, dn, password):
"""This method is ignored, but provided for compatibility."""
if server_fail:
raise SERVER_DOWN()
pass
def unbind_s(self):
"""This method is ignored, but provided for compatibility."""
if server_fail:
raise SERVER_DOWN()
pass
def add_s(self, dn, attr):
"""Add an object with the specified attributes at dn."""
if server_fail:
raise SERVER_DOWN()
key = "%s%s" % (self.__prefix, dn)
value_dict = {k: _to_json(v) for k, v in attr}
Store.instance().hmset(key, value_dict)
def delete_s(self, dn):
"""Remove the ldap object at specified dn."""
if server_fail:
raise SERVER_DOWN()
Store.instance().delete("%s%s" % (self.__prefix, dn))
def modify_s(self, dn, attrs):
"""Modify the object at dn using the attribute list.
:param dn: a dn
:param attrs: a list of tuples in the following form::
([MOD_ADD | MOD_DELETE | MOD_REPACE], attribute, value)
"""
if server_fail:
raise SERVER_DOWN()
store = Store.instance()
key = "%s%s" % (self.__prefix, dn)
for cmd, k, v in attrs:
values = _from_json(store.hget(key, k))
if cmd == MOD_ADD:
values.append(v)
elif cmd == MOD_REPLACE:
values = [v]
else:
values.remove(v)
store.hset(key, k, _to_json(values))
def modrdn_s(self, dn, newrdn):
oldobj = self.search_s(dn, SCOPE_BASE)
if not oldobj:
raise NO_SUCH_OBJECT()
newdn = "%s,%s" % (newrdn, dn.partition(',')[2])
newattrs = oldobj[0][1]
modlist = []
for attrtype in newattrs.keys():
modlist.append((attrtype, newattrs[attrtype]))
self.add_s(newdn, modlist)
self.delete_s(dn)
def search_s(self, dn, scope, query=None, fields=None):
"""Search for all matching objects under dn using the query.
Args:
dn -- dn to search under
scope -- only SCOPE_BASE and SCOPE_SUBTREE are supported
query -- query to filter objects by
fields -- fields to return. Returns all fields if not specified
"""
if server_fail:
raise SERVER_DOWN()
if scope != SCOPE_BASE and scope != SCOPE_SUBTREE:
raise NotImplementedError(str(scope))
store = Store.instance()
if scope == SCOPE_BASE:
pattern = "%s%s" % (self.__prefix, dn)
keys = store.keys(pattern)
else:
keys = store.keys("%s*%s" % (self.__prefix, dn))
if not keys:
raise NO_SUCH_OBJECT()
objects = []
for key in keys:
# get the attributes from the store
attrs = store.hgetall(key)
# turn the values from the store into lists
attrs = {k: _from_json(v) for k, v in six.iteritems(attrs)}
# filter the objects by query
if not query or _match_query(query, attrs):
# filter the attributes by fields
attrs = {k: v for k, v in six.iteritems(attrs)
if not fields or k in fields}
objects.append((key[len(self.__prefix):], attrs))
return objects
@property
def __prefix(self):
"""Get the prefix to use for all keys."""
return 'ldap:'
| apache-2.0 |
salv-orlando/MyRepo | nova/tests/api/ec2/test_admin.py | 1 | 19772 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Tests for api.ec2.admin"""
import datetime
from nova import context
from nova import db
from nova import exception
from nova import flags
from nova import test
from nova import utils
from nova.api.ec2 import admin
from nova.api.ec2 import ec2utils
from nova.cloudpipe import pipelib
from nova.compute import vm_states
class AdminTestCase(test.TestCase):
def setUp(self):
super(AdminTestCase, self).setUp()
self.stubs.Set(utils, 'vpn_ping',
lambda address, port: address == '127.0.0.1')
def test_user_dict(self):
user = type('User', (object,),
{'id': 'bob', 'access': 'foo', 'secret': 'bar'})
expected_user_dict = {'username': 'bob',
'accesskey': 'foo',
'secretkey': 'bar',
'file': 'filename'}
self.assertEqual(expected_user_dict, admin.user_dict(user, 'filename'))
def test_user_dict_no_file(self):
user = type('User', (object,),
{'id': 'bob', 'access': 'foo', 'secret': 'bar'})
expected_user_dict = {'username': 'bob',
'accesskey': 'foo',
'secretkey': 'bar',
'file': None}
self.assertEqual(expected_user_dict, admin.user_dict(user))
def test_user_dict_no_user(self):
self.assertEqual({}, admin.user_dict(None))
def test_project_dict(self):
project = type('Project', (object,), {'id': 'project',
'project_manager_id': 'foo',
'description': 'bar'})
expected_project_dict = {'projectname': 'project',
'project_manager_id': 'foo',
'description': 'bar'}
self.assertEqual(expected_project_dict, admin.project_dict(project))
def test_project_dict_no_project(self):
self.assertEqual({}, admin.project_dict(None))
def test_host_dict_using_updated_at(self):
# instances and volumes only used for count
instances = range(2)
volumes = range(3)
now = datetime.datetime.now()
updated_at = now - datetime.timedelta(seconds=10)
compute_service = {'updated_at': updated_at}
volume_service = {'updated_at': updated_at}
expected_host_dict = {'hostname': 'server',
'instance_count': 2,
'volume_count': 3,
'compute': 'up',
'volume': 'up'}
self.assertEqual(expected_host_dict,
admin.host_dict('server', compute_service, instances,
volume_service, volumes, now))
def test_host_dict_service_down_using_created_at(self):
# instances and volumes only used for count
instances = range(2)
volumes = range(3)
# service_down_time is 60 by defualt so we set to 70 to simulate
# services been down
now = datetime.datetime.now()
created_at = now - datetime.timedelta(seconds=70)
compute_service = {'created_at': created_at, 'updated_at': None}
volume_service = {'created_at': created_at, 'updated_at': None}
expected_host_dict = {'hostname': 'server',
'instance_count': 2,
'volume_count': 3,
'compute': 'down',
'volume': 'down'}
self.assertEqual(expected_host_dict,
admin.host_dict('server', compute_service, instances,
volume_service, volumes, now))
def test_instance_dict(self):
inst = {'name': 'this_inst',
'memory_mb': 1024,
'vcpus': 2,
'local_gb': 500,
'flavorid': 1}
expected_inst_dict = {'name': 'this_inst',
'memory_mb': 1024,
'vcpus': 2,
'disk_gb': 500,
'flavor_id': 1}
self.assertEqual(expected_inst_dict, admin.instance_dict(inst))
def test_vpn_dict_state_running(self):
isonow = datetime.datetime.utcnow()
vpn_instance = {'id': 1,
'created_at': isonow,
'fixed_ip': {'address': '127.0.0.1'}}
project = type('Project', (object,), {'id': 'project',
'vpn_ip': '127.0.0.1',
'vpn_port': 1234})
# Returns state running for 127.0.0.1 - look at class setup
expected_vpn_dict = {'project_id': 'project',
'public_ip': '127.0.0.1',
'public_port': 1234,
'internal_ip': '127.0.0.1',
'instance_id':
ec2utils.id_to_ec2_id(1),
'created_at': utils.isotime(isonow),
'state': 'running'}
self.assertEqual(expected_vpn_dict,
admin.vpn_dict(project, vpn_instance))
def test_vpn_dict_state_down(self):
isonow = datetime.datetime.utcnow()
vpn_instance = {'id': 1,
'created_at': isonow,
'fixed_ip': {'address': '127.0.0.1'}}
project = type('Project', (object,), {'id': 'project',
'vpn_ip': '127.0.0.2',
'vpn_port': 1234})
# Returns state down for 127.0.0.2 - look at class setup
vpn_dict = admin.vpn_dict(project, vpn_instance)
self.assertEqual('down', vpn_dict['state'])
def test_vpn_dict_invalid_project_vpn_config(self):
isonow = datetime.datetime.utcnow()
vpn_instance = {'id': 1,
'created_at': isonow,
'fixed_ip': {'address': '127.0.0.1'}}
# Inline project object - vpn_port of None to make it invalid
project = type('Project', (object,), {'id': 'project',
'vpn_ip': '127.0.0.2',
'vpn_port': None})
# Returns state down for 127.0.0.2 - look at class setup
vpn_dict = admin.vpn_dict(project, vpn_instance)
self.assertEqual('down - invalid project vpn config',
vpn_dict['state'])
def test_vpn_dict_non_vpn_instance(self):
project = type('Project', (object,), {'id': 'project',
'vpn_ip': '127.0.0.1',
'vpn_port': '1234'})
expected_vpn_dict = {'project_id': 'project',
'public_ip': '127.0.0.1',
'public_port': '1234',
'state': 'pending'}
self.assertEqual(expected_vpn_dict, admin.vpn_dict(project, None))
class AdminControllerTestCase(test.TestCase):
@classmethod
def setUpClass(cls):
cls._c = context.get_admin_context()
cls._ac = admin.AdminController()
def test_admin_controller_to_str(self):
self.assertEqual('AdminController', str(admin.AdminController()))
def test_describe_instance_types(self):
insts = self._ac.describe_instance_types(self._c)['instanceTypeSet']
for inst_name in ('m1.medium', 'm1.large', 'm1.tiny', 'm1.xlarge',
'm1.small',):
self.assertIn(inst_name, [i['name'] for i in insts])
def test_register_user(self):
registered_user = self._ac.register_user(self._c, 'bob')
self.assertEqual('bob', registered_user['username'])
def test_describe_user(self):
self._ac.register_user(self._c, 'bob')
self.assertEqual('bob',
self._ac.describe_user(self._c, 'bob')['username'])
def test_describe_users(self):
self._ac.register_user(self._c, 'bob')
users = self._ac.describe_users(self._c)
self.assertIn('userSet', users)
self.assertEqual('bob', users['userSet'][0]['username'])
def test_deregister_user(self):
self._ac.register_user(self._c, 'bob')
self._ac.deregister_user(self._c, 'bob')
self.assertRaises(exception.UserNotFound,
self._ac.describe_user,
self._c, 'bob')
def test_register_project(self):
self._ac.register_user(self._c, 'bob')
self.assertEqual('bobs_project',
self._ac.register_project(self._c,
'bobs_project',
'bob')['projectname'])
def test_describe_projects(self):
self._ac.register_user(self._c, 'bob')
self._ac.register_project(self._c, 'bobs_project', 'bob')
projects = self._ac.describe_projects(self._c)
self.assertIn('projectSet', projects)
self.assertEqual('bobs_project',
projects['projectSet'][0]['projectname'])
def test_deregister_project(self):
self._ac.register_user(self._c, 'bob')
self._ac.register_project(self._c, 'bobs_project', 'bob')
self._ac.deregister_project(self._c, 'bobs_project')
self.assertRaises(exception.ProjectNotFound,
self._ac.describe_project,
self._c, 'bobs_project')
def test_describe_project_members(self):
self._ac.register_user(self._c, 'bob')
self._ac.register_project(self._c, 'bobs_project', 'bob')
members = self._ac.describe_project_members(self._c, 'bobs_project')
self.assertIn('members', members)
self.assertEqual('bob', members['members'][0]['member'])
def test_modify_project(self):
self._ac.register_user(self._c, 'bob')
self._ac.register_project(self._c, 'bobs_project', 'bob')
self._ac.modify_project(self._c, 'bobs_project', 'bob',
description='I like cake')
project = self._ac.describe_project(self._c, 'bobs_project')
self.assertEqual('I like cake', project['description'])
def test_modify_project_member_add(self):
self._ac.register_user(self._c, 'bob')
self._ac.register_user(self._c, 'mary')
self._ac.register_project(self._c, 'bobs_project', 'bob')
self._ac.modify_project_member(self._c, 'mary', 'bobs_project', 'add')
members = self._ac.describe_project_members(self._c, 'bobs_project')
self.assertIn('mary', [m['member'] for m in members['members']])
def test_modify_project_member_remove(self):
self._ac.register_user(self._c, 'bob')
self._ac.register_project(self._c, 'bobs_project', 'bob')
self._ac.modify_project_member(self._c, 'bob', 'bobs_project',
'remove')
members = self._ac.describe_project_members(self._c, 'bobs_project')
self.assertNotIn('bob', [m['member'] for m in members['members']])
def test_modify_project_member_invalid_operation(self):
self._ac.register_user(self._c, 'bob')
self._ac.register_project(self._c, 'bobs_project', 'bob')
self.assertRaises(exception.ApiError,
self._ac.modify_project_member,
self._c, 'bob', 'bobs_project', 'invalid_operation')
def test_describe_roles(self):
self._ac.register_user(self._c, 'bob')
self._ac.register_project(self._c, 'bobs_project', 'bob')
roles = self._ac.describe_roles(self._c, 'bobs_project')
# Default roles ('sysadmin', 'netadmin', 'developer') should be in here
roles = [r['role'] for r in roles['roles']]
for role in ('sysadmin', 'netadmin', 'developer'):
self.assertIn('sysadmin', roles)
def test_modify_user_role_add(self):
self._ac.register_user(self._c, 'bob')
self._ac.register_project(self._c, 'bobs_project', 'bob')
self._ac.modify_user_role(self._c, 'bob', 'itsec')
user_roles = self._ac.describe_user_roles(self._c, 'bob')
self.assertIn('itsec', [r['role'] for r in user_roles['roles']])
def test_modify_user_role_project_add(self):
self._ac.register_user(self._c, 'bob')
self._ac.register_project(self._c, 'bobs_project', 'bob')
self._ac.modify_user_role(self._c, 'bob', 'developer', 'bobs_project')
user_roles = self._ac.describe_user_roles(self._c, 'bob',
'bobs_project')
self.assertIn('developer', [r['role'] for r in user_roles['roles']])
def test_modify_user_role_remove(self):
self._ac.register_user(self._c, 'bob')
self._ac.register_project(self._c, 'bobs_project', 'bob')
self._ac.modify_user_role(self._c, 'bob', 'itsec')
self._ac.modify_user_role(self._c, 'bob', 'itsec', operation='remove')
user_roles = self._ac.describe_user_roles(self._c, 'bob')
self.assertNotIn('itsec', [r['role'] for r in user_roles['roles']])
def test_modify_user_role_project_remove(self):
self._ac.register_user(self._c, 'bob')
self._ac.register_project(self._c, 'bobs_project', 'bob')
self._ac.modify_user_role(self._c, 'bob', 'developer', 'bobs_project')
self._ac.modify_user_role(self._c, 'bob', 'developer', 'bobs_project',
'remove')
user_roles = self._ac.describe_user_roles(self._c, 'bob',
'bobs_project')
self.assertNotIn('developer', [r['role'] for r in user_roles['roles']])
def test_modify_user_role_invalid(self):
self.assertRaises(exception.ApiError,
self._ac.modify_user_role,
self._c, 'bob', 'itsec',
operation='invalid_operation')
def test_describe_hosts_compute(self):
db.service_create(self._c, {'host': 'host1',
'binary': "nova-compute",
'topic': 'compute',
'report_count': 0,
'availability_zone': "zone1"})
hosts = self._ac.describe_hosts(self._c)['hosts']
self.assertEqual('host1', hosts[0]['hostname'])
def test_describe_hosts_volume(self):
db.service_create(self._c, {'host': 'volume1',
'binary': "nova-volume",
'topic': 'volume',
'report_count': 0,
'availability_zone': "zone1"})
hosts = self._ac.describe_hosts(self._c)['hosts']
self.assertEqual('volume1', hosts[0]['hostname'])
def test_block_external_addresses(self):
result = self._ac.block_external_addresses(self._c, '192.168.100.1/24')
self.assertEqual('OK', result['status'])
self.assertEqual('Added 3 rules', result['message'])
def test_block_external_addresses_already_existent_rule(self):
self._ac.block_external_addresses(self._c, '192.168.100.1/24')
self.assertRaises(exception.ApiError,
self._ac.block_external_addresses,
self._c, '192.168.100.1/24')
def test_describe_external_address_blocks(self):
self._ac.block_external_addresses(self._c, '192.168.100.1/24')
self.assertEqual(
{'externalIpBlockInfo': [{'cidr': u'192.168.100.1/24'}]},
self._ac.describe_external_address_blocks(self._c))
def test_remove_external_address_block(self):
self._ac.block_external_addresses(self._c, '192.168.100.1/24')
result = self._ac.remove_external_address_block(self._c,
'192.168.100.1/24')
self.assertEqual('OK', result['status'])
self.assertEqual('Deleted 3 rules', result['message'])
result = self._ac.describe_external_address_blocks(self._c)
self.assertEqual([], result['externalIpBlockInfo'])
def test_start_vpn(self):
def fake_launch_vpn_instance(self, *args):
pass
def get_fake_instance_func():
first_call = [True]
def fake_instance_get_all_by_project(self, *args):
if first_call[0]:
first_call[0] = False
return []
else:
return [{'id': 1,
'user_id': 'bob',
'image_id': str(flags.FLAGS.vpn_image_id),
'project_id': 'bobs_project',
'instance_type_id': '1',
'os_type': 'linux',
'architecture': 'x86-64',
'state_description': 'running',
'vm_state': vm_states.ACTIVE,
'image_ref': '3'}]
return fake_instance_get_all_by_project
self.stubs.Set(pipelib.CloudPipe, 'launch_vpn_instance',
fake_launch_vpn_instance)
self.stubs.Set(db, 'instance_get_all_by_project',
get_fake_instance_func())
self._ac.register_user(self._c, 'bob')
self._ac.register_project(self._c, 'bobs_project', 'bob')
self.assertEqual('i-00000001',
self._ac.start_vpn(self._c, 'bobs_project')['instance_id'])
def test_describe_vpns(self):
def fake_instance_get_all_by_project(self, *args):
now = datetime.datetime.now()
created_at = now - datetime.timedelta(seconds=70)
return [{'id': 1,
'user_id': 'bob',
'image_id': str(flags.FLAGS.vpn_image_id),
'project_id': 'bobs_project',
'instance_type_id': '1',
'os_type': 'linux',
'architecture': 'x86-64',
'state_description': 'running',
'created_at': created_at,
'vm_state': vm_states.ACTIVE,
'image_ref': '3'}]
self.stubs.Set(db, 'instance_get_all_by_project',
fake_instance_get_all_by_project)
self._ac.register_user(self._c, 'bob')
self._ac.register_project(self._c, 'bobs_project', 'bob')
vpns = self._ac.describe_vpns(self._c)
self.assertIn('items', vpns)
item = vpns['items'][0]
self.assertEqual('i-00000001', item['instance_id'])
self.assertEqual(None, item['public_port'])
self.assertEqual(None, item['public_ip'])
self.assertEqual('down - invalid project vpn config', item['state'])
self.assertEqual(u'bobs_project', item['project_id'])
| apache-2.0 |
shikhardb/scikit-learn | examples/cluster/plot_digits_linkage.py | 369 | 2959 | """
=============================================================================
Various Agglomerative Clustering on a 2D embedding of digits
=============================================================================
An illustration of various linkage option for agglomerative clustering on
a 2D embedding of the digits dataset.
The goal of this example is to show intuitively how the metrics behave, and
not to find good clusters for the digits. This is why the example works on a
2D embedding.
What this example shows us is the behavior "rich getting richer" of
agglomerative clustering that tends to create uneven cluster sizes.
This behavior is especially pronounced for the average linkage strategy,
that ends up with a couple of singleton clusters.
"""
# Authors: Gael Varoquaux
# License: BSD 3 clause (C) INRIA 2014
print(__doc__)
from time import time
import numpy as np
from scipy import ndimage
from matplotlib import pyplot as plt
from sklearn import manifold, datasets
digits = datasets.load_digits(n_class=10)
X = digits.data
y = digits.target
n_samples, n_features = X.shape
np.random.seed(0)
def nudge_images(X, y):
# Having a larger dataset shows more clearly the behavior of the
# methods, but we multiply the size of the dataset only by 2, as the
# cost of the hierarchical clustering methods are strongly
# super-linear in n_samples
shift = lambda x: ndimage.shift(x.reshape((8, 8)),
.3 * np.random.normal(size=2),
mode='constant',
).ravel()
X = np.concatenate([X, np.apply_along_axis(shift, 1, X)])
Y = np.concatenate([y, y], axis=0)
return X, Y
X, y = nudge_images(X, y)
#----------------------------------------------------------------------
# Visualize the clustering
def plot_clustering(X_red, X, labels, title=None):
x_min, x_max = np.min(X_red, axis=0), np.max(X_red, axis=0)
X_red = (X_red - x_min) / (x_max - x_min)
plt.figure(figsize=(6, 4))
for i in range(X_red.shape[0]):
plt.text(X_red[i, 0], X_red[i, 1], str(y[i]),
color=plt.cm.spectral(labels[i] / 10.),
fontdict={'weight': 'bold', 'size': 9})
plt.xticks([])
plt.yticks([])
if title is not None:
plt.title(title, size=17)
plt.axis('off')
plt.tight_layout()
#----------------------------------------------------------------------
# 2D embedding of the digits dataset
print("Computing embedding")
X_red = manifold.SpectralEmbedding(n_components=2).fit_transform(X)
print("Done.")
from sklearn.cluster import AgglomerativeClustering
for linkage in ('ward', 'average', 'complete'):
clustering = AgglomerativeClustering(linkage=linkage, n_clusters=10)
t0 = time()
clustering.fit(X_red)
print("%s : %.2fs" % (linkage, time() - t0))
plot_clustering(X_red, X, clustering.labels_, "%s linkage" % linkage)
plt.show()
| bsd-3-clause |
travcunn/kissync-python | app/sync/upload.py | 1 | 7233 | import logging
import os
import threading
import time
from fs.osfs import OSFS
from smartfile.errors import RequestError, ResponseError
import common
from definitions import FileDefinition, LocalDefinitionHelper
from errors import FileNotAvailableError, FileDeletedError, MaxTriesError
from errors import UploadError
from worker import Worker
log = logging.getLogger(__name__)
class Uploader(Worker):
def __init__(self, api, sync_dir):
self._api = api
self._sync_dir = sync_dir
self._timeoffset = common.calculate_time_offset()
self._syncFS = OSFS(sync_dir)
def _process_task(self, task):
# Check if the task is already a file definition
if not isinstance(task, FileDefinition):
helper = LocalDefinitionHelper(task.path, self._syncFS)
try:
task = helper.create_definition()
except WindowsError, err:
raise FileDeletedError(err)
# Create a system specific path relative to the sync dir
basepath = os.path.normpath(task.path)
if basepath.startswith("/"):
basepath = basepath.strip("/")
if basepath.startswith('\\'):
basepath = basepath.lstrip('\\')
# Full system path
absolute_path = os.path.join(self._sync_dir, basepath)
# If the task is a file
if not os.path.isdir(absolute_path):
basepath = basepath.replace('\\', '/')
if not basepath.startswith("/"):
basepath = os.path.join("/", basepath)
task_directory = os.path.dirname(basepath)
api_path = "/path/data%s" % basepath
api_path_base = os.path.dirname(api_path)
try:
# create the directory to make sure it exists
self._api.post('/path/oper/mkdir/', path=task_directory)
# upload the file
self._api.post(api_path_base, file=file(absolute_path, 'rb'))
# set the new attributes
except IOError, err:
if err.errno == 2:
raise FileNotAvailableError(err)
except ResponseError, err:
if err.status_code == 404:
# If the file becomes suddenly not available, just ignore
# trying to set its attributes.
pass
elif err.status_code == 409:
# Conflict - Can only upload to an existing directory.
raise UploadError(err)
except RequestError, err:
if err.detail.startswith('HTTPConnectionPool'):
raise MaxTriesError(err)
else:
self._set_attributes(task)
else:
# If the task path is a folder
task_directory = basepath
if not task_directory.startswith("/"):
task_directory = os.path.join("/", task_directory)
task_directory = task_directory.replace('\\', '/')
try:
self._api.post('/path/oper/mkdir/', path=task_directory)
except RequestError, err:
raise MaxTriesError(err)
except Exception, err:
raise UploadError(err)
return task
def _set_attributes(self, task):
checksum = task.checksum
modified = task.modified.replace(microsecond=0)
checksum_string = "checksum=%s" % checksum
modified_string = "modified=%s" % modified
apiPath = "/path/info%s" % task.path
try:
self.__set_attributes(apiPath, checksum_string, modified_string)
except ResponseError, err:
if err.status_code == 404:
"""
If we try setting attributes to a file too soon, SmartFile
gives us an error, so sleep the thread for a bit.
"""
time.sleep(1)
# Now try setting the attributes again
self.__set_attributes(apiPath, checksum_string,
modified_string)
elif err.status_code == 500:
self.__set_attributes(apiPath, checksum_string,
modified_string)
def __set_attributes(self, api_path, checksum_string, modified_string):
request_properties = [checksum_string, modified_string]
try:
self._api.post(api_path, attributes=request_properties)
except ResponseError, err:
if err.status_code == 404:
# If the file becomes suddenly not available, just ignore
# trying to set its attributes
pass
if err.status_code == 500:
# Ignore server errors since they shouldnt happen anyways
pass
except RequestError, err:
if err.detail.startswith('HTTPConnectionPool'):
raise MaxTriesError(err)
class UploadWorker(threading.Thread):
def __init__(self, queue, api, sync_dir, remote_files, realtime=False):
threading.Thread.__init__(self)
self._uploader = Uploader(api, sync_dir)
self._queue = queue
self._remote_files = remote_files
self._realtime = realtime
def run(self):
while True:
log.debug("Getting a new task.")
self._current_task = None
self._current_task = self._queue.get()
try:
log.debug("Processing: " + self._current_task.path)
result = self._uploader.process_task(self._current_task)
# Update the remote files dictionary to reflect the new file
self._remote_files[result.path] = result
except FileNotAvailableError:
# The file was not available when uploading it
log.warning("File is not yet available: " +
self._current_task.path)
self.try_task_later()
except MaxTriesError:
log.warning("Connection error occured while uploading: " +
self._current_task.path)
self.try_task_later()
except UploadError:
log.warning("Folders were not created properly for: " +
self._current_task.path)
self.try_task_later()
except FileDeletedError:
log.warning("The file was deleted before trying to upload:" +
self._current_task.path)
else:
# Notify the realtime messaging system of the upload
if self._realtime:
log.debug("Sending an update message about: " +
self._current_task.path)
self._realtime.update(self._current_task)
log.debug("Task complete.")
self._queue.task_done()
def try_task_later(self):
self._queue.put(self._current_task)
def cancel(self):
log.debug("Task cancelled: " + self._current_task.path)
self._uploader.cancel_task()
@property
def current_task(self):
return self._current_task
| mit |
mrunge/horizon | openstack_dashboard/api/swift.py | 1 | 11669 | # Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from oslo.utils import timeutils
import six.moves.urllib.parse as urlparse
import swiftclient
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon.utils.memoized import memoized # noqa
from openstack_dashboard.api import base
LOG = logging.getLogger(__name__)
FOLDER_DELIMITER = "/"
# Swift ACL
GLOBAL_READ_ACL = ".r:*"
LIST_CONTENTS_ACL = ".rlistings"
class Container(base.APIDictWrapper):
pass
class StorageObject(base.APIDictWrapper):
def __init__(self, apidict, container_name, orig_name=None, data=None):
super(StorageObject, self).__init__(apidict)
self.container_name = container_name
self.orig_name = orig_name
self.data = data
@property
def id(self):
return self.name
class PseudoFolder(base.APIDictWrapper):
def __init__(self, apidict, container_name):
super(PseudoFolder, self).__init__(apidict)
self.container_name = container_name
@property
def id(self):
return '%s/%s' % (self.container_name, self.name)
@property
def name(self):
return self.subdir.rstrip(FOLDER_DELIMITER)
@property
def bytes(self):
return None
@property
def content_type(self):
return "application/pseudo-folder"
def _objectify(items, container_name):
"""Splits a listing of objects into their appropriate wrapper classes."""
objects = []
# Deal with objects and object pseudo-folders first, save subdirs for later
for item in items:
if item.get("subdir", None) is not None:
object_cls = PseudoFolder
else:
object_cls = StorageObject
objects.append(object_cls(item, container_name))
return objects
def _metadata_to_header(metadata):
headers = {}
public = metadata.get('is_public')
if public is True:
public_container_acls = [GLOBAL_READ_ACL, LIST_CONTENTS_ACL]
headers['x-container-read'] = ",".join(public_container_acls)
elif public is False:
headers['x-container-read'] = ""
return headers
@memoized
def swift_api(request):
endpoint = base.url_for(request, 'object-store')
cacert = getattr(settings, 'OPENSTACK_SSL_CACERT', None)
insecure = getattr(settings, 'OPENSTACK_SSL_NO_VERIFY', False)
LOG.debug('Swift connection created using token "%s" and url "%s"'
% (request.user.token.id, endpoint))
return swiftclient.client.Connection(None,
request.user.username,
None,
preauthtoken=request.user.token.id,
preauthurl=endpoint,
cacert=cacert,
insecure=insecure,
auth_version="2.0")
def swift_container_exists(request, container_name):
try:
swift_api(request).head_container(container_name)
return True
except swiftclient.client.ClientException:
return False
def swift_object_exists(request, container_name, object_name):
try:
swift_api(request).head_object(container_name, object_name)
return True
except swiftclient.client.ClientException:
return False
def swift_get_containers(request, marker=None):
limit = getattr(settings, 'API_RESULT_LIMIT', 1000)
headers, containers = swift_api(request).get_account(limit=limit + 1,
marker=marker,
full_listing=True)
container_objs = [Container(c) for c in containers]
if(len(container_objs) > limit):
return (container_objs[0:-1], True)
else:
return (container_objs, False)
def swift_get_container(request, container_name, with_data=True):
if with_data:
headers, data = swift_api(request).get_object(container_name, "")
else:
data = None
headers = swift_api(request).head_container(container_name)
timestamp = None
is_public = False
public_url = None
try:
is_public = GLOBAL_READ_ACL in headers.get('x-container-read', '')
if is_public:
swift_endpoint = base.url_for(request,
'object-store',
endpoint_type='publicURL')
public_url = swift_endpoint + '/' + urlparse.quote(container_name)
ts_float = float(headers.get('x-timestamp'))
timestamp = timeutils.iso8601_from_timestamp(ts_float)
except Exception:
pass
container_info = {
'name': container_name,
'container_object_count': headers.get('x-container-object-count'),
'container_bytes_used': headers.get('x-container-bytes-used'),
'timestamp': timestamp,
'data': data,
'is_public': is_public,
'public_url': public_url,
}
return Container(container_info)
def swift_create_container(request, name, metadata=({})):
if swift_container_exists(request, name):
raise exceptions.AlreadyExists(name, 'container')
headers = _metadata_to_header(metadata)
swift_api(request).put_container(name, headers=headers)
return Container({'name': name})
def swift_update_container(request, name, metadata=({})):
headers = _metadata_to_header(metadata)
swift_api(request).post_container(name, headers=headers)
return Container({'name': name})
def swift_delete_container(request, name):
# It cannot be deleted if it's not empty. The batch remove of objects
# be done in swiftclient instead of Horizon.
objects, more = swift_get_objects(request, name)
if objects:
error_msg = unicode(_("The container cannot be deleted "
"since it's not empty."))
exc = exceptions.Conflict(error_msg)
exc._safe_message = error_msg
raise exc
swift_api(request).delete_container(name)
return True
def swift_get_objects(request, container_name, prefix=None, marker=None,
limit=None):
limit = limit or getattr(settings, 'API_RESULT_LIMIT', 1000)
kwargs = dict(prefix=prefix,
marker=marker,
limit=limit + 1,
delimiter=FOLDER_DELIMITER,
full_listing=True)
headers, objects = swift_api(request).get_container(container_name,
**kwargs)
object_objs = _objectify(objects, container_name)
if(len(object_objs) > limit):
return (object_objs[0:-1], True)
else:
return (object_objs, False)
def swift_filter_objects(request, filter_string, container_name, prefix=None,
marker=None):
# FIXME(kewu): Swift currently has no real filtering API, thus the marker
# parameter here won't actually help the pagination. For now I am just
# getting the largest number of objects from a container and filtering
# based on those objects.
limit = 9999
objects = swift_get_objects(request,
container_name,
prefix=prefix,
marker=marker,
limit=limit)
filter_string_list = filter_string.lower().strip().split(' ')
def matches_filter(obj):
for q in filter_string_list:
return wildcard_search(obj.name.lower(), q)
return filter(matches_filter, objects[0])
def wildcard_search(string, q):
q_list = q.split('*')
if all(map(lambda x: x == '', q_list)):
return True
elif q_list[0] not in string:
return False
else:
if q_list[0] == '':
tail = string
else:
head, delimiter, tail = string.partition(q_list[0])
return wildcard_search(tail, '*'.join(q_list[1:]))
def swift_copy_object(request, orig_container_name, orig_object_name,
new_container_name, new_object_name):
if swift_object_exists(request, new_container_name, new_object_name):
raise exceptions.AlreadyExists(new_object_name, 'object')
headers = {"X-Copy-From": FOLDER_DELIMITER.join([orig_container_name,
orig_object_name])}
return swift_api(request).put_object(new_container_name,
new_object_name,
None,
headers=headers)
def swift_upload_object(request, container_name, object_name,
object_file=None):
headers = {}
size = 0
if object_file:
headers['X-Object-Meta-Orig-Filename'] = object_file.name
size = object_file.size
etag = swift_api(request).put_object(container_name,
object_name,
object_file,
headers=headers)
obj_info = {'name': object_name, 'bytes': size, 'etag': etag}
return StorageObject(obj_info, container_name)
def swift_create_pseudo_folder(request, container_name, pseudo_folder_name):
headers = {}
etag = swift_api(request).put_object(container_name,
pseudo_folder_name,
None,
headers=headers)
obj_info = {
'name': pseudo_folder_name,
'etag': etag
}
return PseudoFolder(obj_info, container_name)
def swift_delete_object(request, container_name, object_name):
swift_api(request).delete_object(container_name, object_name)
return True
def swift_get_object(request, container_name, object_name, with_data=True):
if with_data:
headers, data = swift_api(request).get_object(container_name,
object_name)
else:
data = None
headers = swift_api(request).head_object(container_name,
object_name)
orig_name = headers.get("x-object-meta-orig-filename")
timestamp = None
try:
ts_float = float(headers.get('x-timestamp'))
timestamp = timeutils.iso8601_from_timestamp(ts_float)
except Exception:
pass
obj_info = {
'name': object_name,
'bytes': headers.get('content-length'),
'content_type': headers.get('content-type'),
'etag': headers.get('etag'),
'timestamp': timestamp,
}
return StorageObject(obj_info,
container_name,
orig_name=orig_name,
data=data)
| apache-2.0 |
abdellatifkarroum/odoo | openerp/addons/base/ir/ir_attachment.py | 183 | 16487 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hashlib
import itertools
import logging
import os
import re
from openerp import tools
from openerp.tools.translate import _
from openerp.exceptions import AccessError
from openerp.osv import fields,osv
from openerp import SUPERUSER_ID
from openerp.osv.orm import except_orm
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
class ir_attachment(osv.osv):
"""Attachments are used to link binary files or url to any openerp document.
External attachment storage
---------------------------
The 'data' function field (_data_get,data_set) is implemented using
_file_read, _file_write and _file_delete which can be overridden to
implement other storage engines, shuch methods should check for other
location pseudo uri (example: hdfs://hadoppserver)
The default implementation is the file:dirname location that stores files
on the local filesystem using name based on their sha1 hash
"""
_order = 'id desc'
def _name_get_resname(self, cr, uid, ids, object, method, context):
data = {}
for attachment in self.browse(cr, uid, ids, context=context):
model_object = attachment.res_model
res_id = attachment.res_id
if model_object and res_id:
model_pool = self.pool[model_object]
res = model_pool.name_get(cr,uid,[res_id],context)
res_name = res and res[0][1] or None
if res_name:
field = self._columns.get('res_name',False)
if field and len(res_name) > field.size:
res_name = res_name[:30] + '...'
data[attachment.id] = res_name or False
else:
data[attachment.id] = False
return data
def _storage(self, cr, uid, context=None):
return self.pool['ir.config_parameter'].get_param(cr, SUPERUSER_ID, 'ir_attachment.location', 'file')
def _filestore(self, cr, uid, context=None):
return tools.config.filestore(cr.dbname)
def force_storage(self, cr, uid, context=None):
"""Force all attachments to be stored in the currently configured storage"""
if not self.pool['res.users'].has_group(cr, uid, 'base.group_erp_manager'):
raise AccessError(_('Only administrators can execute this action.'))
location = self._storage(cr, uid, context)
domain = {
'db': [('store_fname', '!=', False)],
'file': [('db_datas', '!=', False)],
}[location]
ids = self.search(cr, uid, domain, context=context)
for attach in self.browse(cr, uid, ids, context=context):
attach.write({'datas': attach.datas})
return True
# 'data' field implementation
def _full_path(self, cr, uid, path):
# sanitize ath
path = re.sub('[.]', '', path)
path = path.strip('/\\')
return os.path.join(self._filestore(cr, uid), path)
def _get_path(self, cr, uid, bin_data):
sha = hashlib.sha1(bin_data).hexdigest()
# retro compatibility
fname = sha[:3] + '/' + sha
full_path = self._full_path(cr, uid, fname)
if os.path.isfile(full_path):
return fname, full_path # keep existing path
# scatter files across 256 dirs
# we use '/' in the db (even on windows)
fname = sha[:2] + '/' + sha
full_path = self._full_path(cr, uid, fname)
dirname = os.path.dirname(full_path)
if not os.path.isdir(dirname):
os.makedirs(dirname)
return fname, full_path
def _file_read(self, cr, uid, fname, bin_size=False):
full_path = self._full_path(cr, uid, fname)
r = ''
try:
if bin_size:
r = os.path.getsize(full_path)
else:
r = open(full_path,'rb').read().encode('base64')
except IOError:
_logger.exception("_read_file reading %s", full_path)
return r
def _file_write(self, cr, uid, value):
bin_value = value.decode('base64')
fname, full_path = self._get_path(cr, uid, bin_value)
if not os.path.exists(full_path):
try:
with open(full_path, 'wb') as fp:
fp.write(bin_value)
except IOError:
_logger.exception("_file_write writing %s", full_path)
return fname
def _file_delete(self, cr, uid, fname):
# using SQL to include files hidden through unlink or due to record rules
cr.execute("SELECT COUNT(*) FROM ir_attachment WHERE store_fname = %s", (fname,))
count = cr.fetchone()[0]
full_path = self._full_path(cr, uid, fname)
if not count and os.path.exists(full_path):
try:
os.unlink(full_path)
except OSError:
_logger.exception("_file_delete could not unlink %s", full_path)
except IOError:
# Harmless and needed for race conditions
_logger.exception("_file_delete could not unlink %s", full_path)
def _data_get(self, cr, uid, ids, name, arg, context=None):
if context is None:
context = {}
result = {}
bin_size = context.get('bin_size')
for attach in self.browse(cr, uid, ids, context=context):
if attach.store_fname:
result[attach.id] = self._file_read(cr, uid, attach.store_fname, bin_size)
else:
result[attach.id] = attach.db_datas
return result
def _data_set(self, cr, uid, id, name, value, arg, context=None):
# We dont handle setting data to null
if not value:
return True
if context is None:
context = {}
location = self._storage(cr, uid, context)
file_size = len(value.decode('base64'))
attach = self.browse(cr, uid, id, context=context)
fname_to_delete = attach.store_fname
if location != 'db':
fname = self._file_write(cr, uid, value)
# SUPERUSER_ID as probably don't have write access, trigger during create
super(ir_attachment, self).write(cr, SUPERUSER_ID, [id], {'store_fname': fname, 'file_size': file_size, 'db_datas': False}, context=context)
else:
super(ir_attachment, self).write(cr, SUPERUSER_ID, [id], {'db_datas': value, 'file_size': file_size, 'store_fname': False}, context=context)
# After de-referencing the file in the database, check whether we need
# to garbage-collect it on the filesystem
if fname_to_delete:
self._file_delete(cr, uid, fname_to_delete)
return True
_name = 'ir.attachment'
_columns = {
'name': fields.char('Attachment Name', required=True),
'datas_fname': fields.char('File Name'),
'description': fields.text('Description'),
'res_name': fields.function(_name_get_resname, type='char', string='Resource Name', store=True),
'res_model': fields.char('Resource Model', readonly=True, help="The database object this attachment will be attached to"),
'res_id': fields.integer('Resource ID', readonly=True, help="The record id this is attached to"),
'create_date': fields.datetime('Date Created', readonly=True),
'create_uid': fields.many2one('res.users', 'Owner', readonly=True),
'company_id': fields.many2one('res.company', 'Company', change_default=True),
'type': fields.selection( [ ('url','URL'), ('binary','Binary'), ],
'Type', help="Binary File or URL", required=True, change_default=True),
'url': fields.char('Url', size=1024),
# al: We keep shitty field names for backward compatibility with document
'datas': fields.function(_data_get, fnct_inv=_data_set, string='File Content', type="binary", nodrop=True),
'store_fname': fields.char('Stored Filename'),
'db_datas': fields.binary('Database Data'),
'file_size': fields.integer('File Size'),
}
_defaults = {
'type': 'binary',
'file_size': 0,
'company_id': lambda s,cr,uid,c: s.pool.get('res.company')._company_default_get(cr, uid, 'ir.attachment', context=c),
}
def _auto_init(self, cr, context=None):
super(ir_attachment, self)._auto_init(cr, context)
cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('ir_attachment_res_idx',))
if not cr.fetchone():
cr.execute('CREATE INDEX ir_attachment_res_idx ON ir_attachment (res_model, res_id)')
cr.commit()
def check(self, cr, uid, ids, mode, context=None, values=None):
"""Restricts the access to an ir.attachment, according to referred model
In the 'document' module, it is overriden to relax this hard rule, since
more complex ones apply there.
"""
res_ids = {}
require_employee = False
if ids:
if isinstance(ids, (int, long)):
ids = [ids]
cr.execute('SELECT DISTINCT res_model, res_id, create_uid FROM ir_attachment WHERE id = ANY (%s)', (ids,))
for rmod, rid, create_uid in cr.fetchall():
if not (rmod and rid):
if create_uid != uid:
require_employee = True
continue
res_ids.setdefault(rmod,set()).add(rid)
if values:
if values.get('res_model') and values.get('res_id'):
res_ids.setdefault(values['res_model'],set()).add(values['res_id'])
ima = self.pool.get('ir.model.access')
for model, mids in res_ids.items():
# ignore attachments that are not attached to a resource anymore when checking access rights
# (resource was deleted but attachment was not)
if not self.pool.get(model):
require_employee = True
continue
existing_ids = self.pool[model].exists(cr, uid, mids)
if len(existing_ids) != len(mids):
require_employee = True
ima.check(cr, uid, model, mode)
self.pool[model].check_access_rule(cr, uid, existing_ids, mode, context=context)
if require_employee:
if not uid == SUPERUSER_ID and not self.pool['res.users'].has_group(cr, uid, 'base.group_user'):
raise except_orm(_('Access Denied'), _("Sorry, you are not allowed to access this document."))
def _search(self, cr, uid, args, offset=0, limit=None, order=None, context=None, count=False, access_rights_uid=None):
ids = super(ir_attachment, self)._search(cr, uid, args, offset=offset,
limit=limit, order=order,
context=context, count=False,
access_rights_uid=access_rights_uid)
if not ids:
if count:
return 0
return []
# Work with a set, as list.remove() is prohibitive for large lists of documents
# (takes 20+ seconds on a db with 100k docs during search_count()!)
orig_ids = ids
ids = set(ids)
# For attachments, the permissions of the document they are attached to
# apply, so we must remove attachments for which the user cannot access
# the linked document.
# Use pure SQL rather than read() as it is about 50% faster for large dbs (100k+ docs),
# and the permissions are checked in super() and below anyway.
cr.execute("""SELECT id, res_model, res_id FROM ir_attachment WHERE id = ANY(%s)""", (list(ids),))
targets = cr.dictfetchall()
model_attachments = {}
for target_dict in targets:
if not target_dict['res_model']:
continue
# model_attachments = { 'model': { 'res_id': [id1,id2] } }
model_attachments.setdefault(target_dict['res_model'],{}).setdefault(target_dict['res_id'] or 0, set()).add(target_dict['id'])
# To avoid multiple queries for each attachment found, checks are
# performed in batch as much as possible.
ima = self.pool.get('ir.model.access')
for model, targets in model_attachments.iteritems():
if model not in self.pool:
continue
if not ima.check(cr, uid, model, 'read', False):
# remove all corresponding attachment ids
for attach_id in itertools.chain(*targets.values()):
ids.remove(attach_id)
continue # skip ir.rule processing, these ones are out already
# filter ids according to what access rules permit
target_ids = targets.keys()
allowed_ids = [0] + self.pool[model].search(cr, uid, [('id', 'in', target_ids)], context=context)
disallowed_ids = set(target_ids).difference(allowed_ids)
for res_id in disallowed_ids:
for attach_id in targets[res_id]:
ids.remove(attach_id)
# sort result according to the original sort ordering
result = [id for id in orig_ids if id in ids]
return len(result) if count else list(result)
def read(self, cr, uid, ids, fields_to_read=None, context=None, load='_classic_read'):
if isinstance(ids, (int, long)):
ids = [ids]
self.check(cr, uid, ids, 'read', context=context)
return super(ir_attachment, self).read(cr, uid, ids, fields_to_read, context=context, load=load)
def write(self, cr, uid, ids, vals, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
self.check(cr, uid, ids, 'write', context=context, values=vals)
if 'file_size' in vals:
del vals['file_size']
return super(ir_attachment, self).write(cr, uid, ids, vals, context)
def copy(self, cr, uid, id, default=None, context=None):
self.check(cr, uid, [id], 'write', context=context)
return super(ir_attachment, self).copy(cr, uid, id, default, context)
def unlink(self, cr, uid, ids, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
self.check(cr, uid, ids, 'unlink', context=context)
# First delete in the database, *then* in the filesystem if the
# database allowed it. Helps avoid errors when concurrent transactions
# are deleting the same file, and some of the transactions are
# rolled back by PostgreSQL (due to concurrent updates detection).
to_delete = [a.store_fname
for a in self.browse(cr, uid, ids, context=context)
if a.store_fname]
res = super(ir_attachment, self).unlink(cr, uid, ids, context)
for file_path in to_delete:
self._file_delete(cr, uid, file_path)
return res
def create(self, cr, uid, values, context=None):
self.check(cr, uid, [], mode='write', context=context, values=values)
if 'file_size' in values:
del values['file_size']
return super(ir_attachment, self).create(cr, uid, values, context)
def action_get(self, cr, uid, context=None):
return self.pool.get('ir.actions.act_window').for_xml_id(
cr, uid, 'base', 'action_attachment', context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
halberom/ansible | lib/ansible/modules/files/lineinfile.py | 25 | 16101 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Daniel Hokka Zakrisson <daniel@hozac.com>
# (c) 2014, Ahti Kitsik <ak@ahtik.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'core',
'version': '1.0'}
DOCUMENTATION = """
---
module: lineinfile
author:
- "Daniel Hokka Zakrissoni (@dhozac)"
- "Ahti Kitsik (@ahtik)"
extends_documentation_fragment:
- files
- validate
short_description: Ensure a particular line is in a file, or replace an
existing line using a back-referenced regular expression.
description:
- This module will search a file for a line, and ensure that it is present or absent.
- This is primarily useful when you want to change a single line in
a file only. See the M(replace) module if you want to change
multiple, similar lines or check M(blockinfile) if you want to insert/update/remove a block of lines in a file.
For other cases, see the M(copy) or M(template) modules.
version_added: "0.7"
options:
path:
required: true
aliases: [ 'dest', 'destfile', 'name' ]
description:
- The file to modify.
- Before 2.3 this option was only usable as I(dest), I(destfile) and I(name).
regexp:
required: false
version_added: 1.7
description:
- The regular expression to look for in every line of the file. For
C(state=present), the pattern to replace if found; only the last line
found will be replaced. For C(state=absent), the pattern of the line
to remove. Uses Python regular expressions; see
U(http://docs.python.org/2/library/re.html).
state:
required: false
choices: [ present, absent ]
default: "present"
aliases: []
description:
- Whether the line should be there or not.
line:
required: false
description:
- Required for C(state=present). The line to insert/replace into the
file. If C(backrefs) is set, may contain backreferences that will get
expanded with the C(regexp) capture groups if the regexp matches.
backrefs:
required: false
default: "no"
choices: [ "yes", "no" ]
version_added: "1.1"
description:
- Used with C(state=present). If set, line can contain backreferences
(both positional and named) that will get populated if the C(regexp)
matches. This flag changes the operation of the module slightly;
C(insertbefore) and C(insertafter) will be ignored, and if the C(regexp)
doesn't match anywhere in the file, the file will be left unchanged.
If the C(regexp) does match, the last matching line will be replaced by
the expanded line parameter.
insertafter:
required: false
default: EOF
description:
- Used with C(state=present). If specified, the line will be inserted
after the last match of specified regular expression. A special value is
available; C(EOF) for inserting the line at the end of the file.
If specified regular expression has no matches, EOF will be used instead.
May not be used with C(backrefs).
choices: [ 'EOF', '*regex*' ]
insertbefore:
required: false
version_added: "1.1"
description:
- Used with C(state=present). If specified, the line will be inserted
before the last match of specified regular expression. A value is
available; C(BOF) for inserting the line at the beginning of the file.
If specified regular expression has no matches, the line will be
inserted at the end of the file. May not be used with C(backrefs).
choices: [ 'BOF', '*regex*' ]
create:
required: false
choices: [ "yes", "no" ]
default: "no"
description:
- Used with C(state=present). If specified, the file will be created
if it does not already exist. By default it will fail if the file
is missing.
backup:
required: false
default: "no"
choices: [ "yes", "no" ]
description:
- Create a backup file including the timestamp information so you can
get the original file back if you somehow clobbered it incorrectly.
others:
description:
- All arguments accepted by the M(file) module also work here.
required: false
notes:
- As of Ansible 2.3, the I(dest) option has been changed to I(path) as default, but I(dest) still works as well.
"""
EXAMPLES = r"""
# Before 2.3, option 'dest', 'destfile' or 'name' was used instead of 'path'
- lineinfile:
path: /etc/selinux/config
regexp: '^SELINUX='
line: 'SELINUX=enforcing'
- lineinfile:
path: /etc/sudoers
state: absent
regexp: '^%wheel'
- lineinfile:
path: /etc/hosts
regexp: '^127\.0\.0\.1'
line: '127.0.0.1 localhost'
owner: root
group: root
mode: 0644
- lineinfile:
path: /etc/httpd/conf/httpd.conf
regexp: '^Listen '
insertafter: '^#Listen '
line: 'Listen 8080'
- lineinfile:
path: /etc/services
regexp: '^# port for http'
insertbefore: '^www.*80/tcp'
line: '# port for http by default'
# Add a line to a file if it does not exist, without passing regexp
- lineinfile:
path: /tmp/testfile
line: '192.168.1.99 foo.lab.net foo'
# Fully quoted because of the ': ' on the line. See the Gotchas in the YAML docs.
- lineinfile:
path: /etc/sudoers
state: present
regexp: '^%wheel\s'
line: '%wheel ALL=(ALL) NOPASSWD: ALL'
# Yaml requires escaping backslashes in double quotes but not in single quotes
- lineinfile:
path: /opt/jboss-as/bin/standalone.conf
regexp: '^(.*)Xms(\\d+)m(.*)$'
line: '\1Xms${xms}m\3'
backrefs: yes
# Validate the sudoers file before saving
- lineinfile:
path: /etc/sudoers
state: present
regexp: '^%ADMIN ALL='
line: '%ADMIN ALL=(ALL) NOPASSWD: ALL'
validate: 'visudo -cf %s'
"""
import re
import os
import tempfile
# import module snippets
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import b
from ansible.module_utils._text import to_bytes, to_native
def write_changes(module, b_lines, dest):
tmpfd, tmpfile = tempfile.mkstemp()
f = os.fdopen(tmpfd, 'wb')
f.writelines(b_lines)
f.close()
validate = module.params.get('validate', None)
valid = not validate
if validate:
if "%s" not in validate:
module.fail_json(msg="validate must contain %%s: %s" % (validate))
(rc, out, err) = module.run_command(to_bytes(validate % tmpfile, errors='surrogate_or_strict'))
valid = rc == 0
if rc != 0:
module.fail_json(msg='failed to validate: '
'rc:%s error:%s' % (rc, err))
if valid:
module.atomic_move(tmpfile,
to_native(os.path.realpath(to_bytes(dest, errors='surrogate_or_strict')), errors='surrogate_or_strict'),
unsafe_writes=module.params['unsafe_writes'])
def check_file_attrs(module, changed, message, diff):
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False, diff=diff):
if changed:
message += " and "
changed = True
message += "ownership, perms or SE linux context changed"
return message, changed
def present(module, dest, regexp, line, insertafter, insertbefore, create,
backup, backrefs):
diff = {'before': '',
'after': '',
'before_header': '%s (content)' % dest,
'after_header': '%s (content)' % dest}
b_dest = to_bytes(dest, errors='surrogate_or_strict')
if not os.path.exists(b_dest):
if not create:
module.fail_json(rc=257, msg='Destination %s does not exist !' % dest)
b_destpath = os.path.dirname(b_dest)
if not os.path.exists(b_destpath) and not module.check_mode:
os.makedirs(b_destpath)
b_lines = []
else:
f = open(b_dest, 'rb')
b_lines = f.readlines()
f.close()
if module._diff:
diff['before'] = to_native(b('').join(b_lines))
if regexp is not None:
bre_m = re.compile(to_bytes(regexp, errors='surrogate_or_strict'))
if insertafter not in (None, 'BOF', 'EOF'):
bre_ins = re.compile(to_bytes(insertafter, errors='surrogate_or_strict'))
elif insertbefore not in (None, 'BOF'):
bre_ins = re.compile(to_bytes(insertbefore, errors='surrogate_or_strict'))
else:
bre_ins = None
# index[0] is the line num where regexp has been found
# index[1] is the line num where insertafter/inserbefore has been found
index = [-1, -1]
m = None
b_line = to_bytes(line, errors='surrogate_or_strict')
for lineno, b_cur_line in enumerate(b_lines):
if regexp is not None:
match_found = bre_m.search(b_cur_line)
else:
match_found = b_line == b_cur_line.rstrip(b('\r\n'))
if match_found:
index[0] = lineno
m = match_found
elif bre_ins is not None and bre_ins.search(b_cur_line):
if insertafter:
# + 1 for the next line
index[1] = lineno + 1
if insertbefore:
# + 1 for the previous line
index[1] = lineno
msg = ''
changed = False
# Regexp matched a line in the file
b_linesep = to_bytes(os.linesep, errors='surrogate_or_strict')
if index[0] != -1:
if backrefs:
b_new_line = m.expand(b_line)
else:
# Don't do backref expansion if not asked.
b_new_line = b_line
if not b_new_line.endswith(b_linesep):
b_new_line += b_linesep
if b_lines[index[0]] != b_new_line:
b_lines[index[0]] = b_new_line
msg = 'line replaced'
changed = True
elif backrefs:
# Do absolutely nothing, since it's not safe generating the line
# without the regexp matching to populate the backrefs.
pass
# Add it to the beginning of the file
elif insertbefore == 'BOF' or insertafter == 'BOF':
b_lines.insert(0, b_line + b_linesep)
msg = 'line added'
changed = True
# Add it to the end of the file if requested or
# if insertafter/insertbefore didn't match anything
# (so default behaviour is to add at the end)
elif insertafter == 'EOF' or index[1] == -1:
# If the file is not empty then ensure there's a newline before the added line
if len(b_lines) > 0 and not b_lines[-1][-1:] in (b('\n'), b('\r')):
b_lines.append(b_linesep)
b_lines.append(b_line + b_linesep)
msg = 'line added'
changed = True
# insert* matched, but not the regexp
else:
b_lines.insert(index[1], b_line + b_linesep)
msg = 'line added'
changed = True
if module._diff:
diff['after'] = to_native(b('').join(b_lines))
backupdest = ""
if changed and not module.check_mode:
if backup and os.path.exists(b_dest):
backupdest = module.backup_local(dest)
write_changes(module, b_lines, dest)
if module.check_mode and not os.path.exists(b_dest):
module.exit_json(changed=changed, msg=msg, backup=backupdest, diff=diff)
attr_diff = {}
msg, changed = check_file_attrs(module, changed, msg, attr_diff)
attr_diff['before_header'] = '%s (file attributes)' % dest
attr_diff['after_header'] = '%s (file attributes)' % dest
difflist = [diff, attr_diff]
module.exit_json(changed=changed, msg=msg, backup=backupdest, diff=difflist)
def absent(module, dest, regexp, line, backup):
b_dest = to_bytes(dest, errors='surrogate_or_strict')
if not os.path.exists(b_dest):
module.exit_json(changed=False, msg="file not present")
msg = ''
diff = {'before': '',
'after': '',
'before_header': '%s (content)' % dest,
'after_header': '%s (content)' % dest}
f = open(b_dest, 'rb')
b_lines = f.readlines()
f.close()
if module._diff:
diff['before'] = to_native(b('').join(b_lines))
if regexp is not None:
bre_c = re.compile(to_bytes(regexp, errors='surrogate_or_strict'))
found = []
b_line = to_bytes(line, errors='surrogate_or_strict')
def matcher(b_cur_line):
if regexp is not None:
match_found = bre_c.search(b_cur_line)
else:
match_found = b_line == b_cur_line.rstrip(b('\r\n'))
if match_found:
found.append(b_cur_line)
return not match_found
b_lines = [l for l in b_lines if matcher(l)]
changed = len(found) > 0
if module._diff:
diff['after'] = to_native(b('').join(b_lines))
backupdest = ""
if changed and not module.check_mode:
if backup:
backupdest = module.backup_local(dest)
write_changes(module, b_lines, dest)
if changed:
msg = "%s line(s) removed" % len(found)
attr_diff = {}
msg, changed = check_file_attrs(module, changed, msg, attr_diff)
attr_diff['before_header'] = '%s (file attributes)' % dest
attr_diff['after_header'] = '%s (file attributes)' % dest
difflist = [diff, attr_diff]
module.exit_json(changed=changed, found=len(found), msg=msg, backup=backupdest, diff=difflist)
def main():
module = AnsibleModule(
argument_spec=dict(
path=dict(required=True, aliases=['dest', 'destfile', 'name'], type='path'),
state=dict(default='present', choices=['absent', 'present']),
regexp=dict(default=None),
line=dict(aliases=['value']),
insertafter=dict(default=None),
insertbefore=dict(default=None),
backrefs=dict(default=False, type='bool'),
create=dict(default=False, type='bool'),
backup=dict(default=False, type='bool'),
validate=dict(default=None, type='str'),
),
mutually_exclusive=[['insertbefore', 'insertafter']],
add_file_common_args=True,
supports_check_mode=True
)
params = module.params
create = params['create']
backup = params['backup']
backrefs = params['backrefs']
path = params['path']
b_path = to_bytes(path, errors='surrogate_or_strict')
if os.path.isdir(b_path):
module.fail_json(rc=256, msg='Path %s is a directory !' % path)
if params['state'] == 'present':
if backrefs and params['regexp'] is None:
module.fail_json(msg='regexp= is required with backrefs=true')
if params.get('line', None) is None:
module.fail_json(msg='line= is required with state=present')
# Deal with the insertafter default value manually, to avoid errors
# because of the mutually_exclusive mechanism.
ins_bef, ins_aft = params['insertbefore'], params['insertafter']
if ins_bef is None and ins_aft is None:
ins_aft = 'EOF'
line = params['line']
present(module, path, params['regexp'], line,
ins_aft, ins_bef, create, backup, backrefs)
else:
if params['regexp'] is None and params.get('line', None) is None:
module.fail_json(msg='one of line= or regexp= is required with state=absent')
absent(module, path, params['regexp'], params.get('line', None), backup)
if __name__ == '__main__':
main()
| gpl-3.0 |
sgerhart/ansible | lib/ansible/modules/network/nso/nso_action.py | 12 | 5754 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Cisco and/or its affiliates.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'
}
DOCUMENTATION = '''
---
module: nso_action
extends_documentation_fragment: nso
short_description: Executes Cisco NSO actions and verifies output.
description:
- This module provices support for executing Cisco NSO actions and then
verifying that the output is as expected.
requirements:
- Cisco NSO version 3.4 or higher.
author: "Claes Nästén (@cnasten)"
options:
path:
description: Path to NSO action.
required: true
input:
description: >
NSO action parameters.
output_required:
description: >
Required output parameters.
output_invalid:
description: >
List of result parameter names that will cause the task to fail if they
are present.
validate_strict:
description: >
If set to true, the task will fail if any output parameters not in
output_required is present in the output.
version_added: "2.5"
'''
EXAMPLES = '''
- name: Sync NSO device
nso_action:
url: http://localhost:8080/jsonrpc
username: username
password: password
path: /ncs:devices/device{ce0}/sync-from
input: {}
'''
RETURN = '''
output:
description: Action output
returned: success
type: dict
sample:
result: true
'''
from ansible.module_utils.network.nso.nso import connect, verify_version, nso_argument_spec
from ansible.module_utils.network.nso.nso import normalize_value
from ansible.module_utils.network.nso.nso import ModuleFailException, NsoException
from ansible.module_utils.basic import AnsibleModule
class NsoAction(object):
REQUIRED_VERSIONS = [
(3, 4)
]
def __init__(self, check_mode, client,
path, input,
output_required, output_invalid, validate_strict):
self._check_mode = check_mode
self._client = client
self._path = path
self._input = input
self._output_required = output_required
self._output_invalid = output_invalid
self._validate_strict = validate_strict
def main(self):
schema = self._client.get_schema(path=self._path)
if schema['data']['kind'] != 'action':
raise ModuleFailException('{0} is not an action'.format(self._path))
input_schema = [c for c in schema['data']['children']
if c.get('is_action_input', False)]
for key, value in self._input.items():
child = next((c for c in input_schema if c['name'] == key), None)
if child is None:
raise ModuleFailException('no parameter {0}'.format(key))
# implement type validation in the future
if self._check_mode:
return {}
else:
return self._run_and_verify()
def _run_and_verify(self):
output = self._client.run_action(None, self._path, self._input)
for key, value in self._output_required.items():
if key not in output:
raise ModuleFailException('{0} not in result'.format(key))
n_value = normalize_value(value, output[key], key)
if value != n_value:
msg = '{0} value mismatch. expected {1} got {2}'.format(
key, value, n_value)
raise ModuleFailException(msg)
for key in self._output_invalid.keys():
if key in output:
raise ModuleFailException('{0} not allowed in result'.format(key))
if self._validate_strict:
for name in output.keys():
if name not in self._output_required:
raise ModuleFailException('{0} not allowed in result'.format(name))
return output
def main():
argument_spec = dict(
path=dict(required=True),
input=dict(required=False, type='dict', default={}),
output_required=dict(required=False, type='dict', default={}),
output_invalid=dict(required=False, type='dict', default={}),
validate_strict=dict(required=False, type='bool', default=False)
)
argument_spec.update(nso_argument_spec)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True
)
p = module.params
client = connect(p)
nso_action = NsoAction(
module.check_mode, client,
p['path'],
p['input'],
p['output_required'],
p['output_invalid'],
p['validate_strict'])
try:
verify_version(client, NsoAction.REQUIRED_VERSIONS)
output = nso_action.main()
client.logout()
module.exit_json(changed=True, output=output)
except NsoException as ex:
client.logout()
module.fail_json(msg=ex.message)
except ModuleFailException as ex:
client.logout()
module.fail_json(msg=ex.message)
if __name__ == '__main__':
main()
| mit |
lidiamcfreitas/FenixScheduleMaker | ScheduleMaker/brython/www/src/Lib/unittest/result.py | 727 | 6397 | """Test result object"""
import io
import sys
import traceback
from . import util
from functools import wraps
__unittest = True
def failfast(method):
@wraps(method)
def inner(self, *args, **kw):
if getattr(self, 'failfast', False):
self.stop()
return method(self, *args, **kw)
return inner
STDOUT_LINE = '\nStdout:\n%s'
STDERR_LINE = '\nStderr:\n%s'
class TestResult(object):
"""Holder for test result information.
Test results are automatically managed by the TestCase and TestSuite
classes, and do not need to be explicitly manipulated by writers of tests.
Each instance holds the total number of tests run, and collections of
failures and errors that occurred among those test runs. The collections
contain tuples of (testcase, exceptioninfo), where exceptioninfo is the
formatted traceback of the error that occurred.
"""
_previousTestClass = None
_testRunEntered = False
_moduleSetUpFailed = False
def __init__(self, stream=None, descriptions=None, verbosity=None):
self.failfast = False
self.failures = []
self.errors = []
self.testsRun = 0
self.skipped = []
self.expectedFailures = []
self.unexpectedSuccesses = []
self.shouldStop = False
self.buffer = False
self._stdout_buffer = None
self._stderr_buffer = None
self._original_stdout = sys.stdout
self._original_stderr = sys.stderr
self._mirrorOutput = False
def printErrors(self):
"Called by TestRunner after test run"
#fixme brython
pass
def startTest(self, test):
"Called when the given test is about to be run"
self.testsRun += 1
self._mirrorOutput = False
self._setupStdout()
def _setupStdout(self):
if self.buffer:
if self._stderr_buffer is None:
self._stderr_buffer = io.StringIO()
self._stdout_buffer = io.StringIO()
sys.stdout = self._stdout_buffer
sys.stderr = self._stderr_buffer
def startTestRun(self):
"""Called once before any tests are executed.
See startTest for a method called before each test.
"""
def stopTest(self, test):
"""Called when the given test has been run"""
self._restoreStdout()
self._mirrorOutput = False
def _restoreStdout(self):
if self.buffer:
if self._mirrorOutput:
output = sys.stdout.getvalue()
error = sys.stderr.getvalue()
if output:
if not output.endswith('\n'):
output += '\n'
self._original_stdout.write(STDOUT_LINE % output)
if error:
if not error.endswith('\n'):
error += '\n'
self._original_stderr.write(STDERR_LINE % error)
sys.stdout = self._original_stdout
sys.stderr = self._original_stderr
self._stdout_buffer.seek(0)
self._stdout_buffer.truncate()
self._stderr_buffer.seek(0)
self._stderr_buffer.truncate()
def stopTestRun(self):
"""Called once after all tests are executed.
See stopTest for a method called after each test.
"""
@failfast
def addError(self, test, err):
"""Called when an error has occurred. 'err' is a tuple of values as
returned by sys.exc_info().
"""
self.errors.append((test, self._exc_info_to_string(err, test)))
self._mirrorOutput = True
@failfast
def addFailure(self, test, err):
"""Called when an error has occurred. 'err' is a tuple of values as
returned by sys.exc_info()."""
self.failures.append((test, self._exc_info_to_string(err, test)))
self._mirrorOutput = True
def addSuccess(self, test):
"Called when a test has completed successfully"
pass
def addSkip(self, test, reason):
"""Called when a test is skipped."""
self.skipped.append((test, reason))
def addExpectedFailure(self, test, err):
"""Called when an expected failure/error occured."""
self.expectedFailures.append(
(test, self._exc_info_to_string(err, test)))
@failfast
def addUnexpectedSuccess(self, test):
"""Called when a test was expected to fail, but succeed."""
self.unexpectedSuccesses.append(test)
def wasSuccessful(self):
"Tells whether or not this result was a success"
return len(self.failures) == len(self.errors) == 0
def stop(self):
"Indicates that the tests should be aborted"
self.shouldStop = True
def _exc_info_to_string(self, err, test):
"""Converts a sys.exc_info()-style tuple of values into a string."""
exctype, value, tb = err
# Skip test runner traceback levels
while tb and self._is_relevant_tb_level(tb):
tb = tb.tb_next
if exctype is test.failureException:
# Skip assert*() traceback levels
length = self._count_relevant_tb_levels(tb)
msgLines = traceback.format_exception(exctype, value, tb, length)
else:
msgLines = traceback.format_exception(exctype, value, tb)
if self.buffer:
output = sys.stdout.getvalue()
error = sys.stderr.getvalue()
if output:
if not output.endswith('\n'):
output += '\n'
msgLines.append(STDOUT_LINE % output)
if error:
if not error.endswith('\n'):
error += '\n'
msgLines.append(STDERR_LINE % error)
return ''.join(msgLines)
def _is_relevant_tb_level(self, tb):
#fix me brython
#return '__unittest' in tb.tb_frame.f_globals
return True #for now, lets just return False
def _count_relevant_tb_levels(self, tb):
length = 0
while tb and not self._is_relevant_tb_level(tb):
length += 1
tb = tb.tb_next
return length
def __repr__(self):
return ("<%s run=%i errors=%i failures=%i>" %
(util.strclass(self.__class__), self.testsRun, len(self.errors),
len(self.failures)))
| bsd-2-clause |
pitch-sands/i-MPI | flask/Lib/site-packages/pytz/exceptions.py | 657 | 1333 | '''
Custom exceptions raised by pytz.
'''
__all__ = [
'UnknownTimeZoneError', 'InvalidTimeError', 'AmbiguousTimeError',
'NonExistentTimeError',
]
class UnknownTimeZoneError(KeyError):
'''Exception raised when pytz is passed an unknown timezone.
>>> isinstance(UnknownTimeZoneError(), LookupError)
True
This class is actually a subclass of KeyError to provide backwards
compatibility with code relying on the undocumented behavior of earlier
pytz releases.
>>> isinstance(UnknownTimeZoneError(), KeyError)
True
'''
pass
class InvalidTimeError(Exception):
'''Base class for invalid time exceptions.'''
class AmbiguousTimeError(InvalidTimeError):
'''Exception raised when attempting to create an ambiguous wallclock time.
At the end of a DST transition period, a particular wallclock time will
occur twice (once before the clocks are set back, once after). Both
possibilities may be correct, unless further information is supplied.
See DstTzInfo.normalize() for more info
'''
class NonExistentTimeError(InvalidTimeError):
'''Exception raised when attempting to create a wallclock time that
cannot exist.
At the start of a DST transition period, the wallclock time jumps forward.
The instants jumped over never occur.
'''
| bsd-3-clause |
hazrpg/calibre | src/calibre/ebooks/markdown/extensions/tables.py | 8 | 5602 | """
Tables Extension for Python-Markdown
====================================
Added parsing of tables to Python-Markdown.
See <https://pythonhosted.org/Markdown/extensions/tables.html>
for documentation.
Original code Copyright 2009 [Waylan Limberg](http://achinghead.com)
All changes Copyright 2008-2014 The Python Markdown Project
License: [BSD](http://www.opensource.org/licenses/bsd-license.php)
"""
from __future__ import absolute_import
from __future__ import unicode_literals
from . import Extension
from ..blockprocessors import BlockProcessor
from ..inlinepatterns import BacktickPattern, BACKTICK_RE
from ..util import etree
class TableProcessor(BlockProcessor):
""" Process Tables. """
def test(self, parent, block):
rows = block.split('\n')
return (len(rows) > 1 and '|' in rows[0] and
'|' in rows[1] and '-' in rows[1] and
rows[1].strip()[0] in ['|', ':', '-'])
def run(self, parent, blocks):
""" Parse a table block and build table. """
block = blocks.pop(0).split('\n')
header = block[0].strip()
seperator = block[1].strip()
rows = [] if len(block) < 3 else block[2:]
# Get format type (bordered by pipes or not)
border = False
if header.startswith('|'):
border = True
# Get alignment of columns
align = []
for c in self._split_row(seperator, border):
if c.startswith(':') and c.endswith(':'):
align.append('center')
elif c.startswith(':'):
align.append('left')
elif c.endswith(':'):
align.append('right')
else:
align.append(None)
# Build table
table = etree.SubElement(parent, 'table')
thead = etree.SubElement(table, 'thead')
self._build_row(header, thead, align, border)
tbody = etree.SubElement(table, 'tbody')
for row in rows:
self._build_row(row.strip(), tbody, align, border)
def _build_row(self, row, parent, align, border):
""" Given a row of text, build table cells. """
tr = etree.SubElement(parent, 'tr')
tag = 'td'
if parent.tag == 'thead':
tag = 'th'
cells = self._split_row(row, border)
# We use align here rather than cells to ensure every row
# contains the same number of columns.
for i, a in enumerate(align):
c = etree.SubElement(tr, tag)
try:
if isinstance(cells[i], str) or isinstance(cells[i], unicode):
c.text = cells[i].strip()
else:
# we've already inserted a code element
c.append(cells[i])
except IndexError: # pragma: no cover
c.text = ""
if a:
c.set('align', a)
def _split_row(self, row, border):
""" split a row of text into list of cells. """
if border:
if row.startswith('|'):
row = row[1:]
if row.endswith('|'):
row = row[:-1]
return self._split(row, '|')
def _split(self, row, marker):
""" split a row of text with some code into a list of cells. """
if self._row_has_unpaired_backticks(row):
# fallback on old behaviour
return row.split(marker)
# modify the backtick pattern to only match at the beginning of the search string
backtick_pattern = BacktickPattern('^' + BACKTICK_RE)
elements = []
current = ''
i = 0
while i < len(row):
letter = row[i]
if letter == marker:
if current != '' or len(elements) == 0:
# Don't append empty string unless it is the first element
# The border is already removed when we get the row, then the line is strip()'d
# If the first element is a marker, then we have an empty first cell
elements.append(current)
current = ''
else:
match = backtick_pattern.getCompiledRegExp().match(row[i:])
if not match:
current += letter
else:
groups = match.groups()
delim = groups[1] # the code block delimeter (ie 1 or more backticks)
row_contents = groups[2] # the text contained inside the code block
i += match.start(4) # jump pointer to the beginning of the rest of the text (group #4)
element = delim + row_contents + delim # reinstert backticks
current += element
i += 1
elements.append(current)
return elements
def _row_has_unpaired_backticks(self, row):
count_total_backtick = row.count('`')
count_escaped_backtick = row.count('\`')
count_backtick = count_total_backtick - count_escaped_backtick
# odd number of backticks,
# we won't be able to build correct code blocks
return count_backtick & 1
class TableExtension(Extension):
""" Add tables to Markdown. """
def extendMarkdown(self, md, md_globals):
""" Add an instance of TableProcessor to BlockParser. """
md.parser.blockprocessors.add('table',
TableProcessor(md.parser),
'<hashheader')
def makeExtension(*args, **kwargs):
return TableExtension(*args, **kwargs)
| gpl-3.0 |
kans/birgo | tools/gyp/test/small/gyptest-small.py | 89 | 1405 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Runs small tests.
"""
import imp
import os
import sys
import unittest
import TestGyp
test = TestGyp.TestGyp()
# Add pylib to the import path (so tests can import their dependencies).
# This is consistant with the path.append done in the top file "gyp".
sys.path.append(os.path.join(test._cwd, 'pylib'))
# Add new test suites here.
files_to_test = [
'pylib/gyp/MSVSSettings_test.py',
'pylib/gyp/easy_xml_test.py',
'pylib/gyp/generator/msvs_test.py',
'pylib/gyp/generator/ninja_test.py',
'pylib/gyp/common_test.py',
]
# Collect all the suites from the above files.
suites = []
for filename in files_to_test:
# Carve the module name out of the path.
name = os.path.splitext(os.path.split(filename)[1])[0]
# Find the complete module path.
full_filename = os.path.join(test._cwd, filename)
# Load the module.
module = imp.load_source(name, full_filename)
# Add it to the list of test suites.
suites.append(unittest.defaultTestLoader.loadTestsFromModule(module))
# Create combined suite.
all_tests = unittest.TestSuite(suites)
# Run all the tests.
result = unittest.TextTestRunner(verbosity=2).run(all_tests)
if result.failures or result.errors:
test.fail_test()
test.pass_test()
| apache-2.0 |
Kamik423/uni_plan | plan/plan/lib/python3.4/site-packages/pip/_vendor/requests/packages/chardet/euckrfreq.py | 3121 | 45978 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# Sampling from about 20M text materials include literature and computer technology
# 128 --> 0.79
# 256 --> 0.92
# 512 --> 0.986
# 1024 --> 0.99944
# 2048 --> 0.99999
#
# Idea Distribution Ratio = 0.98653 / (1-0.98653) = 73.24
# Random Distribution Ration = 512 / (2350-512) = 0.279.
#
# Typical Distribution Ratio
EUCKR_TYPICAL_DISTRIBUTION_RATIO = 6.0
EUCKR_TABLE_SIZE = 2352
# Char to FreqOrder table ,
EUCKRCharToFreqOrder = ( \
13, 130, 120,1396, 481,1719,1720, 328, 609, 212,1721, 707, 400, 299,1722, 87,
1397,1723, 104, 536,1117,1203,1724,1267, 685,1268, 508,1725,1726,1727,1728,1398,
1399,1729,1730,1731, 141, 621, 326,1057, 368,1732, 267, 488, 20,1733,1269,1734,
945,1400,1735, 47, 904,1270,1736,1737, 773, 248,1738, 409, 313, 786, 429,1739,
116, 987, 813,1401, 683, 75,1204, 145,1740,1741,1742,1743, 16, 847, 667, 622,
708,1744,1745,1746, 966, 787, 304, 129,1747, 60, 820, 123, 676,1748,1749,1750,
1751, 617,1752, 626,1753,1754,1755,1756, 653,1757,1758,1759,1760,1761,1762, 856,
344,1763,1764,1765,1766, 89, 401, 418, 806, 905, 848,1767,1768,1769, 946,1205,
709,1770,1118,1771, 241,1772,1773,1774,1271,1775, 569,1776, 999,1777,1778,1779,
1780, 337, 751,1058, 28, 628, 254,1781, 177, 906, 270, 349, 891,1079,1782, 19,
1783, 379,1784, 315,1785, 629, 754,1402, 559,1786, 636, 203,1206,1787, 710, 567,
1788, 935, 814,1789,1790,1207, 766, 528,1791,1792,1208,1793,1794,1795,1796,1797,
1403,1798,1799, 533,1059,1404,1405,1156,1406, 936, 884,1080,1800, 351,1801,1802,
1803,1804,1805, 801,1806,1807,1808,1119,1809,1157, 714, 474,1407,1810, 298, 899,
885,1811,1120, 802,1158,1812, 892,1813,1814,1408, 659,1815,1816,1121,1817,1818,
1819,1820,1821,1822, 319,1823, 594, 545,1824, 815, 937,1209,1825,1826, 573,1409,
1022,1827,1210,1828,1829,1830,1831,1832,1833, 556, 722, 807,1122,1060,1834, 697,
1835, 900, 557, 715,1836,1410, 540,1411, 752,1159, 294, 597,1211, 976, 803, 770,
1412,1837,1838, 39, 794,1413, 358,1839, 371, 925,1840, 453, 661, 788, 531, 723,
544,1023,1081, 869, 91,1841, 392, 430, 790, 602,1414, 677,1082, 457,1415,1416,
1842,1843, 475, 327,1024,1417, 795, 121,1844, 733, 403,1418,1845,1846,1847, 300,
119, 711,1212, 627,1848,1272, 207,1849,1850, 796,1213, 382,1851, 519,1852,1083,
893,1853,1854,1855, 367, 809, 487, 671,1856, 663,1857,1858, 956, 471, 306, 857,
1859,1860,1160,1084,1861,1862,1863,1864,1865,1061,1866,1867,1868,1869,1870,1871,
282, 96, 574,1872, 502,1085,1873,1214,1874, 907,1875,1876, 827, 977,1419,1420,
1421, 268,1877,1422,1878,1879,1880, 308,1881, 2, 537,1882,1883,1215,1884,1885,
127, 791,1886,1273,1423,1887, 34, 336, 404, 643,1888, 571, 654, 894, 840,1889,
0, 886,1274, 122, 575, 260, 908, 938,1890,1275, 410, 316,1891,1892, 100,1893,
1894,1123, 48,1161,1124,1025,1895, 633, 901,1276,1896,1897, 115, 816,1898, 317,
1899, 694,1900, 909, 734,1424, 572, 866,1425, 691, 85, 524,1010, 543, 394, 841,
1901,1902,1903,1026,1904,1905,1906,1907,1908,1909, 30, 451, 651, 988, 310,1910,
1911,1426, 810,1216, 93,1912,1913,1277,1217,1914, 858, 759, 45, 58, 181, 610,
269,1915,1916, 131,1062, 551, 443,1000, 821,1427, 957, 895,1086,1917,1918, 375,
1919, 359,1920, 687,1921, 822,1922, 293,1923,1924, 40, 662, 118, 692, 29, 939,
887, 640, 482, 174,1925, 69,1162, 728,1428, 910,1926,1278,1218,1279, 386, 870,
217, 854,1163, 823,1927,1928,1929,1930, 834,1931, 78,1932, 859,1933,1063,1934,
1935,1936,1937, 438,1164, 208, 595,1938,1939,1940,1941,1219,1125,1942, 280, 888,
1429,1430,1220,1431,1943,1944,1945,1946,1947,1280, 150, 510,1432,1948,1949,1950,
1951,1952,1953,1954,1011,1087,1955,1433,1043,1956, 881,1957, 614, 958,1064,1065,
1221,1958, 638,1001, 860, 967, 896,1434, 989, 492, 553,1281,1165,1959,1282,1002,
1283,1222,1960,1961,1962,1963, 36, 383, 228, 753, 247, 454,1964, 876, 678,1965,
1966,1284, 126, 464, 490, 835, 136, 672, 529, 940,1088,1435, 473,1967,1968, 467,
50, 390, 227, 587, 279, 378, 598, 792, 968, 240, 151, 160, 849, 882,1126,1285,
639,1044, 133, 140, 288, 360, 811, 563,1027, 561, 142, 523,1969,1970,1971, 7,
103, 296, 439, 407, 506, 634, 990,1972,1973,1974,1975, 645,1976,1977,1978,1979,
1980,1981, 236,1982,1436,1983,1984,1089, 192, 828, 618, 518,1166, 333,1127,1985,
818,1223,1986,1987,1988,1989,1990,1991,1992,1993, 342,1128,1286, 746, 842,1994,
1995, 560, 223,1287, 98, 8, 189, 650, 978,1288,1996,1437,1997, 17, 345, 250,
423, 277, 234, 512, 226, 97, 289, 42, 167,1998, 201,1999,2000, 843, 836, 824,
532, 338, 783,1090, 182, 576, 436,1438,1439, 527, 500,2001, 947, 889,2002,2003,
2004,2005, 262, 600, 314, 447,2006, 547,2007, 693, 738,1129,2008, 71,1440, 745,
619, 688,2009, 829,2010,2011, 147,2012, 33, 948,2013,2014, 74, 224,2015, 61,
191, 918, 399, 637,2016,1028,1130, 257, 902,2017,2018,2019,2020,2021,2022,2023,
2024,2025,2026, 837,2027,2028,2029,2030, 179, 874, 591, 52, 724, 246,2031,2032,
2033,2034,1167, 969,2035,1289, 630, 605, 911,1091,1168,2036,2037,2038,1441, 912,
2039, 623,2040,2041, 253,1169,1290,2042,1442, 146, 620, 611, 577, 433,2043,1224,
719,1170, 959, 440, 437, 534, 84, 388, 480,1131, 159, 220, 198, 679,2044,1012,
819,1066,1443, 113,1225, 194, 318,1003,1029,2045,2046,2047,2048,1067,2049,2050,
2051,2052,2053, 59, 913, 112,2054, 632,2055, 455, 144, 739,1291,2056, 273, 681,
499,2057, 448,2058,2059, 760,2060,2061, 970, 384, 169, 245,1132,2062,2063, 414,
1444,2064,2065, 41, 235,2066, 157, 252, 877, 568, 919, 789, 580,2067, 725,2068,
2069,1292,2070,2071,1445,2072,1446,2073,2074, 55, 588, 66,1447, 271,1092,2075,
1226,2076, 960,1013, 372,2077,2078,2079,2080,2081,1293,2082,2083,2084,2085, 850,
2086,2087,2088,2089,2090, 186,2091,1068, 180,2092,2093,2094, 109,1227, 522, 606,
2095, 867,1448,1093, 991,1171, 926, 353,1133,2096, 581,2097,2098,2099,1294,1449,
1450,2100, 596,1172,1014,1228,2101,1451,1295,1173,1229,2102,2103,1296,1134,1452,
949,1135,2104,2105,1094,1453,1454,1455,2106,1095,2107,2108,2109,2110,2111,2112,
2113,2114,2115,2116,2117, 804,2118,2119,1230,1231, 805,1456, 405,1136,2120,2121,
2122,2123,2124, 720, 701,1297, 992,1457, 927,1004,2125,2126,2127,2128,2129,2130,
22, 417,2131, 303,2132, 385,2133, 971, 520, 513,2134,1174, 73,1096, 231, 274,
962,1458, 673,2135,1459,2136, 152,1137,2137,2138,2139,2140,1005,1138,1460,1139,
2141,2142,2143,2144, 11, 374, 844,2145, 154,1232, 46,1461,2146, 838, 830, 721,
1233, 106,2147, 90, 428, 462, 578, 566,1175, 352,2148,2149, 538,1234, 124,1298,
2150,1462, 761, 565,2151, 686,2152, 649,2153, 72, 173,2154, 460, 415,2155,1463,
2156,1235, 305,2157,2158,2159,2160,2161,2162, 579,2163,2164,2165,2166,2167, 747,
2168,2169,2170,2171,1464, 669,2172,2173,2174,2175,2176,1465,2177, 23, 530, 285,
2178, 335, 729,2179, 397,2180,2181,2182,1030,2183,2184, 698,2185,2186, 325,2187,
2188, 369,2189, 799,1097,1015, 348,2190,1069, 680,2191, 851,1466,2192,2193, 10,
2194, 613, 424,2195, 979, 108, 449, 589, 27, 172, 81,1031, 80, 774, 281, 350,
1032, 525, 301, 582,1176,2196, 674,1045,2197,2198,1467, 730, 762,2199,2200,2201,
2202,1468,2203, 993,2204,2205, 266,1070, 963,1140,2206,2207,2208, 664,1098, 972,
2209,2210,2211,1177,1469,1470, 871,2212,2213,2214,2215,2216,1471,2217,2218,2219,
2220,2221,2222,2223,2224,2225,2226,2227,1472,1236,2228,2229,2230,2231,2232,2233,
2234,2235,1299,2236,2237, 200,2238, 477, 373,2239,2240, 731, 825, 777,2241,2242,
2243, 521, 486, 548,2244,2245,2246,1473,1300, 53, 549, 137, 875, 76, 158,2247,
1301,1474, 469, 396,1016, 278, 712,2248, 321, 442, 503, 767, 744, 941,1237,1178,
1475,2249, 82, 178,1141,1179, 973,2250,1302,2251, 297,2252,2253, 570,2254,2255,
2256, 18, 450, 206,2257, 290, 292,1142,2258, 511, 162, 99, 346, 164, 735,2259,
1476,1477, 4, 554, 343, 798,1099,2260,1100,2261, 43, 171,1303, 139, 215,2262,
2263, 717, 775,2264,1033, 322, 216,2265, 831,2266, 149,2267,1304,2268,2269, 702,
1238, 135, 845, 347, 309,2270, 484,2271, 878, 655, 238,1006,1478,2272, 67,2273,
295,2274,2275, 461,2276, 478, 942, 412,2277,1034,2278,2279,2280, 265,2281, 541,
2282,2283,2284,2285,2286, 70, 852,1071,2287,2288,2289,2290, 21, 56, 509, 117,
432,2291,2292, 331, 980, 552,1101, 148, 284, 105, 393,1180,1239, 755,2293, 187,
2294,1046,1479,2295, 340,2296, 63,1047, 230,2297,2298,1305, 763,1306, 101, 800,
808, 494,2299,2300,2301, 903,2302, 37,1072, 14, 5,2303, 79, 675,2304, 312,
2305,2306,2307,2308,2309,1480, 6,1307,2310,2311,2312, 1, 470, 35, 24, 229,
2313, 695, 210, 86, 778, 15, 784, 592, 779, 32, 77, 855, 964,2314, 259,2315,
501, 380,2316,2317, 83, 981, 153, 689,1308,1481,1482,1483,2318,2319, 716,1484,
2320,2321,2322,2323,2324,2325,1485,2326,2327, 128, 57, 68, 261,1048, 211, 170,
1240, 31,2328, 51, 435, 742,2329,2330,2331, 635,2332, 264, 456,2333,2334,2335,
425,2336,1486, 143, 507, 263, 943,2337, 363, 920,1487, 256,1488,1102, 243, 601,
1489,2338,2339,2340,2341,2342,2343,2344, 861,2345,2346,2347,2348,2349,2350, 395,
2351,1490,1491, 62, 535, 166, 225,2352,2353, 668, 419,1241, 138, 604, 928,2354,
1181,2355,1492,1493,2356,2357,2358,1143,2359, 696,2360, 387, 307,1309, 682, 476,
2361,2362, 332, 12, 222, 156,2363, 232,2364, 641, 276, 656, 517,1494,1495,1035,
416, 736,1496,2365,1017, 586,2366,2367,2368,1497,2369, 242,2370,2371,2372,1498,
2373, 965, 713,2374,2375,2376,2377, 740, 982,1499, 944,1500,1007,2378,2379,1310,
1501,2380,2381,2382, 785, 329,2383,2384,1502,2385,2386,2387, 932,2388,1503,2389,
2390,2391,2392,1242,2393,2394,2395,2396,2397, 994, 950,2398,2399,2400,2401,1504,
1311,2402,2403,2404,2405,1049, 749,2406,2407, 853, 718,1144,1312,2408,1182,1505,
2409,2410, 255, 516, 479, 564, 550, 214,1506,1507,1313, 413, 239, 444, 339,1145,
1036,1508,1509,1314,1037,1510,1315,2411,1511,2412,2413,2414, 176, 703, 497, 624,
593, 921, 302,2415, 341, 165,1103,1512,2416,1513,2417,2418,2419, 376,2420, 700,
2421,2422,2423, 258, 768,1316,2424,1183,2425, 995, 608,2426,2427,2428,2429, 221,
2430,2431,2432,2433,2434,2435,2436,2437, 195, 323, 726, 188, 897, 983,1317, 377,
644,1050, 879,2438, 452,2439,2440,2441,2442,2443,2444, 914,2445,2446,2447,2448,
915, 489,2449,1514,1184,2450,2451, 515, 64, 427, 495,2452, 583,2453, 483, 485,
1038, 562, 213,1515, 748, 666,2454,2455,2456,2457, 334,2458, 780, 996,1008, 705,
1243,2459,2460,2461,2462,2463, 114,2464, 493,1146, 366, 163,1516, 961,1104,2465,
291,2466,1318,1105,2467,1517, 365,2468, 355, 951,1244,2469,1319,2470, 631,2471,
2472, 218,1320, 364, 320, 756,1518,1519,1321,1520,1322,2473,2474,2475,2476, 997,
2477,2478,2479,2480, 665,1185,2481, 916,1521,2482,2483,2484, 584, 684,2485,2486,
797,2487,1051,1186,2488,2489,2490,1522,2491,2492, 370,2493,1039,1187, 65,2494,
434, 205, 463,1188,2495, 125, 812, 391, 402, 826, 699, 286, 398, 155, 781, 771,
585,2496, 590, 505,1073,2497, 599, 244, 219, 917,1018, 952, 646,1523,2498,1323,
2499,2500, 49, 984, 354, 741,2501, 625,2502,1324,2503,1019, 190, 357, 757, 491,
95, 782, 868,2504,2505,2506,2507,2508,2509, 134,1524,1074, 422,1525, 898,2510,
161,2511,2512,2513,2514, 769,2515,1526,2516,2517, 411,1325,2518, 472,1527,2519,
2520,2521,2522,2523,2524, 985,2525,2526,2527,2528,2529,2530, 764,2531,1245,2532,
2533, 25, 204, 311,2534, 496,2535,1052,2536,2537,2538,2539,2540,2541,2542, 199,
704, 504, 468, 758, 657,1528, 196, 44, 839,1246, 272, 750,2543, 765, 862,2544,
2545,1326,2546, 132, 615, 933,2547, 732,2548,2549,2550,1189,1529,2551, 283,1247,
1053, 607, 929,2552,2553,2554, 930, 183, 872, 616,1040,1147,2555,1148,1020, 441,
249,1075,2556,2557,2558, 466, 743,2559,2560,2561, 92, 514, 426, 420, 526,2562,
2563,2564,2565,2566,2567,2568, 185,2569,2570,2571,2572, 776,1530, 658,2573, 362,
2574, 361, 922,1076, 793,2575,2576,2577,2578,2579,2580,1531, 251,2581,2582,2583,
2584,1532, 54, 612, 237,1327,2585,2586, 275, 408, 647, 111,2587,1533,1106, 465,
3, 458, 9, 38,2588, 107, 110, 890, 209, 26, 737, 498,2589,1534,2590, 431,
202, 88,1535, 356, 287,1107, 660,1149,2591, 381,1536, 986,1150, 445,1248,1151,
974,2592,2593, 846,2594, 446, 953, 184,1249,1250, 727,2595, 923, 193, 883,2596,
2597,2598, 102, 324, 539, 817,2599, 421,1041,2600, 832,2601, 94, 175, 197, 406,
2602, 459,2603,2604,2605,2606,2607, 330, 555,2608,2609,2610, 706,1108, 389,2611,
2612,2613,2614, 233,2615, 833, 558, 931, 954,1251,2616,2617,1537, 546,2618,2619,
1009,2620,2621,2622,1538, 690,1328,2623, 955,2624,1539,2625,2626, 772,2627,2628,
2629,2630,2631, 924, 648, 863, 603,2632,2633, 934,1540, 864, 865,2634, 642,1042,
670,1190,2635,2636,2637,2638, 168,2639, 652, 873, 542,1054,1541,2640,2641,2642, # 512, 256
#Everything below is of no interest for detection purpose
2643,2644,2645,2646,2647,2648,2649,2650,2651,2652,2653,2654,2655,2656,2657,2658,
2659,2660,2661,2662,2663,2664,2665,2666,2667,2668,2669,2670,2671,2672,2673,2674,
2675,2676,2677,2678,2679,2680,2681,2682,2683,2684,2685,2686,2687,2688,2689,2690,
2691,2692,2693,2694,2695,2696,2697,2698,2699,1542, 880,2700,2701,2702,2703,2704,
2705,2706,2707,2708,2709,2710,2711,2712,2713,2714,2715,2716,2717,2718,2719,2720,
2721,2722,2723,2724,2725,1543,2726,2727,2728,2729,2730,2731,2732,1544,2733,2734,
2735,2736,2737,2738,2739,2740,2741,2742,2743,2744,2745,2746,2747,2748,2749,2750,
2751,2752,2753,2754,1545,2755,2756,2757,2758,2759,2760,2761,2762,2763,2764,2765,
2766,1546,2767,1547,2768,2769,2770,2771,2772,2773,2774,2775,2776,2777,2778,2779,
2780,2781,2782,2783,2784,2785,2786,1548,2787,2788,2789,1109,2790,2791,2792,2793,
2794,2795,2796,2797,2798,2799,2800,2801,2802,2803,2804,2805,2806,2807,2808,2809,
2810,2811,2812,1329,2813,2814,2815,2816,2817,2818,2819,2820,2821,2822,2823,2824,
2825,2826,2827,2828,2829,2830,2831,2832,2833,2834,2835,2836,2837,2838,2839,2840,
2841,2842,2843,2844,2845,2846,2847,2848,2849,2850,2851,2852,2853,2854,2855,2856,
1549,2857,2858,2859,2860,1550,2861,2862,1551,2863,2864,2865,2866,2867,2868,2869,
2870,2871,2872,2873,2874,1110,1330,2875,2876,2877,2878,2879,2880,2881,2882,2883,
2884,2885,2886,2887,2888,2889,2890,2891,2892,2893,2894,2895,2896,2897,2898,2899,
2900,2901,2902,2903,2904,2905,2906,2907,2908,2909,2910,2911,2912,2913,2914,2915,
2916,2917,2918,2919,2920,2921,2922,2923,2924,2925,2926,2927,2928,2929,2930,1331,
2931,2932,2933,2934,2935,2936,2937,2938,2939,2940,2941,2942,2943,1552,2944,2945,
2946,2947,2948,2949,2950,2951,2952,2953,2954,2955,2956,2957,2958,2959,2960,2961,
2962,2963,2964,1252,2965,2966,2967,2968,2969,2970,2971,2972,2973,2974,2975,2976,
2977,2978,2979,2980,2981,2982,2983,2984,2985,2986,2987,2988,2989,2990,2991,2992,
2993,2994,2995,2996,2997,2998,2999,3000,3001,3002,3003,3004,3005,3006,3007,3008,
3009,3010,3011,3012,1553,3013,3014,3015,3016,3017,1554,3018,1332,3019,3020,3021,
3022,3023,3024,3025,3026,3027,3028,3029,3030,3031,3032,3033,3034,3035,3036,3037,
3038,3039,3040,3041,3042,3043,3044,3045,3046,3047,3048,3049,3050,1555,3051,3052,
3053,1556,1557,3054,3055,3056,3057,3058,3059,3060,3061,3062,3063,3064,3065,3066,
3067,1558,3068,3069,3070,3071,3072,3073,3074,3075,3076,1559,3077,3078,3079,3080,
3081,3082,3083,1253,3084,3085,3086,3087,3088,3089,3090,3091,3092,3093,3094,3095,
3096,3097,3098,3099,3100,3101,3102,3103,3104,3105,3106,3107,3108,1152,3109,3110,
3111,3112,3113,1560,3114,3115,3116,3117,1111,3118,3119,3120,3121,3122,3123,3124,
3125,3126,3127,3128,3129,3130,3131,3132,3133,3134,3135,3136,3137,3138,3139,3140,
3141,3142,3143,3144,3145,3146,3147,3148,3149,3150,3151,3152,3153,3154,3155,3156,
3157,3158,3159,3160,3161,3162,3163,3164,3165,3166,3167,3168,3169,3170,3171,3172,
3173,3174,3175,3176,1333,3177,3178,3179,3180,3181,3182,3183,3184,3185,3186,3187,
3188,3189,1561,3190,3191,1334,3192,3193,3194,3195,3196,3197,3198,3199,3200,3201,
3202,3203,3204,3205,3206,3207,3208,3209,3210,3211,3212,3213,3214,3215,3216,3217,
3218,3219,3220,3221,3222,3223,3224,3225,3226,3227,3228,3229,3230,3231,3232,3233,
3234,1562,3235,3236,3237,3238,3239,3240,3241,3242,3243,3244,3245,3246,3247,3248,
3249,3250,3251,3252,3253,3254,3255,3256,3257,3258,3259,3260,3261,3262,3263,3264,
3265,3266,3267,3268,3269,3270,3271,3272,3273,3274,3275,3276,3277,1563,3278,3279,
3280,3281,3282,3283,3284,3285,3286,3287,3288,3289,3290,3291,3292,3293,3294,3295,
3296,3297,3298,3299,3300,3301,3302,3303,3304,3305,3306,3307,3308,3309,3310,3311,
3312,3313,3314,3315,3316,3317,3318,3319,3320,3321,3322,3323,3324,3325,3326,3327,
3328,3329,3330,3331,3332,3333,3334,3335,3336,3337,3338,3339,3340,3341,3342,3343,
3344,3345,3346,3347,3348,3349,3350,3351,3352,3353,3354,3355,3356,3357,3358,3359,
3360,3361,3362,3363,3364,1335,3365,3366,3367,3368,3369,3370,3371,3372,3373,3374,
3375,3376,3377,3378,3379,3380,3381,3382,3383,3384,3385,3386,3387,1336,3388,3389,
3390,3391,3392,3393,3394,3395,3396,3397,3398,3399,3400,3401,3402,3403,3404,3405,
3406,3407,3408,3409,3410,3411,3412,3413,3414,1337,3415,3416,3417,3418,3419,1338,
3420,3421,3422,1564,1565,3423,3424,3425,3426,3427,3428,3429,3430,3431,1254,3432,
3433,3434,1339,3435,3436,3437,3438,3439,1566,3440,3441,3442,3443,3444,3445,3446,
3447,3448,3449,3450,3451,3452,3453,3454,1255,3455,3456,3457,3458,3459,1567,1191,
3460,1568,1569,3461,3462,3463,1570,3464,3465,3466,3467,3468,1571,3469,3470,3471,
3472,3473,1572,3474,3475,3476,3477,3478,3479,3480,3481,3482,3483,3484,3485,3486,
1340,3487,3488,3489,3490,3491,3492,1021,3493,3494,3495,3496,3497,3498,1573,3499,
1341,3500,3501,3502,3503,3504,3505,3506,3507,3508,3509,3510,3511,1342,3512,3513,
3514,3515,3516,1574,1343,3517,3518,3519,1575,3520,1576,3521,3522,3523,3524,3525,
3526,3527,3528,3529,3530,3531,3532,3533,3534,3535,3536,3537,3538,3539,3540,3541,
3542,3543,3544,3545,3546,3547,3548,3549,3550,3551,3552,3553,3554,3555,3556,3557,
3558,3559,3560,3561,3562,3563,3564,3565,3566,3567,3568,3569,3570,3571,3572,3573,
3574,3575,3576,3577,3578,3579,3580,1577,3581,3582,1578,3583,3584,3585,3586,3587,
3588,3589,3590,3591,3592,3593,3594,3595,3596,3597,3598,3599,3600,3601,3602,3603,
3604,1579,3605,3606,3607,3608,3609,3610,3611,3612,3613,3614,3615,3616,3617,3618,
3619,3620,3621,3622,3623,3624,3625,3626,3627,3628,3629,1580,3630,3631,1581,3632,
3633,3634,3635,3636,3637,3638,3639,3640,3641,3642,3643,3644,3645,3646,3647,3648,
3649,3650,3651,3652,3653,3654,3655,3656,1582,3657,3658,3659,3660,3661,3662,3663,
3664,3665,3666,3667,3668,3669,3670,3671,3672,3673,3674,3675,3676,3677,3678,3679,
3680,3681,3682,3683,3684,3685,3686,3687,3688,3689,3690,3691,3692,3693,3694,3695,
3696,3697,3698,3699,3700,1192,3701,3702,3703,3704,1256,3705,3706,3707,3708,1583,
1257,3709,3710,3711,3712,3713,3714,3715,3716,1584,3717,3718,3719,3720,3721,3722,
3723,3724,3725,3726,3727,3728,3729,3730,3731,3732,3733,3734,3735,3736,3737,3738,
3739,3740,3741,3742,3743,3744,3745,1344,3746,3747,3748,3749,3750,3751,3752,3753,
3754,3755,3756,1585,3757,3758,3759,3760,3761,3762,3763,3764,3765,3766,1586,3767,
3768,3769,3770,3771,3772,3773,3774,3775,3776,3777,3778,1345,3779,3780,3781,3782,
3783,3784,3785,3786,3787,3788,3789,3790,3791,3792,3793,3794,3795,1346,1587,3796,
3797,1588,3798,3799,3800,3801,3802,3803,3804,3805,3806,1347,3807,3808,3809,3810,
3811,1589,3812,3813,3814,3815,3816,3817,3818,3819,3820,3821,1590,3822,3823,1591,
1348,3824,3825,3826,3827,3828,3829,3830,1592,3831,3832,1593,3833,3834,3835,3836,
3837,3838,3839,3840,3841,3842,3843,3844,1349,3845,3846,3847,3848,3849,3850,3851,
3852,3853,3854,3855,3856,3857,3858,1594,3859,3860,3861,3862,3863,3864,3865,3866,
3867,3868,3869,1595,3870,3871,3872,3873,1596,3874,3875,3876,3877,3878,3879,3880,
3881,3882,3883,3884,3885,3886,1597,3887,3888,3889,3890,3891,3892,3893,3894,3895,
1598,3896,3897,3898,1599,1600,3899,1350,3900,1351,3901,3902,1352,3903,3904,3905,
3906,3907,3908,3909,3910,3911,3912,3913,3914,3915,3916,3917,3918,3919,3920,3921,
3922,3923,3924,1258,3925,3926,3927,3928,3929,3930,3931,1193,3932,1601,3933,3934,
3935,3936,3937,3938,3939,3940,3941,3942,3943,1602,3944,3945,3946,3947,3948,1603,
3949,3950,3951,3952,3953,3954,3955,3956,3957,3958,3959,3960,3961,3962,3963,3964,
3965,1604,3966,3967,3968,3969,3970,3971,3972,3973,3974,3975,3976,3977,1353,3978,
3979,3980,3981,3982,3983,3984,3985,3986,3987,3988,3989,3990,3991,1354,3992,3993,
3994,3995,3996,3997,3998,3999,4000,4001,4002,4003,4004,4005,4006,4007,4008,4009,
4010,4011,4012,4013,4014,4015,4016,4017,4018,4019,4020,4021,4022,4023,1355,4024,
4025,4026,4027,4028,4029,4030,4031,4032,4033,4034,4035,4036,4037,4038,4039,4040,
1605,4041,4042,4043,4044,4045,4046,4047,4048,4049,4050,4051,4052,4053,4054,4055,
4056,4057,4058,4059,4060,1606,4061,4062,4063,4064,1607,4065,4066,4067,4068,4069,
4070,4071,4072,4073,4074,4075,4076,1194,4077,4078,1608,4079,4080,4081,4082,4083,
4084,4085,4086,4087,1609,4088,4089,4090,4091,4092,4093,4094,4095,4096,4097,4098,
4099,4100,4101,4102,4103,4104,4105,4106,4107,4108,1259,4109,4110,4111,4112,4113,
4114,4115,4116,4117,4118,4119,4120,4121,4122,4123,4124,1195,4125,4126,4127,1610,
4128,4129,4130,4131,4132,4133,4134,4135,4136,4137,1356,4138,4139,4140,4141,4142,
4143,4144,1611,4145,4146,4147,4148,4149,4150,4151,4152,4153,4154,4155,4156,4157,
4158,4159,4160,4161,4162,4163,4164,4165,4166,4167,4168,4169,4170,4171,4172,4173,
4174,4175,4176,4177,4178,4179,4180,4181,4182,4183,4184,4185,4186,4187,4188,4189,
4190,4191,4192,4193,4194,4195,4196,4197,4198,4199,4200,4201,4202,4203,4204,4205,
4206,4207,4208,4209,4210,4211,4212,4213,4214,4215,4216,4217,4218,4219,1612,4220,
4221,4222,4223,4224,4225,4226,4227,1357,4228,1613,4229,4230,4231,4232,4233,4234,
4235,4236,4237,4238,4239,4240,4241,4242,4243,1614,4244,4245,4246,4247,4248,4249,
4250,4251,4252,4253,4254,4255,4256,4257,4258,4259,4260,4261,4262,4263,4264,4265,
4266,4267,4268,4269,4270,1196,1358,4271,4272,4273,4274,4275,4276,4277,4278,4279,
4280,4281,4282,4283,4284,4285,4286,4287,1615,4288,4289,4290,4291,4292,4293,4294,
4295,4296,4297,4298,4299,4300,4301,4302,4303,4304,4305,4306,4307,4308,4309,4310,
4311,4312,4313,4314,4315,4316,4317,4318,4319,4320,4321,4322,4323,4324,4325,4326,
4327,4328,4329,4330,4331,4332,4333,4334,1616,4335,4336,4337,4338,4339,4340,4341,
4342,4343,4344,4345,4346,4347,4348,4349,4350,4351,4352,4353,4354,4355,4356,4357,
4358,4359,4360,1617,4361,4362,4363,4364,4365,1618,4366,4367,4368,4369,4370,4371,
4372,4373,4374,4375,4376,4377,4378,4379,4380,4381,4382,4383,4384,4385,4386,4387,
4388,4389,4390,4391,4392,4393,4394,4395,4396,4397,4398,4399,4400,4401,4402,4403,
4404,4405,4406,4407,4408,4409,4410,4411,4412,4413,4414,4415,4416,1619,4417,4418,
4419,4420,4421,4422,4423,4424,4425,1112,4426,4427,4428,4429,4430,1620,4431,4432,
4433,4434,4435,4436,4437,4438,4439,4440,4441,4442,1260,1261,4443,4444,4445,4446,
4447,4448,4449,4450,4451,4452,4453,4454,4455,1359,4456,4457,4458,4459,4460,4461,
4462,4463,4464,4465,1621,4466,4467,4468,4469,4470,4471,4472,4473,4474,4475,4476,
4477,4478,4479,4480,4481,4482,4483,4484,4485,4486,4487,4488,4489,1055,4490,4491,
4492,4493,4494,4495,4496,4497,4498,4499,4500,4501,4502,4503,4504,4505,4506,4507,
4508,4509,4510,4511,4512,4513,4514,4515,4516,4517,4518,1622,4519,4520,4521,1623,
4522,4523,4524,4525,4526,4527,4528,4529,4530,4531,4532,4533,4534,4535,1360,4536,
4537,4538,4539,4540,4541,4542,4543, 975,4544,4545,4546,4547,4548,4549,4550,4551,
4552,4553,4554,4555,4556,4557,4558,4559,4560,4561,4562,4563,4564,4565,4566,4567,
4568,4569,4570,4571,1624,4572,4573,4574,4575,4576,1625,4577,4578,4579,4580,4581,
4582,4583,4584,1626,4585,4586,4587,4588,4589,4590,4591,4592,4593,4594,4595,1627,
4596,4597,4598,4599,4600,4601,4602,4603,4604,4605,4606,4607,4608,4609,4610,4611,
4612,4613,4614,4615,1628,4616,4617,4618,4619,4620,4621,4622,4623,4624,4625,4626,
4627,4628,4629,4630,4631,4632,4633,4634,4635,4636,4637,4638,4639,4640,4641,4642,
4643,4644,4645,4646,4647,4648,4649,1361,4650,4651,4652,4653,4654,4655,4656,4657,
4658,4659,4660,4661,1362,4662,4663,4664,4665,4666,4667,4668,4669,4670,4671,4672,
4673,4674,4675,4676,4677,4678,4679,4680,4681,4682,1629,4683,4684,4685,4686,4687,
1630,4688,4689,4690,4691,1153,4692,4693,4694,1113,4695,4696,4697,4698,4699,4700,
4701,4702,4703,4704,4705,4706,4707,4708,4709,4710,4711,1197,4712,4713,4714,4715,
4716,4717,4718,4719,4720,4721,4722,4723,4724,4725,4726,4727,4728,4729,4730,4731,
4732,4733,4734,4735,1631,4736,1632,4737,4738,4739,4740,4741,4742,4743,4744,1633,
4745,4746,4747,4748,4749,1262,4750,4751,4752,4753,4754,1363,4755,4756,4757,4758,
4759,4760,4761,4762,4763,4764,4765,4766,4767,4768,1634,4769,4770,4771,4772,4773,
4774,4775,4776,4777,4778,1635,4779,4780,4781,4782,4783,4784,4785,4786,4787,4788,
4789,1636,4790,4791,4792,4793,4794,4795,4796,4797,4798,4799,4800,4801,4802,4803,
4804,4805,4806,1637,4807,4808,4809,1638,4810,4811,4812,4813,4814,4815,4816,4817,
4818,1639,4819,4820,4821,4822,4823,4824,4825,4826,4827,4828,4829,4830,4831,4832,
4833,1077,4834,4835,4836,4837,4838,4839,4840,4841,4842,4843,4844,4845,4846,4847,
4848,4849,4850,4851,4852,4853,4854,4855,4856,4857,4858,4859,4860,4861,4862,4863,
4864,4865,4866,4867,4868,4869,4870,4871,4872,4873,4874,4875,4876,4877,4878,4879,
4880,4881,4882,4883,1640,4884,4885,1641,4886,4887,4888,4889,4890,4891,4892,4893,
4894,4895,4896,4897,4898,4899,4900,4901,4902,4903,4904,4905,4906,4907,4908,4909,
4910,4911,1642,4912,4913,4914,1364,4915,4916,4917,4918,4919,4920,4921,4922,4923,
4924,4925,4926,4927,4928,4929,4930,4931,1643,4932,4933,4934,4935,4936,4937,4938,
4939,4940,4941,4942,4943,4944,4945,4946,4947,4948,4949,4950,4951,4952,4953,4954,
4955,4956,4957,4958,4959,4960,4961,4962,4963,4964,4965,4966,4967,4968,4969,4970,
4971,4972,4973,4974,4975,4976,4977,4978,4979,4980,1644,4981,4982,4983,4984,1645,
4985,4986,1646,4987,4988,4989,4990,4991,4992,4993,4994,4995,4996,4997,4998,4999,
5000,5001,5002,5003,5004,5005,1647,5006,1648,5007,5008,5009,5010,5011,5012,1078,
5013,5014,5015,5016,5017,5018,5019,5020,5021,5022,5023,5024,5025,5026,5027,5028,
1365,5029,5030,5031,5032,5033,5034,5035,5036,5037,5038,5039,1649,5040,5041,5042,
5043,5044,5045,1366,5046,5047,5048,5049,5050,5051,5052,5053,5054,5055,1650,5056,
5057,5058,5059,5060,5061,5062,5063,5064,5065,5066,5067,5068,5069,5070,5071,5072,
5073,5074,5075,5076,5077,1651,5078,5079,5080,5081,5082,5083,5084,5085,5086,5087,
5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102,5103,
5104,5105,5106,5107,5108,5109,5110,1652,5111,5112,5113,5114,5115,5116,5117,5118,
1367,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,1653,5130,5131,5132,
5133,5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,
5149,1368,5150,1654,5151,1369,5152,5153,5154,5155,5156,5157,5158,5159,5160,5161,
5162,5163,5164,5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,5176,5177,
5178,1370,5179,5180,5181,5182,5183,5184,5185,5186,5187,5188,5189,5190,5191,5192,
5193,5194,5195,5196,5197,5198,1655,5199,5200,5201,5202,1656,5203,5204,5205,5206,
1371,5207,1372,5208,5209,5210,5211,1373,5212,5213,1374,5214,5215,5216,5217,5218,
5219,5220,5221,5222,5223,5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,
5235,5236,5237,5238,5239,5240,5241,5242,5243,5244,5245,5246,5247,1657,5248,5249,
5250,5251,1658,1263,5252,5253,5254,5255,5256,1375,5257,5258,5259,5260,5261,5262,
5263,5264,5265,5266,5267,5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,
5279,5280,5281,5282,5283,1659,5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,
5294,5295,5296,5297,5298,5299,5300,1660,5301,5302,5303,5304,5305,5306,5307,5308,
5309,5310,5311,5312,5313,5314,5315,5316,5317,5318,5319,5320,5321,1376,5322,5323,
5324,5325,5326,5327,5328,5329,5330,5331,5332,5333,1198,5334,5335,5336,5337,5338,
5339,5340,5341,5342,5343,1661,5344,5345,5346,5347,5348,5349,5350,5351,5352,5353,
5354,5355,5356,5357,5358,5359,5360,5361,5362,5363,5364,5365,5366,5367,5368,5369,
5370,5371,5372,5373,5374,5375,5376,5377,5378,5379,5380,5381,5382,5383,5384,5385,
5386,5387,5388,5389,5390,5391,5392,5393,5394,5395,5396,5397,5398,1264,5399,5400,
5401,5402,5403,5404,5405,5406,5407,5408,5409,5410,5411,5412,1662,5413,5414,5415,
5416,1663,5417,5418,5419,5420,5421,5422,5423,5424,5425,5426,5427,5428,5429,5430,
5431,5432,5433,5434,5435,5436,5437,5438,1664,5439,5440,5441,5442,5443,5444,5445,
5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456,5457,5458,5459,5460,5461,
5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472,5473,5474,5475,5476,5477,
5478,1154,5479,5480,5481,5482,5483,5484,5485,1665,5486,5487,5488,5489,5490,5491,
5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504,5505,5506,5507,
5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520,5521,5522,5523,
5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536,5537,5538,5539,
5540,5541,5542,5543,5544,5545,5546,5547,5548,1377,5549,5550,5551,5552,5553,5554,
5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568,5569,5570,
1114,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584,5585,
5586,5587,5588,5589,5590,5591,5592,1378,5593,5594,5595,5596,5597,5598,5599,5600,
5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,1379,5615,
5616,5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,
5632,5633,5634,1380,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,
5647,5648,5649,1381,1056,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,
1666,5661,5662,5663,5664,5665,5666,5667,5668,1667,5669,1668,5670,5671,5672,5673,
5674,5675,5676,5677,5678,1155,5679,5680,5681,5682,5683,5684,5685,5686,5687,5688,
5689,5690,5691,5692,5693,5694,5695,5696,5697,5698,1669,5699,5700,5701,5702,5703,
5704,5705,1670,5706,5707,5708,5709,5710,1671,5711,5712,5713,5714,1382,5715,5716,
5717,5718,5719,5720,5721,5722,5723,5724,5725,1672,5726,5727,1673,1674,5728,5729,
5730,5731,5732,5733,5734,5735,5736,1675,5737,5738,5739,5740,5741,5742,5743,5744,
1676,5745,5746,5747,5748,5749,5750,5751,1383,5752,5753,5754,5755,5756,5757,5758,
5759,5760,5761,5762,5763,5764,5765,5766,5767,5768,1677,5769,5770,5771,5772,5773,
1678,5774,5775,5776, 998,5777,5778,5779,5780,5781,5782,5783,5784,5785,1384,5786,
5787,5788,5789,5790,5791,5792,5793,5794,5795,5796,5797,5798,5799,5800,1679,5801,
5802,5803,1115,1116,5804,5805,5806,5807,5808,5809,5810,5811,5812,5813,5814,5815,
5816,5817,5818,5819,5820,5821,5822,5823,5824,5825,5826,5827,5828,5829,5830,5831,
5832,5833,5834,5835,5836,5837,5838,5839,5840,5841,5842,5843,5844,5845,5846,5847,
5848,5849,5850,5851,5852,5853,5854,5855,1680,5856,5857,5858,5859,5860,5861,5862,
5863,5864,1681,5865,5866,5867,1682,5868,5869,5870,5871,5872,5873,5874,5875,5876,
5877,5878,5879,1683,5880,1684,5881,5882,5883,5884,1685,5885,5886,5887,5888,5889,
5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904,5905,
5906,5907,1686,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920,
5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,1687,
5936,5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,
5952,1688,1689,5953,1199,5954,5955,5956,5957,5958,5959,5960,5961,1690,5962,5963,
5964,5965,5966,5967,5968,5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,
5980,5981,1385,5982,1386,5983,5984,5985,5986,5987,5988,5989,5990,5991,5992,5993,
5994,5995,5996,5997,5998,5999,6000,6001,6002,6003,6004,6005,6006,6007,6008,6009,
6010,6011,6012,6013,6014,6015,6016,6017,6018,6019,6020,6021,6022,6023,6024,6025,
6026,6027,1265,6028,6029,1691,6030,6031,6032,6033,6034,6035,6036,6037,6038,6039,
6040,6041,6042,6043,6044,6045,6046,6047,6048,6049,6050,6051,6052,6053,6054,6055,
6056,6057,6058,6059,6060,6061,6062,6063,6064,6065,6066,6067,6068,6069,6070,6071,
6072,6073,6074,6075,6076,6077,6078,6079,6080,6081,6082,6083,6084,1692,6085,6086,
6087,6088,6089,6090,6091,6092,6093,6094,6095,6096,6097,6098,6099,6100,6101,6102,
6103,6104,6105,6106,6107,6108,6109,6110,6111,6112,6113,6114,6115,6116,6117,6118,
6119,6120,6121,6122,6123,6124,6125,6126,6127,6128,6129,6130,6131,1693,6132,6133,
6134,6135,6136,1694,6137,6138,6139,6140,6141,1695,6142,6143,6144,6145,6146,6147,
6148,6149,6150,6151,6152,6153,6154,6155,6156,6157,6158,6159,6160,6161,6162,6163,
6164,6165,6166,6167,6168,6169,6170,6171,6172,6173,6174,6175,6176,6177,6178,6179,
6180,6181,6182,6183,6184,6185,1696,6186,6187,6188,6189,6190,6191,6192,6193,6194,
6195,6196,6197,6198,6199,6200,6201,6202,6203,6204,6205,6206,6207,6208,6209,6210,
6211,6212,6213,6214,6215,6216,6217,6218,6219,1697,6220,6221,6222,6223,6224,6225,
6226,6227,6228,6229,6230,6231,6232,6233,6234,6235,6236,6237,6238,6239,6240,6241,
6242,6243,6244,6245,6246,6247,6248,6249,6250,6251,6252,6253,1698,6254,6255,6256,
6257,6258,6259,6260,6261,6262,6263,1200,6264,6265,6266,6267,6268,6269,6270,6271, #1024
6272,6273,6274,6275,6276,6277,6278,6279,6280,6281,6282,6283,6284,6285,6286,6287,
6288,6289,6290,6291,6292,6293,6294,6295,6296,6297,6298,6299,6300,6301,6302,1699,
6303,6304,1700,6305,6306,6307,6308,6309,6310,6311,6312,6313,6314,6315,6316,6317,
6318,6319,6320,6321,6322,6323,6324,6325,6326,6327,6328,6329,6330,6331,6332,6333,
6334,6335,6336,6337,6338,6339,1701,6340,6341,6342,6343,6344,1387,6345,6346,6347,
6348,6349,6350,6351,6352,6353,6354,6355,6356,6357,6358,6359,6360,6361,6362,6363,
6364,6365,6366,6367,6368,6369,6370,6371,6372,6373,6374,6375,6376,6377,6378,6379,
6380,6381,6382,6383,6384,6385,6386,6387,6388,6389,6390,6391,6392,6393,6394,6395,
6396,6397,6398,6399,6400,6401,6402,6403,6404,6405,6406,6407,6408,6409,6410,6411,
6412,6413,1702,6414,6415,6416,6417,6418,6419,6420,6421,6422,1703,6423,6424,6425,
6426,6427,6428,6429,6430,6431,6432,6433,6434,6435,6436,6437,6438,1704,6439,6440,
6441,6442,6443,6444,6445,6446,6447,6448,6449,6450,6451,6452,6453,6454,6455,6456,
6457,6458,6459,6460,6461,6462,6463,6464,6465,6466,6467,6468,6469,6470,6471,6472,
6473,6474,6475,6476,6477,6478,6479,6480,6481,6482,6483,6484,6485,6486,6487,6488,
6489,6490,6491,6492,6493,6494,6495,6496,6497,6498,6499,6500,6501,6502,6503,1266,
6504,6505,6506,6507,6508,6509,6510,6511,6512,6513,6514,6515,6516,6517,6518,6519,
6520,6521,6522,6523,6524,6525,6526,6527,6528,6529,6530,6531,6532,6533,6534,6535,
6536,6537,6538,6539,6540,6541,6542,6543,6544,6545,6546,6547,6548,6549,6550,6551,
1705,1706,6552,6553,6554,6555,6556,6557,6558,6559,6560,6561,6562,6563,6564,6565,
6566,6567,6568,6569,6570,6571,6572,6573,6574,6575,6576,6577,6578,6579,6580,6581,
6582,6583,6584,6585,6586,6587,6588,6589,6590,6591,6592,6593,6594,6595,6596,6597,
6598,6599,6600,6601,6602,6603,6604,6605,6606,6607,6608,6609,6610,6611,6612,6613,
6614,6615,6616,6617,6618,6619,6620,6621,6622,6623,6624,6625,6626,6627,6628,6629,
6630,6631,6632,6633,6634,6635,6636,6637,1388,6638,6639,6640,6641,6642,6643,6644,
1707,6645,6646,6647,6648,6649,6650,6651,6652,6653,6654,6655,6656,6657,6658,6659,
6660,6661,6662,6663,1708,6664,6665,6666,6667,6668,6669,6670,6671,6672,6673,6674,
1201,6675,6676,6677,6678,6679,6680,6681,6682,6683,6684,6685,6686,6687,6688,6689,
6690,6691,6692,6693,6694,6695,6696,6697,6698,6699,6700,6701,6702,6703,6704,6705,
6706,6707,6708,6709,6710,6711,6712,6713,6714,6715,6716,6717,6718,6719,6720,6721,
6722,6723,6724,6725,1389,6726,6727,6728,6729,6730,6731,6732,6733,6734,6735,6736,
1390,1709,6737,6738,6739,6740,6741,6742,1710,6743,6744,6745,6746,1391,6747,6748,
6749,6750,6751,6752,6753,6754,6755,6756,6757,1392,6758,6759,6760,6761,6762,6763,
6764,6765,6766,6767,6768,6769,6770,6771,6772,6773,6774,6775,6776,6777,6778,6779,
6780,1202,6781,6782,6783,6784,6785,6786,6787,6788,6789,6790,6791,6792,6793,6794,
6795,6796,6797,6798,6799,6800,6801,6802,6803,6804,6805,6806,6807,6808,6809,1711,
6810,6811,6812,6813,6814,6815,6816,6817,6818,6819,6820,6821,6822,6823,6824,6825,
6826,6827,6828,6829,6830,6831,6832,6833,6834,6835,6836,1393,6837,6838,6839,6840,
6841,6842,6843,6844,6845,6846,6847,6848,6849,6850,6851,6852,6853,6854,6855,6856,
6857,6858,6859,6860,6861,6862,6863,6864,6865,6866,6867,6868,6869,6870,6871,6872,
6873,6874,6875,6876,6877,6878,6879,6880,6881,6882,6883,6884,6885,6886,6887,6888,
6889,6890,6891,6892,6893,6894,6895,6896,6897,6898,6899,6900,6901,6902,1712,6903,
6904,6905,6906,6907,6908,6909,6910,1713,6911,6912,6913,6914,6915,6916,6917,6918,
6919,6920,6921,6922,6923,6924,6925,6926,6927,6928,6929,6930,6931,6932,6933,6934,
6935,6936,6937,6938,6939,6940,6941,6942,6943,6944,6945,6946,6947,6948,6949,6950,
6951,6952,6953,6954,6955,6956,6957,6958,6959,6960,6961,6962,6963,6964,6965,6966,
6967,6968,6969,6970,6971,6972,6973,6974,1714,6975,6976,6977,6978,6979,6980,6981,
6982,6983,6984,6985,6986,6987,6988,1394,6989,6990,6991,6992,6993,6994,6995,6996,
6997,6998,6999,7000,1715,7001,7002,7003,7004,7005,7006,7007,7008,7009,7010,7011,
7012,7013,7014,7015,7016,7017,7018,7019,7020,7021,7022,7023,7024,7025,7026,7027,
7028,1716,7029,7030,7031,7032,7033,7034,7035,7036,7037,7038,7039,7040,7041,7042,
7043,7044,7045,7046,7047,7048,7049,7050,7051,7052,7053,7054,7055,7056,7057,7058,
7059,7060,7061,7062,7063,7064,7065,7066,7067,7068,7069,7070,7071,7072,7073,7074,
7075,7076,7077,7078,7079,7080,7081,7082,7083,7084,7085,7086,7087,7088,7089,7090,
7091,7092,7093,7094,7095,7096,7097,7098,7099,7100,7101,7102,7103,7104,7105,7106,
7107,7108,7109,7110,7111,7112,7113,7114,7115,7116,7117,7118,7119,7120,7121,7122,
7123,7124,7125,7126,7127,7128,7129,7130,7131,7132,7133,7134,7135,7136,7137,7138,
7139,7140,7141,7142,7143,7144,7145,7146,7147,7148,7149,7150,7151,7152,7153,7154,
7155,7156,7157,7158,7159,7160,7161,7162,7163,7164,7165,7166,7167,7168,7169,7170,
7171,7172,7173,7174,7175,7176,7177,7178,7179,7180,7181,7182,7183,7184,7185,7186,
7187,7188,7189,7190,7191,7192,7193,7194,7195,7196,7197,7198,7199,7200,7201,7202,
7203,7204,7205,7206,7207,1395,7208,7209,7210,7211,7212,7213,1717,7214,7215,7216,
7217,7218,7219,7220,7221,7222,7223,7224,7225,7226,7227,7228,7229,7230,7231,7232,
7233,7234,7235,7236,7237,7238,7239,7240,7241,7242,7243,7244,7245,7246,7247,7248,
7249,7250,7251,7252,7253,7254,7255,7256,7257,7258,7259,7260,7261,7262,7263,7264,
7265,7266,7267,7268,7269,7270,7271,7272,7273,7274,7275,7276,7277,7278,7279,7280,
7281,7282,7283,7284,7285,7286,7287,7288,7289,7290,7291,7292,7293,7294,7295,7296,
7297,7298,7299,7300,7301,7302,7303,7304,7305,7306,7307,7308,7309,7310,7311,7312,
7313,1718,7314,7315,7316,7317,7318,7319,7320,7321,7322,7323,7324,7325,7326,7327,
7328,7329,7330,7331,7332,7333,7334,7335,7336,7337,7338,7339,7340,7341,7342,7343,
7344,7345,7346,7347,7348,7349,7350,7351,7352,7353,7354,7355,7356,7357,7358,7359,
7360,7361,7362,7363,7364,7365,7366,7367,7368,7369,7370,7371,7372,7373,7374,7375,
7376,7377,7378,7379,7380,7381,7382,7383,7384,7385,7386,7387,7388,7389,7390,7391,
7392,7393,7394,7395,7396,7397,7398,7399,7400,7401,7402,7403,7404,7405,7406,7407,
7408,7409,7410,7411,7412,7413,7414,7415,7416,7417,7418,7419,7420,7421,7422,7423,
7424,7425,7426,7427,7428,7429,7430,7431,7432,7433,7434,7435,7436,7437,7438,7439,
7440,7441,7442,7443,7444,7445,7446,7447,7448,7449,7450,7451,7452,7453,7454,7455,
7456,7457,7458,7459,7460,7461,7462,7463,7464,7465,7466,7467,7468,7469,7470,7471,
7472,7473,7474,7475,7476,7477,7478,7479,7480,7481,7482,7483,7484,7485,7486,7487,
7488,7489,7490,7491,7492,7493,7494,7495,7496,7497,7498,7499,7500,7501,7502,7503,
7504,7505,7506,7507,7508,7509,7510,7511,7512,7513,7514,7515,7516,7517,7518,7519,
7520,7521,7522,7523,7524,7525,7526,7527,7528,7529,7530,7531,7532,7533,7534,7535,
7536,7537,7538,7539,7540,7541,7542,7543,7544,7545,7546,7547,7548,7549,7550,7551,
7552,7553,7554,7555,7556,7557,7558,7559,7560,7561,7562,7563,7564,7565,7566,7567,
7568,7569,7570,7571,7572,7573,7574,7575,7576,7577,7578,7579,7580,7581,7582,7583,
7584,7585,7586,7587,7588,7589,7590,7591,7592,7593,7594,7595,7596,7597,7598,7599,
7600,7601,7602,7603,7604,7605,7606,7607,7608,7609,7610,7611,7612,7613,7614,7615,
7616,7617,7618,7619,7620,7621,7622,7623,7624,7625,7626,7627,7628,7629,7630,7631,
7632,7633,7634,7635,7636,7637,7638,7639,7640,7641,7642,7643,7644,7645,7646,7647,
7648,7649,7650,7651,7652,7653,7654,7655,7656,7657,7658,7659,7660,7661,7662,7663,
7664,7665,7666,7667,7668,7669,7670,7671,7672,7673,7674,7675,7676,7677,7678,7679,
7680,7681,7682,7683,7684,7685,7686,7687,7688,7689,7690,7691,7692,7693,7694,7695,
7696,7697,7698,7699,7700,7701,7702,7703,7704,7705,7706,7707,7708,7709,7710,7711,
7712,7713,7714,7715,7716,7717,7718,7719,7720,7721,7722,7723,7724,7725,7726,7727,
7728,7729,7730,7731,7732,7733,7734,7735,7736,7737,7738,7739,7740,7741,7742,7743,
7744,7745,7746,7747,7748,7749,7750,7751,7752,7753,7754,7755,7756,7757,7758,7759,
7760,7761,7762,7763,7764,7765,7766,7767,7768,7769,7770,7771,7772,7773,7774,7775,
7776,7777,7778,7779,7780,7781,7782,7783,7784,7785,7786,7787,7788,7789,7790,7791,
7792,7793,7794,7795,7796,7797,7798,7799,7800,7801,7802,7803,7804,7805,7806,7807,
7808,7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,
7824,7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,
7840,7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855,
7856,7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871,
7872,7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887,
7888,7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903,
7904,7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919,
7920,7921,7922,7923,7924,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935,
7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951,
7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967,
7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983,
7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999,
8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015,
8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031,
8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047,
8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063,
8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079,
8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095,
8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111,
8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127,
8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,
8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,
8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,
8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,
8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,
8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,
8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,
8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,
8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,
8272,8273,8274,8275,8276,8277,8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,
8288,8289,8290,8291,8292,8293,8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,
8304,8305,8306,8307,8308,8309,8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,
8320,8321,8322,8323,8324,8325,8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,
8336,8337,8338,8339,8340,8341,8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,
8352,8353,8354,8355,8356,8357,8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,
8368,8369,8370,8371,8372,8373,8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,
8384,8385,8386,8387,8388,8389,8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,
8400,8401,8402,8403,8404,8405,8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,
8416,8417,8418,8419,8420,8421,8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,
8432,8433,8434,8435,8436,8437,8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,
8448,8449,8450,8451,8452,8453,8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,
8464,8465,8466,8467,8468,8469,8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,
8480,8481,8482,8483,8484,8485,8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,
8496,8497,8498,8499,8500,8501,8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,
8512,8513,8514,8515,8516,8517,8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,
8528,8529,8530,8531,8532,8533,8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,
8544,8545,8546,8547,8548,8549,8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,
8560,8561,8562,8563,8564,8565,8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,
8576,8577,8578,8579,8580,8581,8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,
8592,8593,8594,8595,8596,8597,8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,
8608,8609,8610,8611,8612,8613,8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,
8624,8625,8626,8627,8628,8629,8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,
8640,8641,8642,8643,8644,8645,8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,
8656,8657,8658,8659,8660,8661,8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,
8672,8673,8674,8675,8676,8677,8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,
8688,8689,8690,8691,8692,8693,8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,
8704,8705,8706,8707,8708,8709,8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,
8720,8721,8722,8723,8724,8725,8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,
8736,8737,8738,8739,8740,8741)
# flake8: noqa
| apache-2.0 |
xwolf12/django | tests/httpwrappers/tests.py | 63 | 27550 | # -*- encoding: utf-8 -*-
from __future__ import unicode_literals
import copy
import json
import os
import pickle
import unittest
import uuid
from django.core.exceptions import SuspiciousOperation
from django.core.serializers.json import DjangoJSONEncoder
from django.core.signals import request_finished
from django.db import close_old_connections
from django.http import (
BadHeaderError, HttpResponse, HttpResponseNotAllowed,
HttpResponseNotModified, HttpResponsePermanentRedirect,
HttpResponseRedirect, JsonResponse, QueryDict, SimpleCookie,
StreamingHttpResponse, parse_cookie,
)
from django.test import SimpleTestCase
from django.utils import six
from django.utils._os import upath
from django.utils.encoding import force_text, smart_str
from django.utils.functional import lazy
lazystr = lazy(force_text, six.text_type)
class QueryDictTests(unittest.TestCase):
def test_create_with_no_args(self):
self.assertEqual(QueryDict(), QueryDict(str('')))
def test_missing_key(self):
q = QueryDict()
self.assertRaises(KeyError, q.__getitem__, 'foo')
def test_immutability(self):
q = QueryDict()
self.assertRaises(AttributeError, q.__setitem__, 'something', 'bar')
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar'])
self.assertRaises(AttributeError, q.appendlist, 'foo', ['bar'])
self.assertRaises(AttributeError, q.update, {'foo': 'bar'})
self.assertRaises(AttributeError, q.pop, 'foo')
self.assertRaises(AttributeError, q.popitem)
self.assertRaises(AttributeError, q.clear)
def test_immutable_get_with_default(self):
q = QueryDict()
self.assertEqual(q.get('foo', 'default'), 'default')
def test_immutable_basic_operations(self):
q = QueryDict()
self.assertEqual(q.getlist('foo'), [])
if six.PY2:
self.assertEqual(q.has_key('foo'), False)
self.assertEqual('foo' in q, False)
self.assertEqual(list(six.iteritems(q)), [])
self.assertEqual(list(six.iterlists(q)), [])
self.assertEqual(list(six.iterkeys(q)), [])
self.assertEqual(list(six.itervalues(q)), [])
self.assertEqual(len(q), 0)
self.assertEqual(q.urlencode(), '')
def test_single_key_value(self):
"""Test QueryDict with one key/value pair"""
q = QueryDict(str('foo=bar'))
self.assertEqual(q['foo'], 'bar')
self.assertRaises(KeyError, q.__getitem__, 'bar')
self.assertRaises(AttributeError, q.__setitem__, 'something', 'bar')
self.assertEqual(q.get('foo', 'default'), 'bar')
self.assertEqual(q.get('bar', 'default'), 'default')
self.assertEqual(q.getlist('foo'), ['bar'])
self.assertEqual(q.getlist('bar'), [])
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar'])
self.assertRaises(AttributeError, q.appendlist, 'foo', ['bar'])
if six.PY2:
self.assertTrue(q.has_key('foo'))
self.assertIn('foo', q)
if six.PY2:
self.assertFalse(q.has_key('bar'))
self.assertNotIn('bar', q)
self.assertEqual(list(six.iteritems(q)), [('foo', 'bar')])
self.assertEqual(list(six.iterlists(q)), [('foo', ['bar'])])
self.assertEqual(list(six.iterkeys(q)), ['foo'])
self.assertEqual(list(six.itervalues(q)), ['bar'])
self.assertEqual(len(q), 1)
self.assertRaises(AttributeError, q.update, {'foo': 'bar'})
self.assertRaises(AttributeError, q.pop, 'foo')
self.assertRaises(AttributeError, q.popitem)
self.assertRaises(AttributeError, q.clear)
self.assertRaises(AttributeError, q.setdefault, 'foo', 'bar')
self.assertEqual(q.urlencode(), 'foo=bar')
def test_urlencode(self):
q = QueryDict(mutable=True)
q['next'] = '/a&b/'
self.assertEqual(q.urlencode(), 'next=%2Fa%26b%2F')
self.assertEqual(q.urlencode(safe='/'), 'next=/a%26b/')
q = QueryDict(mutable=True)
q['next'] = '/t\xebst&key/'
self.assertEqual(q.urlencode(), 'next=%2Ft%C3%ABst%26key%2F')
self.assertEqual(q.urlencode(safe='/'), 'next=/t%C3%ABst%26key/')
def test_mutable_copy(self):
"""A copy of a QueryDict is mutable."""
q = QueryDict().copy()
self.assertRaises(KeyError, q.__getitem__, "foo")
q['name'] = 'john'
self.assertEqual(q['name'], 'john')
def test_mutable_delete(self):
q = QueryDict(mutable=True)
q['name'] = 'john'
del q['name']
self.assertNotIn('name', q)
def test_basic_mutable_operations(self):
q = QueryDict(mutable=True)
q['name'] = 'john'
self.assertEqual(q.get('foo', 'default'), 'default')
self.assertEqual(q.get('name', 'default'), 'john')
self.assertEqual(q.getlist('name'), ['john'])
self.assertEqual(q.getlist('foo'), [])
q.setlist('foo', ['bar', 'baz'])
self.assertEqual(q.get('foo', 'default'), 'baz')
self.assertEqual(q.getlist('foo'), ['bar', 'baz'])
q.appendlist('foo', 'another')
self.assertEqual(q.getlist('foo'), ['bar', 'baz', 'another'])
self.assertEqual(q['foo'], 'another')
if six.PY2:
self.assertTrue(q.has_key('foo'))
self.assertIn('foo', q)
self.assertListEqual(sorted(six.iteritems(q)),
[('foo', 'another'), ('name', 'john')])
self.assertListEqual(sorted(six.iterlists(q)),
[('foo', ['bar', 'baz', 'another']), ('name', ['john'])])
self.assertListEqual(sorted(six.iterkeys(q)),
['foo', 'name'])
self.assertListEqual(sorted(six.itervalues(q)),
['another', 'john'])
q.update({'foo': 'hello'})
self.assertEqual(q['foo'], 'hello')
self.assertEqual(q.get('foo', 'not available'), 'hello')
self.assertEqual(q.getlist('foo'), ['bar', 'baz', 'another', 'hello'])
self.assertEqual(q.pop('foo'), ['bar', 'baz', 'another', 'hello'])
self.assertEqual(q.pop('foo', 'not there'), 'not there')
self.assertEqual(q.get('foo', 'not there'), 'not there')
self.assertEqual(q.setdefault('foo', 'bar'), 'bar')
self.assertEqual(q['foo'], 'bar')
self.assertEqual(q.getlist('foo'), ['bar'])
self.assertIn(q.urlencode(), ['foo=bar&name=john', 'name=john&foo=bar'])
q.clear()
self.assertEqual(len(q), 0)
def test_multiple_keys(self):
"""Test QueryDict with two key/value pairs with same keys."""
q = QueryDict(str('vote=yes&vote=no'))
self.assertEqual(q['vote'], 'no')
self.assertRaises(AttributeError, q.__setitem__, 'something', 'bar')
self.assertEqual(q.get('vote', 'default'), 'no')
self.assertEqual(q.get('foo', 'default'), 'default')
self.assertEqual(q.getlist('vote'), ['yes', 'no'])
self.assertEqual(q.getlist('foo'), [])
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar', 'baz'])
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar', 'baz'])
self.assertRaises(AttributeError, q.appendlist, 'foo', ['bar'])
if six.PY2:
self.assertEqual(q.has_key('vote'), True)
self.assertEqual('vote' in q, True)
if six.PY2:
self.assertEqual(q.has_key('foo'), False)
self.assertEqual('foo' in q, False)
self.assertEqual(list(six.iteritems(q)), [('vote', 'no')])
self.assertEqual(list(six.iterlists(q)), [('vote', ['yes', 'no'])])
self.assertEqual(list(six.iterkeys(q)), ['vote'])
self.assertEqual(list(six.itervalues(q)), ['no'])
self.assertEqual(len(q), 1)
self.assertRaises(AttributeError, q.update, {'foo': 'bar'})
self.assertRaises(AttributeError, q.pop, 'foo')
self.assertRaises(AttributeError, q.popitem)
self.assertRaises(AttributeError, q.clear)
self.assertRaises(AttributeError, q.setdefault, 'foo', 'bar')
self.assertRaises(AttributeError, q.__delitem__, 'vote')
if six.PY2:
def test_invalid_input_encoding(self):
"""
QueryDicts must be able to handle invalid input encoding (in this
case, bad UTF-8 encoding), falling back to ISO-8859-1 decoding.
This test doesn't apply under Python 3 because the URL is a string
and not a bytestring.
"""
q = QueryDict(str(b'foo=bar&foo=\xff'))
self.assertEqual(q['foo'], '\xff')
self.assertEqual(q.getlist('foo'), ['bar', '\xff'])
def test_pickle(self):
q = QueryDict()
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q == q1, True)
q = QueryDict(str('a=b&c=d'))
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q == q1, True)
q = QueryDict(str('a=b&c=d&a=1'))
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q == q1, True)
def test_update_from_querydict(self):
"""Regression test for #8278: QueryDict.update(QueryDict)"""
x = QueryDict(str("a=1&a=2"), mutable=True)
y = QueryDict(str("a=3&a=4"))
x.update(y)
self.assertEqual(x.getlist('a'), ['1', '2', '3', '4'])
def test_non_default_encoding(self):
"""#13572 - QueryDict with a non-default encoding"""
q = QueryDict(str('cur=%A4'), encoding='iso-8859-15')
self.assertEqual(q.encoding, 'iso-8859-15')
self.assertEqual(list(six.iteritems(q)), [('cur', '€')])
self.assertEqual(q.urlencode(), 'cur=%A4')
q = q.copy()
self.assertEqual(q.encoding, 'iso-8859-15')
self.assertEqual(list(six.iteritems(q)), [('cur', '€')])
self.assertEqual(q.urlencode(), 'cur=%A4')
self.assertEqual(copy.copy(q).encoding, 'iso-8859-15')
self.assertEqual(copy.deepcopy(q).encoding, 'iso-8859-15')
class HttpResponseTests(unittest.TestCase):
def test_headers_type(self):
r = HttpResponse()
# The following tests explicitly test types in addition to values
# because in Python 2 u'foo' == b'foo'.
# ASCII unicode or bytes values are converted to native strings.
r['key'] = 'test'
self.assertEqual(r['key'], str('test'))
self.assertIsInstance(r['key'], str)
r['key'] = 'test'.encode('ascii')
self.assertEqual(r['key'], str('test'))
self.assertIsInstance(r['key'], str)
self.assertIn(b'test', r.serialize_headers())
# Latin-1 unicode or bytes values are also converted to native strings.
r['key'] = 'café'
self.assertEqual(r['key'], smart_str('café', 'latin-1'))
self.assertIsInstance(r['key'], str)
r['key'] = 'café'.encode('latin-1')
self.assertEqual(r['key'], smart_str('café', 'latin-1'))
self.assertIsInstance(r['key'], str)
self.assertIn('café'.encode('latin-1'), r.serialize_headers())
# Other unicode values are MIME-encoded (there's no way to pass them as bytes).
r['key'] = '†'
self.assertEqual(r['key'], str('=?utf-8?b?4oCg?='))
self.assertIsInstance(r['key'], str)
self.assertIn(b'=?utf-8?b?4oCg?=', r.serialize_headers())
# The response also converts unicode or bytes keys to strings, but requires
# them to contain ASCII
r = HttpResponse()
del r['Content-Type']
r['foo'] = 'bar'
l = list(r.items())
self.assertEqual(len(l), 1)
self.assertEqual(l[0], ('foo', 'bar'))
self.assertIsInstance(l[0][0], str)
r = HttpResponse()
del r['Content-Type']
r[b'foo'] = 'bar'
l = list(r.items())
self.assertEqual(len(l), 1)
self.assertEqual(l[0], ('foo', 'bar'))
self.assertIsInstance(l[0][0], str)
r = HttpResponse()
self.assertRaises(UnicodeError, r.__setitem__, 'føø', 'bar')
self.assertRaises(UnicodeError, r.__setitem__, 'føø'.encode('utf-8'), 'bar')
def test_long_line(self):
# Bug #20889: long lines trigger newlines to be added to headers
# (which is not allowed due to bug #10188)
h = HttpResponse()
f = 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz a\xcc\x88'.encode('latin-1')
f = f.decode('utf-8')
h['Content-Disposition'] = 'attachment; filename="%s"' % f
# This one is triggering http://bugs.python.org/issue20747, that is Python
# will itself insert a newline in the header
h['Content-Disposition'] = 'attachement; filename="EdelRot_Blu\u0308te (3)-0.JPG"'
def test_newlines_in_headers(self):
# Bug #10188: Do not allow newlines in headers (CR or LF)
r = HttpResponse()
self.assertRaises(BadHeaderError, r.__setitem__, 'test\rstr', 'test')
self.assertRaises(BadHeaderError, r.__setitem__, 'test\nstr', 'test')
def test_dict_behavior(self):
"""
Test for bug #14020: Make HttpResponse.get work like dict.get
"""
r = HttpResponse()
self.assertEqual(r.get('test'), None)
def test_non_string_content(self):
# Bug 16494: HttpResponse should behave consistently with non-strings
r = HttpResponse(12345)
self.assertEqual(r.content, b'12345')
# test content via property
r = HttpResponse()
r.content = 12345
self.assertEqual(r.content, b'12345')
def test_iter_content(self):
r = HttpResponse(['abc', 'def', 'ghi'])
self.assertEqual(r.content, b'abcdefghi')
# test iter content via property
r = HttpResponse()
r.content = ['idan', 'alex', 'jacob']
self.assertEqual(r.content, b'idanalexjacob')
r = HttpResponse()
r.content = [1, 2, 3]
self.assertEqual(r.content, b'123')
# test odd inputs
r = HttpResponse()
r.content = ['1', '2', 3, '\u079e']
# '\xde\x9e' == unichr(1950).encode('utf-8')
self.assertEqual(r.content, b'123\xde\x9e')
# .content can safely be accessed multiple times.
r = HttpResponse(iter(['hello', 'world']))
self.assertEqual(r.content, r.content)
self.assertEqual(r.content, b'helloworld')
# __iter__ can safely be called multiple times (#20187).
self.assertEqual(b''.join(r), b'helloworld')
self.assertEqual(b''.join(r), b'helloworld')
# Accessing .content still works.
self.assertEqual(r.content, b'helloworld')
# Accessing .content also works if the response was iterated first.
r = HttpResponse(iter(['hello', 'world']))
self.assertEqual(b''.join(r), b'helloworld')
self.assertEqual(r.content, b'helloworld')
# Additional content can be written to the response.
r = HttpResponse(iter(['hello', 'world']))
self.assertEqual(r.content, b'helloworld')
r.write('!')
self.assertEqual(r.content, b'helloworld!')
def test_iterator_isnt_rewound(self):
# Regression test for #13222
r = HttpResponse('abc')
i = iter(r)
self.assertEqual(list(i), [b'abc'])
self.assertEqual(list(i), [])
def test_lazy_content(self):
r = HttpResponse(lazystr('helloworld'))
self.assertEqual(r.content, b'helloworld')
def test_file_interface(self):
r = HttpResponse()
r.write(b"hello")
self.assertEqual(r.tell(), 5)
r.write("привет")
self.assertEqual(r.tell(), 17)
r = HttpResponse(['abc'])
r.write('def')
self.assertEqual(r.tell(), 6)
self.assertEqual(r.content, b'abcdef')
# with Content-Encoding header
r = HttpResponse()
r['Content-Encoding'] = 'winning'
r.write(b'abc')
r.write(b'def')
self.assertEqual(r.content, b'abcdef')
def test_stream_interface(self):
r = HttpResponse('asdf')
self.assertEqual(r.getvalue(), b'asdf')
r = HttpResponse()
self.assertEqual(r.writable(), True)
r.writelines(['foo\n', 'bar\n', 'baz\n'])
self.assertEqual(r.content, b'foo\nbar\nbaz\n')
def test_unsafe_redirect(self):
bad_urls = [
'data:text/html,<script>window.alert("xss")</script>',
'mailto:test@example.com',
'file:///etc/passwd',
]
for url in bad_urls:
self.assertRaises(SuspiciousOperation,
HttpResponseRedirect, url)
self.assertRaises(SuspiciousOperation,
HttpResponsePermanentRedirect, url)
class HttpResponseSubclassesTests(SimpleTestCase):
def test_redirect(self):
response = HttpResponseRedirect('/redirected/')
self.assertEqual(response.status_code, 302)
# Test that standard HttpResponse init args can be used
response = HttpResponseRedirect('/redirected/',
content='The resource has temporarily moved',
content_type='text/html')
self.assertContains(response, 'The resource has temporarily moved', status_code=302)
# Test that url attribute is right
self.assertEqual(response.url, response['Location'])
def test_redirect_lazy(self):
"""Make sure HttpResponseRedirect works with lazy strings."""
r = HttpResponseRedirect(lazystr('/redirected/'))
self.assertEqual(r.url, '/redirected/')
def test_redirect_repr(self):
response = HttpResponseRedirect('/redirected/')
expected = '<HttpResponseRedirect status_code=302, "text/html; charset=utf-8", url="/redirected/">'
self.assertEqual(repr(response), expected)
def test_not_modified(self):
response = HttpResponseNotModified()
self.assertEqual(response.status_code, 304)
# 304 responses should not have content/content-type
with self.assertRaises(AttributeError):
response.content = "Hello dear"
self.assertNotIn('content-type', response)
def test_not_allowed(self):
response = HttpResponseNotAllowed(['GET'])
self.assertEqual(response.status_code, 405)
# Test that standard HttpResponse init args can be used
response = HttpResponseNotAllowed(['GET'],
content='Only the GET method is allowed',
content_type='text/html')
self.assertContains(response, 'Only the GET method is allowed', status_code=405)
def test_not_allowed_repr(self):
response = HttpResponseNotAllowed(['GET', 'OPTIONS'], content_type='text/plain')
expected = '<HttpResponseNotAllowed [GET, OPTIONS] status_code=405, "text/plain">'
self.assertEqual(repr(response), expected)
class JsonResponseTests(SimpleTestCase):
def test_json_response_non_ascii(self):
data = {'key': 'łóżko'}
response = JsonResponse(data)
self.assertEqual(json.loads(response.content.decode()), data)
def test_json_response_raises_type_error_with_default_setting(self):
with self.assertRaisesMessage(TypeError,
'In order to allow non-dict objects to be serialized set the '
'safe parameter to False'):
JsonResponse([1, 2, 3])
def test_json_response_text(self):
response = JsonResponse('foobar', safe=False)
self.assertEqual(json.loads(response.content.decode()), 'foobar')
def test_json_response_list(self):
response = JsonResponse(['foo', 'bar'], safe=False)
self.assertEqual(json.loads(response.content.decode()), ['foo', 'bar'])
def test_json_response_uuid(self):
u = uuid.uuid4()
response = JsonResponse(u, safe=False)
self.assertEqual(json.loads(response.content.decode()), str(u))
def test_json_response_custom_encoder(self):
class CustomDjangoJSONEncoder(DjangoJSONEncoder):
def encode(self, o):
return json.dumps({'foo': 'bar'})
response = JsonResponse({}, encoder=CustomDjangoJSONEncoder)
self.assertEqual(json.loads(response.content.decode()), {'foo': 'bar'})
class StreamingHttpResponseTests(SimpleTestCase):
def test_streaming_response(self):
r = StreamingHttpResponse(iter(['hello', 'world']))
# iterating over the response itself yields bytestring chunks.
chunks = list(r)
self.assertEqual(chunks, [b'hello', b'world'])
for chunk in chunks:
self.assertIsInstance(chunk, six.binary_type)
# and the response can only be iterated once.
self.assertEqual(list(r), [])
# even when a sequence that can be iterated many times, like a list,
# is given as content.
r = StreamingHttpResponse(['abc', 'def'])
self.assertEqual(list(r), [b'abc', b'def'])
self.assertEqual(list(r), [])
# iterating over Unicode strings still yields bytestring chunks.
r.streaming_content = iter(['hello', 'café'])
chunks = list(r)
# '\xc3\xa9' == unichr(233).encode('utf-8')
self.assertEqual(chunks, [b'hello', b'caf\xc3\xa9'])
for chunk in chunks:
self.assertIsInstance(chunk, six.binary_type)
# streaming responses don't have a `content` attribute.
self.assertFalse(hasattr(r, 'content'))
# and you can't accidentally assign to a `content` attribute.
with self.assertRaises(AttributeError):
r.content = 'xyz'
# but they do have a `streaming_content` attribute.
self.assertTrue(hasattr(r, 'streaming_content'))
# that exists so we can check if a response is streaming, and wrap or
# replace the content iterator.
r.streaming_content = iter(['abc', 'def'])
r.streaming_content = (chunk.upper() for chunk in r.streaming_content)
self.assertEqual(list(r), [b'ABC', b'DEF'])
# coercing a streaming response to bytes doesn't return a complete HTTP
# message like a regular response does. it only gives us the headers.
r = StreamingHttpResponse(iter(['hello', 'world']))
self.assertEqual(
six.binary_type(r), b'Content-Type: text/html; charset=utf-8')
# and this won't consume its content.
self.assertEqual(list(r), [b'hello', b'world'])
# additional content cannot be written to the response.
r = StreamingHttpResponse(iter(['hello', 'world']))
with self.assertRaises(Exception):
r.write('!')
# and we can't tell the current position.
with self.assertRaises(Exception):
r.tell()
r = StreamingHttpResponse(iter(['hello', 'world']))
self.assertEqual(r.getvalue(), b'helloworld')
class FileCloseTests(SimpleTestCase):
def setUp(self):
# Disable the request_finished signal during this test
# to avoid interfering with the database connection.
request_finished.disconnect(close_old_connections)
def tearDown(self):
request_finished.connect(close_old_connections)
def test_response(self):
filename = os.path.join(os.path.dirname(upath(__file__)), 'abc.txt')
# file isn't closed until we close the response.
file1 = open(filename)
r = HttpResponse(file1)
self.assertFalse(file1.closed)
r.close()
self.assertTrue(file1.closed)
# don't automatically close file when we finish iterating the response.
file1 = open(filename)
r = HttpResponse(file1)
self.assertFalse(file1.closed)
list(r)
self.assertFalse(file1.closed)
r.close()
self.assertTrue(file1.closed)
# when multiple file are assigned as content, make sure they are all
# closed with the response.
file1 = open(filename)
file2 = open(filename)
r = HttpResponse(file1)
r.content = file2
self.assertFalse(file1.closed)
self.assertFalse(file2.closed)
r.close()
self.assertTrue(file1.closed)
self.assertTrue(file2.closed)
def test_streaming_response(self):
filename = os.path.join(os.path.dirname(upath(__file__)), 'abc.txt')
# file isn't closed until we close the response.
file1 = open(filename)
r = StreamingHttpResponse(file1)
self.assertFalse(file1.closed)
r.close()
self.assertTrue(file1.closed)
# when multiple file are assigned as content, make sure they are all
# closed with the response.
file1 = open(filename)
file2 = open(filename)
r = StreamingHttpResponse(file1)
r.streaming_content = file2
self.assertFalse(file1.closed)
self.assertFalse(file2.closed)
r.close()
self.assertTrue(file1.closed)
self.assertTrue(file2.closed)
class CookieTests(unittest.TestCase):
def test_encode(self):
"""
Test that we don't output tricky characters in encoded value
"""
c = SimpleCookie()
c['test'] = "An,awkward;value"
self.assertNotIn(";", c.output().rstrip(';')) # IE compat
self.assertNotIn(",", c.output().rstrip(';')) # Safari compat
def test_decode(self):
"""
Test that we can still preserve semi-colons and commas
"""
c = SimpleCookie()
c['test'] = "An,awkward;value"
c2 = SimpleCookie()
c2.load(c.output()[12:])
self.assertEqual(c['test'].value, c2['test'].value)
def test_decode_2(self):
"""
Test that we haven't broken normal encoding
"""
c = SimpleCookie()
c['test'] = b"\xf0"
c2 = SimpleCookie()
c2.load(c.output()[12:])
self.assertEqual(c['test'].value, c2['test'].value)
def test_nonstandard_keys(self):
"""
Test that a single non-standard cookie name doesn't affect all cookies. Ticket #13007.
"""
self.assertIn('good_cookie', parse_cookie('good_cookie=yes;bad:cookie=yes').keys())
def test_repeated_nonstandard_keys(self):
"""
Test that a repeated non-standard name doesn't affect all cookies. Ticket #15852
"""
self.assertIn('good_cookie', parse_cookie('a:=b; a:=c; good_cookie=yes').keys())
def test_httponly_after_load(self):
"""
Test that we can use httponly attribute on cookies that we load
"""
c = SimpleCookie()
c.load("name=val")
c['name']['httponly'] = True
self.assertTrue(c['name']['httponly'])
def test_load_dict(self):
c = SimpleCookie()
c.load({'name': 'val'})
self.assertEqual(c['name'].value, 'val')
@unittest.skipUnless(six.PY2, "PY3 throws an exception on invalid cookie keys.")
def test_bad_cookie(self):
"""
Regression test for #18403
"""
r = HttpResponse()
r.set_cookie("a:.b/", 1)
self.assertEqual(len(r.cookies.bad_cookies), 1)
def test_pickle(self):
rawdata = 'Customer="WILE_E_COYOTE"; Path=/acme; Version=1'
expected_output = 'Set-Cookie: %s' % rawdata
C = SimpleCookie()
C.load(rawdata)
self.assertEqual(C.output(), expected_output)
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
C1 = pickle.loads(pickle.dumps(C, protocol=proto))
self.assertEqual(C1.output(), expected_output)
| bsd-3-clause |
emonty/ansible | test/units/module_utils/facts/system/test_cmdline.py | 88 | 1996 | # unit tests for ansible system cmdline fact collectors
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from ansible.module_utils.facts.system.cmdline import CmdLineFactCollector
test_data = [
(
"crashkernel=auto rd.lvm.lv=fedora_test-elementary-os/root rd.lvm.lv=fedora_test-elementary-os/swap rhgb quiet",
{
'crashkernel': 'auto',
'quiet': True,
'rd.lvm.lv': [
'fedora_test-elementary-os/root',
'fedora_test-elementary-os/swap',
],
'rhgb': True
}
),
(
"root=/dev/mapper/vg_ssd-root ro rd.lvm.lv=fedora_xenon/root rd.lvm.lv=fedora_xenon/swap rhgb quiet "
"resume=/dev/mapper/fedora_xenon-swap crashkernel=128M zswap.enabled=1",
{
'crashkernel': '128M',
'quiet': True,
'rd.lvm.lv': [
'fedora_xenon/root',
'fedora_xenon/swap'
],
'resume': '/dev/mapper/fedora_xenon-swap',
'rhgb': True,
'ro': True,
'root': '/dev/mapper/vg_ssd-root',
'zswap.enabled': '1'
}
),
(
"rhgb",
{
"rhgb": True
}
),
(
"root=/dev/mapper/vg_ssd-root",
{
'root': '/dev/mapper/vg_ssd-root',
}
),
(
"",
{},
)
]
test_ids = ['lvm_1', 'lvm_2', 'single_without_equal_sign', 'single_with_equal_sign', 'blank_cmdline']
@pytest.mark.parametrize("cmdline, cmdline_dict", test_data, ids=test_ids)
def test_cmd_line_factor(cmdline, cmdline_dict):
cmdline_facter = CmdLineFactCollector()
parsed_cmdline = cmdline_facter._parse_proc_cmdline_facts(data=cmdline)
assert parsed_cmdline == cmdline_dict
| gpl-3.0 |
named-data-ndnSIM/NFD | docs/redmine_issue.py | 54 | 2561 | # -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
# Based on http://doughellmann.com/2010/05/09/defining-custom-roles-in-sphinx.html
"""Integration of Sphinx with Redmine.
"""
from docutils import nodes, utils
from docutils.parsers.rst.roles import set_classes
def redmine_role(name, rawtext, text, lineno, inliner, options={}, content=[]):
"""Link to a Redmine issue.
Returns 2 part tuple containing list of nodes to insert into the
document and a list of system messages. Both are allowed to be
empty.
:param name: The role name used in the document.
:param rawtext: The entire markup snippet, with role.
:param text: The text marked with the role.
:param lineno: The line number where rawtext appears in the input.
:param inliner: The inliner instance that called us.
:param options: Directive options for customization.
:param content: The directive content for customization.
"""
try:
issue_num = int(text)
if issue_num <= 0:
raise ValueError
except ValueError:
msg = inliner.reporter.error(
'Redmine issue number must be a number greater than or equal to 1; '
'"%s" is invalid.' % text, line=lineno)
prb = inliner.problematic(rawtext, rawtext, msg)
return [prb], [msg]
app = inliner.document.settings.env.app
node = make_link_node(rawtext, app, 'issues', str(issue_num), options)
return [node], []
def make_link_node(rawtext, app, type, slug, options):
"""Create a link to a Redmine resource.
:param rawtext: Text being replaced with link node.
:param app: Sphinx application context
:param type: Link type (issue, changeset, etc.)
:param slug: ID of the thing to link to
:param options: Options dictionary passed to role func.
"""
#
try:
base = app.config.redmine_project_url
if not base:
raise AttributeError
except AttributeError, err:
raise ValueError('redmine_project_url configuration value is not set (%s)' % str(err))
#
slash = '/' if base[-1] != '/' else ''
ref = base + slash + type + '/' + slug + '/'
set_classes(options)
node = nodes.reference(rawtext, 'Issue #' + utils.unescape(slug), refuri=ref,
**options)
return node
def setup(app):
"""Install the plugin.
:param app: Sphinx application context.
"""
app.add_role('issue', redmine_role)
app.add_config_value('redmine_project_url', None, 'env')
return
| gpl-3.0 |
ArneBab/pypyjs | website/demo/home/rfk/repos/pypy/lib-python/2.7/user.py | 313 | 1627 | """Hook to allow user-specified customization code to run.
As a policy, Python doesn't run user-specified code on startup of
Python programs (interactive sessions execute the script specified in
the PYTHONSTARTUP environment variable if it exists).
However, some programs or sites may find it convenient to allow users
to have a standard customization file, which gets run when a program
requests it. This module implements such a mechanism. A program
that wishes to use the mechanism must execute the statement
import user
The user module looks for a file .pythonrc.py in the user's home
directory and if it can be opened, execfile()s it in its own global
namespace. Errors during this phase are not caught; that's up to the
program that imports the user module, if it wishes.
The user's .pythonrc.py could conceivably test for sys.version if it
wishes to do different things depending on the Python version.
"""
from warnings import warnpy3k
warnpy3k("the user module has been removed in Python 3.0", stacklevel=2)
del warnpy3k
import os
home = os.curdir # Default
if 'HOME' in os.environ:
home = os.environ['HOME']
elif os.name == 'posix':
home = os.path.expanduser("~/")
elif os.name == 'nt': # Contributed by Jeff Bauer
if 'HOMEPATH' in os.environ:
if 'HOMEDRIVE' in os.environ:
home = os.environ['HOMEDRIVE'] + os.environ['HOMEPATH']
else:
home = os.environ['HOMEPATH']
pythonrc = os.path.join(home, ".pythonrc.py")
try:
f = open(pythonrc)
except IOError:
pass
else:
f.close()
execfile(pythonrc)
| mit |
collects/VTK | Examples/Modelling/Python/constrainedDelaunay.py | 27 | 4511 | #!/usr/bin/env python
# This example demonstrates how to use a constraint polygon in
# Delaunay triangulation.
import vtk
from vtk.util.colors import peacock
# Generate the input points and constrained edges/polygons.
points = vtk.vtkPoints()
points.InsertPoint(0, 1, 4, 0)
points.InsertPoint(1, 3, 4, 0)
points.InsertPoint(2, 7, 4, 0)
points.InsertPoint(3, 11, 4, 0)
points.InsertPoint(4, 13, 4, 0)
points.InsertPoint(5, 13, 8, 0)
points.InsertPoint(6, 13, 12, 0)
points.InsertPoint(7, 10, 12, 0)
points.InsertPoint(8, 7, 12, 0)
points.InsertPoint(9, 4, 12, 0)
points.InsertPoint(10, 1, 12, 0)
points.InsertPoint(11, 1, 8, 0)
points.InsertPoint(12, 3.5, 5, 0)
points.InsertPoint(13, 4.5, 5, 0)
points.InsertPoint(14, 5.5, 8, 0)
points.InsertPoint(15, 6.5, 8, 0)
points.InsertPoint(16, 6.5, 5, 0)
points.InsertPoint(17, 7.5, 5, 0)
points.InsertPoint(18, 7.5, 8, 0)
points.InsertPoint(19, 9, 8, 0)
points.InsertPoint(20, 9, 5, 0)
points.InsertPoint(21, 10, 5, 0)
points.InsertPoint(22, 10, 7, 0)
points.InsertPoint(23, 11, 5, 0)
points.InsertPoint(24, 12, 5, 0)
points.InsertPoint(25, 10.5, 8, 0)
points.InsertPoint(26, 12, 11, 0)
points.InsertPoint(27, 11, 11, 0)
points.InsertPoint(28, 10, 9, 0)
points.InsertPoint(29, 10, 11, 0)
points.InsertPoint(30, 9, 11, 0)
points.InsertPoint(31, 9, 9, 0)
points.InsertPoint(32, 7.5, 9, 0)
points.InsertPoint(33, 7.5, 11, 0)
points.InsertPoint(34, 6.5, 11, 0)
points.InsertPoint(35, 6.5, 9, 0)
points.InsertPoint(36, 5, 9, 0)
points.InsertPoint(37, 4, 6, 0)
points.InsertPoint(38, 3, 9, 0)
points.InsertPoint(39, 2, 9, 0)
polys = vtk.vtkCellArray()
polys.InsertNextCell(12)
polys.InsertCellPoint(0)
polys.InsertCellPoint(1)
polys.InsertCellPoint(2)
polys.InsertCellPoint(3)
polys.InsertCellPoint(4)
polys.InsertCellPoint(5)
polys.InsertCellPoint(6)
polys.InsertCellPoint(7)
polys.InsertCellPoint(8)
polys.InsertCellPoint(9)
polys.InsertCellPoint(10)
polys.InsertCellPoint(11)
polys.InsertNextCell(28)
polys.InsertCellPoint(39)
polys.InsertCellPoint(38)
polys.InsertCellPoint(37)
polys.InsertCellPoint(36)
polys.InsertCellPoint(35)
polys.InsertCellPoint(34)
polys.InsertCellPoint(33)
polys.InsertCellPoint(32)
polys.InsertCellPoint(31)
polys.InsertCellPoint(30)
polys.InsertCellPoint(29)
polys.InsertCellPoint(28)
polys.InsertCellPoint(27)
polys.InsertCellPoint(26)
polys.InsertCellPoint(25)
polys.InsertCellPoint(24)
polys.InsertCellPoint(23)
polys.InsertCellPoint(22)
polys.InsertCellPoint(21)
polys.InsertCellPoint(20)
polys.InsertCellPoint(19)
polys.InsertCellPoint(18)
polys.InsertCellPoint(17)
polys.InsertCellPoint(16)
polys.InsertCellPoint(15)
polys.InsertCellPoint(14)
polys.InsertCellPoint(13)
polys.InsertCellPoint(12)
polyData = vtk.vtkPolyData()
polyData.SetPoints(points)
polyData.SetPolys(polys)
# Notice this trick. The SetInput() method accepts a vtkPolyData that
# is also the input to the Delaunay filter. The points of the
# vtkPolyData are used to generate the triangulation; the polygons are
# used to create a constraint region. The polygons are very carefully
# created and ordered in the right direction to indicate inside and
# outside of the polygon.
delny = vtk.vtkDelaunay2D()
delny.SetInputData(polyData)
delny.SetSourceData(polyData)
mapMesh = vtk.vtkPolyDataMapper()
mapMesh.SetInputConnection(delny.GetOutputPort())
meshActor = vtk.vtkActor()
meshActor.SetMapper(mapMesh)
# Now we just pretty the mesh up with tubed edges and balls at the
# vertices.
extract = vtk.vtkExtractEdges()
extract.SetInputConnection(delny.GetOutputPort())
tubes = vtk.vtkTubeFilter()
tubes.SetInputConnection(extract.GetOutputPort())
tubes.SetRadius(0.1)
tubes.SetNumberOfSides(6)
mapEdges = vtk.vtkPolyDataMapper()
mapEdges.SetInputConnection(tubes.GetOutputPort())
edgeActor = vtk.vtkActor()
edgeActor.SetMapper(mapEdges)
edgeActor.GetProperty().SetColor(peacock)
edgeActor.GetProperty().SetSpecularColor(1, 1, 1)
edgeActor.GetProperty().SetSpecular(0.3)
edgeActor.GetProperty().SetSpecularPower(20)
edgeActor.GetProperty().SetAmbient(0.2)
edgeActor.GetProperty().SetDiffuse(0.8)
# Create the rendering window, renderer, and interactive renderer
ren = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# Add the actors to the renderer, set the background and size
ren.AddActor(meshActor)
ren.AddActor(edgeActor)
ren.SetBackground(0, 0, 0)
renWin.SetSize(450, 300)
ren.ResetCamera()
ren.GetActiveCamera().Zoom(2)
iren.Initialize()
renWin.Render()
iren.Start()
| bsd-3-clause |
mastizada/pontoon | pontoon/base/migrations/0101_userprofile_custom_homepage.py | 1 | 1375 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.7 on 2017-08-18 22:28
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('base', '0100_bug_1390805_create_missing_tm_entries'),
]
operations = [
migrations.AddField(
model_name='userprofile',
name='custom_homepage',
field=models.CharField(blank=True, max_length=10, null=True),
),
migrations.RunSQL(
# SQL migration that will group contributions and will assign locale with highest numbers of contributions
# of a user as its custom_homepage.
"""
WITH cte AS (
SELECT au.id, au.email, bt.locale_id, count(bt.locale_id) as contributions_count, row_number()
OVER(PARTITION BY au.id ORDER BY count(bt.locale_id) DESC) AS rn
FROM auth_user au LEFT JOIN base_translation bt ON(bt.user_id=au.id)
GROUP BY au.id, bt.locale_id
)
UPDATE base_userprofile bu SET custom_homepage = (
SELECT code FROM base_locale WHERE id=cte.locale_id
)
FROM cte
WHERE cte.rn=1 and cte.contributions_count > 0 and bu.user_id=cte.id;
""",
migrations.RunSQL.noop,
)
]
| bsd-3-clause |
laufercenter/meld | docs/conf.py | 1 | 8490 | # -*- coding: utf-8 -*-
#
# MELD documentation build configuration file, created by
# sphinx-quickstart on Mon Aug 11 15:01:20 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import mock
MOCK_MODULES = ['meldplugin', 'simtk', 'simtk.openmm', 'simtk.openmm.app',
'simtk.unit', 'netCDF4', 'numpy', 'scipy']
for mod_name in MOCK_MODULES:
sys.modules[mod_name] = mock.Mock()
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.coverage',
'sphinx.ext.mathjax',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'MELD'
copyright = u'2014, Justin MacCallum'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0'
# The full version, including alpha/beta/rc tags.
release = '1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'MELDdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'MELD.tex', u'MELD Documentation',
u'Justin MacCallum', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'meld', u'MELD Documentation',
[u'Justin MacCallum'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'MELD', u'MELD Documentation',
u'Justin MacCallum', 'MELD', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| mit |
jiangzhuo/kbengine | kbe/res/scripts/common/Lib/site-packages/pip/_vendor/requests/packages/urllib3/util/ssl_.py | 305 | 4235 | from binascii import hexlify, unhexlify
from hashlib import md5, sha1
from ..exceptions import SSLError
try: # Test for SSL features
SSLContext = None
HAS_SNI = False
import ssl
from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23
from ssl import SSLContext # Modern SSL?
from ssl import HAS_SNI # Has SNI?
except ImportError:
pass
def assert_fingerprint(cert, fingerprint):
"""
Checks if given fingerprint matches the supplied certificate.
:param cert:
Certificate as bytes object.
:param fingerprint:
Fingerprint as string of hexdigits, can be interspersed by colons.
"""
# Maps the length of a digest to a possible hash function producing
# this digest.
hashfunc_map = {
16: md5,
20: sha1
}
fingerprint = fingerprint.replace(':', '').lower()
digest_length, rest = divmod(len(fingerprint), 2)
if rest or digest_length not in hashfunc_map:
raise SSLError('Fingerprint is of invalid length.')
# We need encode() here for py32; works on py2 and p33.
fingerprint_bytes = unhexlify(fingerprint.encode())
hashfunc = hashfunc_map[digest_length]
cert_digest = hashfunc(cert).digest()
if not cert_digest == fingerprint_bytes:
raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".'
.format(hexlify(fingerprint_bytes),
hexlify(cert_digest)))
def resolve_cert_reqs(candidate):
"""
Resolves the argument to a numeric constant, which can be passed to
the wrap_socket function/method from the ssl module.
Defaults to :data:`ssl.CERT_NONE`.
If given a string it is assumed to be the name of the constant in the
:mod:`ssl` module or its abbrevation.
(So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
If it's neither `None` nor a string we assume it is already the numeric
constant which can directly be passed to wrap_socket.
"""
if candidate is None:
return CERT_NONE
if isinstance(candidate, str):
res = getattr(ssl, candidate, None)
if res is None:
res = getattr(ssl, 'CERT_' + candidate)
return res
return candidate
def resolve_ssl_version(candidate):
"""
like resolve_cert_reqs
"""
if candidate is None:
return PROTOCOL_SSLv23
if isinstance(candidate, str):
res = getattr(ssl, candidate, None)
if res is None:
res = getattr(ssl, 'PROTOCOL_' + candidate)
return res
return candidate
if SSLContext is not None: # Python 3.2+
def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
ca_certs=None, server_hostname=None,
ssl_version=None):
"""
All arguments except `server_hostname` have the same meaning as for
:func:`ssl.wrap_socket`
:param server_hostname:
Hostname of the expected certificate
"""
context = SSLContext(ssl_version)
context.verify_mode = cert_reqs
# Disable TLS compression to migitate CRIME attack (issue #309)
OP_NO_COMPRESSION = 0x20000
context.options |= OP_NO_COMPRESSION
if ca_certs:
try:
context.load_verify_locations(ca_certs)
# Py32 raises IOError
# Py33 raises FileNotFoundError
except Exception as e: # Reraise as SSLError
raise SSLError(e)
if certfile:
# FIXME: This block needs a test.
context.load_cert_chain(certfile, keyfile)
if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI
return context.wrap_socket(sock, server_hostname=server_hostname)
return context.wrap_socket(sock)
else: # Python 3.1 and earlier
def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
ca_certs=None, server_hostname=None,
ssl_version=None):
return wrap_socket(sock, keyfile=keyfile, certfile=certfile,
ca_certs=ca_certs, cert_reqs=cert_reqs,
ssl_version=ssl_version)
| lgpl-3.0 |
AlphaSmartDog/DeepLearningNotes | Note-2 RNN处理非线性回归/sonnet/python/modules/attention.py | 10 | 7926 | # Copyright 2017 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Modules for attending over memory."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
# Dependency imports
import numpy as np
from sonnet.python.modules import base
from sonnet.python.modules import basic
import tensorflow as tf
# Result of AttentiveRead._build(). See docstring therein for details.
AttentionOutput = collections.namedtuple(
"AttentionOutput", ["read", "weights", "weight_logits"])
class AttentiveRead(base.AbstractModule):
"""A module for reading with attention.
This module reads a weighted sum of embeddings from memory, where each
memory slot's weight is based on the logit returned by an attention embedding
module. A mask may be given to ignore some memory slots (e.g. when attending
over variable-length sequences).
"""
def __init__(self, attention_logit_mod, name="attention"):
"""Initialize AttentiveRead module.
Args:
attention_logit_mod: Module that produces logit corresponding to a memory
slot's compatibility. Must map a [batch_size * memory_size,
memory_word_size + query_word_size]-shaped Tensor to a
[batch_size * memory_size, 1] shape Tensor.
name: string. Name for module.
"""
super(AttentiveRead, self).__init__(name=name)
self._attention_logit_mod = attention_logit_mod
def _build(self, memory, query, memory_mask=None):
"""Perform a differentiable read.
Args:
memory: [batch_size, memory_size, memory_word_size]-shaped Tensor of
dtype float32. This represents, for each example and memory slot, a
single embedding to attend over.
query: [batch_size, query_word_size]-shaped Tensor of dtype float32.
Represents, for each example, a single embedding representing a query.
memory_mask: None or [batch_size, memory_size]-shaped Tensor of dtype
bool. An entry of False indicates that a memory slot should not enter
the resulting weighted sum. If None, all memory is used.
Returns:
An AttentionOutput instance containing:
read: [batch_size, memory_word_size]-shaped Tensor of dtype float32.
This represents, for each example, a weighted sum of the contents of
the memory.
weights: [batch_size, memory_size]-shaped Tensor of dtype float32. This
represents, for each example and memory slot, the attention weights
used to compute the read.
weight_logits: [batch_size, memory_size]-shaped Tensor of dtype float32.
This represents, for each example and memory slot, the logits of the
attention weights, that is, `weights` is calculated by taking the
softmax of the weight logits.
Raises:
UnderspecifiedError: if memory_word_size or query_word_size can not be
inferred.
IncompatibleShapeError: if memory, query, memory_mask, or output of
attention_logit_mod do not match expected shapes.
"""
if len(memory.get_shape()) != 3:
raise base.IncompatibleShapeError(
"memory must have shape [batch_size, memory_size, memory_word_size].")
if len(query.get_shape()) != 2:
raise base.IncompatibleShapeError(
"query must have shape [batch_size, query_word_size].")
if memory_mask is not None and len(memory_mask.get_shape()) != 2:
raise base.IncompatibleShapeError(
"memory_mask must have shape [batch_size, memory_size].")
# Ensure final dimensions are defined, else the attention logit module will
# be unable to infer input size when constructing variables.
inferred_memory_word_size = memory.get_shape()[2].value
inferred_query_word_size = query.get_shape()[1].value
if inferred_memory_word_size is None or inferred_query_word_size is None:
raise base.UnderspecifiedError(
"memory_word_size and query_word_size must be known at graph "
"construction time.")
memory_shape = tf.shape(memory)
batch_size = memory_shape[0]
memory_size = memory_shape[1]
query_shape = tf.shape(query)
query_batch_size = query_shape[0]
# Transform query to have same number of words as memory.
#
# expanded_query: [batch_size, memory_size, query_word_size].
expanded_query = tf.tile(tf.expand_dims(query, dim=1), [1, memory_size, 1])
# Compute attention weights for each memory slot.
#
# attention_weight_logits: [batch_size, memory_size]
with tf.control_dependencies(
[tf.assert_equal(batch_size, query_batch_size)]):
concatenated_embeddings = tf.concat(
values=[memory, expanded_query], axis=2)
batch_apply_attention_logit = basic.BatchApply(
self._attention_logit_mod, n_dims=2, name="batch_apply_attention_logit")
attention_weight_logits = batch_apply_attention_logit(
concatenated_embeddings)
# Note: basic.BatchApply() will automatically reshape the [batch_size *
# memory_size, 1]-shaped result of self._attention_logit_mod(...) into a
# [batch_size, memory_size, 1]-shaped Tensor. If
# self._attention_logit_mod(...) returns something with more dimensions,
# then attention_weight_logits will have extra dimensions, too.
if len(attention_weight_logits.get_shape()) != 3:
raise base.IncompatibleShapeError(
"attention_weight_logits must be a rank-3 Tensor. Are you sure that "
"attention_logit_mod() returned [batch_size * memory_size, 1]-shaped"
" Tensor?")
# Remove final length-1 dimension.
attention_weight_logits = tf.squeeze(attention_weight_logits, [2])
# Mask out ignored memory slots by assigning them very small logits. Ensures
# that every example has at least one valid memory slot, else we'd end up
# averaging all memory slots equally.
if memory_mask is not None:
num_remaining_memory_slots = tf.reduce_sum(
tf.cast(memory_mask, dtype=tf.int32), axis=[1])
with tf.control_dependencies(
[tf.assert_positive(num_remaining_memory_slots)]):
finfo = np.finfo(np.float32)
kept_indices = tf.cast(memory_mask, dtype=tf.float32)
ignored_indices = tf.cast(tf.logical_not(memory_mask), dtype=tf.float32)
lower_bound = finfo.max * kept_indices + finfo.min * ignored_indices
attention_weight_logits = tf.minimum(attention_weight_logits,
lower_bound)
# attended_memory: [batch_size, memory_word_size].
attention_weight = tf.reshape(
tf.nn.softmax(attention_weight_logits),
shape=[batch_size, memory_size, 1])
# The multiplication is elementwise and relies on broadcasting the weights
# across memory_word_size. Then we sum across the memory slots.
attended_memory = tf.reduce_sum(memory * attention_weight, axis=[1])
# Infer shape of result as much as possible.
inferred_batch_size, _, inferred_memory_word_size = (
memory.get_shape().as_list())
attended_memory.set_shape([inferred_batch_size, inferred_memory_word_size])
return AttentionOutput(
read=attended_memory,
weights=tf.squeeze(attention_weight, [2]),
weight_logits=attention_weight_logits)
| mit |
BradleyConn/brickhack | examples/bratko_kopec/bratko_kopec.py | 2 | 4994 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Run an EPD test suite with an UCI engine."""
from __future__ import print_function
import chess
import chess.uci
import chess.variant
import time
import argparse
import itertools
import logging
import sys
def test_epd(engine, epd, VariantBoard, movetime):
position = VariantBoard()
epd_info = position.set_epd(epd)
epd_string = "%s" % epd_info.get("id", position.fen())
if "am" in epd_info:
epd_string = "%s (avoid %s)" % (epd_string, " and ".join(position.san(am) for am in epd_info["am"]))
if "bm" in epd_info:
epd_string = "%s (expect %s)" % (epd_string, " or ".join(position.san(bm) for bm in epd_info["bm"]))
engine.ucinewgame()
engine.setoption({"UCI_Variant": VariantBoard.uci_variant})
engine.position(position)
enginemove, pondermove = engine.go(movetime=movetime)
if "am" in epd_info and enginemove in epd_info["am"]:
print("%s: %s | +0" % (epd_string, position.san(enginemove)))
return 0.0
elif "bm" in epd_info and enginemove not in epd_info["bm"]:
print("%s: %s | +0" % (epd_string, position.san(enginemove)))
return 0.0
else:
print("%s: %s | +1" % (epd_string, position.san(enginemove)))
return 1.0
def test_epd_with_fractional_scores(engine, epd, VariantBoard, movetime):
info_handler = chess.uci.InfoHandler()
engine.info_handlers.append(info_handler)
position = VariantBoard()
epd_info = position.set_epd(epd)
epd_string = "%s" % epd_info.get("id", position.fen())
if "am" in epd_info:
epd_string = "%s (avoid %s)" % (epd_string, " and ".join(position.san(am) for am in epd_info["am"]))
if "bm" in epd_info:
epd_string = "%s (expect %s)" % (epd_string, " or ".join(position.san(bm) for bm in epd_info["bm"]))
engine.ucinewgame()
engine.setoption({"UCI_Variant": VariantBoard.uci_variant})
engine.position(position)
# Search in background
search = engine.go(infinite=True, async_callback=True)
score = 0.0
print("%s:" % epd_string, end=" ")
sys.stdout.flush()
for step in range(0, 3):
time.sleep(movetime / 4000.0)
# Assess the current principal variation.
with info_handler as info:
if 1 in info["pv"] and len(info["pv"][1]) >= 1:
move = info["pv"][1][0]
print("(%s)" % position.san(move), end=" ")
sys.stdout.flush()
if "am" in epd_info and move in epd_info["am"]:
continue # fail
elif "bm" in epd_info and move not in epd_info["bm"]:
continue # fail
else:
score = 1.0 / (4 - step)
else:
print("(no pv)", end=" ")
sys.stdout.flush()
# Assess the final best move by the engine.
time.sleep(movetime / 4000.0)
engine.stop()
enginemove, pondermove = search.result()
if "am" in epd_info and enginemove in epd_info["am"]:
pass # fail
elif "bm" in epd_info and enginemove not in epd_info["bm"]:
pass # fail
else:
score = 1.0
print("%s | +%g" % (position.san(enginemove), score))
engine.info_handlers.remove(info_handler)
return score
if __name__ == "__main__":
# Parse command line arguments.
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("-e", "--engine", required=True,
help="The UCI engine under test.")
parser.add_argument("epd", nargs="+", type=argparse.FileType("r"),
help="EPD test suite(s).")
parser.add_argument("-v", "--variant", default="standard",
help="Use a non-standard chess variant.")
parser.add_argument("-t", "--movetime", default=1000, type=int,
help="Time to move in milliseconds.")
parser.add_argument("-s", "--simple", dest="test_epd", action="store_const",
default=test_epd_with_fractional_scores,
const=test_epd,
help="Run in simple mode without fractional scores.")
parser.add_argument("-d", "--debug", action="store_true",
help="Show debug logs.")
args = parser.parse_args()
# Configure logger.
logging.basicConfig(level=logging.DEBUG if args.debug else logging.WARNING)
# Find variant.
VariantBoard = chess.variant.find_variant(args.variant)
# Open engine.
engine = chess.uci.popen_engine(args.engine)
engine.uci()
# Run each test line.
score = 0.0
count = 0
for epd in itertools.chain(*args.epd):
# Skip comments and empty lines.
epd = epd.strip()
if not epd or epd.startswith("#") or epd.startswith("%"):
print(epd.rstrip())
continue
# Run the actual test.
score += args.test_epd(engine, epd, VariantBoard, args.movetime)
count += 1
engine.quit()
print("-------------------------------")
print("%g / %d" % (score, count))
| gpl-3.0 |
tanzquotient/tq_website | courses/api/serializers.py | 2 | 2420 | from rest_framework import serializers
from django.contrib import auth
from courses.models import *
class OfferingSerializer(serializers.HyperlinkedModelSerializer):
course_set = serializers.HyperlinkedRelatedField(many=True, read_only=True, view_name='courses:api:course-payment-detail')
period = serializers.StringRelatedField()
class Meta:
model = Offering
fields = ('id', 'name', 'period', 'course_set')
class UserSerializer(serializers.ModelSerializer):
student_status = serializers.StringRelatedField(source='profile.student_status')
class Meta:
model = auth.get_user_model()
fields = ('id', 'username', 'first_name', 'last_name', 'email', 'student_status')
class SubscribePaymentSerializer(serializers.HyperlinkedModelSerializer):
user = UserSerializer()
partner = serializers.StringRelatedField()
price_to_pay = serializers.FloatField(source='get_price_to_pay')
detail = serializers.HyperlinkedIdentityField(view_name='courses:api:subscription-payment')
class Meta:
model = Subscribe
fields = ('id', 'user', 'partner', 'price_to_pay', 'payed', 'detail')
class CoursePaymentSerializer(serializers.HyperlinkedModelSerializer):
offering = serializers.HyperlinkedRelatedField(view_name='courses:api:offering-detail', read_only=True)
type_name = serializers.StringRelatedField(source='type')
type = serializers.HyperlinkedRelatedField(view_name='courses:api:coursetype-detail', read_only=True)
# this calls the method participatory() and serializes the returned queryset
participatory = SubscribePaymentSerializer(many=True)
class Meta:
model = Course
fields = ('id', 'name', 'type_name', 'type', 'offering', 'participatory')
class SubscribePaymentUpdateSerializer(serializers.Serializer):
paid = serializers.BooleanField()
def update(self, instance, validated_data):
instance.paid = validated_data.get('paid', instance.paid)
instance.save()
return instance
class CourseTypeSerializer(serializers.HyperlinkedModelSerializer):
styles = serializers.HyperlinkedRelatedField(many=True, view_name='courses:api:style-detail', read_only=True)
class Meta:
model = CourseType
fields = ('name', 'styles', 'level', 'couple_course')
class StyleSerializer(serializers.ModelSerializer):
class Meta:
model = Style
| gpl-2.0 |
synicalsyntax/zulip | zerver/management/commands/soft_deactivate_users.py | 4 | 3849 | import sys
from argparse import ArgumentParser
from typing import Any, Dict, List
from django.conf import settings
from django.core.management.base import CommandError
from zerver.lib.management import ZulipBaseCommand
from zerver.lib.soft_deactivation import (
do_auto_soft_deactivate_users,
do_soft_activate_users,
do_soft_deactivate_users,
logger,
)
from zerver.models import Realm, UserProfile
def get_users_from_emails(emails: List[str],
filter_kwargs: Dict[str, Realm]) -> List[UserProfile]:
# Bug: Ideally, this would be case-insensitive like our other email queries.
users = UserProfile.objects.filter(
delivery_email__in=emails,
**filter_kwargs)
if len(users) != len(emails):
user_emails_found = {user.delivery_email for user in users}
user_emails_not_found = '\n'.join(set(emails) - user_emails_found)
raise CommandError(
'Users with the following emails were not found:\n\n'
f'{user_emails_not_found}\n\n'
'Check if they are correct.',
)
return users
class Command(ZulipBaseCommand):
help = """Soft activate/deactivate users. Users are recognised by their emails here."""
def add_arguments(self, parser: ArgumentParser) -> None:
self.add_realm_args(parser)
parser.add_argument('-d', '--deactivate',
dest='deactivate',
action='store_true',
default=False,
help='Used to deactivate user/users.')
parser.add_argument('-a', '--activate',
dest='activate',
action='store_true',
default=False,
help='Used to activate user/users.')
parser.add_argument('--inactive-for',
type=int,
default=28,
help='Number of days of inactivity before soft-deactivation')
parser.add_argument('users', metavar='<users>', type=str, nargs='*', default=[],
help="A list of user emails to soft activate/deactivate.")
def handle(self, *args: Any, **options: Any) -> None:
if settings.STAGING:
print('This is a Staging server. Suppressing management command.')
sys.exit(0)
realm = self.get_realm(options)
user_emails = options['users']
activate = options['activate']
deactivate = options['deactivate']
filter_kwargs: Dict[str, Realm] = {}
if realm is not None:
filter_kwargs = dict(realm=realm)
if activate:
if not user_emails:
print('You need to specify at least one user to use the activate option.')
self.print_help("./manage.py", "soft_deactivate_users")
raise CommandError
users_to_activate = get_users_from_emails(user_emails, filter_kwargs)
users_activated = do_soft_activate_users(users_to_activate)
logger.info('Soft Reactivated %d user(s)', len(users_activated))
elif deactivate:
if user_emails:
users_to_deactivate = get_users_from_emails(user_emails, filter_kwargs)
print('Soft deactivating forcefully...')
users_deactivated = do_soft_deactivate_users(users_to_deactivate)
else:
users_deactivated = do_auto_soft_deactivate_users(int(options['inactive_for']),
realm)
logger.info('Soft Deactivated %d user(s)', len(users_deactivated))
else:
self.print_help("./manage.py", "soft_deactivate_users")
raise CommandError
| apache-2.0 |
recq-cse/RecQ | algorithm/ranking/DMF.py | 2 | 6418 | #coding:utf8
from baseclass.DeepRecommender import DeepRecommender
import numpy as np
from random import choice,random,randint,shuffle
from tool import config
import tensorflow as tf
#According to the paper, we only
class DMF(DeepRecommender):
def __init__(self,conf,trainingSet=None,testSet=None,fold='[1]'):
super(DMF, self).__init__(conf,trainingSet,testSet,fold)
def next_batch(self,i):
rows = np.zeros(((self.negative_sp+1)*self.batch_size,self.num_items))
cols = np.zeros(((self.negative_sp+1)*self.batch_size,self.num_users))
batch_idx = range(self.batch_size*i,self.batch_size*(i+1))
users = [self.data.trainingData[idx][0] for idx in batch_idx]
items = [self.data.trainingData[idx][1] for idx in batch_idx]
u_idx = [self.data.user[u] for u in users]
v_idx = [self.data.item[i] for i in items]
ratings = [float(self.data.trainingData[idx][2]) for idx in batch_idx]
for i,user in enumerate(users):
rows[i] = self.data.row(user)
for i,item in enumerate(items):
cols[i] = self.data.col(item)
userList = self.data.user.keys()
itemList = self.data.item.keys()
#negative sample
for i in range(self.negative_sp*self.batch_size):
u = choice(userList)
v = choice(itemList)
while self.data.contains(u,v):
u = choice(userList)
v = choice(itemList)
rows[self.batch_size-1+i]=self.data.row(u)
cols[self.batch_size-1+i]=self.data.col(i)
u_idx.append(self.data.user[u])
v_idx.append(self.data.item[v])
ratings.append(0)
return rows,cols,np.array(ratings),np.array(u_idx),np.array(v_idx)
def initModel(self):
super(DMF, self).initModel()
n_input_u = len(self.data.item)
n_input_i = len(self.data.user)
self.negative_sp = 5
self.n_hidden_u=[256,512]
self.n_hidden_i=[256,512]
self.input_u = tf.placeholder(tf.float, [None, n_input_u])
self.input_i = tf.placeholder(tf.float, [None, n_input_i])
def buildModel(self):
super(DMF, self).buildModel_tf()
initializer = tf.contrib.layers.xavier_initializer()
#user net
user_W1 = tf.Variable(initializer([self.num_items, self.n_hidden_u[0]],stddev=0.01))
self.user_out = tf.nn.relu(tf.matmul(self.input_u, user_W1))
self.regLoss = tf.nn.l2_loss(user_W1)
for i in range(1, len(self.n_hidden_u)):
W = tf.Variable(initializer([self.n_hidden_u[i-1], self.n_hidden_u[i]],stddev=0.01))
b = tf.Variable(initializer([self.n_hidden_u[i]],stddev=0.01))
self.regLoss = tf.add(self.regLoss,tf.nn.l2_loss(W))
self.regLoss = tf.add(self.regLoss, tf.nn.l2_loss(b))
self.user_out = tf.nn.relu(tf.add(tf.matmul(self.user_out, W), b))
#item net
item_W1 = tf.Variable(initializer([self.num_users, self.n_hidden_i[0]],stddev=0.01))
self.item_out = tf.nn.relu(tf.matmul(self.input_i, item_W1))
self.regLoss = tf.add(self.regLoss, tf.nn.l2_loss(item_W1))
for i in range(1, len(self.n_hidden_i)):
W = tf.Variable(initializer([self.n_hidden_i[i-1], self.n_hidden_i[i]],stddev=0.01))
b = tf.Variable(initializer([self.n_hidden_i[i]],stddev=0.01))
self.regLoss = tf.add(self.regLoss, tf.nn.l2_loss(W))
self.regLoss = tf.add(self.regLoss, tf.nn.l2_loss(b))
self.item_out = tf.nn.relu(tf.add(tf.matmul(self.item_out, W), b))
norm_user_output = tf.sqrt(tf.reduce_sum(tf.square(self.user_out), axis=1))
norm_item_output = tf.sqrt(tf.reduce_sum(tf.square(self.item_out), axis=1))
self.y_ = tf.reduce_sum(tf.multiply(self.user_out, self.item_out), axis=1) / (
norm_item_output * norm_user_output)
self.y_ = tf.maximum(1e-6, self.y_)
self.loss = self.r*tf.log(self.y_) + (1 - self.r) * tf.log(1 - self.y_)#tf.nn.sigmoid_cross_entropy_with_logits(logits=self.y_,labels=self.r)
#self.loss = tf.nn.l2_loss(tf.subtract(self.y_,self.r))
self.loss = -tf.reduce_sum(self.loss)
reg_lambda = tf.constant(self.regU, dtype=tf.float32)
self.regLoss = tf.multiply(reg_lambda,self.regLoss)
self.loss = tf.add(self.loss,self.regLoss)
optimizer = tf.train.AdamOptimizer(self.lRate).minimize(self.loss)
self.U = np.zeros((self.num_users, self.n_hidden_u[-1]))
self.V = np.zeros((self.num_items, self.n_hidden_u[-1]))
init = tf.global_variables_initializer()
self.sess.run(init)
total_batch = int(len(self.data.trainingData)/ self.batch_size)
for epoch in range(self.maxIter):
shuffle(self.data.trainingData)
for i in range(total_batch):
users,items,ratings,u_idx,v_idx = self.next_batch(i)
shuffle_idx=np.random.permutation(range(len(users)))
users = users[shuffle_idx]
items = items[shuffle_idx]
ratings = ratings[shuffle_idx]
u_idx = u_idx[shuffle_idx]
v_idx = v_idx[shuffle_idx]
_,loss= self.sess.run([optimizer, self.loss], feed_dict={self.input_u: users,self.input_i:items,self.r:ratings})
print self.foldInfo, "Epoch:", '%04d' % (epoch + 1), "Batch:", '%03d' % (i + 1), "loss=", "{:.9f}".format(loss)
#save the output layer
U_embedding, V_embedding = self.sess.run([self.user_out, self.item_out], feed_dict={self.input_u: users,self.input_i:items})
for ue,u in zip(U_embedding,u_idx):
self.U[u]=ue
for ve,v in zip(V_embedding,v_idx):
self.V[v]=ve
self.normalized_V = np.sqrt(np.sum(self.V * self.V, axis=1))
self.normalized_U = np.sqrt(np.sum(self.U * self.U, axis=1))
self.ranking_performance()
print("Optimization Finished!")
def predictForRanking(self, u):
'invoked to rank all the items for the user'
if self.data.containsUser(u):
uid = self.data.user[u]
return np.divide(self.V.dot(self.U[uid]),self.normalized_U[uid]*self.normalized_V)
else:
return [self.data.globalMean] * self.num_items
| gpl-3.0 |
refeed/coala | tests/results/result_actions/PrintMoreInfoActionTest.py | 33 | 1354 | import unittest
from coala_utils.ContextManagers import retrieve_stdout
from coalib.results.Result import Result
from coalib.results.result_actions.PrintMoreInfoAction import (
PrintMoreInfoAction)
from coalib.settings.Section import Section
class PrintMoreInfoActionTest(unittest.TestCase):
def setUp(self):
self.uut = PrintMoreInfoAction()
self.test_result = Result(
'origin', 'message',
additional_info='A lot of additional information can be found here')
def test_is_applicable(self):
with self.assertRaises(TypeError):
self.uut.is_applicable(1, None, None)
self.assertEqual(
self.uut.is_applicable(Result('o', 'm'), None, None),
'There is no additional info.'
)
self.assertTrue(self.uut.is_applicable(self.test_result, None, None))
def test_apply(self):
with retrieve_stdout() as stdout:
self.assertEqual(self.uut.apply_from_section(self.test_result,
{},
{},
Section('name')),
{})
self.assertEqual(stdout.getvalue(),
self.test_result.additional_info + '\n')
| agpl-3.0 |
132nd-etcher/EMFT | emft/plugins/reorder/gui/tab_reorder.py | 1 | 4859 | # coding=utf-8
from emft.config import Config
from emft.core.logging import make_logger
from emft.gui.base import Checkbox, GroupBox, HLayout, HSpacer, Label, PushButton, Radio, VLayout
from emft.gui.main_ui_tab_widget import MainUiTabChild
from emft.plugins.reorder.adapter.tab_reorder_adapter import TAB_NAME, TabReorderAdapter
from emft.plugins.reorder.service.reorder_miz import ReorderMiz
from .widget_auto_reorder import WidgetAutoReorder
from .widget_manual_reorder import WidgetManualReorder
LOGGER = make_logger(__name__)
class TabChildReorder(MainUiTabChild, TabReorderAdapter):
def tab_reorder_update_view_after_artifact_scan(self, *args, **kwargs):
pass
def tab_reorder_update_view_after_branches_scan(self, *args, **kwargs):
pass
def tab_reorder_change_active_profile(self, new_profile_name):
pass
def tab_clicked(self):
pass
@property
def tab_title(self):
return TAB_NAME
def __init__(self, parent=None):
MainUiTabChild.__init__(self, parent=parent)
self.check_skip_options = Checkbox(
'Skip "options" file',
self._on_click_option_checkbox
)
self.radio_manual = Radio('Manual mode', self._on_click_radio_manual_or_auto)
self.radio_auto = Radio('Auto mode', self._on_click_radio_manual_or_auto)
self.widget_manual = WidgetManualReorder(self)
self.widget_auto = WidgetAutoReorder(self)
self.btn_reorder = PushButton(
text='Reorder MIZ file',
func=self._on_click_reorder_btn,
parent=self,
min_height=40,
)
self.setLayout(
VLayout(
[
Label(
'By design, LUA tables are unordered, which makes tracking changes extremely difficult.\n\n'
'This lets you reorder them alphabetically before you push them in a SCM.'
),
GroupBox(
'Options',
VLayout(
[
self.check_skip_options,
Label(
'The "options" file at the root of the MIZ is player-specific, and is of very'
' relative import for the MIZ file itself. To avoid having irrelevant changes in'
' the SCM, it can be safely skipped during reordering.'
),
],
),
),
GroupBox(
title='Select re-ordering method',
layout=HLayout(
[
HSpacer(),
self.radio_manual,
HSpacer(),
self.radio_auto,
HSpacer(),
],
),
),
GroupBox(
title='Reordering setup',
layout=VLayout(
[
self.widget_manual,
self.widget_auto,
],
),
),
self.btn_reorder,
],
# set_stretch=[(3, 1)]
# add_stretch=True,
),
)
self._load_from_config()
def _load_from_config(self):
self.radio_auto.setChecked(Config().auto_mode)
self.radio_manual.setChecked(not Config().auto_mode)
self.check_skip_options.setChecked(Config().skip_options_file)
def _write_selected_mode_to_config(self):
Config().auto_mode = self._auto_mode_is_selected
@property
def _manual_mode_is_selected(self):
return self.radio_manual.isChecked()
@property
def _auto_mode_is_selected(self):
return self.radio_auto.isChecked()
def _on_click_option_checkbox(self):
Config().skip_options_file = self.check_skip_options.isChecked()
def _on_click_radio_manual_or_auto(self):
self.widget_manual.setVisible(self.radio_manual.isChecked())
self.widget_auto.setVisible(self.radio_auto.isChecked())
self._write_selected_mode_to_config()
def _reorder_manual(self):
ReorderMiz.manual_reorder(self.widget_manual.path_to_miz)
@staticmethod
def _reorder_auto():
ReorderMiz.auto_reorder()
def _on_click_reorder_btn(self):
if self._manual_mode_is_selected:
self._reorder_manual()
elif self._auto_mode_is_selected:
self._reorder_auto()
| gpl-3.0 |
krzkaczor/antlr4 | runtime/Python3/src/antlr4/atn/ATNConfigSet.py | 11 | 9413 | #
# [The "BSD license"]
# Copyright (c) 2012 Terence Parr
# Copyright (c) 2012 Sam Harwell
# Copyright (c) 2014 Eric Vergnaud
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. The name of the author may not be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Specialized {@link Set}{@code <}{@link ATNConfig}{@code >} that can track
# info about the set, with support for combining similar configurations using a
# graph-structured stack.
#/
from io import StringIO
from antlr4.PredictionContext import PredictionContext, merge
from antlr4.Utils import str_list
from antlr4.atn.ATN import ATN
from antlr4.atn.ATNConfig import ATNConfig
from antlr4.atn.SemanticContext import SemanticContext
from antlr4.error.Errors import UnsupportedOperationException, IllegalStateException
ATNSimulator = None
class ATNConfigSet(object):
#
# The reason that we need this is because we don't want the hash map to use
# the standard hash code and equals. We need all configurations with the same
# {@code (s,i,_,semctx)} to be equal. Unfortunately, this key effectively doubles
# the number of objects associated with ATNConfigs. The other solution is to
# use a hash table that lets us specify the equals/hashcode operation.
def __init__(self, fullCtx:bool=True):
# All configs but hashed by (s, i, _, pi) not including context. Wiped out
# when we go readonly as this set becomes a DFA state.
self.configLookup = dict()
# Indicates that this configuration set is part of a full context
# LL prediction. It will be used to determine how to merge $. With SLL
# it's a wildcard whereas it is not for LL context merge.
self.fullCtx = fullCtx
# Indicates that the set of configurations is read-only. Do not
# allow any code to manipulate the set; DFA states will point at
# the sets and they must not change. This does not protect the other
# fields; in particular, conflictingAlts is set after
# we've made this readonly.
self.readonly = False
# Track the elements as they are added to the set; supports get(i)#/
self.configs = []
# TODO: these fields make me pretty uncomfortable but nice to pack up info together, saves recomputation
# TODO: can we track conflicts as they are added to save scanning configs later?
self.uniqueAlt = 0
self.conflictingAlts = None
# Used in parser and lexer. In lexer, it indicates we hit a pred
# while computing a closure operation. Don't make a DFA state from this.
self.hasSemanticContext = False
self.dipsIntoOuterContext = False
self.cachedHashCode = -1
def __iter__(self):
return self.configs.__iter__()
# Adding a new config means merging contexts with existing configs for
# {@code (s, i, pi, _)}, where {@code s} is the
# {@link ATNConfig#state}, {@code i} is the {@link ATNConfig#alt}, and
# {@code pi} is the {@link ATNConfig#semanticContext}. We use
# {@code (s,i,pi)} as key.
#
# <p>This method updates {@link #dipsIntoOuterContext} and
# {@link #hasSemanticContext} when necessary.</p>
#/
def add(self, config:ATNConfig, mergeCache=None):
if self.readonly:
raise Exception("This set is readonly")
if config.semanticContext is not SemanticContext.NONE:
self.hasSemanticContext = True
if config.reachesIntoOuterContext > 0:
self.dipsIntoOuterContext = True
existing = self.getOrAdd(config)
if existing is config:
self.cachedHashCode = -1
self.configs.append(config) # track order here
return True
# a previous (s,i,pi,_), merge with it and save result
rootIsWildcard = not self.fullCtx
merged = merge(existing.context, config.context, rootIsWildcard, mergeCache)
# no need to check for existing.context, config.context in cache
# since only way to create new graphs is "call rule" and here. We
# cache at both places.
existing.reachesIntoOuterContext = max(existing.reachesIntoOuterContext, config.reachesIntoOuterContext)
# make sure to preserve the precedence filter suppression during the merge
if config.precedenceFilterSuppressed:
existing.precedenceFilterSuppressed = True
existing.context = merged # replace context; no need to alt mapping
return True
def getOrAdd(self, config:ATNConfig):
h = hash(config)
l = self.configLookup.get(h, None)
if l is not None:
for c in l:
if c==config:
return c
if l is None:
l = [config]
self.configLookup[h] = l
else:
l.append(config)
return config
def getStates(self):
states = set()
for c in self.configs:
states.add(c.state)
return states
def getPredicates(self):
preds = list()
for c in self.configs:
if c.semanticContext!=SemanticContext.NONE:
preds.append(c.semanticContext)
return preds
def get(self, i:int):
return self.configs[i]
def optimizeConfigs(self, interpreter:ATNSimulator):
if self.readonly:
raise IllegalStateException("This set is readonly")
if len(self.configs)==0:
return
for config in self.configs:
config.context = interpreter.getCachedContext(config.context)
def addAll(self, coll:list):
for c in coll:
self.add(c)
return False
def __eq__(self, other):
if self is other:
return True
elif not isinstance(other, ATNConfigSet):
return False
same = self.configs is not None and \
self.configs==other.configs and \
self.fullCtx == other.fullCtx and \
self.uniqueAlt == other.uniqueAlt and \
self.conflictingAlts == other.conflictingAlts and \
self.hasSemanticContext == other.hasSemanticContext and \
self.dipsIntoOuterContext == other.dipsIntoOuterContext
return same
def __hash__(self):
if self.readonly:
if self.cachedHashCode == -1:
self.cachedHashCode = self.hashConfigs()
return self.cachedHashCode
return self.hashConfigs()
def hashConfigs(self):
h = 0
for cfg in self.configs:
h = hash((h, cfg))
return h
def __len__(self):
return len(self.configs)
def isEmpty(self):
return len(self.configs)==0
def __contains__(self, config):
if self.configLookup is None:
raise UnsupportedOperationException("This method is not implemented for readonly sets.")
h = hash(config)
l = self.configLookup.get(h, None)
return l is not None and config in l
def clear(self):
if self.readonly:
raise IllegalStateException("This set is readonly")
self.configs.clear()
self.cachedHashCode = -1
self.configLookup.clear()
def setReadonly(self, readonly:bool):
self.readonly = readonly
self.configLookup = None # can't mod, no need for lookup cache
def __str__(self):
with StringIO() as buf:
buf.write(str_list(self.configs))
if self.hasSemanticContext:
buf.write(",hasSemanticContext=")
buf.write(str(self.hasSemanticContext))
if self.uniqueAlt!=ATN.INVALID_ALT_NUMBER:
buf.write(",uniqueAlt=")
buf.write(str(self.uniqueAlt))
if self.conflictingAlts is not None:
buf.write(",conflictingAlts=")
buf.write(str(self.conflictingAlts))
if self.dipsIntoOuterContext:
buf.write(",dipsIntoOuterContext")
return buf.getvalue()
class OrderedATNConfigSet(ATNConfigSet):
def __init__(self):
super().__init__()
| bsd-3-clause |
denis-pitul/django | django/contrib/gis/forms/widgets.py | 422 | 3659 | from __future__ import unicode_literals
import logging
from django.conf import settings
from django.contrib.gis import gdal
from django.contrib.gis.geos import GEOSException, GEOSGeometry
from django.forms.widgets import Widget
from django.template import loader
from django.utils import six, translation
logger = logging.getLogger('django.contrib.gis')
class BaseGeometryWidget(Widget):
"""
The base class for rich geometry widgets.
Renders a map using the WKT of the geometry.
"""
geom_type = 'GEOMETRY'
map_srid = 4326
map_width = 600
map_height = 400
display_raw = False
supports_3d = False
template_name = '' # set on subclasses
def __init__(self, attrs=None):
self.attrs = {}
for key in ('geom_type', 'map_srid', 'map_width', 'map_height', 'display_raw'):
self.attrs[key] = getattr(self, key)
if attrs:
self.attrs.update(attrs)
def serialize(self, value):
return value.wkt if value else ''
def deserialize(self, value):
try:
return GEOSGeometry(value, self.map_srid)
except (GEOSException, ValueError) as err:
logger.error(
"Error creating geometry from value '%s' (%s)" % (
value, err)
)
return None
def render(self, name, value, attrs=None):
# If a string reaches here (via a validation error on another
# field) then just reconstruct the Geometry.
if isinstance(value, six.string_types):
value = self.deserialize(value)
if value:
# Check that srid of value and map match
if value.srid != self.map_srid:
try:
ogr = value.ogr
ogr.transform(self.map_srid)
value = ogr
except gdal.GDALException as err:
logger.error(
"Error transforming geometry from srid '%s' to srid '%s' (%s)" % (
value.srid, self.map_srid, err)
)
context = self.build_attrs(
attrs,
name=name,
module='geodjango_%s' % name.replace('-', '_'), # JS-safe
serialized=self.serialize(value),
geom_type=gdal.OGRGeomType(self.attrs['geom_type']),
STATIC_URL=settings.STATIC_URL,
LANGUAGE_BIDI=translation.get_language_bidi(),
)
return loader.render_to_string(self.template_name, context)
class OpenLayersWidget(BaseGeometryWidget):
template_name = 'gis/openlayers.html'
class Media:
js = (
'http://openlayers.org/api/2.13/OpenLayers.js',
'gis/js/OLMapWidget.js',
)
class OSMWidget(BaseGeometryWidget):
"""
An OpenLayers/OpenStreetMap-based widget.
"""
template_name = 'gis/openlayers-osm.html'
default_lon = 5
default_lat = 47
class Media:
js = (
'http://openlayers.org/api/2.13/OpenLayers.js',
'http://www.openstreetmap.org/openlayers/OpenStreetMap.js',
'gis/js/OLMapWidget.js',
)
def __init__(self, attrs=None):
super(OSMWidget, self).__init__()
for key in ('default_lon', 'default_lat'):
self.attrs[key] = getattr(self, key)
if attrs:
self.attrs.update(attrs)
@property
def map_srid(self):
# Use the official spherical mercator projection SRID when GDAL is
# available; otherwise, fallback to 900913.
if gdal.HAS_GDAL:
return 3857
else:
return 900913
| bsd-3-clause |
DOTOCA/plugin.video.netflixbmc | resources/lib/chrome_cookies.py | 7 | 1409 | __author__ = 'corona'
import sqlite3
try:
import cPickle as pickle
except ImportError:
import pickle
def inject_cookies_into_chrome(session, chrome_cookie_file):
conn = sqlite3.connect(chrome_cookie_file)
cookies = {}
cookies_list = []
#fh = xbmcvfs.File(session_file, 'rb')
# fh = open(session_file, 'rb')
# content = fh.read()
# fh.close()
# session = pickle.loads(content)
for cookie in session.cookies:
#print cookie
host_key = cookie.domain
name = cookie.name
value = cookie.value
path = cookie.path
expires = cookie.expires
encrypted_value = ""
sql = 'insert or replace into cookies (host_key, name, value, path, secure, httponly, has_expires, expires_utc, last_access_utc, encrypted_value) values (%s, %s, %s, %s, %d, %d, %d, %d, %d, %s );' % (
"'"+host_key+"'" if host_key is not None else "",
"'"+name+"'" if name is not None else "",
"'"+value+"'" if value is not None else "",
"'"+path+"'" if path is not None else "",
0,
0,
int(expires) if expires is not None else 0,
0,
0,
"'"+encrypted_value+"'" if encrypted_value is not None else "NULL"
)
conn.execute(sql)
conn.commit()
conn.close()
| gpl-2.0 |
r-icarus/openstack_microserver | openstack_dashboard/openstack/common/notifier/log_notifier.py | 19 | 1297 | # Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from openstack_dashboard.openstack.common import jsonutils
from openstack_dashboard.openstack.common import log as logging
CONF = cfg.CONF
def notify(_context, message):
"""Notifies the recipient of the desired event given the model.
Log notifications using openstack's default logging system.
"""
priority = message.get('priority',
CONF.default_notification_level)
priority = priority.lower()
logger = logging.getLogger(
'openstack_dashboard.openstack.common.notification.%s' %
message['event_type'])
getattr(logger, priority)(jsonutils.dumps(message))
| apache-2.0 |
Mapita/mapita_ci | mapita/mapita_ci/settings_planproposal.py | 1 | 8611 | import os
# Django settings for geonition project.
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.app_directories.Loader',
'django.template.loaders.filesystem.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.gzip.GZipMiddleware',
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.locale.LocaleMiddleware',
# 'geonition_utils.middleware.PreventCacheMiddleware', #should be only for REST data api
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
'geonition_utils.middleware.IEEdgeMiddleware', #should be only for ui html/css apps
)
ROOT_URLCONF = 'mapita_ci.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'mapita_ci.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
#REQUIRED AND MODIFIED
SESSION_EXPIRE_AT_BROWSER_CLOSE = True
#CHANGE TEST RUNNER TO OUR OWN TO DISABLE MODELTRANSLATION TESTS
TEST_RUNNER = 'mapita_ci.tests.GeonitionTestSuiteRunner'
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
'django.contrib.gis',
#geonition apps
'base_page',
# 'dashboard',
# 'maps',
# 'auth_page',
'plan_proposals',
# 'geonition_client',
# 'gntauth',
# 'gntimages',
# 'geojson_rest',
# 'geonition_utils',
# 'geoforms',
# third party apps
'modeltranslation',
)
TEMPLATE_CONTEXT_PROCESSORS = (
"django.contrib.auth.context_processors.auth",
"django.core.context_processors.debug",
"django.core.context_processors.i18n",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.core.context_processors.tz",
"django.contrib.messages.context_processors.messages",
"django.core.context_processors.request",
"base_page.context_processors.organization"
)
#TEMPLATE_DIRS = (os.path.dirname(os.path.realpath(__file__)) + '/../statics/templates')
JAVASCRIPT_CLIENT_TEMPLATES = [
'geonition_auth.jquery.js',
'data_processing.jquery.js',
'opensocial_people.jquery.js',
'geonition_geojson.jquery.js',
'questionnaire.api.js'
]
from django.core.urlresolvers import reverse_lazy
LOGIN_REDIRECT_URL = reverse_lazy('dashboard')
LOGIN_URL = reverse_lazy('login')
LOGOUT_URL = reverse_lazy('logout')
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'a[9lifg_(udnsh5w$=4@+kjyt93ys%c9wa8ck(=22_1d*w2gws'
ADMINS = (
('Mikko Johansson', 'mikko.johansson@mapita.fi'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'testdb', # Or path to database file if using sqlite3.
'USER': 'test_user', # Not used with sqlite3.
'PASSWORD': 'test_pw', # Not used with sqlite3.
'HOST': 'localhost', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '5432', # Set to empty string for default. Not used with sqlite3.
}
}
MEDIA_ROOT = '/home/msjohans/geonition_test/media'
#SPATIAL_REFERENCE_SYSTEM_ID = 3067
SPATIAL_REFERENCE_SYSTEM_ID = 3857
LANGUAGES = (('en', 'English'),
('fi', 'Suomi'),)
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en'
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'Europe/Helsinki'
SITE_ID = 1
DEBUG = True
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
TEMPLATE_DEBUG = DEBUG
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = '/media/'
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = '/home/msjohans/geonition_test/static'
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
POSTGIS_VERSION = (1, 5, 3)
POSTGIS_TEMPLATE = 'template_postgis'
EMAIL_BACKEND = 'django.core.mail.backends.dummy.EmailBackend'
ORGANIZATION_ADMIN_DEFAULT_MAP_SETTINGS = {'default_lon': 0,
'default_lat': 0,
'default_zoom': 4}
#MODEL TRANSLATION
#MODELTRANSLATION_TRANSLATION_REGISTRY = 'mapita_ci.translation_planproposals'
#MODELTRANSLATION_TRANSLATION_FILES = ('mapita_ci.translation_planproposals',)
# for django-jenkins
INSTALLED_APPS += ('django_jenkins',)
PROJECT_APPS = ('plan_proposals',)
#INSTALLED_APPS += ('django_extensions',)
#PROJECT_APPS = [appname for appname in INSTALLED_APPS if not (appname.startswith('django') or appname.startswith('modeltranslation'))]
JENKINS_TEST_RUNNER = 'mapita_ci.tests.GeonitionJenkinsTestSuiteRunner'
JENKINS_TASKS = (
'django_jenkins.tasks.with_coverage',
'django_jenkins.tasks.run_pylint',
'django_jenkins.tasks.django_tests', # select one django or
#'django_jenkins.tasks.dir_tests' # directory tests discovery
'django_jenkins.tasks.run_pep8',
# 'django_jenkins.tasks.run_pyflakes',
'django_jenkins.tasks.run_jshint',
'django_jenkins.tasks.run_csslint',
# 'django_jenkins.tasks.run_sloccount',
# 'django_jenkins.tasks.run_graphmodels',
# 'django_jenkins.tasks.lettuce_tests',
)
| mit |
CaledoniaProject/Empire | lib/modules/persistence/misc/disable_machine_acct_change.py | 22 | 2079 | from lib.common import helpers
class Module:
def __init__(self, mainMenu, params=[]):
self.info = {
'Name': 'Invoke-DisableMachineAcctChange',
'Author': ['@harmj0y'],
'Description': ('Disables the machine account for the target system '
'from changing its password automatically.'),
'Background' : False,
'OutputExtension' : None,
'NeedsAdmin' : True,
'OpsecSafe' : True,
'MinPSVersion' : '2',
'Comments': []
}
# any options needed by the module, settable during runtime
self.options = {
# format:
# value_name : {description, required, default_value}
'Agent' : {
'Description' : 'Agent to run module on.',
'Required' : True,
'Value' : ''
},
'CleanUp' : {
'Description' : 'Switch. Re-enable machine password changes.',
'Required' : False,
'Value' : ''
}
}
# save off a copy of the mainMenu object to access external functionality
# like listeners/agent handlers/etc.
self.mainMenu = mainMenu
for param in params:
# parameter format is [Name, Value]
option, value = param
if option in self.options:
self.options[option]['Value'] = value
def generate(self):
cleanup = self.options['CleanUp']['Value']
if cleanup.lower() == 'true':
return "$null=Set-ItemProperty -Force -Path HKLM:\SYSTEM\CurrentControlSet\Services\Netlogon\Parameters -Name DisablePasswordChange -Value 0; 'Machine account password change re-enabled.'"
return "$null=Set-ItemProperty -Force -Path HKLM:\SYSTEM\CurrentControlSet\Services\Netlogon\Parameters -Name DisablePasswordChange -Value 1; 'Machine account password change disabled.'"
| bsd-3-clause |
tayfun/django | django/contrib/gis/db/backends/postgis/models.py | 396 | 2158 | """
The GeometryColumns and SpatialRefSys models for the PostGIS backend.
"""
from django.contrib.gis.db.backends.base.models import SpatialRefSysMixin
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class PostGISGeometryColumns(models.Model):
"""
The 'geometry_columns' table from the PostGIS. See the PostGIS
documentation at Ch. 4.3.2.
On PostGIS 2, this is a view.
"""
f_table_catalog = models.CharField(max_length=256)
f_table_schema = models.CharField(max_length=256)
f_table_name = models.CharField(max_length=256)
f_geometry_column = models.CharField(max_length=256)
coord_dimension = models.IntegerField()
srid = models.IntegerField(primary_key=True)
type = models.CharField(max_length=30)
class Meta:
app_label = 'gis'
db_table = 'geometry_columns'
managed = False
@classmethod
def table_name_col(cls):
"""
Returns the name of the metadata column used to store the feature table
name.
"""
return 'f_table_name'
@classmethod
def geom_col_name(cls):
"""
Returns the name of the metadata column used to store the feature
geometry column.
"""
return 'f_geometry_column'
def __str__(self):
return "%s.%s - %dD %s field (SRID: %d)" % \
(self.f_table_name, self.f_geometry_column,
self.coord_dimension, self.type, self.srid)
class PostGISSpatialRefSys(models.Model, SpatialRefSysMixin):
"""
The 'spatial_ref_sys' table from PostGIS. See the PostGIS
documentaiton at Ch. 4.2.1.
"""
srid = models.IntegerField(primary_key=True)
auth_name = models.CharField(max_length=256)
auth_srid = models.IntegerField()
srtext = models.CharField(max_length=2048)
proj4text = models.CharField(max_length=2048)
class Meta:
app_label = 'gis'
db_table = 'spatial_ref_sys'
managed = False
@property
def wkt(self):
return self.srtext
@classmethod
def wkt_col(cls):
return 'srtext'
| bsd-3-clause |
bottompawn/kbengine | kbe/res/scripts/common/Lib/json/decoder.py | 89 | 12763 | """Implementation of JSONDecoder
"""
import re
from json import scanner
try:
from _json import scanstring as c_scanstring
except ImportError:
c_scanstring = None
__all__ = ['JSONDecoder']
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
NaN = float('nan')
PosInf = float('inf')
NegInf = float('-inf')
def linecol(doc, pos):
if isinstance(doc, bytes):
newline = b'\n'
else:
newline = '\n'
lineno = doc.count(newline, 0, pos) + 1
if lineno == 1:
colno = pos + 1
else:
colno = pos - doc.rindex(newline, 0, pos)
return lineno, colno
def errmsg(msg, doc, pos, end=None):
# Note that this function is called from _json
lineno, colno = linecol(doc, pos)
if end is None:
fmt = '{0}: line {1} column {2} (char {3})'
return fmt.format(msg, lineno, colno, pos)
#fmt = '%s: line %d column %d (char %d)'
#return fmt % (msg, lineno, colno, pos)
endlineno, endcolno = linecol(doc, end)
fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
#fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
#return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
_CONSTANTS = {
'-Infinity': NegInf,
'Infinity': PosInf,
'NaN': NaN,
}
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
BACKSLASH = {
'"': '"', '\\': '\\', '/': '/',
'b': '\b', 'f': '\f', 'n': '\n', 'r': '\r', 't': '\t',
}
def _decode_uXXXX(s, pos):
esc = s[pos + 1:pos + 5]
if len(esc) == 4 and esc[1] not in 'xX':
try:
return int(esc, 16)
except ValueError:
pass
msg = "Invalid \\uXXXX escape"
raise ValueError(errmsg(msg, s, pos))
def py_scanstring(s, end, strict=True,
_b=BACKSLASH, _m=STRINGCHUNK.match):
"""Scan the string s for a JSON string. End is the index of the
character in s after the quote that started the JSON string.
Unescapes all valid JSON string escape sequences and raises ValueError
on attempt to decode an invalid string. If strict is False then literal
control characters are allowed in the string.
Returns a tuple of the decoded string and the index of the character in s
after the end quote."""
chunks = []
_append = chunks.append
begin = end - 1
while 1:
chunk = _m(s, end)
if chunk is None:
raise ValueError(
errmsg("Unterminated string starting at", s, begin))
end = chunk.end()
content, terminator = chunk.groups()
# Content is contains zero or more unescaped string characters
if content:
_append(content)
# Terminator is the end of string, a literal control character,
# or a backslash denoting that an escape sequence follows
if terminator == '"':
break
elif terminator != '\\':
if strict:
#msg = "Invalid control character %r at" % (terminator,)
msg = "Invalid control character {0!r} at".format(terminator)
raise ValueError(errmsg(msg, s, end))
else:
_append(terminator)
continue
try:
esc = s[end]
except IndexError:
raise ValueError(
errmsg("Unterminated string starting at", s, begin))
# If not a unicode escape sequence, must be in the lookup table
if esc != 'u':
try:
char = _b[esc]
except KeyError:
msg = "Invalid \\escape: {0!r}".format(esc)
raise ValueError(errmsg(msg, s, end))
end += 1
else:
uni = _decode_uXXXX(s, end)
end += 5
if 0xd800 <= uni <= 0xdbff and s[end:end + 2] == '\\u':
uni2 = _decode_uXXXX(s, end + 1)
if 0xdc00 <= uni2 <= 0xdfff:
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
end += 6
char = chr(uni)
_append(char)
return ''.join(chunks), end
# Use speedup if available
scanstring = c_scanstring or py_scanstring
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
WHITESPACE_STR = ' \t\n\r'
def JSONObject(s_and_end, strict, scan_once, object_hook, object_pairs_hook,
memo=None, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
s, end = s_and_end
pairs = []
pairs_append = pairs.append
# Backwards compatibility
if memo is None:
memo = {}
memo_get = memo.setdefault
# Use a slice to prevent IndexError from being raised, the following
# check will raise a more specific ValueError if the string is empty
nextchar = s[end:end + 1]
# Normally we expect nextchar == '"'
if nextchar != '"':
if nextchar in _ws:
end = _w(s, end).end()
nextchar = s[end:end + 1]
# Trivial empty object
if nextchar == '}':
if object_pairs_hook is not None:
result = object_pairs_hook(pairs)
return result, end + 1
pairs = {}
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end + 1
elif nextchar != '"':
raise ValueError(errmsg(
"Expecting property name enclosed in double quotes", s, end))
end += 1
while True:
key, end = scanstring(s, end, strict)
key = memo_get(key, key)
# To skip some function call overhead we optimize the fast paths where
# the JSON key separator is ": " or just ":".
if s[end:end + 1] != ':':
end = _w(s, end).end()
if s[end:end + 1] != ':':
raise ValueError(errmsg("Expecting ':' delimiter", s, end))
end += 1
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
try:
value, end = scan_once(s, end)
except StopIteration as err:
raise ValueError(errmsg("Expecting value", s, err.value)) from None
pairs_append((key, value))
try:
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar == '}':
break
elif nextchar != ',':
raise ValueError(errmsg("Expecting ',' delimiter", s, end - 1))
end = _w(s, end).end()
nextchar = s[end:end + 1]
end += 1
if nextchar != '"':
raise ValueError(errmsg(
"Expecting property name enclosed in double quotes", s, end - 1))
if object_pairs_hook is not None:
result = object_pairs_hook(pairs)
return result, end
pairs = dict(pairs)
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end
def JSONArray(s_and_end, scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
s, end = s_and_end
values = []
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
# Look-ahead for trivial empty array
if nextchar == ']':
return values, end + 1
_append = values.append
while True:
try:
value, end = scan_once(s, end)
except StopIteration as err:
raise ValueError(errmsg("Expecting value", s, err.value)) from None
_append(value)
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
end += 1
if nextchar == ']':
break
elif nextchar != ',':
raise ValueError(errmsg("Expecting ',' delimiter", s, end - 1))
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
return values, end
class JSONDecoder(object):
"""Simple JSON <http://json.org> decoder
Performs the following translations in decoding by default:
+---------------+-------------------+
| JSON | Python |
+===============+===================+
| object | dict |
+---------------+-------------------+
| array | list |
+---------------+-------------------+
| string | str |
+---------------+-------------------+
| number (int) | int |
+---------------+-------------------+
| number (real) | float |
+---------------+-------------------+
| true | True |
+---------------+-------------------+
| false | False |
+---------------+-------------------+
| null | None |
+---------------+-------------------+
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
their corresponding ``float`` values, which is outside the JSON spec.
"""
def __init__(self, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, strict=True,
object_pairs_hook=None):
"""``object_hook``, if specified, will be called with the result
of every JSON object decoded and its return value will be used in
place of the given ``dict``. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
``object_pairs_hook``, if specified will be called with the result of
every JSON object decoded with an ordered list of pairs. The return
value of ``object_pairs_hook`` will be used instead of the ``dict``.
This feature can be used to implement custom decoders that rely on the
order that the key and value pairs are decoded (for example,
collections.OrderedDict will remember the order of insertion). If
``object_hook`` is also defined, the ``object_pairs_hook`` takes
priority.
``parse_float``, if specified, will be called with the string
of every JSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for JSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every JSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for JSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN.
This can be used to raise an exception if invalid JSON numbers
are encountered.
If ``strict`` is false (true is the default), then control
characters will be allowed inside strings. Control characters in
this context are those with character codes in the 0-31 range,
including ``'\\t'`` (tab), ``'\\n'``, ``'\\r'`` and ``'\\0'``.
"""
self.object_hook = object_hook
self.parse_float = parse_float or float
self.parse_int = parse_int or int
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
self.strict = strict
self.object_pairs_hook = object_pairs_hook
self.parse_object = JSONObject
self.parse_array = JSONArray
self.parse_string = scanstring
self.memo = {}
self.scan_once = scanner.make_scanner(self)
def decode(self, s, _w=WHITESPACE.match):
"""Return the Python representation of ``s`` (a ``str`` instance
containing a JSON document).
"""
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
end = _w(s, end).end()
if end != len(s):
raise ValueError(errmsg("Extra data", s, end, len(s)))
return obj
def raw_decode(self, s, idx=0):
"""Decode a JSON document from ``s`` (a ``str`` beginning with
a JSON document) and return a 2-tuple of the Python
representation and the index in ``s`` where the document ended.
This can be used to decode a JSON document from a string that may
have extraneous data at the end.
"""
try:
obj, end = self.scan_once(s, idx)
except StopIteration as err:
raise ValueError(errmsg("Expecting value", s, err.value)) from None
return obj, end
| lgpl-3.0 |
doismellburning/edx-platform | common/lib/xmodule/xmodule/annotator_token.py | 211 | 1542 | """
This file contains a function used to retrieve the token for the annotation backend
without having to create a view, but just returning a string instead.
It can be called from other files by using the following:
from xmodule.annotator_token import retrieve_token
"""
import datetime
from firebase_token_generator import create_token
def retrieve_token(userid, secret):
'''
Return a token for the backend of annotations.
It uses the course id to retrieve a variable that contains the secret
token found in inheritance.py. It also contains information of when
the token was issued. This will be stored with the user along with
the id for identification purposes in the backend.
'''
# the following five lines of code allows you to include the default timezone in the iso format
# for more information: http://stackoverflow.com/questions/3401428/how-to-get-an-isoformat-datetime-string-including-the-default-timezone
dtnow = datetime.datetime.now()
dtutcnow = datetime.datetime.utcnow()
delta = dtnow - dtutcnow
newhour, newmin = divmod((delta.days * 24 * 60 * 60 + delta.seconds + 30) // 60, 60)
newtime = "%s%+02d:%02d" % (dtnow.isoformat(), newhour, newmin)
# uses the issued time (UTC plus timezone), the consumer key and the user's email to maintain a
# federated system in the annotation backend server
custom_data = {"issuedAt": newtime, "consumerKey": secret, "userId": userid, "ttl": 86400}
newtoken = create_token(secret, custom_data)
return newtoken
| agpl-3.0 |
asdacap/iiumschedule | server/lib/jinja2/testsuite/inheritance.py | 414 | 8248 | # -*- coding: utf-8 -*-
"""
jinja2.testsuite.inheritance
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests the template inheritance feature.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import unittest
from jinja2.testsuite import JinjaTestCase
from jinja2 import Environment, DictLoader, TemplateError
LAYOUTTEMPLATE = '''\
|{% block block1 %}block 1 from layout{% endblock %}
|{% block block2 %}block 2 from layout{% endblock %}
|{% block block3 %}
{% block block4 %}nested block 4 from layout{% endblock %}
{% endblock %}|'''
LEVEL1TEMPLATE = '''\
{% extends "layout" %}
{% block block1 %}block 1 from level1{% endblock %}'''
LEVEL2TEMPLATE = '''\
{% extends "level1" %}
{% block block2 %}{% block block5 %}nested block 5 from level2{%
endblock %}{% endblock %}'''
LEVEL3TEMPLATE = '''\
{% extends "level2" %}
{% block block5 %}block 5 from level3{% endblock %}
{% block block4 %}block 4 from level3{% endblock %}
'''
LEVEL4TEMPLATE = '''\
{% extends "level3" %}
{% block block3 %}block 3 from level4{% endblock %}
'''
WORKINGTEMPLATE = '''\
{% extends "layout" %}
{% block block1 %}
{% if false %}
{% block block2 %}
this should workd
{% endblock %}
{% endif %}
{% endblock %}
'''
DOUBLEEXTENDS = '''\
{% extends "layout" %}
{% extends "layout" %}
{% block block1 %}
{% if false %}
{% block block2 %}
this should workd
{% endblock %}
{% endif %}
{% endblock %}
'''
env = Environment(loader=DictLoader({
'layout': LAYOUTTEMPLATE,
'level1': LEVEL1TEMPLATE,
'level2': LEVEL2TEMPLATE,
'level3': LEVEL3TEMPLATE,
'level4': LEVEL4TEMPLATE,
'working': WORKINGTEMPLATE,
'doublee': DOUBLEEXTENDS,
}), trim_blocks=True)
class InheritanceTestCase(JinjaTestCase):
def test_layout(self):
tmpl = env.get_template('layout')
assert tmpl.render() == ('|block 1 from layout|block 2 from '
'layout|nested block 4 from layout|')
def test_level1(self):
tmpl = env.get_template('level1')
assert tmpl.render() == ('|block 1 from level1|block 2 from '
'layout|nested block 4 from layout|')
def test_level2(self):
tmpl = env.get_template('level2')
assert tmpl.render() == ('|block 1 from level1|nested block 5 from '
'level2|nested block 4 from layout|')
def test_level3(self):
tmpl = env.get_template('level3')
assert tmpl.render() == ('|block 1 from level1|block 5 from level3|'
'block 4 from level3|')
def test_level4(sel):
tmpl = env.get_template('level4')
assert tmpl.render() == ('|block 1 from level1|block 5 from '
'level3|block 3 from level4|')
def test_super(self):
env = Environment(loader=DictLoader({
'a': '{% block intro %}INTRO{% endblock %}|'
'BEFORE|{% block data %}INNER{% endblock %}|AFTER',
'b': '{% extends "a" %}{% block data %}({{ '
'super() }}){% endblock %}',
'c': '{% extends "b" %}{% block intro %}--{{ '
'super() }}--{% endblock %}\n{% block data '
'%}[{{ super() }}]{% endblock %}'
}))
tmpl = env.get_template('c')
assert tmpl.render() == '--INTRO--|BEFORE|[(INNER)]|AFTER'
def test_working(self):
tmpl = env.get_template('working')
def test_reuse_blocks(self):
tmpl = env.from_string('{{ self.foo() }}|{% block foo %}42'
'{% endblock %}|{{ self.foo() }}')
assert tmpl.render() == '42|42|42'
def test_preserve_blocks(self):
env = Environment(loader=DictLoader({
'a': '{% if false %}{% block x %}A{% endblock %}{% endif %}{{ self.x() }}',
'b': '{% extends "a" %}{% block x %}B{{ super() }}{% endblock %}'
}))
tmpl = env.get_template('b')
assert tmpl.render() == 'BA'
def test_dynamic_inheritance(self):
env = Environment(loader=DictLoader({
'master1': 'MASTER1{% block x %}{% endblock %}',
'master2': 'MASTER2{% block x %}{% endblock %}',
'child': '{% extends master %}{% block x %}CHILD{% endblock %}'
}))
tmpl = env.get_template('child')
for m in range(1, 3):
assert tmpl.render(master='master%d' % m) == 'MASTER%dCHILD' % m
def test_multi_inheritance(self):
env = Environment(loader=DictLoader({
'master1': 'MASTER1{% block x %}{% endblock %}',
'master2': 'MASTER2{% block x %}{% endblock %}',
'child': '''{% if master %}{% extends master %}{% else %}{% extends
'master1' %}{% endif %}{% block x %}CHILD{% endblock %}'''
}))
tmpl = env.get_template('child')
assert tmpl.render(master='master2') == 'MASTER2CHILD'
assert tmpl.render(master='master1') == 'MASTER1CHILD'
assert tmpl.render() == 'MASTER1CHILD'
def test_scoped_block(self):
env = Environment(loader=DictLoader({
'master.html': '{% for item in seq %}[{% block item scoped %}'
'{% endblock %}]{% endfor %}'
}))
t = env.from_string('{% extends "master.html" %}{% block item %}'
'{{ item }}{% endblock %}')
assert t.render(seq=list(range(5))) == '[0][1][2][3][4]'
def test_super_in_scoped_block(self):
env = Environment(loader=DictLoader({
'master.html': '{% for item in seq %}[{% block item scoped %}'
'{{ item }}{% endblock %}]{% endfor %}'
}))
t = env.from_string('{% extends "master.html" %}{% block item %}'
'{{ super() }}|{{ item * 2 }}{% endblock %}')
assert t.render(seq=list(range(5))) == '[0|0][1|2][2|4][3|6][4|8]'
def test_scoped_block_after_inheritance(self):
env = Environment(loader=DictLoader({
'layout.html': '''
{% block useless %}{% endblock %}
''',
'index.html': '''
{%- extends 'layout.html' %}
{% from 'helpers.html' import foo with context %}
{% block useless %}
{% for x in [1, 2, 3] %}
{% block testing scoped %}
{{ foo(x) }}
{% endblock %}
{% endfor %}
{% endblock %}
''',
'helpers.html': '''
{% macro foo(x) %}{{ the_foo + x }}{% endmacro %}
'''
}))
rv = env.get_template('index.html').render(the_foo=42).split()
assert rv == ['43', '44', '45']
class BugFixTestCase(JinjaTestCase):
def test_fixed_macro_scoping_bug(self):
assert Environment(loader=DictLoader({
'test.html': '''\
{% extends 'details.html' %}
{% macro my_macro() %}
my_macro
{% endmacro %}
{% block inner_box %}
{{ my_macro() }}
{% endblock %}
''',
'details.html': '''\
{% extends 'standard.html' %}
{% macro my_macro() %}
my_macro
{% endmacro %}
{% block content %}
{% block outer_box %}
outer_box
{% block inner_box %}
inner_box
{% endblock %}
{% endblock %}
{% endblock %}
''',
'standard.html': '''
{% block content %} {% endblock %}
'''
})).get_template("test.html").render().split() == [u'outer_box', u'my_macro']
def test_double_extends(self):
"""Ensures that a template with more than 1 {% extends ... %} usage
raises a ``TemplateError``.
"""
try:
tmpl = env.get_template('doublee')
except Exception as e:
assert isinstance(e, TemplateError)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(InheritanceTestCase))
suite.addTest(unittest.makeSuite(BugFixTestCase))
return suite
| gpl-3.0 |
pavel-odintsov/MoonGen | deps/dpdk/app/test/autotest.py | 17 | 2882 | #!/usr/bin/python
# BSD LICENSE
#
# Copyright(c) 2010-2014 Intel Corporation. All rights reserved.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Intel Corporation nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Script that uses either test app or qemu controlled by python-pexpect
import sys, autotest_data, autotest_runner
def usage():
print"Usage: autotest.py [test app|test iso image]",
print "[target] [whitelist|-blacklist]"
if len(sys.argv) < 3:
usage()
sys.exit(1)
target = sys.argv[2]
test_whitelist=None
test_blacklist=None
# get blacklist/whitelist
if len(sys.argv) > 3:
testlist = sys.argv[3].split(',')
testlist = [test.lower() for test in testlist]
if testlist[0].startswith('-'):
testlist[0] = testlist[0].lstrip('-')
test_blacklist = testlist
else:
test_whitelist = testlist
# adjust test command line
if "baremetal" in target:
cmdline = "qemu-system-x86_64 -cdrom %s.iso -boot d " % (sys.argv[1])
cmdline += "-m 2000 -smp 4 -nographic -net nic,model=e1000"
platform = "QEMU x86_64"
else:
cmdline = "%s -c f -n 4"%(sys.argv[1])
print cmdline
runner = autotest_runner.AutotestRunner(cmdline, target, test_blacklist, test_whitelist)
for test_group in autotest_data.parallel_test_group_list:
runner.add_parallel_test_group(test_group)
for test_group in autotest_data.non_parallel_test_group_list:
runner.add_non_parallel_test_group(test_group)
runner.run_all_tests()
| mit |
2015fallproject/2015fallcase2 | static/Brython3.2.0-20150701-214155/Lib/test/unittests/test_zipfile64.py | 32 | 4273 | # Tests of the full ZIP64 functionality of zipfile
# The support.requires call is the only reason for keeping this separate
# from test_zipfile
from test import support
# XXX(nnorwitz): disable this test by looking for extra largfile resource
# which doesn't exist. This test takes over 30 minutes to run in general
# and requires more disk space than most of the buildbots.
support.requires(
'extralargefile',
'test requires loads of disk-space bytes and a long time to run'
)
import zipfile, os, unittest
import time
import sys
from io import StringIO
from tempfile import TemporaryFile
from test.support import TESTFN, run_unittest, requires_zlib
TESTFN2 = TESTFN + "2"
# How much time in seconds can pass before we print a 'Still working' message.
_PRINT_WORKING_MSG_INTERVAL = 5 * 60
class TestsWithSourceFile(unittest.TestCase):
def setUp(self):
# Create test data.
line_gen = ("Test of zipfile line %d." % i for i in range(1000000))
self.data = '\n'.join(line_gen).encode('ascii')
# And write it to a file.
fp = open(TESTFN, "wb")
fp.write(self.data)
fp.close()
def zipTest(self, f, compression):
# Create the ZIP archive.
zipfp = zipfile.ZipFile(f, "w", compression, allowZip64=True)
# It will contain enough copies of self.data to reach about 6GB of
# raw data to store.
filecount = 6*1024**3 // len(self.data)
next_time = time.time() + _PRINT_WORKING_MSG_INTERVAL
for num in range(filecount):
zipfp.writestr("testfn%d" % num, self.data)
# Print still working message since this test can be really slow
if next_time <= time.time():
next_time = time.time() + _PRINT_WORKING_MSG_INTERVAL
print((
' zipTest still writing %d of %d, be patient...' %
(num, filecount)), file=sys.__stdout__)
sys.__stdout__.flush()
zipfp.close()
# Read the ZIP archive
zipfp = zipfile.ZipFile(f, "r", compression)
for num in range(filecount):
self.assertEqual(zipfp.read("testfn%d" % num), self.data)
# Print still working message since this test can be really slow
if next_time <= time.time():
next_time = time.time() + _PRINT_WORKING_MSG_INTERVAL
print((
' zipTest still reading %d of %d, be patient...' %
(num, filecount)), file=sys.__stdout__)
sys.__stdout__.flush()
zipfp.close()
def testStored(self):
# Try the temp file first. If we do TESTFN2 first, then it hogs
# gigabytes of disk space for the duration of the test.
for f in TemporaryFile(), TESTFN2:
self.zipTest(f, zipfile.ZIP_STORED)
@requires_zlib
def testDeflated(self):
# Try the temp file first. If we do TESTFN2 first, then it hogs
# gigabytes of disk space for the duration of the test.
for f in TemporaryFile(), TESTFN2:
self.zipTest(f, zipfile.ZIP_DEFLATED)
def tearDown(self):
for fname in TESTFN, TESTFN2:
if os.path.exists(fname):
os.remove(fname)
class OtherTests(unittest.TestCase):
def testMoreThan64kFiles(self):
# This test checks that more than 64k files can be added to an archive,
# and that the resulting archive can be read properly by ZipFile
zipf = zipfile.ZipFile(TESTFN, mode="w")
zipf.debug = 100
numfiles = (1 << 16) * 3//2
for i in range(numfiles):
zipf.writestr("foo%08d" % i, "%d" % (i**3 % 57))
self.assertEqual(len(zipf.namelist()), numfiles)
zipf.close()
zipf2 = zipfile.ZipFile(TESTFN, mode="r")
self.assertEqual(len(zipf2.namelist()), numfiles)
for i in range(numfiles):
content = zipf2.read("foo%08d" % i).decode('ascii')
self.assertEqual(content, "%d" % (i**3 % 57))
zipf.close()
def tearDown(self):
support.unlink(TESTFN)
support.unlink(TESTFN2)
def test_main():
run_unittest(TestsWithSourceFile, OtherTests)
if __name__ == "__main__":
test_main()
| agpl-3.0 |
NickDaly/GemRB-MultipleConfigs | gemrb/GUIScripts/pst/GUIOPT.py | 4 | 26872 | # -*-python-*-
# GemRB - Infinity Engine Emulator
# Copyright (C) 2003 The GemRB Project
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# GUIOPT.py - scripts to control options windows mostly from GUIOPT winpack
# GUIOPT:
# 0 - Main options window (peacock tail)
# 1 - Video options window
# 2 - msg win with 1 button
# 3 - msg win with 2 buttons
# 4 - msg win with 3 buttons
# 5 - Audio options window
# 6 - Gameplay options window
# 8 - Feedback options window
# 9 - Autopause options window
###################################################
import GemRB
import GUICommon
import GUICommonWindows
import GUISAVE
from GUIDefines import *
###################################################
OptionsWindow = None
VideoOptionsWindow = None
AudioOptionsWindow = None
GameplayOptionsWindow = None
FeedbackOptionsWindow = None
AutopauseOptionsWindow = None
LoadMsgWindow = None
QuitMsgWindow = None
MoviesWindow = None
KeysWindow = None
###################################################
def OpenOptionsWindow ():
"""Open main options window (peacock tail)"""
global OptionsWindow
if GUICommon.CloseOtherWindow (OpenOptionsWindow):
if VideoOptionsWindow: OpenVideoOptionsWindow ()
if AudioOptionsWindow: OpenAudioOptionsWindow ()
if GameplayOptionsWindow: OpenGameplayOptionsWindow ()
if FeedbackOptionsWindow: OpenFeedbackOptionsWindow ()
if AutopauseOptionsWindow: OpenAutopauseOptionsWindow ()
if LoadMsgWindow: OpenLoadMsgWindow ()
if QuitMsgWindow: OpenQuitMsgWindow ()
if KeysWindow: OpenKeysWindow ()
if MoviesWindow: OpenMoviesWindow ()
GemRB.HideGUI ()
if OptionsWindow:
OptionsWindow.Unload ()
GemRB.SetVar ("OtherWindow", -1)
GUICommonWindows.EnableAnimatedWindows ()
OptionsWindow = None
GemRB.UnhideGUI ()
return
GemRB.HideGUI ()
GemRB.LoadWindowPack ("GUIOPT")
OptionsWindow = Window = GemRB.LoadWindow (0)
GemRB.SetVar ("OtherWindow", OptionsWindow.ID)
GUICommonWindows.DisableAnimatedWindows ()
# Return to Game
Button = Window.GetControl (0)
Button.SetText (28638)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, OpenOptionsWindow)
# Quit Game
Button = Window.GetControl (1)
Button.SetText (2595)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, OpenQuitMsgWindow)
# Load Game
Button = Window.GetControl (2)
Button.SetText (2592)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, OpenLoadMsgWindow)
# Save Game
Button = Window.GetControl (3)
Button.SetText (20639)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, GUISAVE.OpenSaveWindow)
# Video Options
Button = Window.GetControl (4)
Button.SetText (28781)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, OpenVideoOptionsWindow)
# Audio Options
Button = Window.GetControl (5)
Button.SetText (29720)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, OpenAudioOptionsWindow)
# Gameplay Options
Button = Window.GetControl (6)
Button.SetText (29722)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, OpenGameplayOptionsWindow)
# Keyboard Mappings
Button = Window.GetControl (7)
Button.SetText (29723)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, OpenKeyboardMappingsWindow)
# Movies
Button = Window.GetControl (9)
Button.SetText (38156) # or 2594
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, OpenMoviesWindow)
# game version, e.g. v1.1.0000
Label = Window.GetControl (0x10000007)
Label.SetText (GEMRB_VERSION)
#Window.SetVisible (WINDOW_VISIBLE)
GemRB.UnhideGUI ()
###################################################
def OpenVideoOptionsWindow ():
"""Open video options window"""
global VideoOptionsWindow, VideoHelpText
GemRB.HideGUI ()
if VideoOptionsWindow:
if VideoOptionsWindow:
VideoOptionsWindow.Unload ()
VideoOptionsWindow = None
GemRB.SetVar ("FloatWindow", -1)
GemRB.UnhideGUI ()
return
VideoOptionsWindow = Window = GemRB.LoadWindow (1)
GemRB.SetVar ("FloatWindow", VideoOptionsWindow.ID)
VideoHelpText = OptHelpText ('VideoOptions', Window, 9, 31052)
OptDone ('VideoOptions', Window, 7)
OptCancel ('VideoOptions', Window, 8)
OptSlider ('VideoOptions', 'Brightness', Window, 1, 10, 31234, "Brightness Correction", GammaFeedback, 1)
OptSlider ('VideoOptions', 'Contrast', Window, 2, 11, 31429, "Gamma Correction", GammaFeedback, 1)
OptCheckbox ('VideoOptions', 'SoftwareBlitting', Window, 6, 15, 30898, None) #TODO: SoftBlt
OptCheckbox ('VideoOptions', 'SoftwareMirroring', Window, 4, 13, 30896, None) #TODO: SoftMirrorBlt
OptCheckbox ('VideoOptions', 'SoftwareTransparency', Window, 5, 14, 30897, None) #TODO: SoftSrcKeyBlt
GemRB.UnhideGUI ()
Window.ShowModal (MODAL_SHADOW_GRAY)
return
def GammaFeedback ():
GemRB.SetGamma (GemRB.GetVar("Brightness Correction")/5,GemRB.GetVar("Gamma Correction")/5)
return
def DisplayHelpVideoOptions ():
VideoHelpText.SetText (31052)
def DisplayHelpBrightness ():
VideoHelpText.SetText (31431)
def DisplayHelpContrast ():
VideoHelpText.SetText (31459)
def DisplayHelpSoftwareBlitting ():
VideoHelpText.SetText (31221)
def DisplayHelpSoftwareMirroring ():
VideoHelpText.SetText (31216)
def DisplayHelpSoftwareTransparency ():
VideoHelpText.SetText (31220)
###################################################
saved_audio_options = {}
def OpenAudioOptionsWindow ():
"""Open audio options window"""
global AudioOptionsWindow, AudioHelpText
GemRB.HideGUI ()
if AudioOptionsWindow:
if AudioOptionsWindow:
AudioOptionsWindow.Unload ()
AudioOptionsWindow = None
GemRB.SetVar ("FloatWindow", -1)
# Restore values in case of cancel
if GemRB.GetVar ("Cancel") == 1:
for k, v in saved_audio_options.items ():
GemRB.SetVar (k, v)
UpdateVolume ()
GemRB.UnhideGUI ()
return
AudioOptionsWindow = Window = GemRB.LoadWindow (5)
GemRB.SetVar ("FloatWindow", AudioOptionsWindow.ID)
# save values, so we can restore them on cancel
for v in "Volume Ambients", "Volume SFX", "Volume Voices", "Volume Music", "Volume Movie", "Sound Processing", "Music Processing":
saved_audio_options[v] = GemRB.GetVar (v)
AudioHelpText = OptHelpText ('AudioOptions', Window, 9, 31210)
OptDone ('AudioOptions', Window, 7)
OptCancel ('AudioOptions', Window, 8)
OptSlider ('AudioOptions', 'AmbientVolume', Window, 1, 10, 31460, "Volume Ambients", UpdateVolume)
OptSlider ('AudioOptions', 'SoundFXVolume', Window, 2, 11, 31466, "Volume SFX", UpdateVolume)
OptSlider ('AudioOptions', 'VoiceVolume', Window, 3, 12, 31467, "Volume Voices", UpdateVolume)
OptSlider ('AudioOptions', 'MusicVolume', Window, 4, 13, 31468, "Volume Music", UpdateVolume)
OptSlider ('AudioOptions', 'MovieVolume', Window, 5, 14, 31469, "Volume Movie", UpdateVolume)
OptCheckbox ('AudioOptions', 'CreativeEAX', Window, 6, 15, 30900, "Environmental Audio")
OptCheckbox ('AudioOptions', 'SoundProcessing', Window, 16, 17, 63242, "Sound Processing")
OptCheckbox ('AudioOptions', 'MusicProcessing', Window, 18, 19, 63243, "Music Processing")
#AudioOptionsWindow.SetVisible (WINDOW_VISIBLE)
GemRB.UnhideGUI ()
Window.ShowModal (MODAL_SHADOW_GRAY)
def UpdateVolume ():
GemRB.UpdateAmbientsVolume ()
GemRB.UpdateMusicVolume ()
def DisplayHelpAudioOptions ():
AudioHelpText.SetText (31210)
def DisplayHelpAmbientVolume ():
AudioHelpText.SetText (31227)
def DisplayHelpSoundFXVolume ():
AudioHelpText.SetText (31228)
def DisplayHelpVoiceVolume ():
AudioHelpText.SetText (31226)
def DisplayHelpMusicVolume ():
AudioHelpText.SetText (31225)
def DisplayHelpMovieVolume ():
AudioHelpText.SetText (31229)
def DisplayHelpCreativeEAX ():
AudioHelpText.SetText (31224)
def DisplayHelpSoundProcessing ():
AudioHelpText.SetText (63244)
def DisplayHelpMusicProcessing ():
AudioHelpText.SetText (63247)
###################################################
def OpenGameplayOptionsWindow ():
"""Open gameplay options window"""
global GameplayOptionsWindow, GameplayHelpText
GemRB.HideGUI ()
if GameplayOptionsWindow:
if FeedbackOptionsWindow: OpenFeedbackOptionsWindow()
if AutopauseOptionsWindow: OpenAutopauseOptionsWindow()
if GameplayOptionsWindow:
GameplayOptionsWindow.Unload ()
GameplayOptionsWindow = None
GemRB.SetVar ("FloatWindow", -1)
GemRB.UnhideGUI ()
return
GameplayOptionsWindow = Window = GemRB.LoadWindow (6)
GemRB.SetVar ("FloatWindow", GameplayOptionsWindow.ID)
GameplayHelpText = OptHelpText ('GameplayOptions', Window, 12, 31212)
OptDone ('GameplayOptions', Window, 10)
OptCancel ('GameplayOptions', Window, 11)
OptSlider ('GameplayOptions', 'TooltipDelay', Window, 1, 13, 31481, "Tooltips", UpdateTooltips, TOOLTIP_DELAY_FACTOR)
OptSlider ('GameplayOptions', 'MouseScrollingSpeed', Window, 2, 14, 31482, "Mouse Scroll Speed", UpdateMouseSpeed)
OptSlider ('GameplayOptions', 'KeyboardScrollingSpeed', Window, 3, 15, 31480, "Keyboard Scroll Speed", UpdateKeyboardSpeed)
OptSlider ('GameplayOptions', 'Difficulty', Window, 4, 16, 31479, "Difficulty Level")
OptCheckbox ('GameplayOptions', 'DitherAlways', Window, 5, 17, 31217, "Always Dither")
OptCheckbox ('GameplayOptions', 'Gore', Window, 6, 18, 31218, "Gore???")
OptCheckbox ('GameplayOptions', 'AlwaysRun', Window, 22, 23, 62418, "Always Run")
OptButton ('GameplayOptions', 'FeedbackOptions', Window, 8, 20, 31478)
OptButton ('GameplayOptions', 'AutopauseOptions', Window, 9, 21, 31470)
GemRB.UnhideGUI ()
Window.ShowModal (MODAL_SHADOW_GRAY)
return
def DisplayHelpGameplayOptions ():
GameplayHelpText.SetText (31212)
def UpdateTooltips ():
GemRB.SetTooltipDelay (GemRB.GetVar ("Tooltips") )
def DisplayHelpTooltipDelay ():
GameplayHelpText.SetText (31232)
def UpdateMouseSpeed ():
GemRB.SetMouseScrollSpeed (GemRB.GetVar ("Mouse Scroll Speed") )
def DisplayHelpMouseScrollingSpeed ():
GameplayHelpText.SetText (31230)
def UpdateKeyboardSpeed ():
#GemRB.SetKeyboardScrollSpeed (GemRB.GetVar ("Keyboard Scroll Speed") )
return
def DisplayHelpKeyboardScrollingSpeed ():
GameplayHelpText.SetText (31231)
def DisplayHelpDifficulty ():
GameplayHelpText.SetText (31233)
def DisplayHelpDitherAlways ():
GameplayHelpText.SetText (31222)
def DisplayHelpGore ():
GameplayHelpText.SetText (31223)
def DisplayHelpAlwaysRun ():
GameplayHelpText.SetText (62419)
def DisplayHelpFeedbackOptions ():
GameplayHelpText.SetText (31213)
def DisplayHelpAutopauseOptions ():
GameplayHelpText.SetText (31214)
###################################################
def OpenFeedbackOptionsWindow ():
"""Open feedback options window"""
global FeedbackOptionsWindow, FeedbackHelpText
GemRB.HideGUI ()
if FeedbackOptionsWindow:
if FeedbackOptionsWindow:
FeedbackOptionsWindow.Unload ()
FeedbackOptionsWindow = None
GemRB.SetVar ("FloatWindow", GameplayOptionsWindow.ID)
GemRB.UnhideGUI ()
GameplayOptionsWindow.ShowModal (MODAL_SHADOW_GRAY)
return
FeedbackOptionsWindow = Window = GemRB.LoadWindow (8)
GemRB.SetVar ("FloatWindow", FeedbackOptionsWindow.ID)
FeedbackHelpText = OptHelpText ('FeedbackOptions', Window, 9, 37410)
OptDone ('FeedbackOptions', Window, 7)
OptCancel ('FeedbackOptions', Window, 8)
OptSlider ('FeedbackOptions', 'MarkerFeedback', Window, 1, 10, 37463, "GUI Feedback Level")
OptSlider ('FeedbackOptions', 'LocatorFeedback', Window, 2, 11, 37586, "Locator Feedback Level")
OptSlider ('FeedbackOptions', 'SelectionFeedbackLevel', Window, 20, 21, 54879, "Selection Sounds Frequency")
OptSlider ('FeedbackOptions', 'CommandFeedbackLevel', Window, 22, 23, 55012, "Command Sounds Frequency")
OptCheckbox ('FeedbackOptions', 'CharacterStates', Window, 6, 15, 37594, "")
OptCheckbox ('FeedbackOptions', 'MiscellaneousMessages', Window, 17, 19, 37596, "")
OptCheckbox ('FeedbackOptions', 'ToHitRolls', Window, 3, 12, 37588, "")
OptCheckbox ('FeedbackOptions', 'CombatInfo', Window, 4, 13, 37590, "")
OptCheckbox ('FeedbackOptions', 'SpellCasting', Window, 5, 14, 37592, "")
GemRB.UnhideGUI ()
Window.ShowModal (MODAL_SHADOW_GRAY)
def DisplayHelpMarkerFeedback ():
FeedbackHelpText.SetText (37411)
def DisplayHelpLocatorFeedback ():
FeedbackHelpText.SetText (37447)
def DisplayHelpSelectionFeedbackLevel ():
FeedbackHelpText.SetText (54878)
def DisplayHelpCommandFeedbackLevel ():
FeedbackHelpText.SetText (54880)
def DisplayHelpCharacterStates ():
FeedbackHelpText.SetText (37460)
def DisplayHelpMiscellaneousMessages ():
FeedbackHelpText.SetText (37462)
def DisplayHelpToHitRolls ():
FeedbackHelpText.SetText (37453)
def DisplayHelpCombatInfo ():
FeedbackHelpText.SetText (37457)
def DisplayHelpSpellCasting ():
FeedbackHelpText.SetText (37458)
###################################################
def OpenAutopauseOptionsWindow ():
"""Open autopause options window"""
global AutopauseOptionsWindow, AutopauseHelpText
GemRB.HideGUI ()
if AutopauseOptionsWindow:
if AutopauseOptionsWindow:
AutopauseOptionsWindow.Unload ()
AutopauseOptionsWindow = None
GemRB.SetVar ("FloatWindow", GameplayOptionsWindow.ID)
GemRB.UnhideGUI ()
GameplayOptionsWindow.ShowModal (MODAL_SHADOW_GRAY)
return
AutopauseOptionsWindow = Window = GemRB.LoadWindow (9)
GemRB.SetVar ("FloatWindow", AutopauseOptionsWindow.ID)
AutopauseHelpText = OptHelpText ('AutopauseOptions', Window, 1, 31214)
OptDone ('AutopauseOptions', Window, 16)
OptCancel ('AutopauseOptions', Window, 17)
# Set variable for each checkbox according to a particular bit of
# AutoPauseState
state = GemRB.GetVar ("Auto Pause State")
GemRB.SetVar("AutoPauseState_Unusable", (state & 0x01) != 0 )
GemRB.SetVar("AutoPauseState_Attacked", (state & 0x02) != 0 )
GemRB.SetVar("AutoPauseState_Hit", (state & 0x04) != 0 )
GemRB.SetVar("AutoPauseState_Wounded", (state & 0x08) != 0 )
GemRB.SetVar("AutoPauseState_Dead", (state & 0x10) != 0 )
GemRB.SetVar("AutoPauseState_NoTarget", (state & 0x20) != 0 )
GemRB.SetVar("AutoPauseState_EndRound", (state & 0x40) != 0 )
OptCheckbox ('AutopauseOptions', 'CharacterHit', Window, 2, 9, 37598, "AutoPauseState_Hit", OnAutoPauseClicked)
OptCheckbox ('AutopauseOptions', 'CharacterInjured', Window, 3, 10, 37681, "AutoPauseState_Wounded", OnAutoPauseClicked)
OptCheckbox ('AutopauseOptions', 'CharacterDead', Window, 4, 11, 37682, "AutoPauseState_Dead", OnAutoPauseClicked)
OptCheckbox ('AutopauseOptions', 'CharacterAttacked', Window, 5, 12, 37683, "AutoPauseState_Attacked", OnAutoPauseClicked)
OptCheckbox ('AutopauseOptions', 'WeaponUnusable', Window, 6, 13, 37684, "AutoPauseState_Unusable", OnAutoPauseClicked)
OptCheckbox ('AutopauseOptions', 'TargetGone', Window, 7, 14, 37685, "AutoPauseState_NoTarget", OnAutoPauseClicked)
OptCheckbox ('AutopauseOptions', 'EndOfRound', Window, 8, 15, 37686, "AutoPauseState_EndRound", OnAutoPauseClicked)
GemRB.UnhideGUI ()
Window.ShowModal (MODAL_SHADOW_GRAY)
def OnAutoPauseClicked ():
state = (0x01 * GemRB.GetVar("AutoPauseState_Unusable") +
0x02 * GemRB.GetVar("AutoPauseState_Attacked") +
0x04 * GemRB.GetVar("AutoPauseState_Hit") +
0x08 * GemRB.GetVar("AutoPauseState_Wounded") +
0x10 * GemRB.GetVar("AutoPauseState_Dead") +
0x20 * GemRB.GetVar("AutoPauseState_NoTarget") +
0x40 * GemRB.GetVar("AutoPauseState_EndRound"))
GemRB.SetVar("Auto Pause State", state)
def DisplayHelpCharacterHit ():
AutopauseHelpText.SetText (37688)
def DisplayHelpCharacterInjured ():
AutopauseHelpText.SetText (37689)
def DisplayHelpCharacterDead ():
AutopauseHelpText.SetText (37690)
def DisplayHelpCharacterAttacked ():
AutopauseHelpText.SetText (37691)
def DisplayHelpWeaponUnusable ():
AutopauseHelpText.SetText (37692)
def DisplayHelpTargetGone ():
AutopauseHelpText.SetText (37693)
def DisplayHelpEndOfRound ():
AutopauseHelpText.SetText (37694)
###################################################
###################################################
def OpenLoadMsgWindow ():
global LoadMsgWindow
GemRB.HideGUI()
if LoadMsgWindow:
if LoadMsgWindow:
LoadMsgWindow.Unload ()
LoadMsgWindow = None
GemRB.SetVar ("FloatWindow", -1)
GemRB.UnhideGUI ()
return
LoadMsgWindow = Window = GemRB.LoadWindow (3)
GemRB.SetVar ("FloatWindow", LoadMsgWindow.ID)
# Load
Button = Window.GetControl (0)
Button.SetText (28648)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, LoadGame)
# Cancel
Button = Window.GetControl (1)
Button.SetText (4196)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, OpenLoadMsgWindow)
# Loading a game will destroy ...
Text = Window.GetControl (3)
Text.SetText (39432)
GemRB.UnhideGUI ()
Window.ShowModal (MODAL_SHADOW_GRAY)
def LoadGame ():
OpenOptionsWindow ()
GemRB.QuitGame ()
GemRB.SetNextScript ('GUILOAD')
###################################################
def OpenQuitMsgWindow ():
global QuitMsgWindow
#GemRB.HideGUI()
if QuitMsgWindow:
if QuitMsgWindow:
QuitMsgWindow.Unload ()
QuitMsgWindow = None
GemRB.SetVar ("FloatWindow", -1)
#GemRB.UnhideGUI ()
return
QuitMsgWindow = Window = GemRB.LoadWindow (4)
GemRB.SetVar ("FloatWindow", QuitMsgWindow.ID)
# Save
Button = Window.GetControl (0)
Button.SetText (28645)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, SaveGame)
# Quit Game
Button = Window.GetControl (1)
Button.SetText (2595)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, QuitGame)
# Cancel
Button = Window.GetControl (2)
Button.SetText (4196)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, OpenQuitMsgWindow)
Button.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
# The game has not been saved ....
Text = Window.GetControl (3)
Text.SetText (39430) # or 39431 - cannot be saved atm
#GemRB.UnhideGUI ()
Window.ShowModal (MODAL_SHADOW_GRAY)
return
def QuitGame ():
OpenOptionsWindow ()
GemRB.QuitGame ()
GemRB.SetNextScript ('Start')
def SaveGame ():
OpenOptionsWindow ()
GemRB.QuitGame ()
GemRB.SetNextScript ('GUISAVE')
###################################################
key_list = [
('GemRB', None),
('Grab pointer', '^G'),
('Toggle fullscreen', '^F'),
('Enable cheats', '^T'),
('', None),
('IE', None),
('Open Inventory', 'I'),
('Open Priest Spells', 'P'),
('Open Mage Spells', 'S'),
('Pause Game', 'SPC'),
('Select Weapon', ''),
('', None),
]
KEYS_PAGE_SIZE = 60
KEYS_PAGE_COUNT = ((len (key_list) - 1) / KEYS_PAGE_SIZE)+ 1
def OpenKeyboardMappingsWindow ():
global KeysWindow
global last_key_action
last_key_action = None
GemRB.HideGUI()
if KeysWindow:
if KeysWindow:
KeysWindow.Unload ()
KeysWindow = None
GemRB.SetVar ("OtherWindow", OptionsWindow.ID)
GemRB.LoadWindowPack ("GUIOPT")
GemRB.UnhideGUI ()
return
GemRB.LoadWindowPack ("GUIKEYS")
KeysWindow = Window = GemRB.LoadWindow (0)
GemRB.SetVar ("OtherWindow", KeysWindow.ID)
# Default
Button = Window.GetControl (3)
Button.SetText (49051)
#Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, None)
# Done
Button = Window.GetControl (4)
Button.SetText (1403)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, OpenKeyboardMappingsWindow)
Button.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
# Cancel
Button = Window.GetControl (5)
Button.SetText (4196)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, OpenKeyboardMappingsWindow)
Button.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
keys_setup_page (0)
#KeysWindow.SetVisible (WINDOW_VISIBLE)
GemRB.UnhideGUI ()
def keys_setup_page (pageno):
Window = KeysWindow
# Page n of n
Label = Window.GetControl (0x10000001)
#txt = GemRB.ReplaceVarsInText (49053, {'PAGE': str (pageno + 1), 'NUMPAGES': str (KEYS_PAGE_COUNT)})
GemRB.SetToken ('PAGE', str (pageno + 1))
GemRB.SetToken ('NUMPAGES', str (KEYS_PAGE_COUNT))
Label.SetText (49053)
for i in range (KEYS_PAGE_SIZE):
try:
label, key = key_list[pageno * KEYS_PAGE_SIZE + i]
except:
label = ''
key = None
if key == None:
# Section header
Label = Window.GetControl (0x10000005 + i)
Label.SetText ('')
Label = Window.GetControl (0x10000041 + i)
Label.SetText (label)
Label.SetTextColor (0, 255, 255)
else:
Label = Window.GetControl (0x10000005 + i)
Label.SetText (key)
Label.SetEvent (IE_GUI_LABEL_ON_PRESS, OnActionLabelPress)
Label.SetVarAssoc ("KeyAction", i)
Label = Window.GetControl (0x10000041 + i)
Label.SetText (label)
Label.SetEvent (IE_GUI_LABEL_ON_PRESS, OnActionLabelPress)
Label.SetVarAssoc ("KeyAction", i)
last_key_action = None
def OnActionLabelPress ():
global last_key_action
Window = KeysWindow
i = GemRB.GetVar ("KeyAction")
if last_key_action != None:
Label = Window.GetControl (0x10000005 + last_key_action)
Label.SetTextColor (255, 255, 255)
Label = Window.GetControl (0x10000041 + last_key_action)
Label.SetTextColor (255, 255, 255)
Label = Window.GetControl (0x10000005 + i)
Label.SetTextColor (255, 255, 0)
Label = Window.GetControl (0x10000041 + i)
Label.SetTextColor (255, 255, 0)
last_key_action = i
# 49155
###################################################
def OpenMoviesWindow ():
global MoviesWindow
GemRB.HideGUI()
if MoviesWindow:
if MoviesWindow:
MoviesWindow.Unload ()
MoviesWindow = None
GemRB.SetVar ("FloatWindow", -1)
GemRB.LoadWindowPack ("GUIOPT")
GemRB.UnhideGUI ()
return
GemRB.LoadWindowPack ("GUIMOVIE")
# FIXME: clean the window to black
MoviesWindow = Window = GemRB.LoadWindow (0)
GemRB.SetVar ("FloatWindow", MoviesWindow.ID)
# Play Movie
Button = Window.GetControl (2)
Button.SetText (33034)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, OnPlayMoviePress)
# Credits
Button = Window.GetControl (3)
Button.SetText (33078)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, OnCreditsPress)
# Done
Button = Window.GetControl (4)
Button.SetText (1403)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, OpenMoviesWindow)
# movie list
List = Window.GetControl (0)
List.SetFlags (IE_GUI_TEXTAREA_SELECTABLE)
List.SetVarAssoc ('SelectedMovie', -1)
#Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, OpenMoviesWindow)
MovieTable = GemRB.LoadTable ("MOVIDESC")
for i in range (MovieTable.GetRowCount ()):
#key = MovieTable.GetRowName (i)
desc = MovieTable.GetValue (i, 0)
List.Append (desc, i)
GemRB.UnhideGUI ()
Window.ShowModal (MODAL_SHADOW_BLACK)
###################################################
def OnPlayMoviePress ():
selected = GemRB.GetVar ('SelectedMovie')
# FIXME: This should not happen, when the PlayMovie button gets
# properly disabled/enabled, but it does not now
if selected == -1:
return
MovieTable = GemRB.LoadTable ("MOVIDESC")
key = MovieTable.GetRowName (selected)
GemRB.PlayMovie (key, 1)
###################################################
def OnCreditsPress ():
GemRB.PlayMovie ("CREDITS")
###################################################
###################################################
# These functions help to setup controls found
# in Video, Audio, Gameplay, Feedback and Autopause
# options windows
# These controls are usually made from an active
# control (button, slider ...) and a label
def OptSlider (winname, ctlname, window, slider_id, label_id, label_strref, assoc_var, fn = None, scale = 1):
"""Standard slider for option windows"""
slider = window.GetControl (slider_id)
#slider.SetEvent (IE_GUI_MOUSE_ENTER_BUTTON, eval("DisplayHelp" + ctlname))
#slider.SetEvent (IE_GUI_MOUSE_LEAVE_BUTTON, eval("DisplayHelp" + winname))
if fn: slider.SetEvent (IE_GUI_SLIDER_ON_CHANGE, fn)
slider.SetVarAssoc (assoc_var, scale)
label = window.GetControl (label_id)
label.SetText (label_strref)
label.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_SET)
label.SetState (IE_GUI_BUTTON_LOCKED)
#label.SetEvent (IE_GUI_MOUSE_OVER_BUTTON, eval("DisplayHelp" + ctlname))
label.SetEvent (IE_GUI_MOUSE_ENTER_BUTTON, eval("DisplayHelp" + ctlname))
label.SetEvent (IE_GUI_MOUSE_LEAVE_BUTTON, eval("DisplayHelp" + winname))
return slider
def OptCheckbox (winname, ctlname, window, button_id, label_id, label_strref, assoc_var = None, handler = None):
"""Standard checkbox for option windows"""
button = window.GetControl (button_id)
button.SetFlags (IE_GUI_BUTTON_CHECKBOX, OP_OR)
button.SetEvent (IE_GUI_MOUSE_ENTER_BUTTON, eval("DisplayHelp" + ctlname))
button.SetEvent (IE_GUI_MOUSE_LEAVE_BUTTON, eval("DisplayHelp" + winname))
if assoc_var:
button.SetVarAssoc (assoc_var, 1)
if GemRB.GetVar (assoc_var):
button.SetState (IE_GUI_BUTTON_PRESSED)
else:
button.SetState (IE_GUI_BUTTON_UNPRESSED)
else:
button.SetState (IE_GUI_BUTTON_UNPRESSED)
if handler:
button.SetEvent (IE_GUI_BUTTON_ON_PRESS, handler)
label = window.GetControl (label_id)
label.SetText (label_strref)
label.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_SET)
label.SetState (IE_GUI_BUTTON_LOCKED)
label.SetEvent (IE_GUI_MOUSE_ENTER_BUTTON, eval("DisplayHelp" + ctlname))
label.SetEvent (IE_GUI_MOUSE_LEAVE_BUTTON, eval("DisplayHelp" + winname))
return button
def OptButton (winname, ctlname, window, button_id, label_id, label_strref):
"""Standard subwindow button for option windows"""
button = window.GetControl (button_id)
button.SetEvent (IE_GUI_BUTTON_ON_PRESS, eval("Open%sWindow" %ctlname))
button.SetEvent (IE_GUI_MOUSE_ENTER_BUTTON, eval("DisplayHelp" + ctlname))
button.SetEvent (IE_GUI_MOUSE_LEAVE_BUTTON, eval("DisplayHelp" + winname))
label = window.GetControl (label_id)
label.SetText (label_strref)
label.SetFlags (IE_GUI_BUTTON_NO_IMAGE, OP_SET)
label.SetState (IE_GUI_BUTTON_LOCKED)
label.SetEvent (IE_GUI_MOUSE_ENTER_BUTTON, eval("DisplayHelp" + ctlname))
label.SetEvent (IE_GUI_MOUSE_LEAVE_BUTTON, eval("DisplayHelp" + winname))
def OptDone (winname, window, button_id):
"""Standard `Done' button for option windows"""
button = window.GetControl (button_id)
button.SetText (1403) # Done
button.SetEvent (IE_GUI_BUTTON_ON_PRESS, eval("Open%sWindow" %winname))
button.SetVarAssoc ("Cancel", 0)
def OptCancel (winname, window, button_id):
"""Standard `Cancel' button for option windows"""
button = window.GetControl (button_id)
button.SetText (4196) # Cancel
button.SetEvent (IE_GUI_BUTTON_ON_PRESS, eval("Open%sWindow" %winname))
button.SetVarAssoc ("Cancel", 1)
def OptHelpText (winname, window, text_id, text_strref):
"""Standard textarea with context help for option windows"""
text = window.GetControl (text_id)
text.SetText (text_strref)
return text
###################################################
# End of file GUIOPT.py
| gpl-2.0 |
wijnandhoitinga/nutils | tests/test_normals.py | 4 | 2493 | from nutils import *
from nutils.testing import *
@parametrize
class check(TestCase):
def setUp(self):
super().setUp()
if not self.curved:
self.domain, self.geom = mesh.rectilinear([[1,1.5,2],[-1,0],[0,2,4]][:self.ndims])
self.curv = 0
else:
assert self.ndims == 2
nodes = numpy.linspace(-.25*numpy.pi, .25*numpy.pi, 3)
self.domain, (xi,eta) = mesh.rectilinear([nodes, nodes])
self.geom = numpy.sqrt(2) * function.stack([function.sin(xi) * function.cos(eta), function.cos(xi) * function.sin(eta)])
self.curv = 1
def zero(self):
zero = self.domain.boundary.integrate(self.geom.normal()*function.J(self.geom), ischeme='gauss9')
numpy.testing.assert_almost_equal(zero, 0)
def volume(self):
volume = self.domain.integrate(function.J(self.geom), ischeme='gauss9')
volumes = self.domain.boundary.integrate(self.geom * self.geom.normal() * function.J(self.geom), ischeme='gauss9')
numpy.testing.assert_almost_equal(volume, volumes)
def interfaces(self):
funcsp = self.domain.discontfunc(degree=2)
f = (funcsp[:,_] * numpy.arange(funcsp.shape[0]*self.ndims).reshape(-1,self.ndims)).sum(0)
g = funcsp.dot(numpy.arange(funcsp.shape[0]))
fg1 = self.domain.integrate((f * g.grad(self.geom)).sum(-1)*function.J(self.geom), ischeme='gauss9')
fg2 = self.domain.boundary.integrate((f*g).dotnorm(self.geom)*function.J(self.geom), ischeme='gauss9') \
- self.domain.interfaces.integrate(function.jump(f*g).dotnorm(self.geom)*function.J(self.geom), ischeme='gauss9') \
- self.domain.integrate(f.div(self.geom) * g * function.J(self.geom), ischeme='gauss9')
numpy.testing.assert_almost_equal(fg1, fg2)
def curvature(self):
c = self.domain.boundary.sample('uniform', 1).eval(self.geom.curvature())
numpy.testing.assert_almost_equal(c, self.curv)
@parametrize.enable_if(lambda curved, **params: not curved)
def test_boundaries(self):
normal = self.geom.normal()
boundary = self.domain.boundary
for name, n in zip(['right','top','back'][:self.ndims], numpy.eye(self.ndims)):
numpy.testing.assert_almost_equal(boundary[name].sample('gauss', 9).eval(normal)-n, 0)
for name, n in zip(['left','bottom','front'][:self.ndims], -numpy.eye(self.ndims)):
numpy.testing.assert_almost_equal(boundary[name].sample('gauss', 9).eval(normal)-n, 0)
check('2d', ndims=2, curved=False)
check('2dcurved', ndims=2, curved=True)
check('3d', ndims=3, curved=False)
| mit |
fieldOfView/Cura | cura/CuraApplication.py | 1 | 63344 | # Copyright (c) 2017 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
from PyQt5.QtNetwork import QLocalServer
from PyQt5.QtNetwork import QLocalSocket
from UM.Qt.QtApplication import QtApplication
from UM.Scene.SceneNode import SceneNode
from UM.Scene.Camera import Camera
from UM.Math.Vector import Vector
from UM.Math.Quaternion import Quaternion
from UM.Math.AxisAlignedBox import AxisAlignedBox
from UM.Math.Matrix import Matrix
from UM.Resources import Resources
from UM.Scene.ToolHandle import ToolHandle
from UM.Scene.Iterator.DepthFirstIterator import DepthFirstIterator
from UM.Mesh.ReadMeshJob import ReadMeshJob
from UM.Logger import Logger
from UM.Preferences import Preferences
from UM.SaveFile import SaveFile
from UM.Scene.Selection import Selection
from UM.Scene.GroupDecorator import GroupDecorator
from UM.Settings.ContainerStack import ContainerStack
from UM.Settings.InstanceContainer import InstanceContainer
from UM.Settings.Validator import Validator
from UM.Message import Message
from UM.i18n import i18nCatalog
from UM.Workspace.WorkspaceReader import WorkspaceReader
from UM.Decorators import deprecated
from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation
from UM.Operations.RemoveSceneNodeOperation import RemoveSceneNodeOperation
from UM.Operations.GroupedOperation import GroupedOperation
from UM.Operations.SetTransformOperation import SetTransformOperation
from cura.Arrange import Arrange
from cura.ShapeArray import ShapeArray
from cura.ConvexHullDecorator import ConvexHullDecorator
from cura.SetParentOperation import SetParentOperation
from cura.SliceableObjectDecorator import SliceableObjectDecorator
from cura.BlockSlicingDecorator import BlockSlicingDecorator
from cura.ArrangeObjectsJob import ArrangeObjectsJob
from cura.MultiplyObjectsJob import MultiplyObjectsJob
from UM.Settings.SettingDefinition import SettingDefinition, DefinitionPropertyType
from UM.Settings.ContainerRegistry import ContainerRegistry
from UM.Settings.SettingFunction import SettingFunction
from cura.Settings.MachineNameValidator import MachineNameValidator
from cura.Settings.ProfilesModel import ProfilesModel
from cura.Settings.MaterialsModel import MaterialsModel
from cura.Settings.QualityAndUserProfilesModel import QualityAndUserProfilesModel
from cura.Settings.SettingInheritanceManager import SettingInheritanceManager
from cura.Settings.UserProfilesModel import UserProfilesModel
from . import PlatformPhysics
from . import BuildVolume
from . import CameraAnimation
from . import PrintInformation
from . import CuraActions
from . import ZOffsetDecorator
from . import CuraSplashScreen
from . import CameraImageProvider
from . import MachineActionManager
from cura.Settings.MachineManager import MachineManager
from cura.Settings.MaterialManager import MaterialManager
from cura.Settings.ExtruderManager import ExtruderManager
from cura.Settings.UserChangesModel import UserChangesModel
from cura.Settings.ExtrudersModel import ExtrudersModel
from cura.Settings.ContainerSettingsModel import ContainerSettingsModel
from cura.Settings.MaterialSettingsVisibilityHandler import MaterialSettingsVisibilityHandler
from cura.Settings.QualitySettingsModel import QualitySettingsModel
from cura.Settings.ContainerManager import ContainerManager
from cura.Settings.GlobalStack import GlobalStack
from cura.Settings.ExtruderStack import ExtruderStack
from PyQt5.QtCore import QUrl, pyqtSignal, pyqtProperty, QEvent, Q_ENUMS
from UM.FlameProfiler import pyqtSlot
from PyQt5.QtGui import QColor, QIcon
from PyQt5.QtWidgets import QMessageBox
from PyQt5.QtQml import qmlRegisterUncreatableType, qmlRegisterSingletonType, qmlRegisterType
import sys
import os.path
import numpy
import copy
import urllib.parse
import os
import argparse
import json
numpy.seterr(all="ignore")
MYPY = False
if not MYPY:
try:
from cura.CuraVersion import CuraVersion, CuraBuildType
except ImportError:
CuraVersion = "master" # [CodeStyle: Reflecting imported value]
CuraBuildType = ""
class CuraApplication(QtApplication):
# SettingVersion represents the set of settings available in the machine/extruder definitions.
# You need to make sure that this version number needs to be increased if there is any non-backwards-compatible
# changes of the settings.
SettingVersion = 2
class ResourceTypes:
QmlFiles = Resources.UserType + 1
Firmware = Resources.UserType + 2
QualityInstanceContainer = Resources.UserType + 3
MaterialInstanceContainer = Resources.UserType + 4
VariantInstanceContainer = Resources.UserType + 5
UserInstanceContainer = Resources.UserType + 6
MachineStack = Resources.UserType + 7
ExtruderStack = Resources.UserType + 8
DefinitionChangesContainer = Resources.UserType + 9
Q_ENUMS(ResourceTypes)
def __init__(self):
# this list of dir names will be used by UM to detect an old cura directory
for dir_name in ["extruders", "machine_instances", "materials", "plugins", "quality", "user", "variants"]:
Resources.addExpectedDirNameInData(dir_name)
Resources.addSearchPath(os.path.join(QtApplication.getInstallPrefix(), "share", "cura", "resources"))
if not hasattr(sys, "frozen"):
Resources.addSearchPath(os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "resources"))
self._open_file_queue = [] # Files to open when plug-ins are loaded.
# Need to do this before ContainerRegistry tries to load the machines
SettingDefinition.addSupportedProperty("settable_per_mesh", DefinitionPropertyType.Any, default = True, read_only = True)
SettingDefinition.addSupportedProperty("settable_per_extruder", DefinitionPropertyType.Any, default = True, read_only = True)
# this setting can be changed for each group in one-at-a-time mode
SettingDefinition.addSupportedProperty("settable_per_meshgroup", DefinitionPropertyType.Any, default = True, read_only = True)
SettingDefinition.addSupportedProperty("settable_globally", DefinitionPropertyType.Any, default = True, read_only = True)
# From which stack the setting would inherit if not defined per object (handled in the engine)
# AND for settings which are not settable_per_mesh:
# which extruder is the only extruder this setting is obtained from
SettingDefinition.addSupportedProperty("limit_to_extruder", DefinitionPropertyType.Function, default = "-1", depends_on = "value")
# For settings which are not settable_per_mesh and not settable_per_extruder:
# A function which determines the glabel/meshgroup value by looking at the values of the setting in all (used) extruders
SettingDefinition.addSupportedProperty("resolve", DefinitionPropertyType.Function, default = None, depends_on = "value")
SettingDefinition.addSettingType("extruder", None, str, Validator)
SettingDefinition.addSettingType("optional_extruder", None, str, None)
SettingDefinition.addSettingType("[int]", None, str, None)
SettingFunction.registerOperator("extruderValues", ExtruderManager.getExtruderValues)
SettingFunction.registerOperator("extruderValue", ExtruderManager.getExtruderValue)
SettingFunction.registerOperator("resolveOrValue", ExtruderManager.getResolveOrValue)
## Add the 4 types of profiles to storage.
Resources.addStorageType(self.ResourceTypes.QualityInstanceContainer, "quality")
Resources.addStorageType(self.ResourceTypes.VariantInstanceContainer, "variants")
Resources.addStorageType(self.ResourceTypes.MaterialInstanceContainer, "materials")
Resources.addStorageType(self.ResourceTypes.UserInstanceContainer, "user")
Resources.addStorageType(self.ResourceTypes.ExtruderStack, "extruders")
Resources.addStorageType(self.ResourceTypes.MachineStack, "machine_instances")
Resources.addStorageType(self.ResourceTypes.DefinitionChangesContainer, "definition_changes")
ContainerRegistry.getInstance().addResourceType(self.ResourceTypes.QualityInstanceContainer)
ContainerRegistry.getInstance().addResourceType(self.ResourceTypes.VariantInstanceContainer)
ContainerRegistry.getInstance().addResourceType(self.ResourceTypes.MaterialInstanceContainer)
ContainerRegistry.getInstance().addResourceType(self.ResourceTypes.UserInstanceContainer)
ContainerRegistry.getInstance().addResourceType(self.ResourceTypes.ExtruderStack)
ContainerRegistry.getInstance().addResourceType(self.ResourceTypes.MachineStack)
ContainerRegistry.getInstance().addResourceType(self.ResourceTypes.DefinitionChangesContainer)
## Initialise the version upgrade manager with Cura's storage paths.
import UM.VersionUpgradeManager #Needs to be here to prevent circular dependencies.
UM.VersionUpgradeManager.VersionUpgradeManager.getInstance().setCurrentVersions(
{
("quality_changes", InstanceContainer.Version * 1000000 + self.SettingVersion): (self.ResourceTypes.QualityInstanceContainer, "application/x-uranium-instancecontainer"),
("machine_stack", ContainerStack.Version * 1000000 + self.SettingVersion): (self.ResourceTypes.MachineStack, "application/x-cura-globalstack"),
("extruder_train", ContainerStack.Version * 1000000 + self.SettingVersion): (self.ResourceTypes.ExtruderStack, "application/x-cura-extruderstack"),
("preferences", Preferences.Version * 1000000 + self.SettingVersion): (Resources.Preferences, "application/x-uranium-preferences"),
("user", InstanceContainer.Version * 1000000 + self.SettingVersion): (self.ResourceTypes.UserInstanceContainer, "application/x-uranium-instancecontainer"),
("definition_changes", InstanceContainer.Version * 1000000 + self.SettingVersion): (self.ResourceTypes.DefinitionChangesContainer, "application/x-uranium-instancecontainer"),
}
)
self._currently_loading_files = []
self._non_sliceable_extensions = []
self._machine_action_manager = MachineActionManager.MachineActionManager()
self._machine_manager = None # This is initialized on demand.
self._material_manager = None
self._setting_inheritance_manager = None
self._additional_components = {} # Components to add to certain areas in the interface
super().__init__(name = "cura", version = CuraVersion, buildtype = CuraBuildType)
self.setWindowIcon(QIcon(Resources.getPath(Resources.Images, "cura-icon.png")))
self.setRequiredPlugins([
"CuraEngineBackend",
"MeshView",
"LayerView",
"STLReader",
"SelectionTool",
"CameraTool",
"GCodeWriter",
"LocalFileOutputDevice",
"TranslateTool",
"FileLogger",
"XmlMaterialProfile"
])
self._physics = None
self._volume = None
self._output_devices = {}
self._print_information = None
self._previous_active_tool = None
self._platform_activity = False
self._scene_bounding_box = AxisAlignedBox.Null
self._job_name = None
self._center_after_select = False
self._camera_animation = None
self._cura_actions = None
self._started = False
self._message_box_callback = None
self._message_box_callback_arguments = []
self._preferred_mimetype = ""
self._i18n_catalog = i18nCatalog("cura")
self.getController().getScene().sceneChanged.connect(self.updatePlatformActivity)
self.getController().toolOperationStopped.connect(self._onToolOperationStopped)
self.getController().contextMenuRequested.connect(self._onContextMenuRequested)
Resources.addType(self.ResourceTypes.QmlFiles, "qml")
Resources.addType(self.ResourceTypes.Firmware, "firmware")
self.showSplashMessage(self._i18n_catalog.i18nc("@info:progress", "Loading machines..."))
# Add empty variant, material and quality containers.
# Since they are empty, they should never be serialized and instead just programmatically created.
# We need them to simplify the switching between materials.
empty_container = ContainerRegistry.getInstance().getEmptyInstanceContainer()
empty_variant_container = copy.deepcopy(empty_container)
empty_variant_container._id = "empty_variant"
empty_variant_container.addMetaDataEntry("type", "variant")
ContainerRegistry.getInstance().addContainer(empty_variant_container)
empty_material_container = copy.deepcopy(empty_container)
empty_material_container._id = "empty_material"
empty_material_container.addMetaDataEntry("type", "material")
ContainerRegistry.getInstance().addContainer(empty_material_container)
empty_quality_container = copy.deepcopy(empty_container)
empty_quality_container._id = "empty_quality"
empty_quality_container.setName("Not Supported")
empty_quality_container.addMetaDataEntry("quality_type", "normal")
empty_quality_container.addMetaDataEntry("type", "quality")
ContainerRegistry.getInstance().addContainer(empty_quality_container)
empty_quality_changes_container = copy.deepcopy(empty_container)
empty_quality_changes_container._id = "empty_quality_changes"
empty_quality_changes_container.addMetaDataEntry("type", "quality_changes")
ContainerRegistry.getInstance().addContainer(empty_quality_changes_container)
with ContainerRegistry.getInstance().lockFile():
ContainerRegistry.getInstance().load()
# set the setting version for Preferences
preferences = Preferences.getInstance()
preferences.addPreference("metadata/setting_version", 0)
preferences.setValue("metadata/setting_version", self.SettingVersion) #Don't make it equal to the default so that the setting version always gets written to the file.
preferences.addPreference("cura/active_mode", "simple")
preferences.addPreference("cura/categories_expanded", "")
preferences.addPreference("cura/jobname_prefix", True)
preferences.addPreference("view/center_on_select", True)
preferences.addPreference("mesh/scale_to_fit", False)
preferences.addPreference("mesh/scale_tiny_meshes", True)
preferences.addPreference("cura/dialog_on_project_save", True)
preferences.addPreference("cura/asked_dialog_on_project_save", False)
preferences.addPreference("cura/choice_on_profile_override", "always_ask")
preferences.addPreference("cura/choice_on_open_project", "always_ask")
preferences.addPreference("cura/currency", "€")
preferences.addPreference("cura/material_settings", "{}")
preferences.addPreference("view/invert_zoom", False)
for key in [
"dialog_load_path", # dialog_save_path is in LocalFileOutputDevicePlugin
"dialog_profile_path",
"dialog_material_path"]:
preferences.addPreference("local_file/%s" % key, os.path.expanduser("~/"))
preferences.setDefault("local_file/last_used_type", "text/x-gcode")
preferences.setDefault("general/visible_settings", """
machine_settings
resolution
layer_height
shell
wall_thickness
top_bottom_thickness
z_seam_x
z_seam_y
infill
infill_sparse_density
gradual_infill_steps
material
material_print_temperature
material_bed_temperature
material_diameter
material_flow
retraction_enable
speed
speed_print
speed_travel
acceleration_print
acceleration_travel
jerk_print
jerk_travel
travel
cooling
cool_fan_enabled
support
support_enable
support_extruder_nr
support_type
platform_adhesion
adhesion_type
adhesion_extruder_nr
brim_width
raft_airgap
layer_0_z_overlap
raft_surface_layers
dual
prime_tower_enable
prime_tower_size
prime_tower_position_x
prime_tower_position_y
meshfix
blackmagic
print_sequence
infill_mesh
cutting_mesh
experimental
""".replace("\n", ";").replace(" ", ""))
self.applicationShuttingDown.connect(self.saveSettings)
self.engineCreatedSignal.connect(self._onEngineCreated)
self.globalContainerStackChanged.connect(self._onGlobalContainerChanged)
self._onGlobalContainerChanged()
self._plugin_registry.addSupportedPluginExtension("curaplugin", "Cura Plugin")
def _onEngineCreated(self):
self._engine.addImageProvider("camera", CameraImageProvider.CameraImageProvider())
## A reusable dialogbox
#
showMessageBox = pyqtSignal(str, str, str, str, int, int, arguments = ["title", "text", "informativeText", "detailedText", "buttons", "icon"])
def messageBox(self, title, text, informativeText = "", detailedText = "", buttons = QMessageBox.Ok, icon = QMessageBox.NoIcon, callback = None, callback_arguments = []):
self._message_box_callback = callback
self._message_box_callback_arguments = callback_arguments
self.showMessageBox.emit(title, text, informativeText, detailedText, buttons, icon)
showDiscardOrKeepProfileChanges = pyqtSignal()
def discardOrKeepProfileChanges(self):
choice = Preferences.getInstance().getValue("cura/choice_on_profile_override")
if choice == "always_discard":
# don't show dialog and DISCARD the profile
self.discardOrKeepProfileChangesClosed("discard")
elif choice == "always_keep":
# don't show dialog and KEEP the profile
self.discardOrKeepProfileChangesClosed("keep")
else:
# ALWAYS ask whether to keep or discard the profile
self.showDiscardOrKeepProfileChanges.emit()
@pyqtSlot(str)
def discardOrKeepProfileChangesClosed(self, option):
if option == "discard":
global_stack = self.getGlobalContainerStack()
for extruder in ExtruderManager.getInstance().getMachineExtruders(global_stack.getId()):
extruder.getTop().clear()
global_stack.getTop().clear()
@pyqtSlot(int)
def messageBoxClosed(self, button):
if self._message_box_callback:
self._message_box_callback(button, *self._message_box_callback_arguments)
self._message_box_callback = None
self._message_box_callback_arguments = []
showPrintMonitor = pyqtSignal(bool, arguments = ["show"])
## Cura has multiple locations where instance containers need to be saved, so we need to handle this differently.
#
# Note that the AutoSave plugin also calls this method.
def saveSettings(self):
if not self._started: # Do not do saving during application start
return
# Lock file for "more" atomically loading and saving to/from config dir.
with ContainerRegistry.getInstance().lockFile():
for instance in ContainerRegistry.getInstance().findInstanceContainers():
if not instance.isDirty():
continue
try:
data = instance.serialize()
except NotImplementedError:
continue
except Exception:
Logger.logException("e", "An exception occurred when serializing container %s", instance.getId())
continue
mime_type = ContainerRegistry.getMimeTypeForContainer(type(instance))
file_name = urllib.parse.quote_plus(instance.getId()) + "." + mime_type.preferredSuffix
instance_type = instance.getMetaDataEntry("type")
path = None
if instance_type == "material":
path = Resources.getStoragePath(self.ResourceTypes.MaterialInstanceContainer, file_name)
elif instance_type == "quality" or instance_type == "quality_changes":
path = Resources.getStoragePath(self.ResourceTypes.QualityInstanceContainer, file_name)
elif instance_type == "user":
path = Resources.getStoragePath(self.ResourceTypes.UserInstanceContainer, file_name)
elif instance_type == "variant":
path = Resources.getStoragePath(self.ResourceTypes.VariantInstanceContainer, file_name)
elif instance_type == "definition_changes":
path = Resources.getStoragePath(self.ResourceTypes.DefinitionChangesContainer, file_name)
if path:
instance.setPath(path)
with SaveFile(path, "wt") as f:
f.write(data)
for stack in ContainerRegistry.getInstance().findContainerStacks():
self.saveStack(stack)
def saveStack(self, stack):
if not stack.isDirty():
return
try:
data = stack.serialize()
except NotImplementedError:
return
except Exception:
Logger.logException("e", "An exception occurred when serializing container %s", stack.getId())
return
mime_type = ContainerRegistry.getMimeTypeForContainer(type(stack))
file_name = urllib.parse.quote_plus(stack.getId()) + "." + mime_type.preferredSuffix
path = None
if isinstance(stack, GlobalStack):
path = Resources.getStoragePath(self.ResourceTypes.MachineStack, file_name)
elif isinstance(stack, ExtruderStack):
path = Resources.getStoragePath(self.ResourceTypes.ExtruderStack, file_name)
else:
path = Resources.getStoragePath(Resources.ContainerStacks, file_name)
stack.setPath(path)
with SaveFile(path, "wt") as f:
f.write(data)
@pyqtSlot(str, result = QUrl)
def getDefaultPath(self, key):
default_path = Preferences.getInstance().getValue("local_file/%s" % key)
return QUrl.fromLocalFile(default_path)
@pyqtSlot(str, str)
def setDefaultPath(self, key, default_path):
Preferences.getInstance().setValue("local_file/%s" % key, QUrl(default_path).toLocalFile())
@classmethod
def getStaticVersion(cls):
return CuraVersion
## Handle loading of all plugin types (and the backend explicitly)
# \sa PluginRegistery
def _loadPlugins(self):
self._plugin_registry.addType("profile_reader", self._addProfileReader)
self._plugin_registry.addType("profile_writer", self._addProfileWriter)
self._plugin_registry.addPluginLocation(os.path.join(QtApplication.getInstallPrefix(), "lib", "cura"))
if not hasattr(sys, "frozen"):
self._plugin_registry.addPluginLocation(os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "plugins"))
self._plugin_registry.loadPlugin("ConsoleLogger")
self._plugin_registry.loadPlugin("CuraEngineBackend")
self._plugin_registry.loadPlugins()
if self.getBackend() is None:
raise RuntimeError("Could not load the backend plugin!")
self._plugins_loaded = True
@classmethod
def addCommandLineOptions(self, parser):
super().addCommandLineOptions(parser)
parser.add_argument("file", nargs="*", help="Files to load after starting the application.")
parser.add_argument("--single-instance", action="store_true", default=False)
# Set up a local socket server which listener which coordinates single instances Curas and accepts commands.
def _setUpSingleInstanceServer(self):
if self.getCommandLineOption("single_instance", False):
self.__single_instance_server = QLocalServer()
self.__single_instance_server.newConnection.connect(self._singleInstanceServerNewConnection)
self.__single_instance_server.listen("ultimaker-cura")
def _singleInstanceServerNewConnection(self):
Logger.log("i", "New connection recevied on our single-instance server")
remote_cura_connection = self.__single_instance_server.nextPendingConnection()
if remote_cura_connection is not None:
def readCommands():
line = remote_cura_connection.readLine()
while len(line) != 0: # There is also a .canReadLine()
try:
payload = json.loads(str(line, encoding="ASCII").strip())
command = payload["command"]
# Command: Remove all models from the build plate.
if command == "clear-all":
self.deleteAll()
# Command: Load a model file
elif command == "open":
self._openFile(payload["filePath"])
# WARNING ^ this method is async and we really should wait until
# the file load is complete before processing more commands.
# Command: Activate the window and bring it to the top.
elif command == "focus":
# Operating systems these days prevent windows from moving around by themselves.
# 'alert' or flashing the icon in the taskbar is the best thing we do now.
self.getMainWindow().alert(0)
# Command: Close the socket connection. We're done.
elif command == "close-connection":
remote_cura_connection.close()
else:
Logger.log("w", "Received an unrecognized command " + str(command))
except json.decoder.JSONDecodeError as ex:
Logger.log("w", "Unable to parse JSON command in _singleInstanceServerNewConnection(): " + repr(ex))
line = remote_cura_connection.readLine()
remote_cura_connection.readyRead.connect(readCommands)
## Perform any checks before creating the main application.
#
# This should be called directly before creating an instance of CuraApplication.
# \returns \type{bool} True if the whole Cura app should continue running.
@classmethod
def preStartUp(cls):
# Peek the arguments and look for the 'single-instance' flag.
parser = argparse.ArgumentParser(prog="cura") # pylint: disable=bad-whitespace
CuraApplication.addCommandLineOptions(parser)
parsed_command_line = vars(parser.parse_args())
if "single_instance" in parsed_command_line and parsed_command_line["single_instance"]:
Logger.log("i", "Checking for the presence of an ready running Cura instance.")
single_instance_socket = QLocalSocket()
Logger.log("d", "preStartUp(): full server name: " + single_instance_socket.fullServerName())
single_instance_socket.connectToServer("ultimaker-cura")
single_instance_socket.waitForConnected()
if single_instance_socket.state() == QLocalSocket.ConnectedState:
Logger.log("i", "Connection has been made to the single-instance Cura socket.")
# Protocol is one line of JSON terminated with a carriage return.
# "command" field is required and holds the name of the command to execute.
# Other fields depend on the command.
payload = {"command": "clear-all"}
single_instance_socket.write(bytes(json.dumps(payload) + "\n", encoding="ASCII"))
payload = {"command": "focus"}
single_instance_socket.write(bytes(json.dumps(payload) + "\n", encoding="ASCII"))
if len(parsed_command_line["file"]) != 0:
for filename in parsed_command_line["file"]:
payload = {"command": "open", "filePath": filename}
single_instance_socket.write(bytes(json.dumps(payload) + "\n", encoding="ASCII"))
payload = {"command": "close-connection"}
single_instance_socket.write(bytes(json.dumps(payload) + "\n", encoding="ASCII"))
single_instance_socket.flush()
single_instance_socket.waitForDisconnected()
return False
return True
def run(self):
self.showSplashMessage(self._i18n_catalog.i18nc("@info:progress", "Setting up scene..."))
self._setUpSingleInstanceServer()
controller = self.getController()
controller.setActiveView("SolidView")
controller.setCameraTool("CameraTool")
controller.setSelectionTool("SelectionTool")
t = controller.getTool("TranslateTool")
if t:
t.setEnabledAxis([ToolHandle.XAxis, ToolHandle.YAxis,ToolHandle.ZAxis])
Selection.selectionChanged.connect(self.onSelectionChanged)
root = controller.getScene().getRoot()
# The platform is a child of BuildVolume
self._volume = BuildVolume.BuildVolume(root)
# Set the build volume of the arranger to the used build volume
Arrange.build_volume = self._volume
self.getRenderer().setBackgroundColor(QColor(245, 245, 245))
self._physics = PlatformPhysics.PlatformPhysics(controller, self._volume)
camera = Camera("3d", root)
camera.setPosition(Vector(-80, 250, 700))
camera.setPerspective(True)
camera.lookAt(Vector(0, 0, 0))
controller.getScene().setActiveCamera("3d")
camera_tool = self.getController().getTool("CameraTool")
camera_tool.setOrigin(Vector(0, 100, 0))
camera_tool.setZoomRange(0.1, 200000)
self._camera_animation = CameraAnimation.CameraAnimation()
self._camera_animation.setCameraTool(self.getController().getTool("CameraTool"))
self.showSplashMessage(self._i18n_catalog.i18nc("@info:progress", "Loading interface..."))
# Initialise extruder so as to listen to global container stack changes before the first global container stack is set.
ExtruderManager.getInstance()
qmlRegisterSingletonType(MachineManager, "Cura", 1, 0, "MachineManager", self.getMachineManager)
qmlRegisterSingletonType(MaterialManager, "Cura", 1, 0, "MaterialManager", self.getMaterialManager)
qmlRegisterSingletonType(SettingInheritanceManager, "Cura", 1, 0, "SettingInheritanceManager",
self.getSettingInheritanceManager)
qmlRegisterSingletonType(MachineActionManager.MachineActionManager, "Cura", 1, 0, "MachineActionManager", self.getMachineActionManager)
self.setMainQml(Resources.getPath(self.ResourceTypes.QmlFiles, "Cura.qml"))
self._qml_import_paths.append(Resources.getPath(self.ResourceTypes.QmlFiles))
self.initializeEngine()
if self._engine.rootObjects:
self.closeSplash()
for file in self.getCommandLineOption("file", []):
self._openFile(file)
for file_name in self._open_file_queue: #Open all the files that were queued up while plug-ins were loading.
self._openFile(file_name)
self._started = True
self.exec_()
def getMachineManager(self, *args):
if self._machine_manager is None:
self._machine_manager = MachineManager.createMachineManager()
return self._machine_manager
def getMaterialManager(self, *args):
if self._material_manager is None:
self._material_manager = MaterialManager.createMaterialManager()
return self._material_manager
def getSettingInheritanceManager(self, *args):
if self._setting_inheritance_manager is None:
self._setting_inheritance_manager = SettingInheritanceManager.createSettingInheritanceManager()
return self._setting_inheritance_manager
## Get the machine action manager
# We ignore any *args given to this, as we also register the machine manager as qml singleton.
# It wants to give this function an engine and script engine, but we don't care about that.
def getMachineActionManager(self, *args):
return self._machine_action_manager
## Handle Qt events
def event(self, event):
if event.type() == QEvent.FileOpen:
if self._plugins_loaded:
self._openFile(event.file())
else:
self._open_file_queue.append(event.file())
return super().event(event)
## Get print information (duration / material used)
def getPrintInformation(self):
return self._print_information
## Registers objects for the QML engine to use.
#
# \param engine The QML engine.
def registerObjects(self, engine):
super().registerObjects(engine)
engine.rootContext().setContextProperty("Printer", self)
engine.rootContext().setContextProperty("CuraApplication", self)
self._print_information = PrintInformation.PrintInformation()
engine.rootContext().setContextProperty("PrintInformation", self._print_information)
self._cura_actions = CuraActions.CuraActions(self)
engine.rootContext().setContextProperty("CuraActions", self._cura_actions)
qmlRegisterUncreatableType(CuraApplication, "Cura", 1, 0, "ResourceTypes", "Just an Enum type")
qmlRegisterType(ExtrudersModel, "Cura", 1, 0, "ExtrudersModel")
qmlRegisterType(ContainerSettingsModel, "Cura", 1, 0, "ContainerSettingsModel")
qmlRegisterSingletonType(ProfilesModel, "Cura", 1, 0, "ProfilesModel", ProfilesModel.createProfilesModel)
qmlRegisterType(MaterialsModel, "Cura", 1, 0, "MaterialsModel")
qmlRegisterType(QualityAndUserProfilesModel, "Cura", 1, 0, "QualityAndUserProfilesModel")
qmlRegisterType(UserProfilesModel, "Cura", 1, 0, "UserProfilesModel")
qmlRegisterType(MaterialSettingsVisibilityHandler, "Cura", 1, 0, "MaterialSettingsVisibilityHandler")
qmlRegisterType(QualitySettingsModel, "Cura", 1, 0, "QualitySettingsModel")
qmlRegisterType(MachineNameValidator, "Cura", 1, 0, "MachineNameValidator")
qmlRegisterType(UserChangesModel, "Cura", 1, 1, "UserChangesModel")
qmlRegisterSingletonType(ContainerManager, "Cura", 1, 0, "ContainerManager", ContainerManager.createContainerManager)
# As of Qt5.7, it is necessary to get rid of any ".." in the path for the singleton to work.
actions_url = QUrl.fromLocalFile(os.path.abspath(Resources.getPath(CuraApplication.ResourceTypes.QmlFiles, "Actions.qml")))
qmlRegisterSingletonType(actions_url, "Cura", 1, 0, "Actions")
engine.rootContext().setContextProperty("ExtruderManager", ExtruderManager.getInstance())
for path in Resources.getAllResourcesOfType(CuraApplication.ResourceTypes.QmlFiles):
type_name = os.path.splitext(os.path.basename(path))[0]
if type_name in ("Cura", "Actions"):
continue
# Ignore anything that is not a QML file.
if not path.endswith(".qml"):
continue
qmlRegisterType(QUrl.fromLocalFile(path), "Cura", 1, 0, type_name)
def onSelectionChanged(self):
if Selection.hasSelection():
if self.getController().getActiveTool():
# If the tool has been disabled by the new selection
if not self.getController().getActiveTool().getEnabled():
# Default
self.getController().setActiveTool("TranslateTool")
else:
if self._previous_active_tool:
self.getController().setActiveTool(self._previous_active_tool)
if not self.getController().getActiveTool().getEnabled():
self.getController().setActiveTool("TranslateTool")
self._previous_active_tool = None
else:
# Default
self.getController().setActiveTool("TranslateTool")
if Preferences.getInstance().getValue("view/center_on_select"):
self._center_after_select = True
else:
if self.getController().getActiveTool():
self._previous_active_tool = self.getController().getActiveTool().getPluginId()
self.getController().setActiveTool(None)
def _onToolOperationStopped(self, event):
if self._center_after_select and Selection.getSelectedObject(0) is not None:
self._center_after_select = False
self._camera_animation.setStart(self.getController().getTool("CameraTool").getOrigin())
self._camera_animation.setTarget(Selection.getSelectedObject(0).getWorldPosition())
self._camera_animation.start()
def _onGlobalContainerChanged(self):
if self._global_container_stack is not None:
machine_file_formats = [file_type.strip() for file_type in self._global_container_stack.getMetaDataEntry("file_formats").split(";")]
new_preferred_mimetype = ""
if machine_file_formats:
new_preferred_mimetype = machine_file_formats[0]
if new_preferred_mimetype != self._preferred_mimetype:
self._preferred_mimetype = new_preferred_mimetype
self.preferredOutputMimetypeChanged.emit()
requestAddPrinter = pyqtSignal()
activityChanged = pyqtSignal()
sceneBoundingBoxChanged = pyqtSignal()
preferredOutputMimetypeChanged = pyqtSignal()
@pyqtProperty(bool, notify = activityChanged)
def platformActivity(self):
return self._platform_activity
@pyqtProperty(str, notify=preferredOutputMimetypeChanged)
def preferredOutputMimetype(self):
return self._preferred_mimetype
@pyqtProperty(str, notify = sceneBoundingBoxChanged)
def getSceneBoundingBoxString(self):
return self._i18n_catalog.i18nc("@info", "%(width).1f x %(depth).1f x %(height).1f mm") % {'width' : self._scene_bounding_box.width.item(), 'depth': self._scene_bounding_box.depth.item(), 'height' : self._scene_bounding_box.height.item()}
def updatePlatformActivity(self, node = None):
count = 0
scene_bounding_box = None
is_block_slicing_node = False
for node in DepthFirstIterator(self.getController().getScene().getRoot()):
if type(node) is not SceneNode or (not node.getMeshData() and not node.callDecoration("getLayerData")):
continue
if node.callDecoration("isBlockSlicing"):
is_block_slicing_node = True
count += 1
if not scene_bounding_box:
scene_bounding_box = node.getBoundingBox()
else:
other_bb = node.getBoundingBox()
if other_bb is not None:
scene_bounding_box = scene_bounding_box + node.getBoundingBox()
print_information = self.getPrintInformation()
if print_information:
print_information.setPreSliced(is_block_slicing_node)
if not scene_bounding_box:
scene_bounding_box = AxisAlignedBox.Null
if repr(self._scene_bounding_box) != repr(scene_bounding_box) and scene_bounding_box.isValid():
self._scene_bounding_box = scene_bounding_box
self.sceneBoundingBoxChanged.emit()
self._platform_activity = True if count > 0 else False
self.activityChanged.emit()
# Remove all selected objects from the scene.
@pyqtSlot()
@deprecated("Moved to CuraActions", "2.6")
def deleteSelection(self):
if not self.getController().getToolsEnabled():
return
removed_group_nodes = []
op = GroupedOperation()
nodes = Selection.getAllSelectedObjects()
for node in nodes:
op.addOperation(RemoveSceneNodeOperation(node))
group_node = node.getParent()
if group_node and group_node.callDecoration("isGroup") and group_node not in removed_group_nodes:
remaining_nodes_in_group = list(set(group_node.getChildren()) - set(nodes))
if len(remaining_nodes_in_group) == 1:
removed_group_nodes.append(group_node)
op.addOperation(SetParentOperation(remaining_nodes_in_group[0], group_node.getParent()))
op.addOperation(RemoveSceneNodeOperation(group_node))
op.push()
## Remove an object from the scene.
# Note that this only removes an object if it is selected.
@pyqtSlot("quint64")
@deprecated("Use deleteSelection instead", "2.6")
def deleteObject(self, object_id):
if not self.getController().getToolsEnabled():
return
node = self.getController().getScene().findObject(object_id)
if not node and object_id != 0: # Workaround for tool handles overlapping the selected object
node = Selection.getSelectedObject(0)
if node:
op = GroupedOperation()
op.addOperation(RemoveSceneNodeOperation(node))
group_node = node.getParent()
if group_node:
# Note that at this point the node has not yet been deleted
if len(group_node.getChildren()) <= 2 and group_node.callDecoration("isGroup"):
op.addOperation(SetParentOperation(group_node.getChildren()[0], group_node.getParent()))
op.addOperation(RemoveSceneNodeOperation(group_node))
op.push()
## Create a number of copies of existing object.
# \param object_id
# \param count number of copies
# \param min_offset minimum offset to other objects.
@pyqtSlot("quint64", int)
@deprecated("Use CuraActions::multiplySelection", "2.6")
def multiplyObject(self, object_id, count, min_offset = 8):
node = self.getController().getScene().findObject(object_id)
if not node:
node = Selection.getSelectedObject(0)
while node.getParent() and node.getParent().callDecoration("isGroup"):
node = node.getParent()
job = MultiplyObjectsJob([node], count, min_offset)
job.start()
return
## Center object on platform.
@pyqtSlot("quint64")
@deprecated("Use CuraActions::centerSelection", "2.6")
def centerObject(self, object_id):
node = self.getController().getScene().findObject(object_id)
if not node and object_id != 0: # Workaround for tool handles overlapping the selected object
node = Selection.getSelectedObject(0)
if not node:
return
if node.getParent() and node.getParent().callDecoration("isGroup"):
node = node.getParent()
if node:
op = SetTransformOperation(node, Vector())
op.push()
## Select all nodes containing mesh data in the scene.
@pyqtSlot()
def selectAll(self):
if not self.getController().getToolsEnabled():
return
Selection.clear()
for node in DepthFirstIterator(self.getController().getScene().getRoot()):
if type(node) is not SceneNode:
continue
if not node.getMeshData() and not node.callDecoration("isGroup"):
continue # Node that doesnt have a mesh and is not a group.
if node.getParent() and node.getParent().callDecoration("isGroup"):
continue # Grouped nodes don't need resetting as their parent (the group) is resetted)
if not node.isSelectable():
continue # i.e. node with layer data
Selection.add(node)
## Delete all nodes containing mesh data in the scene.
@pyqtSlot()
def deleteAll(self):
Logger.log("i", "Clearing scene")
if not self.getController().getToolsEnabled():
return
nodes = []
for node in DepthFirstIterator(self.getController().getScene().getRoot()):
if type(node) is not SceneNode:
continue
if (not node.getMeshData() and not node.callDecoration("getLayerData")) and not node.callDecoration("isGroup"):
continue # Node that doesnt have a mesh and is not a group.
if node.getParent() and node.getParent().callDecoration("isGroup"):
continue # Grouped nodes don't need resetting as their parent (the group) is resetted)
nodes.append(node)
if nodes:
op = GroupedOperation()
for node in nodes:
op.addOperation(RemoveSceneNodeOperation(node))
op.push()
Selection.clear()
## Reset all translation on nodes with mesh data.
@pyqtSlot()
def resetAllTranslation(self):
Logger.log("i", "Resetting all scene translations")
nodes = []
for node in DepthFirstIterator(self.getController().getScene().getRoot()):
if type(node) is not SceneNode:
continue
if not node.getMeshData() and not node.callDecoration("isGroup"):
continue # Node that doesnt have a mesh and is not a group.
if node.getParent() and node.getParent().callDecoration("isGroup"):
continue # Grouped nodes don't need resetting as their parent (the group) is resetted)
if not node.isSelectable():
continue # i.e. node with layer data
nodes.append(node)
if nodes:
op = GroupedOperation()
for node in nodes:
# Ensure that the object is above the build platform
node.removeDecorator(ZOffsetDecorator.ZOffsetDecorator)
if node.getBoundingBox():
center_y = node.getWorldPosition().y - node.getBoundingBox().bottom
else:
center_y = 0
op.addOperation(SetTransformOperation(node, Vector(0, center_y, 0)))
op.push()
## Reset all transformations on nodes with mesh data.
@pyqtSlot()
def resetAll(self):
Logger.log("i", "Resetting all scene transformations")
nodes = []
for node in DepthFirstIterator(self.getController().getScene().getRoot()):
if type(node) is not SceneNode:
continue
if not node.getMeshData() and not node.callDecoration("isGroup"):
continue # Node that doesnt have a mesh and is not a group.
if node.getParent() and node.getParent().callDecoration("isGroup"):
continue # Grouped nodes don't need resetting as their parent (the group) is resetted)
if not node.isSelectable():
continue # i.e. node with layer data
nodes.append(node)
if nodes:
op = GroupedOperation()
for node in nodes:
# Ensure that the object is above the build platform
node.removeDecorator(ZOffsetDecorator.ZOffsetDecorator)
if node.getBoundingBox():
center_y = node.getWorldPosition().y - node.getBoundingBox().bottom
else:
center_y = 0
op.addOperation(SetTransformOperation(node, Vector(0, center_y, 0), Quaternion(), Vector(1, 1, 1)))
op.push()
## Arrange all objects.
@pyqtSlot()
def arrangeAll(self):
nodes = []
for node in DepthFirstIterator(self.getController().getScene().getRoot()):
if type(node) is not SceneNode:
continue
if not node.getMeshData() and not node.callDecoration("isGroup"):
continue # Node that doesnt have a mesh and is not a group.
if node.getParent() and node.getParent().callDecoration("isGroup"):
continue # Grouped nodes don't need resetting as their parent (the group) is resetted)
if not node.isSelectable():
continue # i.e. node with layer data
# Skip nodes that are too big
if node.getBoundingBox().width < self._volume.getBoundingBox().width or node.getBoundingBox().depth < self._volume.getBoundingBox().depth:
nodes.append(node)
self.arrange(nodes, fixed_nodes = [])
## Arrange Selection
@pyqtSlot()
def arrangeSelection(self):
nodes = Selection.getAllSelectedObjects()
# What nodes are on the build plate and are not being moved
fixed_nodes = []
for node in DepthFirstIterator(self.getController().getScene().getRoot()):
if type(node) is not SceneNode:
continue
if not node.getMeshData() and not node.callDecoration("isGroup"):
continue # Node that doesnt have a mesh and is not a group.
if node.getParent() and node.getParent().callDecoration("isGroup"):
continue # Grouped nodes don't need resetting as their parent (the group) is resetted)
if not node.isSelectable():
continue # i.e. node with layer data
if node in nodes: # exclude selected node from fixed_nodes
continue
fixed_nodes.append(node)
self.arrange(nodes, fixed_nodes)
## Arrange a set of nodes given a set of fixed nodes
# \param nodes nodes that we have to place
# \param fixed_nodes nodes that are placed in the arranger before finding spots for nodes
def arrange(self, nodes, fixed_nodes):
job = ArrangeObjectsJob(nodes, fixed_nodes)
job.start()
## Reload all mesh data on the screen from file.
@pyqtSlot()
def reloadAll(self):
Logger.log("i", "Reloading all loaded mesh data.")
nodes = []
for node in DepthFirstIterator(self.getController().getScene().getRoot()):
if type(node) is not SceneNode or not node.getMeshData():
continue
nodes.append(node)
if not nodes:
return
for node in nodes:
file_name = node.getMeshData().getFileName()
if file_name:
job = ReadMeshJob(file_name)
job._node = node
job.finished.connect(self._reloadMeshFinished)
job.start()
else:
Logger.log("w", "Unable to reload data because we don't have a filename.")
## Get logging data of the backend engine
# \returns \type{string} Logging data
@pyqtSlot(result = str)
def getEngineLog(self):
log = ""
for entry in self.getBackend().getLog():
log += entry.decode()
return log
@pyqtSlot("QStringList")
def setExpandedCategories(self, categories):
categories = list(set(categories))
categories.sort()
joined = ";".join(categories)
if joined != Preferences.getInstance().getValue("cura/categories_expanded"):
Preferences.getInstance().setValue("cura/categories_expanded", joined)
self.expandedCategoriesChanged.emit()
expandedCategoriesChanged = pyqtSignal()
@pyqtProperty("QStringList", notify = expandedCategoriesChanged)
def expandedCategories(self):
return Preferences.getInstance().getValue("cura/categories_expanded").split(";")
@pyqtSlot()
def mergeSelected(self):
self.groupSelected()
try:
group_node = Selection.getAllSelectedObjects()[0]
except Exception as e:
Logger.log("d", "mergeSelected: Exception:", e)
return
meshes = [node.getMeshData() for node in group_node.getAllChildren() if node.getMeshData()]
# Compute the center of the objects
object_centers = []
# Forget about the translation that the original objects have
zero_translation = Matrix(data=numpy.zeros(3))
for mesh, node in zip(meshes, group_node.getChildren()):
transformation = node.getLocalTransformation()
transformation.setTranslation(zero_translation)
transformed_mesh = mesh.getTransformed(transformation)
center = transformed_mesh.getCenterPosition()
if center is not None:
object_centers.append(center)
if object_centers and len(object_centers) > 0:
middle_x = sum([v.x for v in object_centers]) / len(object_centers)
middle_y = sum([v.y for v in object_centers]) / len(object_centers)
middle_z = sum([v.z for v in object_centers]) / len(object_centers)
offset = Vector(middle_x, middle_y, middle_z)
else:
offset = Vector(0, 0, 0)
# Move each node to the same position.
for mesh, node in zip(meshes, group_node.getChildren()):
transformation = node.getLocalTransformation()
transformation.setTranslation(zero_translation)
transformed_mesh = mesh.getTransformed(transformation)
# Align the object around its zero position
# and also apply the offset to center it inside the group.
node.setPosition(-transformed_mesh.getZeroPosition() - offset)
# Use the previously found center of the group bounding box as the new location of the group
group_node.setPosition(group_node.getBoundingBox().center)
@pyqtSlot()
def groupSelected(self):
# Create a group-node
group_node = SceneNode()
group_decorator = GroupDecorator()
group_node.addDecorator(group_decorator)
group_node.setParent(self.getController().getScene().getRoot())
group_node.setSelectable(True)
center = Selection.getSelectionCenter()
group_node.setPosition(center)
group_node.setCenterPosition(center)
# Move selected nodes into the group-node
Selection.applyOperation(SetParentOperation, group_node)
# Deselect individual nodes and select the group-node instead
for node in group_node.getChildren():
Selection.remove(node)
Selection.add(group_node)
@pyqtSlot()
def ungroupSelected(self):
selected_objects = Selection.getAllSelectedObjects().copy()
for node in selected_objects:
if node.callDecoration("isGroup"):
op = GroupedOperation()
group_parent = node.getParent()
children = node.getChildren().copy()
for child in children:
# Set the parent of the children to the parent of the group-node
op.addOperation(SetParentOperation(child, group_parent))
# Add all individual nodes to the selection
Selection.add(child)
op.push()
# Note: The group removes itself from the scene once all its children have left it,
# see GroupDecorator._onChildrenChanged
def _createSplashScreen(self):
return CuraSplashScreen.CuraSplashScreen()
def _onActiveMachineChanged(self):
pass
fileLoaded = pyqtSignal(str)
def _reloadMeshFinished(self, job):
# TODO; This needs to be fixed properly. We now make the assumption that we only load a single mesh!
mesh_data = job.getResult()[0].getMeshData()
if mesh_data:
job._node.setMeshData(mesh_data)
else:
Logger.log("w", "Could not find a mesh in reloaded node.")
def _openFile(self, filename):
self.readLocalFile(QUrl.fromLocalFile(filename))
def _addProfileReader(self, profile_reader):
# TODO: Add the profile reader to the list of plug-ins that can be used when importing profiles.
pass
def _addProfileWriter(self, profile_writer):
pass
@pyqtSlot("QSize")
def setMinimumWindowSize(self, size):
self.getMainWindow().setMinimumSize(size)
def getBuildVolume(self):
return self._volume
additionalComponentsChanged = pyqtSignal(str, arguments = ["areaId"])
@pyqtProperty("QVariantMap", notify = additionalComponentsChanged)
def additionalComponents(self):
return self._additional_components
## Add a component to a list of components to be reparented to another area in the GUI.
# The actual reparenting is done by the area itself.
# \param area_id \type{str} Identifying name of the area to which the component should be reparented
# \param component \type{QQuickComponent} The component that should be reparented
@pyqtSlot(str, "QVariant")
def addAdditionalComponent(self, area_id, component):
if area_id not in self._additional_components:
self._additional_components[area_id] = []
self._additional_components[area_id].append(component)
self.additionalComponentsChanged.emit(area_id)
@pyqtSlot(str)
def log(self, msg):
Logger.log("d", msg)
@pyqtSlot(QUrl)
def readLocalFile(self, file):
if not file.isValid():
return
scene = self.getController().getScene()
for node in DepthFirstIterator(scene.getRoot()):
if node.callDecoration("isBlockSlicing"):
self.deleteAll()
break
f = file.toLocalFile()
extension = os.path.splitext(f)[1]
filename = os.path.basename(f)
if len(self._currently_loading_files) > 0:
# If a non-slicable file is already being loaded, we prevent loading of any further non-slicable files
if extension.lower() in self._non_sliceable_extensions:
message = Message(
self._i18n_catalog.i18nc("@info:status",
"Only one G-code file can be loaded at a time. Skipped importing {0}",
filename))
message.show()
return
# If file being loaded is non-slicable file, then prevent loading of any other files
extension = os.path.splitext(self._currently_loading_files[0])[1]
if extension.lower() in self._non_sliceable_extensions:
message = Message(
self._i18n_catalog.i18nc("@info:status",
"Can't open any other file if G-code is loading. Skipped importing {0}",
filename))
message.show()
return
self._currently_loading_files.append(f)
if extension in self._non_sliceable_extensions:
self.deleteAll()
job = ReadMeshJob(f)
job.finished.connect(self._readMeshFinished)
job.start()
def _readMeshFinished(self, job):
nodes = job.getResult()
filename = job.getFileName()
self._currently_loading_files.remove(filename)
root = self.getController().getScene().getRoot()
arranger = Arrange.create(scene_root = root)
min_offset = 8
self.fileLoaded.emit(filename)
for node in nodes:
node.setSelectable(True)
node.setName(os.path.basename(filename))
extension = os.path.splitext(filename)[1]
if extension.lower() in self._non_sliceable_extensions:
self.getController().setActiveView("LayerView")
view = self.getController().getActiveView()
view.resetLayerData()
view.setLayer(9999999)
view.calculateMaxLayers()
block_slicing_decorator = BlockSlicingDecorator()
node.addDecorator(block_slicing_decorator)
else:
sliceable_decorator = SliceableObjectDecorator()
node.addDecorator(sliceable_decorator)
scene = self.getController().getScene()
# If there is no convex hull for the node, start calculating it and continue.
if not node.getDecorator(ConvexHullDecorator):
node.addDecorator(ConvexHullDecorator())
for child in node.getAllChildren():
if not child.getDecorator(ConvexHullDecorator):
child.addDecorator(ConvexHullDecorator())
if node.callDecoration("isSliceable"):
# Only check position if it's not already blatantly obvious that it won't fit.
if node.getBoundingBox().width < self._volume.getBoundingBox().width or node.getBoundingBox().depth < self._volume.getBoundingBox().depth:
# Find node location
offset_shape_arr, hull_shape_arr = ShapeArray.fromNode(node, min_offset = min_offset)
# Step is for skipping tests to make it a lot faster. it also makes the outcome somewhat rougher
node, _ = arranger.findNodePlacement(node, offset_shape_arr, hull_shape_arr, step = 10)
op = AddSceneNodeOperation(node, scene.getRoot())
op.push()
scene.sceneChanged.emit(node)
def addNonSliceableExtension(self, extension):
self._non_sliceable_extensions.append(extension)
@pyqtSlot(str, result=bool)
def checkIsValidProjectFile(self, file_url):
"""
Checks if the given file URL is a valid project file.
"""
try:
file_path = QUrl(file_url).toLocalFile()
workspace_reader = self.getWorkspaceFileHandler().getReaderForFile(file_path)
if workspace_reader is None:
return False # non-project files won't get a reader
result = workspace_reader.preRead(file_path, show_dialog=False)
return result == WorkspaceReader.PreReadResult.accepted
except Exception as e:
Logger.log("e", "Could not check file %s: %s", file_url, e)
return False
def _onContextMenuRequested(self, x: float, y: float) -> None:
# Ensure we select the object if we request a context menu over an object without having a selection.
if not Selection.hasSelection():
node = self.getController().getScene().findObject(self.getRenderer().getRenderPass("selection").getIdAtPosition(x, y))
if node:
while(node.getParent() and node.getParent().callDecoration("isGroup")):
node = node.getParent()
Selection.add(node)
| agpl-3.0 |
stvreumi/electronic-blackboard | supervise/supervise.py | 4 | 1892 | from sendgmail import sendgmail
from time import sleep
import os.path
def main():
shutdown = 0
error_dir_txt = "../static/log/impossible_error.txt"
setting_dir_txt = "../setting.txt"
file_pointer = ""
file_pointer2 = ""
last_line_count = 0
small_bug = 1
middle_bug = 5
big_bug = 10
#check file exist
if not os.path.isfile(error_dir_txt):
create_file = open(error_dir_txt, 'w')
create_file.close()
#initial
with open(error_dir_txt, 'r') as file_pointer :
for line_content in file_pointer:
last_line_count += 1
while shutdown == 0:
sleep(300)
line_count = 0
msg_content = ""
try:
with open(error_dir_txt, 'r') as file_pointer :
for line_content in file_pointer:
line_count += 1
except:
line_count = 0
if last_line_count < line_count:
if last_line_count + small_bug >= line_count:
msg_content = ("arrange_schedule got "+ str(small_bug) +" error last 5 minute \r\n")
elif last_line_count + middle_bug >= line_count:
msg_content = ("arrange_schedule got "+ str(middle_bug) +" error last 5 minute \r\n")
elif last_line_count + big_bug >= line_count:
msg_content = ("arrange_schedule got "+ str(big_bug) +" error last 5 minute \r\n")
else :
msg_content = ("arrange_schedule got more than "+ str(big_bug) +" error last 5 minute. \r\n")
msg_content = (msg_content + "arrange_schedule ceased. \r\n")
shutdown = 1
content = ""
with open(setting_dir_txt, 'r') as file_pointer2 :
content = file_pointer2.read()
# Replace the target string
content = content.replace('shutdown 0', 'shutdown 1')
# Write the file out again
with open(setting_dir_txt, 'w') as file_pointer2:
file_pointer2.write(content)
sendgmail(['FROM_BOT'],['icecat2012@gmail.com'], [''], [''], 'supervise', msg_content)
last_line_count = line_count
sleep(3)
return 1
if __name__ == "__main__":
main()
| apache-2.0 |
jbzdak/edx-platform | openedx/core/djangoapps/call_stack_manager/core.py | 2 | 8961 | """
Call Stack Manager deals with tracking call stacks of functions/methods/classes(Django Model Classes)
Call Stack Manager logs unique call stacks. The call stacks then can be retrieved via Splunk, or log reads.
classes:
CallStackManager - stores all stacks in global dictionary and logs
CallStackMixin - used for Model save(), and delete() method
Decorators:
@donottrack - Decorator that will halt tracking for parameterized entities,
(or halt tracking anything in case of non-parametrized decorator).
@trackit - Decorator that will start tracking decorated entity.
@track_till_now - Will log every unique call stack of parametrized entity/ entities.
TRACKING DJANGO MODEL CLASSES -
Call stacks of Model Class
in three cases-
1. QuerySet API
2. save()
3. delete()
How to use:
1. Import following in the file where class to be tracked resides
from openedx.core.djangoapps.call_stack_manager import CallStackManager, CallStackMixin
2. Override objects of default manager by writing following in any model class which you want to track-
objects = CallStackManager()
3. For tracking Save and Delete events-
Use mixin called "CallStackMixin"
For ex.
class StudentModule(models.Model, CallStackMixin):
TRACKING FUNCTIONS, and METHODS-
1. Import following-
from openedx.core.djangoapps.call_stack_manager import trackit
NOTE - @trackit is non-parameterized decorator.
FOR DISABLING TRACKING-
1. Import following at appropriate location-
from openedx.core.djangoapps.call_stack_manager import donottrack
NOTE - You need to import function/class you do not want to track.
"""
import logging
import traceback
import re
import collections
import wrapt
import types
import inspect
from django.db.models import Manager
log = logging.getLogger(__name__)
# List of regular expressions acting as filters
REGULAR_EXPS = [re.compile(x) for x in ['^.*python2.7.*$', '^.*<exec_function>.*$', '^.*exec_code_object.*$',
'^.*edxapp/src.*$', '^.*call_stack_manager.*$']]
# List keeping track of entities not to be tracked
HALT_TRACKING = []
STACK_BOOK = collections.defaultdict(list)
# Dictionary which stores call logs
# {'EntityName' : ListOf<CallStacks>}
# CallStacks is ListOf<Frame>
# Frame is a tuple ('FilePath','LineNumber','Function Name', 'Context')
# {"<class 'courseware.models.StudentModule'>" : [[(file, line number, function name, context),(---,---,---)],
# [(file, line number, function name, context),(---,---,---)]]}
def capture_call_stack(entity_name):
""" Logs customised call stacks in global dictionary STACK_BOOK and logs it.
Arguments:
entity_name - entity
"""
# Holds temporary callstack
# List with each element 4-tuple(filename, line number, function name, text)
# and filtered with respect to regular expressions
temp_call_stack = [frame for frame in traceback.extract_stack()
if not any(reg.match(frame[0]) for reg in REGULAR_EXPS)]
final_call_stack = "".join(traceback.format_list(temp_call_stack))
def _should_get_logged(entity_name): # pylint: disable=
""" Checks if current call stack of current entity should be logged or not.
Arguments:
entity_name - Name of the current entity
Returns:
True if the current call stack is to logged, False otherwise
"""
is_class_in_halt_tracking = bool(HALT_TRACKING and inspect.isclass(entity_name) and
issubclass(entity_name, tuple(HALT_TRACKING[-1])))
is_function_in_halt_tracking = bool(HALT_TRACKING and not inspect.isclass(entity_name) and
any((entity_name.__name__ == x.__name__ and
entity_name.__module__ == x.__module__)
for x in tuple(HALT_TRACKING[-1])))
is_top_none = HALT_TRACKING and HALT_TRACKING[-1] is None
# if top of STACK_BOOK is None
if is_top_none:
return False
# if call stack is empty
if not temp_call_stack:
return False
if HALT_TRACKING:
if is_class_in_halt_tracking or is_function_in_halt_tracking:
return False
else:
return temp_call_stack not in STACK_BOOK[entity_name]
else:
return temp_call_stack not in STACK_BOOK[entity_name]
if _should_get_logged(entity_name):
STACK_BOOK[entity_name].append(temp_call_stack)
if inspect.isclass(entity_name):
log.info("Logging new call stack number %s for %s:\n %s", len(STACK_BOOK[entity_name]),
entity_name, final_call_stack)
else:
log.info("Logging new call stack number %s for %s.%s:\n %s", len(STACK_BOOK[entity_name]),
entity_name.__module__, entity_name.__name__, final_call_stack)
class CallStackMixin(object):
""" Mixin class for getting call stacks when save() and delete() methods are called """
def save(self, *args, **kwargs):
""" Logs before save() and overrides respective model API save() """
capture_call_stack(type(self))
return super(CallStackMixin, self).save(*args, **kwargs)
def delete(self, *args, **kwargs):
""" Logs before delete() and overrides respective model API delete() """
capture_call_stack(type(self))
return super(CallStackMixin, self).delete(*args, **kwargs)
class CallStackManager(Manager):
""" Manager class which overrides the default Manager class for getting call stacks """
def get_queryset(self):
""" Override the default queryset API method """
capture_call_stack(self.model)
return super(CallStackManager, self).get_queryset()
def donottrack(*entities_not_to_be_tracked):
""" Decorator which halts tracking for some entities for specific functions
Arguments:
entities_not_to_be_tracked: entities which are not to be tracked
Returns:
wrapped function
"""
if not entities_not_to_be_tracked:
entities_not_to_be_tracked = None
@wrapt.decorator
def real_donottrack(wrapped, instance, args, kwargs): # pylint: disable=unused-argument
""" Takes function to be decorated and returns wrapped function
Arguments:
wrapped - The wrapped function which in turns needs to be called by wrapper function
instance - The object to which the wrapped function was bound when it was called.
args - The list of positional arguments supplied when the decorated function was called.
kwargs - The dictionary of keyword arguments supplied when the decorated function was called.
Returns:
return of wrapped function
"""
global HALT_TRACKING # pylint: disable=global-variable-not-assigned
if entities_not_to_be_tracked is None:
HALT_TRACKING.append(None)
else:
if HALT_TRACKING:
if HALT_TRACKING[-1] is None: # if @donottrack() calls @donottrack('xyz')
pass
else:
HALT_TRACKING.append(set(HALT_TRACKING[-1].union(set(entities_not_to_be_tracked))))
else:
HALT_TRACKING.append(set(entities_not_to_be_tracked))
return_value = wrapped(*args, **kwargs)
# check if the returning class is a generator
if isinstance(return_value, types.GeneratorType):
def generator_wrapper(wrapped_generator):
""" Function handling wrapped yielding values.
Argument:
wrapped_generator - wrapped function returning generator function
Returns:
Generator Wrapper
"""
try:
while True:
return_value = next(wrapped_generator)
yield return_value
finally:
HALT_TRACKING.pop()
return generator_wrapper(return_value)
else:
HALT_TRACKING.pop()
return return_value
return real_donottrack
@wrapt.decorator
def trackit(wrapped, instance, args, kwargs): # pylint: disable=unused-argument
""" Decorator which tracks logs call stacks
Arguments:
wrapped - The wrapped function which in turns needs to be called by wrapper function.
instance - The object to which the wrapped function was bound when it was called.
args - The list of positional arguments supplied when the decorated function was called.
kwargs - The dictionary of keyword arguments supplied when the decorated function was called.
Returns:
wrapped function
"""
capture_call_stack(wrapped)
return wrapped(*args, **kwargs)
| agpl-3.0 |
msrb/freeipa | ipalib/plugins/config.py | 2 | 14161 | # Authors:
# Rob Crittenden <rcritten@redhat.com>
# Pavel Zuna <pzuna@redhat.com>
#
# Copyright (C) 2008 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from ipalib import api
from ipalib import Bool, Int, Str, IA5Str, StrEnum, DNParam
from ipalib.plugable import Registry
from ipalib.plugins.baseldap import *
from ipalib.plugins.selinuxusermap import validate_selinuxuser
from ipalib import _
from ipalib.errors import ValidationError
# 389-ds attributes that should be skipped in attribute checks
OPERATIONAL_ATTRIBUTES = ('nsaccountlock', 'member', 'memberof',
'memberindirect', 'memberofindirect',)
__doc__ = _("""
Server configuration
Manage the default values that IPA uses and some of its tuning parameters.
NOTES:
The password notification value (--pwdexpnotify) is stored here so it will
be replicated. It is not currently used to notify users in advance of an
expiring password.
Some attributes are read-only, provided only for information purposes. These
include:
Certificate Subject base: the configured certificate subject base,
e.g. O=EXAMPLE.COM. This is configurable only at install time.
Password plug-in features: currently defines additional hashes that the
password will generate (there may be other conditions).
When setting the order list for mapping SELinux users you may need to
quote the value so it isn't interpreted by the shell.
EXAMPLES:
Show basic server configuration:
ipa config-show
Show all configuration options:
ipa config-show --all
Change maximum username length to 99 characters:
ipa config-mod --maxusername=99
Increase default time and size limits for maximum IPA server search:
ipa config-mod --searchtimelimit=10 --searchrecordslimit=2000
Set default user e-mail domain:
ipa config-mod --emaildomain=example.com
Enable migration mode to make "ipa migrate-ds" command operational:
ipa config-mod --enable-migration=TRUE
Define SELinux user map order:
ipa config-mod --ipaselinuxusermaporder='guest_u:s0$xguest_u:s0$user_u:s0-s0:c0.c1023$staff_u:s0-s0:c0.c1023$unconfined_u:s0-s0:c0.c1023'
""")
register = Registry()
def validate_searchtimelimit(ugettext, limit):
if limit == 0:
raise ValidationError(name='ipasearchtimelimit', error=_('searchtimelimit must be -1 or > 1.'))
return None
@register()
class config(LDAPObject):
"""
IPA configuration object
"""
object_name = _('configuration options')
default_attributes = [
'ipamaxusernamelength', 'ipahomesrootdir', 'ipadefaultloginshell',
'ipadefaultprimarygroup', 'ipadefaultemaildomain', 'ipasearchtimelimit',
'ipasearchrecordslimit', 'ipausersearchfields', 'ipagroupsearchfields',
'ipamigrationenabled', 'ipacertificatesubjectbase',
'ipapwdexpadvnotify', 'ipaselinuxusermaporder',
'ipaselinuxusermapdefault', 'ipaconfigstring', 'ipakrbauthzdata',
'ipauserauthtype'
]
container_dn = DN(('cn', 'ipaconfig'), ('cn', 'etc'))
permission_filter_objectclasses = ['ipaguiconfig']
managed_permissions = {
'System: Read Global Configuration': {
'replaces_global_anonymous_aci': True,
'ipapermbindruletype': 'all',
'ipapermright': {'read', 'search', 'compare'},
'ipapermdefaultattr': {
'cn', 'objectclass',
'ipacertificatesubjectbase', 'ipaconfigstring',
'ipadefaultemaildomain', 'ipadefaultloginshell',
'ipadefaultprimarygroup', 'ipagroupobjectclasses',
'ipagroupsearchfields', 'ipahomesrootdir',
'ipakrbauthzdata', 'ipamaxusernamelength',
'ipamigrationenabled', 'ipapwdexpadvnotify',
'ipaselinuxusermapdefault', 'ipaselinuxusermaporder',
'ipasearchrecordslimit', 'ipasearchtimelimit',
'ipauserauthtype', 'ipauserobjectclasses',
'ipausersearchfields', 'ipacustomfields',
},
},
}
label = _('Configuration')
label_singular = _('Configuration')
takes_params = (
Int('ipamaxusernamelength',
cli_name='maxusername',
label=_('Maximum username length'),
minvalue=1,
),
IA5Str('ipahomesrootdir',
cli_name='homedirectory',
label=_('Home directory base'),
doc=_('Default location of home directories'),
),
Str('ipadefaultloginshell',
cli_name='defaultshell',
label=_('Default shell'),
doc=_('Default shell for new users'),
),
Str('ipadefaultprimarygroup',
cli_name='defaultgroup',
label=_('Default users group'),
doc=_('Default group for new users'),
),
Str('ipadefaultemaildomain?',
cli_name='emaildomain',
label=_('Default e-mail domain'),
doc=_('Default e-mail domain'),
),
Int('ipasearchtimelimit', validate_searchtimelimit,
cli_name='searchtimelimit',
label=_('Search time limit'),
doc=_('Maximum amount of time (seconds) for a search (> 0, or -1 for unlimited)'),
minvalue=-1,
),
Int('ipasearchrecordslimit',
cli_name='searchrecordslimit',
label=_('Search size limit'),
doc=_('Maximum number of records to search (-1 is unlimited)'),
minvalue=-1,
),
IA5Str('ipausersearchfields',
cli_name='usersearch',
label=_('User search fields'),
doc=_('A comma-separated list of fields to search in when searching for users'),
),
IA5Str('ipagroupsearchfields',
cli_name='groupsearch',
label='Group search fields',
doc=_('A comma-separated list of fields to search in when searching for groups'),
),
Bool('ipamigrationenabled',
cli_name='enable_migration',
label=_('Enable migration mode'),
doc=_('Enable migration mode'),
),
DNParam('ipacertificatesubjectbase',
cli_name='subject',
label=_('Certificate Subject base'),
doc=_('Base for certificate subjects (OU=Test,O=Example)'),
flags=['no_update'],
),
Str('ipagroupobjectclasses+',
cli_name='groupobjectclasses',
label=_('Default group objectclasses'),
doc=_('Default group objectclasses (comma-separated list)'),
csv=True,
),
Str('ipauserobjectclasses+',
cli_name='userobjectclasses',
label=_('Default user objectclasses'),
doc=_('Default user objectclasses (comma-separated list)'),
csv=True,
),
Int('ipapwdexpadvnotify',
cli_name='pwdexpnotify',
label=_('Password Expiration Notification (days)'),
doc=_('Number of days\'s notice of impending password expiration'),
minvalue=0,
),
StrEnum('ipaconfigstring*',
cli_name='ipaconfigstring',
label=_('Password plugin features'),
doc=_('Extra hashes to generate in password plug-in'),
values=(u'AllowNThash',
u'KDC:Disable Last Success', u'KDC:Disable Lockout'),
csv=True,
),
Str('ipaselinuxusermaporder',
label=_('SELinux user map order'),
doc=_('Order in increasing priority of SELinux users, delimited by $'),
),
Str('ipaselinuxusermapdefault?',
label=_('Default SELinux user'),
doc=_('Default SELinux user when no match is found in SELinux map rule'),
),
StrEnum('ipakrbauthzdata*',
cli_name='pac_type',
label=_('Default PAC types'),
doc=_('Default types of PAC supported for services'),
values=(u'MS-PAC', u'PAD', u'nfs:NONE'),
csv=True,
),
StrEnum('ipauserauthtype*',
cli_name='user_auth_type',
label=_('Default user authentication types'),
doc=_('Default types of supported user authentication'),
values=(u'password', u'radius', u'otp', u'disabled'),
csv=True,
),
)
def get_dn(self, *keys, **kwargs):
return DN(('cn', 'ipaconfig'), ('cn', 'etc'), api.env.basedn)
@register()
class config_mod(LDAPUpdate):
__doc__ = _('Modify configuration options.')
def pre_callback(self, ldap, dn, entry_attrs, attrs_list, *keys, **options):
assert isinstance(dn, DN)
if 'ipadefaultprimarygroup' in entry_attrs:
group=entry_attrs['ipadefaultprimarygroup']
try:
api.Object['group'].get_dn_if_exists(group)
except errors.NotFound:
raise errors.NotFound(message=_("The group doesn't exist"))
kw = {}
if 'ipausersearchfields' in entry_attrs:
kw['ipausersearchfields'] = 'ipauserobjectclasses'
if 'ipagroupsearchfields' in entry_attrs:
kw['ipagroupsearchfields'] = 'ipagroupobjectclasses'
if kw:
config = ldap.get_ipa_config(list(kw.values()))
for (k, v) in kw.items():
allowed_attrs = ldap.get_allowed_attributes(config[v])
fields = entry_attrs[k].split(',')
for a in fields:
a = a.strip()
if a not in allowed_attrs:
raise errors.ValidationError(
name=k, error=_('attribute "%s" not allowed') % a
)
for (attr, obj) in (('ipauserobjectclasses', 'user'),
('ipagroupobjectclasses', 'group')):
if attr in entry_attrs:
if not entry_attrs[attr]:
raise errors.ValidationError(name=attr,
error=_('May not be empty'))
objectclasses = list(set(entry_attrs[attr]).union(
self.api.Object[obj].possible_objectclasses))
new_allowed_attrs = ldap.get_allowed_attributes(objectclasses,
raise_on_unknown=True)
checked_attrs = self.api.Object[obj].default_attributes
if self.api.Object[obj].uuid_attribute:
checked_attrs = checked_attrs + [self.api.Object[obj].uuid_attribute]
for obj_attr in checked_attrs:
if obj_attr in OPERATIONAL_ATTRIBUTES:
continue
if obj_attr in self.api.Object[obj].params and \
'virtual_attribute' in \
self.api.Object[obj].params[obj_attr].flags:
# skip virtual attributes
continue
if obj_attr not in new_allowed_attrs:
raise errors.ValidationError(name=attr,
error=_('%(obj)s default attribute %(attr)s would not be allowed!') \
% dict(obj=obj, attr=obj_attr))
if ('ipaselinuxusermapdefault' in entry_attrs or
'ipaselinuxusermaporder' in entry_attrs):
config = None
failedattr = 'ipaselinuxusermaporder'
if 'ipaselinuxusermapdefault' in entry_attrs:
defaultuser = entry_attrs['ipaselinuxusermapdefault']
failedattr = 'ipaselinuxusermapdefault'
# validate the new default user first
if defaultuser is not None:
error_message = validate_selinuxuser(_, defaultuser)
if error_message:
raise errors.ValidationError(name='ipaselinuxusermapdefault',
error=error_message)
else:
config = ldap.get_ipa_config()
defaultuser = config.get('ipaselinuxusermapdefault', [None])[0]
if 'ipaselinuxusermaporder' in entry_attrs:
order = entry_attrs['ipaselinuxusermaporder']
userlist = order.split('$')
# validate the new user order first
for user in userlist:
if not user:
raise errors.ValidationError(name='ipaselinuxusermaporder',
error=_('A list of SELinux users delimited by $ expected'))
error_message = validate_selinuxuser(_, user)
if error_message:
error_message = _("SELinux user '%(user)s' is not "
"valid: %(error)s") % dict(user=user,
error=error_message)
raise errors.ValidationError(name='ipaselinuxusermaporder',
error=error_message)
else:
if not config:
config = ldap.get_ipa_config()
order = config['ipaselinuxusermaporder']
userlist = order[0].split('$')
if defaultuser and defaultuser not in userlist:
raise errors.ValidationError(name=failedattr,
error=_('SELinux user map default user not in order list'))
return dn
@register()
class config_show(LDAPRetrieve):
__doc__ = _('Show the current configuration.')
| gpl-3.0 |
resmo/ansible | lib/ansible/modules/network/fortios/fortios_web_proxy_url_match.py | 14 | 10857 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_web_proxy_url_match
short_description: Exempt URLs from web proxy forwarding and caching in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify web_proxy feature and url_match category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.9"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
state:
description:
- Indicates whether to create or remove the object.
type: str
required: true
choices:
- present
- absent
web_proxy_url_match:
description:
- Exempt URLs from web proxy forwarding and caching.
default: null
type: dict
suboptions:
cache_exemption:
description:
- Enable/disable exempting this URL pattern from caching.
type: str
choices:
- enable
- disable
comment:
description:
- Comment.
type: str
forward_server:
description:
- Forward server name. Source web-proxy.forward-server.name web-proxy.forward-server-group.name.
type: str
name:
description:
- Configure a name for the URL to be exempted.
required: true
type: str
status:
description:
- Enable/disable exempting the URLs matching the URL pattern from web proxy forwarding and caching.
type: str
choices:
- enable
- disable
url_pattern:
description:
- URL pattern to be exempted from web proxy forwarding and caching.
type: str
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Exempt URLs from web proxy forwarding and caching.
fortios_web_proxy_url_match:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
web_proxy_url_match:
cache_exemption: "enable"
comment: "Comment."
forward_server: "<your_own_value> (source web-proxy.forward-server.name web-proxy.forward-server-group.name)"
name: "default_name_6"
status: "enable"
url_pattern: "<your_own_value>"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_web_proxy_url_match_data(json):
option_list = ['cache_exemption', 'comment', 'forward_server',
'name', 'status', 'url_pattern']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def web_proxy_url_match(data, fos):
vdom = data['vdom']
state = data['state']
web_proxy_url_match_data = data['web_proxy_url_match']
filtered_data = underscore_to_hyphen(filter_web_proxy_url_match_data(web_proxy_url_match_data))
if state == "present":
return fos.set('web-proxy',
'url-match',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('web-proxy',
'url-match',
mkey=filtered_data['name'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_web_proxy(data, fos):
if data['web_proxy_url_match']:
resp = web_proxy_url_match(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"web_proxy_url_match": {
"required": False, "type": "dict", "default": None,
"options": {
"cache_exemption": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"comment": {"required": False, "type": "str"},
"forward_server": {"required": False, "type": "str"},
"name": {"required": True, "type": "str"},
"status": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"url_pattern": {"required": False, "type": "str"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_web_proxy(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_web_proxy(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| gpl-3.0 |
resmo/ansible | lib/ansible/plugins/doc_fragments/k8s_name_options.py | 41 | 1974 | # -*- coding: utf-8 -*-
# Copyright: (c) 2018, Red Hat | Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Options for selecting or identifying a specific K8s object
class ModuleDocFragment(object):
DOCUMENTATION = r'''
options:
api_version:
description:
- Use to specify the API version. Use to create, delete, or discover an object without providing a full
resource definition. Use in conjunction with I(kind), I(name), and I(namespace) to identify a
specific object. If I(resource definition) is provided, the I(apiVersion) from the I(resource_definition)
will override this option.
type: str
default: v1
aliases:
- api
- version
kind:
description:
- Use to specify an object model. Use to create, delete, or discover an object without providing a full
resource definition. Use in conjunction with I(api_version), I(name), and I(namespace) to identify a
specific object. If I(resource definition) is provided, the I(kind) from the I(resource_definition)
will override this option.
type: str
name:
description:
- Use to specify an object name. Use to create, delete, or discover an object without providing a full
resource definition. Use in conjunction with I(api_version), I(kind) and I(namespace) to identify a
specific object. If I(resource definition) is provided, the I(metadata.name) value from the
I(resource_definition) will override this option.
type: str
namespace:
description:
- Use to specify an object namespace. Useful when creating, deleting, or discovering an object without
providing a full resource definition. Use in conjunction with I(api_version), I(kind), and I(name)
to identify a specfic object. If I(resource definition) is provided, the I(metadata.namespace) value
from the I(resource_definition) will override this option.
type: str
'''
| gpl-3.0 |
nikkisquared/servo | python/mach/mach/test/common.py | 120 | 1272 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import unicode_literals
from StringIO import StringIO
import os
import unittest
from mach.main import Mach
from mach.base import CommandContext
here = os.path.abspath(os.path.dirname(__file__))
class TestBase(unittest.TestCase):
provider_dir = os.path.join(here, 'providers')
def _run_mach(self, args, provider_file=None, entry_point=None, context_handler=None):
m = Mach(os.getcwd())
m.define_category('testing', 'Mach unittest', 'Testing for mach core', 10)
m.populate_context_handler = context_handler
if provider_file:
m.load_commands_from_file(os.path.join(self.provider_dir, provider_file))
if entry_point:
m.load_commands_from_entry_point(entry_point)
stdout = StringIO()
stderr = StringIO()
stdout.encoding = 'UTF-8'
stderr.encoding = 'UTF-8'
try:
result = m.run(args, stdout=stdout, stderr=stderr)
except SystemExit:
result = None
return (result, stdout.getvalue(), stderr.getvalue())
| mpl-2.0 |
jcai19/smm_gem5 | src/arch/x86/isa/insts/simd64/integer/data_transfer/move_non_temporal.py | 88 | 2310 | # Copyright (c) 2007 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
# MOVNTQ
def macroop MASKMOVQ_MMX_MMX {
ldfp ufp1, ds, [1, t0, rdi], dataSize=8
maskmov ufp1, mmx, mmxm, size=1
stfp ufp1, ds, [1, t0, rdi], dataSize=8
};
'''
| bsd-3-clause |
StephenWeber/ansible | lib/ansible/modules/cloud/google/gce_net.py | 15 | 19330 | #!/usr/bin/python
# Copyright 2013 Google Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: gce_net
version_added: "1.5"
short_description: create/destroy GCE networks and firewall rules
description:
- This module can create and destroy Google Compute Engine networks and
firewall rules U(https://cloud.google.com/compute/docs/networking).
The I(name) parameter is reserved for referencing a network while the
I(fwname) parameter is used to reference firewall rules.
IPv4 Address ranges must be specified using the CIDR
U(http://en.wikipedia.org/wiki/Classless_Inter-Domain_Routing) format.
Full install/configuration instructions for the gce* modules can
be found in the comments of ansible/test/gce_tests.py.
options:
allowed:
description:
- the protocol:ports to allow ('tcp:80' or 'tcp:80,443' or 'tcp:80-800;udp:1-25')
this parameter is mandatory when creating or updating a firewall rule
required: false
default: null
aliases: []
ipv4_range:
description:
- the IPv4 address range in CIDR notation for the network
this parameter is not mandatory when you specified existing network in name parameter, but when you create new network, this parameter is mandatory
required: false
aliases: ['cidr']
fwname:
description:
- name of the firewall rule
required: false
default: null
aliases: ['fwrule']
name:
description:
- name of the network
required: false
default: null
aliases: []
src_range:
description:
- the source IPv4 address range in CIDR notation
required: false
default: null
aliases: ['src_cidr']
src_tags:
description:
- the source instance tags for creating a firewall rule
required: false
default: null
aliases: []
target_tags:
version_added: "1.9"
description:
- the target instance tags for creating a firewall rule
required: false
default: null
aliases: []
state:
description:
- desired state of the network or firewall
required: false
default: "present"
choices: ["active", "present", "absent", "deleted"]
aliases: []
service_account_email:
version_added: "1.6"
description:
- service account email
required: false
default: null
aliases: []
pem_file:
version_added: "1.6"
description:
- path to the pem file associated with the service account email
This option is deprecated. Use 'credentials_file'.
required: false
default: null
aliases: []
credentials_file:
version_added: "2.1.0"
description:
- path to the JSON file associated with the service account email
required: false
default: null
aliases: []
project_id:
version_added: "1.6"
description:
- your GCE project ID
required: false
default: null
aliases: []
mode:
version_added: "2.2"
description:
- network mode for Google Cloud
"legacy" indicates a network with an IP address range
"auto" automatically generates subnetworks in different regions
"custom" uses networks to group subnets of user specified IP address ranges
https://cloud.google.com/compute/docs/networking#network_types
required: false
default: "legacy"
choices: ["legacy", "auto", "custom"]
aliases: []
subnet_name:
version_added: "2.2"
description:
- name of subnet to create
required: false
default: null
aliases: []
subnet_region:
version_added: "2.2"
description:
- region of subnet to create
required: false
default: null
aliases: []
subnet_desc:
version_added: "2.2"
description:
- description of subnet to create
required: false
default: null
aliases: []
requirements:
- "python >= 2.6"
- "apache-libcloud >= 0.13.3, >= 0.17.0 if using JSON credentials"
author: "Eric Johnson (@erjohnso) <erjohnso@google.com>, Tom Melendez (@supertom) <supertom@google.com>"
'''
EXAMPLES = '''
# Create a 'legacy' Network
- name: Create Legacy Network
gce_net:
name: legacynet
ipv4_range: '10.24.17.0/24'
mode: legacy
state: present
# Create an 'auto' Network
- name: Create Auto Network
gce_net:
name: autonet
mode: auto
state: present
# Create a 'custom' Network
- name: Create Custom Network
gce_net:
name: customnet
mode: custom
subnet_name: "customsubnet"
subnet_region: us-east1
ipv4_range: '10.240.16.0/24'
state: "present"
# Create Firewall Rule with Source Tags
- name: Create Firewall Rule w/Source Tags
gce_net:
name: default
fwname: "my-firewall-rule"
allowed: tcp:80
state: "present"
src_tags: "foo,bar"
# Create Firewall Rule with Source Range
- name: Create Firewall Rule w/Source Range
gce_net:
name: default
fwname: "my-firewall-rule"
allowed: tcp:80
state: "present"
src_range: ['10.1.1.1/32']
# Create Custom Subnetwork
- name: Create Custom Subnetwork
gce_net:
name: privatenet
mode: custom
subnet_name: subnet_example
subnet_region: us-central1
ipv4_range: '10.0.0.0/16'
'''
RETURN = '''
allowed:
description: Rules (ports and protocols) specified by this firewall rule.
returned: When specified
type: string
sample: "tcp:80;icmp"
fwname:
description: Name of the firewall rule.
returned: When specified
type: string
sample: "my-fwname"
ipv4_range:
description: IPv4 range of the specified network or subnetwork.
returned: when specified or when a subnetwork is created
type: string
sample: "10.0.0.0/16"
name:
description: Name of the network.
returned: always
type: string
sample: "my-network"
src_range:
description: IP address blocks a firewall rule applies to.
returned: when specified
type: list
sample: [ '10.1.1.12/8' ]
src_tags:
description: Instance Tags firewall rule applies to.
returned: when specified while creating a firewall rule
type: list
sample: [ 'foo', 'bar' ]
state:
description: State of the item operated on.
returned: always
type: string
sample: "present"
subnet_name:
description: Name of the subnetwork.
returned: when specified or when a subnetwork is created
type: string
sample: "my-subnetwork"
subnet_region:
description: Region of the specified subnet.
returned: when specified or when a subnetwork is created
type: string
sample: "us-east1"
target_tags:
description: Instance Tags with these tags receive traffic allowed by firewall rule.
returned: when specified while creating a firewall rule
type: list
sample: [ 'foo', 'bar' ]
'''
try:
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
from libcloud.common.google import GoogleBaseError, QuotaExceededError, \
ResourceExistsError, ResourceNotFoundError
_ = Provider.GCE
HAS_LIBCLOUD = True
except ImportError:
HAS_LIBCLOUD = False
def format_allowed_section(allowed):
"""Format each section of the allowed list"""
if allowed.count(":") == 0:
protocol = allowed
ports = []
elif allowed.count(":") == 1:
protocol, ports = allowed.split(":")
else:
return []
if ports.count(","):
ports = ports.split(",")
elif ports:
ports = [ports]
return_val = {"IPProtocol": protocol}
if ports:
return_val["ports"] = ports
return return_val
def format_allowed(allowed):
"""Format the 'allowed' value so that it is GCE compatible."""
return_value = []
if allowed.count(";") == 0:
return [format_allowed_section(allowed)]
else:
sections = allowed.split(";")
for section in sections:
return_value.append(format_allowed_section(section))
return return_value
def sorted_allowed_list(allowed_list):
"""Sort allowed_list (output of format_allowed) by protocol and port."""
# sort by protocol
allowed_by_protocol = sorted(allowed_list,key=lambda x: x['IPProtocol'])
# sort the ports list
return sorted(allowed_by_protocol, key=lambda y: y.get('ports', []).sort())
def main():
module = AnsibleModule(
argument_spec = dict(
allowed = dict(),
ipv4_range = dict(),
fwname = dict(),
name = dict(),
src_range = dict(type='list'),
src_tags = dict(type='list'),
target_tags = dict(type='list'),
state = dict(default='present'),
service_account_email = dict(),
pem_file = dict(),
credentials_file = dict(),
project_id = dict(),
mode = dict(default='legacy', choices=['legacy', 'auto', 'custom']),
subnet_name = dict(),
subnet_region = dict(),
subnet_desc = dict(),
)
)
if not HAS_LIBCLOUD:
module.fail_json(msg='libcloud with GCE support (0.17.0+) required for this module')
gce = gce_connect(module)
allowed = module.params.get('allowed')
ipv4_range = module.params.get('ipv4_range')
fwname = module.params.get('fwname')
name = module.params.get('name')
src_range = module.params.get('src_range')
src_tags = module.params.get('src_tags')
target_tags = module.params.get('target_tags')
state = module.params.get('state')
mode = module.params.get('mode')
subnet_name = module.params.get('subnet_name')
subnet_region = module.params.get('subnet_region')
subnet_desc = module.params.get('subnet_desc')
changed = False
json_output = {'state': state}
if state in ['active', 'present']:
network = None
subnet = None
try:
network = gce.ex_get_network(name)
json_output['name'] = name
if mode == 'legacy':
json_output['ipv4_range'] = network.cidr
if network and mode == 'custom' and subnet_name:
if not hasattr(gce, 'ex_get_subnetwork'):
module.fail_json(msg="Update libcloud to a more recent version (>1.0) that supports network 'mode' parameter", changed=False)
subnet = gce.ex_get_subnetwork(subnet_name, region=subnet_region)
json_output['subnet_name'] = subnet_name
json_output['ipv4_range'] = subnet.cidr
except ResourceNotFoundError:
pass
except Exception as e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
# user wants to create a new network that doesn't yet exist
if name and not network:
if not ipv4_range and mode != 'auto':
module.fail_json(msg="Network '" + name + "' is not found. To create network in legacy or custom mode, 'ipv4_range' parameter is required",
changed=False)
args = [ipv4_range if mode =='legacy' else None]
kwargs = {}
if mode != 'legacy':
kwargs['mode'] = mode
try:
network = gce.ex_create_network(name, *args, **kwargs)
json_output['name'] = name
json_output['ipv4_range'] = ipv4_range
changed = True
except TypeError:
module.fail_json(msg="Update libcloud to a more recent version (>1.0) that supports network 'mode' parameter", changed=False)
except Exception as e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
if (subnet_name or ipv4_range) and not subnet and mode == 'custom':
if not hasattr(gce, 'ex_create_subnetwork'):
module.fail_json(msg='Update libcloud to a more recent version (>1.0) that supports subnetwork creation', changed=changed)
if not subnet_name or not ipv4_range or not subnet_region:
module.fail_json(msg="subnet_name, ipv4_range, and subnet_region required for custom mode", changed=changed)
try:
subnet = gce.ex_create_subnetwork(subnet_name, cidr=ipv4_range, network=name, region=subnet_region, description=subnet_desc)
json_output['subnet_name'] = subnet_name
json_output['ipv4_range'] = ipv4_range
changed = True
except Exception as e:
module.fail_json(msg=unexpected_error_msg(e), changed=changed)
if fwname:
# user creating a firewall rule
if not allowed and not src_range and not src_tags:
if changed and network:
module.fail_json(
msg="Network created, but missing required " + \
"firewall rule parameter(s)", changed=True)
module.fail_json(
msg="Missing required firewall rule parameter(s)",
changed=False)
allowed_list = format_allowed(allowed)
# Fetch existing rule and if it exists, compare attributes
# update if attributes changed. Create if doesn't exist.
try:
fw_changed = False
fw = gce.ex_get_firewall(fwname)
# If old and new attributes are different, we update the firewall rule.
# This implicitly lets us clear out attributes as well.
# allowed_list is required and must not be None for firewall rules.
if allowed_list and (sorted_allowed_list(allowed_list) != sorted_allowed_list(fw.allowed)):
fw.allowed = allowed_list
fw_changed = True
# source_ranges might not be set in the project; cast it to an empty list
fw.source_ranges = fw.source_ranges or []
# If these attributes are lists, we sort them first, then compare.
# Otherwise, we update if they differ.
if fw.source_ranges != src_range:
if isinstance(src_range, list):
if sorted(fw.source_ranges) != sorted(src_range):
fw.source_ranges = src_range
fw_changed = True
else:
fw.source_ranges = src_range
fw_changed = True
# source_tags might not be set in the project; cast it to an empty list
fw.source_tags = fw.source_tags or []
if fw.source_tags != src_tags:
if isinstance(src_tags, list):
if sorted(fw.source_tags) != sorted(src_tags):
fw.source_tags = src_tags
fw_changed = True
else:
fw.source_tags = src_tags
fw_changed = True
# target_tags might not be set in the project; cast it to an empty list
fw.target_tags = fw.target_tags or []
if fw.target_tags != target_tags:
if isinstance(target_tags, list):
if sorted(fw.target_tags) != sorted(target_tags):
fw.target_tags = target_tags
fw_changed = True
else:
fw.target_tags = target_tags
fw_changed = True
if fw_changed is True:
try:
gce.ex_update_firewall(fw)
changed = True
except Exception as e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
# Firewall rule not found so we try to create it.
except ResourceNotFoundError:
try:
gce.ex_create_firewall(fwname, allowed_list, network=name,
source_ranges=src_range, source_tags=src_tags, target_tags=target_tags)
changed = True
except Exception as e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
except Exception as e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
json_output['fwname'] = fwname
json_output['allowed'] = allowed
json_output['src_range'] = src_range
json_output['src_tags'] = src_tags
json_output['target_tags'] = target_tags
if state in ['absent', 'deleted']:
if fwname:
json_output['fwname'] = fwname
fw = None
try:
fw = gce.ex_get_firewall(fwname)
except ResourceNotFoundError:
pass
except Exception as e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
if fw:
gce.ex_destroy_firewall(fw)
changed = True
elif subnet_name:
if not hasattr(gce, 'ex_get_subnetwork') or not hasattr(gce, 'ex_destroy_subnetwork'):
module.fail_json(msg='Update libcloud to a more recent version (>1.0) that supports subnetwork creation', changed=changed)
json_output['name'] = subnet_name
subnet = None
try:
subnet = gce.ex_get_subnetwork(subnet_name, region=subnet_region)
except ResourceNotFoundError:
pass
except Exception as e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
if subnet:
gce.ex_destroy_subnetwork(subnet)
changed = True
elif name:
json_output['name'] = name
network = None
try:
network = gce.ex_get_network(name)
except ResourceNotFoundError:
pass
except Exception as e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
if network:
try:
gce.ex_destroy_network(network)
except Exception as e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
changed = True
json_output['changed'] = changed
module.exit_json(**json_output)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.gce import *
if __name__ == '__main__':
main()
| gpl-3.0 |
cecep-edu/edx-platform | lms/djangoapps/course_api/blocks/transformers/tests/test_block_counts.py | 23 | 2039 | """
Tests for BlockCountsTransformer.
"""
# pylint: disable=protected-access
from openedx.core.lib.block_structure.factory import BlockStructureFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import SampleCourseFactory
from ..block_counts import BlockCountsTransformer
class TestBlockCountsTransformer(ModuleStoreTestCase):
"""
Test behavior of BlockCountsTransformer
"""
def setUp(self):
super(TestBlockCountsTransformer, self).setUp()
self.course_key = SampleCourseFactory.create().id
self.course_usage_key = self.store.make_course_usage_key(self.course_key)
self.block_structure = BlockStructureFactory.create_from_modulestore(self.course_usage_key, self.store)
def test_transform(self):
# collect phase
BlockCountsTransformer.collect(self.block_structure)
self.block_structure._collect_requested_xblock_fields()
# transform phase
BlockCountsTransformer(['problem', 'chapter']).transform(usage_info=None, block_structure=self.block_structure)
# block_counts
chapter_x_key = self.course_key.make_usage_key('chapter', 'chapter_x')
block_counts_for_chapter_x = self.block_structure.get_transformer_block_data(
chapter_x_key, BlockCountsTransformer,
)
block_counts_for_course = self.block_structure.get_transformer_block_data(
self.course_usage_key, BlockCountsTransformer,
)
# verify count of chapters
self.assertEquals(block_counts_for_course.chapter, 2)
# verify count of problems
self.assertEquals(block_counts_for_course.problem, 6)
self.assertEquals(block_counts_for_chapter_x.problem, 3)
# verify other block types are not counted
for block_type in ['course', 'html', 'video']:
self.assertFalse(hasattr(block_counts_for_course, block_type))
self.assertFalse(hasattr(block_counts_for_chapter_x, block_type))
| agpl-3.0 |
cecep-edu/edx-platform | common/djangoapps/external_auth/views.py | 12 | 36626 | import functools
import json
import logging
import random
import re
import string
import fnmatch
import unicodedata
import urllib
from textwrap import dedent
from external_auth.models import ExternalAuthMap
from external_auth.djangostore import DjangoOpenIDStore
from django.conf import settings
from django.contrib.auth import REDIRECT_FIELD_NAME, authenticate, login
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.core.validators import validate_email
from django.core.exceptions import ValidationError
if settings.FEATURES.get('AUTH_USE_CAS'):
from django_cas.views import login as django_cas_login
from student.helpers import get_next_url_for_login_page
from student.models import UserProfile
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseForbidden
from django.utils.http import urlquote, is_safe_url
from django.shortcuts import redirect
from django.utils.translation import ugettext as _
from edxmako.shortcuts import render_to_response, render_to_string
try:
from django.views.decorators.csrf import csrf_exempt
except ImportError:
from django.contrib.csrf.middleware import csrf_exempt
from django.views.decorators.csrf import ensure_csrf_cookie
import django_openid_auth.views as openid_views
from django_openid_auth import auth as openid_auth
from openid.consumer.consumer import SUCCESS
from openid.server.server import Server, ProtocolError, UntrustedReturnURL
from openid.server.trustroot import TrustRoot
from openid.extensions import ax, sreg
from ratelimitbackend.exceptions import RateLimitException
import student.views
from xmodule.modulestore.django import modulestore
from opaque_keys.edx.locations import SlashSeparatedCourseKey
log = logging.getLogger("edx.external_auth")
AUDIT_LOG = logging.getLogger("audit")
SHIBBOLETH_DOMAIN_PREFIX = settings.SHIBBOLETH_DOMAIN_PREFIX
OPENID_DOMAIN_PREFIX = settings.OPENID_DOMAIN_PREFIX
# -----------------------------------------------------------------------------
# OpenID Common
# -----------------------------------------------------------------------------
@csrf_exempt
def default_render_failure(request,
message,
status=403,
template_name='extauth_failure.html',
exception=None):
"""Render an Openid error page to the user"""
log.debug("In openid_failure " + message)
data = render_to_string(template_name,
dict(message=message, exception=exception))
return HttpResponse(data, status=status)
# -----------------------------------------------------------------------------
# OpenID Authentication
# -----------------------------------------------------------------------------
def generate_password(length=12, chars=string.letters + string.digits):
"""Generate internal password for externally authenticated user"""
choice = random.SystemRandom().choice
return ''.join([choice(chars) for _i in range(length)])
@csrf_exempt
def openid_login_complete(request,
redirect_field_name=REDIRECT_FIELD_NAME,
render_failure=None):
"""Complete the openid login process"""
render_failure = (render_failure or default_render_failure)
openid_response = openid_views.parse_openid_response(request)
if not openid_response:
return render_failure(request,
'This is an OpenID relying party endpoint.')
if openid_response.status == SUCCESS:
external_id = openid_response.identity_url
oid_backend = openid_auth.OpenIDBackend()
details = oid_backend._extract_user_details(openid_response)
log.debug('openid success, details=%s', details)
url = getattr(settings, 'OPENID_SSO_SERVER_URL', None)
external_domain = "{0}{1}".format(OPENID_DOMAIN_PREFIX, url)
fullname = '%s %s' % (details.get('first_name', ''),
details.get('last_name', ''))
return _external_login_or_signup(
request,
external_id,
external_domain,
details,
details.get('email', ''),
fullname,
retfun=functools.partial(redirect, get_next_url_for_login_page(request)),
)
return render_failure(request, 'Openid failure')
def _external_login_or_signup(request,
external_id,
external_domain,
credentials,
email,
fullname,
retfun=None):
"""Generic external auth login or signup"""
# see if we have a map from this external_id to an edX username
try:
eamap = ExternalAuthMap.objects.get(external_id=external_id,
external_domain=external_domain)
log.debug(u'Found eamap=%s', eamap)
except ExternalAuthMap.DoesNotExist:
# go render form for creating edX user
eamap = ExternalAuthMap(external_id=external_id,
external_domain=external_domain,
external_credentials=json.dumps(credentials))
eamap.external_email = email
eamap.external_name = fullname
eamap.internal_password = generate_password()
log.debug(u'Created eamap=%s', eamap)
eamap.save()
log.info(u"External_Auth login_or_signup for %s : %s : %s : %s", external_domain, external_id, email, fullname)
uses_shibboleth = settings.FEATURES.get('AUTH_USE_SHIB') and external_domain.startswith(SHIBBOLETH_DOMAIN_PREFIX)
uses_certs = settings.FEATURES.get('AUTH_USE_CERTIFICATES')
internal_user = eamap.user
if internal_user is None:
if uses_shibboleth:
# If we are using shib, try to link accounts
# For Stanford shib, the email the idp returns is actually under the control of the user.
# Since the id the idps return is not user-editable, and is of the from "username@stanford.edu",
# use the id to link accounts instead.
try:
link_user = User.objects.get(email=eamap.external_id)
if not ExternalAuthMap.objects.filter(user=link_user).exists():
# if there's no pre-existing linked eamap, we link the user
eamap.user = link_user
eamap.save()
internal_user = link_user
log.info(u'SHIB: Linking existing account for %s', eamap.external_id)
# now pass through to log in
else:
# otherwise, there must have been an error, b/c we've already linked a user with these external
# creds
failure_msg = _(
"You have already created an account using "
"an external login like WebAuth or Shibboleth. "
"Please contact {tech_support_email} for support."
).format(
tech_support_email=settings.TECH_SUPPORT_EMAIL,
)
return default_render_failure(request, failure_msg)
except User.DoesNotExist:
log.info(u'SHIB: No user for %s yet, doing signup', eamap.external_email)
return _signup(request, eamap, retfun)
else:
log.info(u'No user for %s yet. doing signup', eamap.external_email)
return _signup(request, eamap, retfun)
# We trust shib's authentication, so no need to authenticate using the password again
uname = internal_user.username
if uses_shibboleth:
user = internal_user
# Assuming this 'AUTHENTICATION_BACKENDS' is set in settings, which I think is safe
if settings.AUTHENTICATION_BACKENDS:
auth_backend = settings.AUTHENTICATION_BACKENDS[0]
else:
auth_backend = 'ratelimitbackend.backends.RateLimitModelBackend'
user.backend = auth_backend
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
AUDIT_LOG.info(u'Linked user.id: {0} logged in via Shibboleth'.format(user.id))
else:
AUDIT_LOG.info(u'Linked user "{0}" logged in via Shibboleth'.format(user.email))
elif uses_certs:
# Certificates are trusted, so just link the user and log the action
user = internal_user
user.backend = 'ratelimitbackend.backends.RateLimitModelBackend'
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
AUDIT_LOG.info(u'Linked user_id {0} logged in via SSL certificate'.format(user.id))
else:
AUDIT_LOG.info(u'Linked user "{0}" logged in via SSL certificate'.format(user.email))
else:
user = authenticate(username=uname, password=eamap.internal_password, request=request)
if user is None:
# we want to log the failure, but don't want to log the password attempted:
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
AUDIT_LOG.warning(u'External Auth Login failed')
else:
AUDIT_LOG.warning(u'External Auth Login failed for "{0}"'.format(uname))
return _signup(request, eamap, retfun)
if not user.is_active:
if settings.FEATURES.get('BYPASS_ACTIVATION_EMAIL_FOR_EXTAUTH'):
# if BYPASS_ACTIVATION_EMAIL_FOR_EXTAUTH, we trust external auth and activate any users
# that aren't already active
user.is_active = True
user.save()
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
AUDIT_LOG.info(u'Activating user {0} due to external auth'.format(user.id))
else:
AUDIT_LOG.info(u'Activating user "{0}" due to external auth'.format(uname))
else:
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
AUDIT_LOG.warning(u'User {0} is not active after external login'.format(user.id))
else:
AUDIT_LOG.warning(u'User "{0}" is not active after external login'.format(uname))
# TODO: improve error page
msg = 'Account not yet activated: please look for link in your email'
return default_render_failure(request, msg)
login(request, user)
request.session.set_expiry(0)
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
AUDIT_LOG.info(u"Login success - user.id: {0}".format(user.id))
else:
AUDIT_LOG.info(u"Login success - {0} ({1})".format(user.username, user.email))
if retfun is None:
return redirect('/')
return retfun()
def _flatten_to_ascii(txt):
"""
Flattens possibly unicode txt to ascii (django username limitation)
@param name:
@return: the flattened txt (in the same type as was originally passed in)
"""
if isinstance(txt, str):
txt = txt.decode('utf-8')
return unicodedata.normalize('NFKD', txt).encode('ASCII', 'ignore')
else:
return unicode(unicodedata.normalize('NFKD', txt).encode('ASCII', 'ignore'))
@ensure_csrf_cookie
def _signup(request, eamap, retfun=None):
"""
Present form to complete for signup via external authentication.
Even though the user has external credentials, he/she still needs
to create an account on the edX system, and fill in the user
registration form.
eamap is an ExternalAuthMap object, specifying the external user
for which to complete the signup.
retfun is a function to execute for the return value, if immediate
signup is used. That allows @ssl_login_shortcut() to work.
"""
# save this for use by student.views.create_account
request.session['ExternalAuthMap'] = eamap
if settings.FEATURES.get('AUTH_USE_CERTIFICATES_IMMEDIATE_SIGNUP', ''):
# do signin immediately, by calling create_account, instead of asking
# student to fill in form. MIT students already have information filed.
username = eamap.external_email.split('@', 1)[0]
username = username.replace('.', '_')
post_vars = dict(username=username,
honor_code=u'true',
terms_of_service=u'true')
log.info(u'doing immediate signup for %s, params=%s', username, post_vars)
student.views.create_account(request, post_vars)
# should check return content for successful completion before
if retfun is not None:
return retfun()
else:
return redirect('/')
# default conjoin name, no spaces, flattened to ascii b/c django can't handle unicode usernames, sadly
# but this only affects username, not fullname
username = re.sub(r'\s', '', _flatten_to_ascii(eamap.external_name), flags=re.UNICODE)
context = {'has_extauth_info': True,
'show_signup_immediately': True,
'extauth_domain': eamap.external_domain,
'extauth_id': eamap.external_id,
'extauth_email': eamap.external_email,
'extauth_username': username,
'extauth_name': eamap.external_name,
'ask_for_tos': True,
}
# Some openEdX instances can't have terms of service for shib users, like
# according to Stanford's Office of General Counsel
uses_shibboleth = (settings.FEATURES.get('AUTH_USE_SHIB') and
eamap.external_domain.startswith(SHIBBOLETH_DOMAIN_PREFIX))
if uses_shibboleth and settings.FEATURES.get('SHIB_DISABLE_TOS'):
context['ask_for_tos'] = False
# detect if full name is blank and ask for it from user
context['ask_for_fullname'] = eamap.external_name.strip() == ''
# validate provided mail and if it's not valid ask the user
try:
validate_email(eamap.external_email)
context['ask_for_email'] = False
except ValidationError:
context['ask_for_email'] = True
log.info(u'EXTAUTH: Doing signup for %s', eamap.external_id)
return student.views.register_user(request, extra_context=context)
# -----------------------------------------------------------------------------
# MIT SSL
# -----------------------------------------------------------------------------
def _ssl_dn_extract_info(dn_string):
"""
Extract username, email address (may be anyuser@anydomain.com) and
full name from the SSL DN string. Return (user,email,fullname) if
successful, and None otherwise.
"""
ss = re.search('/emailAddress=(.*)@([^/]+)', dn_string)
if ss:
user = ss.group(1)
email = "%s@%s" % (user, ss.group(2))
else:
return None
ss = re.search('/CN=([^/]+)/', dn_string)
if ss:
fullname = ss.group(1)
else:
return None
return (user, email, fullname)
def ssl_get_cert_from_request(request):
"""
Extract user information from certificate, if it exists, returning (user, email, fullname).
Else return None.
"""
certkey = "SSL_CLIENT_S_DN" # specify the request.META field to use
cert = request.META.get(certkey, '')
if not cert:
cert = request.META.get('HTTP_' + certkey, '')
if not cert:
try:
# try the direct apache2 SSL key
cert = request._req.subprocess_env.get(certkey, '')
except Exception:
return ''
return cert
def ssl_login_shortcut(fn):
"""
Python function decorator for login procedures, to allow direct login
based on existing ExternalAuth record and MIT ssl certificate.
"""
def wrapped(*args, **kwargs):
"""
This manages the function wrapping, by determining whether to inject
the _external signup or just continuing to the internal function
call.
"""
if not settings.FEATURES['AUTH_USE_CERTIFICATES']:
return fn(*args, **kwargs)
request = args[0]
if request.user and request.user.is_authenticated(): # don't re-authenticate
return fn(*args, **kwargs)
cert = ssl_get_cert_from_request(request)
if not cert: # no certificate information - show normal login window
return fn(*args, **kwargs)
def retfun():
"""Wrap function again for call by _external_login_or_signup"""
return fn(*args, **kwargs)
(_user, email, fullname) = _ssl_dn_extract_info(cert)
return _external_login_or_signup(
request,
external_id=email,
external_domain="ssl:MIT",
credentials=cert,
email=email,
fullname=fullname,
retfun=retfun
)
return wrapped
@csrf_exempt
def ssl_login(request):
"""
This is called by branding.views.index when
FEATURES['AUTH_USE_CERTIFICATES'] = True
Used for MIT user authentication. This presumes the web server
(nginx) has been configured to require specific client
certificates.
If the incoming protocol is HTTPS (SSL) then authenticate via
client certificate. The certificate provides user email and
fullname; this populates the ExternalAuthMap. The user is
nevertheless still asked to complete the edX signup.
Else continues on with student.views.index, and no authentication.
"""
# Just to make sure we're calling this only at MIT:
if not settings.FEATURES['AUTH_USE_CERTIFICATES']:
return HttpResponseForbidden()
cert = ssl_get_cert_from_request(request)
if not cert:
# no certificate information - go onward to main index
return student.views.index(request)
(_user, email, fullname) = _ssl_dn_extract_info(cert)
redirect_to = get_next_url_for_login_page(request)
retfun = functools.partial(redirect, redirect_to)
return _external_login_or_signup(
request,
external_id=email,
external_domain="ssl:MIT",
credentials=cert,
email=email,
fullname=fullname,
retfun=retfun
)
# -----------------------------------------------------------------------------
# CAS (Central Authentication Service)
# -----------------------------------------------------------------------------
def cas_login(request, next_page=None, required=False):
"""
Uses django_cas for authentication.
CAS is a common authentcation method pioneered by Yale.
See http://en.wikipedia.org/wiki/Central_Authentication_Service
Does normal CAS login then generates user_profile if nonexistent,
and if login was successful. We assume that user details are
maintained by the central service, and thus an empty user profile
is appropriate.
"""
ret = django_cas_login(request, next_page, required)
if request.user.is_authenticated():
user = request.user
UserProfile.objects.get_or_create(
user=user,
defaults={'name': user.username}
)
return ret
# -----------------------------------------------------------------------------
# Shibboleth (Stanford and others. Uses *Apache* environment variables)
# -----------------------------------------------------------------------------
def shib_login(request):
"""
Uses Apache's REMOTE_USER environment variable as the external id.
This in turn typically uses EduPersonPrincipalName
http://www.incommonfederation.org/attributesummary.html#eduPersonPrincipal
but the configuration is in the shibboleth software.
"""
shib_error_msg = _(dedent(
"""
Your university identity server did not return your ID information to us.
Please try logging in again. (You may need to restart your browser.)
"""))
if not request.META.get('REMOTE_USER'):
log.error(u"SHIB: no REMOTE_USER found in request.META")
return default_render_failure(request, shib_error_msg)
elif not request.META.get('Shib-Identity-Provider'):
log.error(u"SHIB: no Shib-Identity-Provider in request.META")
return default_render_failure(request, shib_error_msg)
else:
# If we get here, the user has authenticated properly
shib = {attr: request.META.get(attr, '').decode('utf-8')
for attr in ['REMOTE_USER', 'givenName', 'sn', 'mail', 'Shib-Identity-Provider', 'displayName']}
# Clean up first name, last name, and email address
# TODO: Make this less hardcoded re: format, but split will work
# even if ";" is not present, since we are accessing 1st element
shib['sn'] = shib['sn'].split(";")[0].strip().capitalize()
shib['givenName'] = shib['givenName'].split(";")[0].strip().capitalize()
# TODO: should we be logging creds here, at info level?
log.info(u"SHIB creds returned: %r", shib)
fullname = shib['displayName'] if shib['displayName'] else u'%s %s' % (shib['givenName'], shib['sn'])
redirect_to = get_next_url_for_login_page(request)
retfun = functools.partial(_safe_postlogin_redirect, redirect_to, request.get_host())
return _external_login_or_signup(
request,
external_id=shib['REMOTE_USER'],
external_domain=SHIBBOLETH_DOMAIN_PREFIX + shib['Shib-Identity-Provider'],
credentials=shib,
email=shib['mail'],
fullname=fullname,
retfun=retfun
)
def _safe_postlogin_redirect(redirect_to, safehost, default_redirect='/'):
"""
If redirect_to param is safe (not off this host), then perform the redirect.
Otherwise just redirect to '/'.
Basically copied from django.contrib.auth.views.login
@param redirect_to: user-supplied redirect url
@param safehost: which host is safe to redirect to
@return: an HttpResponseRedirect
"""
if is_safe_url(url=redirect_to, host=safehost):
return redirect(redirect_to)
return redirect(default_redirect)
def course_specific_login(request, course_id):
"""
Dispatcher function for selecting the specific login method
required by the course
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = modulestore().get_course(course_key)
if not course:
# couldn't find the course, will just return vanilla signin page
return redirect_with_get('signin_user', request.GET)
# now the dispatching conditionals. Only shib for now
if (
settings.FEATURES.get('AUTH_USE_SHIB') and
course.enrollment_domain and
course.enrollment_domain.startswith(SHIBBOLETH_DOMAIN_PREFIX)
):
return redirect_with_get('shib-login', request.GET)
# Default fallthrough to normal signin page
return redirect_with_get('signin_user', request.GET)
def course_specific_register(request, course_id):
"""
Dispatcher function for selecting the specific registration method
required by the course
"""
course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id)
course = modulestore().get_course(course_key)
if not course:
# couldn't find the course, will just return vanilla registration page
return redirect_with_get('register_user', request.GET)
# now the dispatching conditionals. Only shib for now
if (
settings.FEATURES.get('AUTH_USE_SHIB') and
course.enrollment_domain and
course.enrollment_domain.startswith(SHIBBOLETH_DOMAIN_PREFIX)
):
# shib-login takes care of both registration and login flows
return redirect_with_get('shib-login', request.GET)
# Default fallthrough to normal registration page
return redirect_with_get('register_user', request.GET)
def redirect_with_get(view_name, get_querydict, do_reverse=True):
"""
Helper function to carry over get parameters across redirects
Using urlencode(safe='/') because the @login_required decorator generates 'next' queryparams with '/' unencoded
"""
if do_reverse:
url = reverse(view_name)
else:
url = view_name
if get_querydict:
return redirect("%s?%s" % (url, get_querydict.urlencode(safe='/')))
return redirect(view_name)
# -----------------------------------------------------------------------------
# OpenID Provider
# -----------------------------------------------------------------------------
def get_xrds_url(resource, request):
"""
Return the XRDS url for a resource
"""
host = request.get_host()
location = host + '/openid/provider/' + resource + '/'
if request.is_secure():
return 'https://' + location
else:
return 'http://' + location
def add_openid_simple_registration(request, response, data):
sreg_data = {}
sreg_request = sreg.SRegRequest.fromOpenIDRequest(request)
sreg_fields = sreg_request.allRequestedFields()
# if consumer requested simple registration fields, add them
if sreg_fields:
for field in sreg_fields:
if field == 'email' and 'email' in data:
sreg_data['email'] = data['email']
elif field == 'fullname' and 'fullname' in data:
sreg_data['fullname'] = data['fullname']
elif field == 'nickname' and 'nickname' in data:
sreg_data['nickname'] = data['nickname']
# construct sreg response
sreg_response = sreg.SRegResponse.extractResponse(sreg_request,
sreg_data)
sreg_response.toMessage(response.fields)
def add_openid_attribute_exchange(request, response, data):
try:
ax_request = ax.FetchRequest.fromOpenIDRequest(request)
except ax.AXError:
# not using OpenID attribute exchange extension
pass
else:
ax_response = ax.FetchResponse()
# if consumer requested attribute exchange fields, add them
if ax_request and ax_request.requested_attributes:
for type_uri in ax_request.requested_attributes.iterkeys():
email_schema = 'http://axschema.org/contact/email'
name_schema = 'http://axschema.org/namePerson'
if type_uri == email_schema and 'email' in data:
ax_response.addValue(email_schema, data['email'])
elif type_uri == name_schema and 'fullname' in data:
ax_response.addValue(name_schema, data['fullname'])
# construct ax response
ax_response.toMessage(response.fields)
def provider_respond(server, request, response, data):
"""
Respond to an OpenID request
"""
# get and add extensions
add_openid_simple_registration(request, response, data)
add_openid_attribute_exchange(request, response, data)
# create http response from OpenID response
webresponse = server.encodeResponse(response)
http_response = HttpResponse(webresponse.body)
http_response.status_code = webresponse.code
# add OpenID headers to response
for k, v in webresponse.headers.iteritems():
http_response[k] = v
return http_response
def validate_trust_root(openid_request):
"""
Only allow OpenID requests from valid trust roots
"""
trusted_roots = getattr(settings, 'OPENID_PROVIDER_TRUSTED_ROOT', None)
if not trusted_roots:
# not using trusted roots
return True
# don't allow empty trust roots
if (not hasattr(openid_request, 'trust_root') or
not openid_request.trust_root):
log.error('no trust_root')
return False
# ensure trust root parses cleanly (one wildcard, of form *.foo.com, etc.)
trust_root = TrustRoot.parse(openid_request.trust_root)
if not trust_root:
log.error('invalid trust_root')
return False
# don't allow empty return tos
if (not hasattr(openid_request, 'return_to') or
not openid_request.return_to):
log.error('empty return_to')
return False
# ensure return to is within trust root
if not trust_root.validateURL(openid_request.return_to):
log.error('invalid return_to')
return False
# check that the root matches the ones we trust
if not any(r for r in trusted_roots if fnmatch.fnmatch(trust_root, r)):
log.error('non-trusted root')
return False
return True
@csrf_exempt
def provider_login(request):
"""
OpenID login endpoint
"""
# make and validate endpoint
endpoint = get_xrds_url('login', request)
if not endpoint:
return default_render_failure(request, "Invalid OpenID request")
# initialize store and server
store = DjangoOpenIDStore()
server = Server(store, endpoint)
# first check to see if the request is an OpenID request.
# If so, the client will have specified an 'openid.mode' as part
# of the request.
if request.method == 'GET':
querydict = dict(request.GET.items())
else:
querydict = dict(request.POST.items())
error = False
if 'openid.mode' in request.GET or 'openid.mode' in request.POST:
# decode request
try:
openid_request = server.decodeRequest(querydict)
except (UntrustedReturnURL, ProtocolError):
openid_request = None
if not openid_request:
return default_render_failure(request, "Invalid OpenID request")
# don't allow invalid and non-trusted trust roots
if not validate_trust_root(openid_request):
return default_render_failure(request, "Invalid OpenID trust root")
# checkid_immediate not supported, require user interaction
if openid_request.mode == 'checkid_immediate':
return provider_respond(server, openid_request,
openid_request.answer(False), {})
# checkid_setup, so display login page
# (by falling through to the provider_login at the
# bottom of this method).
elif openid_request.mode == 'checkid_setup':
if openid_request.idSelect():
# remember request and original path
request.session['openid_setup'] = {
'request': openid_request,
'url': request.get_full_path(),
'post_params': request.POST,
}
# user failed login on previous attempt
if 'openid_error' in request.session:
error = True
del request.session['openid_error']
# OpenID response
else:
return provider_respond(server, openid_request,
server.handleRequest(openid_request), {})
# handle login redirection: these are also sent to this view function,
# but are distinguished by lacking the openid mode. We also know that
# they are posts, because they come from the popup
elif request.method == 'POST' and 'openid_setup' in request.session:
# get OpenID request from session
openid_setup = request.session['openid_setup']
openid_request = openid_setup['request']
openid_request_url = openid_setup['url']
post_params = openid_setup['post_params']
# We need to preserve the parameters, and the easiest way to do this is
# through the URL
url_post_params = {
param: post_params[param] for param in post_params if param.startswith('openid')
}
encoded_params = urllib.urlencode(url_post_params)
if '?' not in openid_request_url:
openid_request_url = openid_request_url + '?' + encoded_params
else:
openid_request_url = openid_request_url + '&' + encoded_params
del request.session['openid_setup']
# don't allow invalid trust roots
if not validate_trust_root(openid_request):
return default_render_failure(request, "Invalid OpenID trust root")
# check if user with given email exists
# Failure is redirected to this method (by using the original URL),
# which will bring up the login dialog.
email = request.POST.get('email', None)
try:
user = User.objects.get(email=email)
except User.DoesNotExist:
request.session['openid_error'] = True
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
AUDIT_LOG.warning(u"OpenID login failed - Unknown user email")
else:
msg = u"OpenID login failed - Unknown user email: {0}".format(email)
AUDIT_LOG.warning(msg)
return HttpResponseRedirect(openid_request_url)
# attempt to authenticate user (but not actually log them in...)
# Failure is again redirected to the login dialog.
username = user.username
password = request.POST.get('password', None)
try:
user = authenticate(username=username, password=password, request=request)
except RateLimitException:
AUDIT_LOG.warning(u'OpenID - Too many failed login attempts.')
return HttpResponseRedirect(openid_request_url)
if user is None:
request.session['openid_error'] = True
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
AUDIT_LOG.warning(u"OpenID login failed - invalid password")
else:
AUDIT_LOG.warning(
u"OpenID login failed - password for %s is invalid", email)
return HttpResponseRedirect(openid_request_url)
# authentication succeeded, so fetch user information
# that was requested
if user is not None and user.is_active:
# remove error from session since login succeeded
if 'openid_error' in request.session:
del request.session['openid_error']
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
AUDIT_LOG.info(u"OpenID login success - user.id: %s", user.id)
else:
AUDIT_LOG.info(
u"OpenID login success - %s (%s)", user.username, user.email)
# redirect user to return_to location
url = endpoint + urlquote(user.username)
response = openid_request.answer(True, None, url)
# Note too that this is hardcoded, and not really responding to
# the extensions that were registered in the first place.
results = {
'nickname': user.username,
'email': user.email,
'fullname': user.profile.name,
}
# the request succeeded:
return provider_respond(server, openid_request, response, results)
# the account is not active, so redirect back to the login page:
request.session['openid_error'] = True
if settings.FEATURES['SQUELCH_PII_IN_LOGS']:
AUDIT_LOG.warning(
u"Login failed - Account not active for user.id %s", user.id)
else:
AUDIT_LOG.warning(
u"Login failed - Account not active for user %s", username)
return HttpResponseRedirect(openid_request_url)
# determine consumer domain if applicable
return_to = request.GET.get('openid.return_to') or request.POST.get('openid.return_to') or ''
if return_to:
matches = re.match(r'\w+:\/\/([\w\.-]+)', return_to)
return_to = matches.group(1)
# display login page
response = render_to_response('provider_login.html', {
'error': error,
'return_to': return_to
})
# add custom XRDS header necessary for discovery process
response['X-XRDS-Location'] = get_xrds_url('xrds', request)
return response
def provider_identity(request):
"""
XRDS for identity discovery
"""
response = render_to_response('identity.xml',
{'url': get_xrds_url('login', request)},
content_type='text/xml')
# custom XRDS header necessary for discovery process
response['X-XRDS-Location'] = get_xrds_url('identity', request)
return response
def provider_xrds(request):
"""
XRDS for endpoint discovery
"""
response = render_to_response('xrds.xml',
{'url': get_xrds_url('login', request)},
content_type='text/xml')
# custom XRDS header necessary for discovery process
response['X-XRDS-Location'] = get_xrds_url('xrds', request)
return response
| agpl-3.0 |
hychrisli/PyAlgorithms | src/solutions/part1/q347_top_k_frequent_elements.py | 1 | 1641 | from src.base.solution import Solution
from src.tests.part1.q347_test_top_k_frequent_elements import TopKFreqElemTestCases
"""
https://leetcode.com/problems/top-k-frequent-elements/#/description
Given a non-empty array of integers, return the k most frequent elements.
For example,
Given [1,1,1,2,2,3] and k = 2, return [1,2].
Note:
You may assume k is always valid, 1 <= k <= number of unique elements.
Your algorithm's time complexity must be better than O(n log n), where n is the array's size.
"""
class TopKFreqElem(Solution):
def verify_output(self, test_output, output):
return set(test_output) == set(output)
def run_test(self, input):
return self.topKFrequent(input[0], input[1])
def gen_test_cases(self):
return TopKFreqElemTestCases()
def print_output(self, output):
super(TopKFreqElem, self).print_output(output)
def topKFrequent(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: List[int]
"""
lkp = dict()
for num in nums:
lkp[num] = lkp.get(num, 0) + 1
bucket = [None] * (len(nums) + 1)
for (key, val) in lkp.items():
if bucket[val] == None:
bucket[val] = [key]
else:
bucket[val].append(key)
res = []
count = 0
for lst in reversed(bucket):
if lst != None:
res += lst
count += len(lst)
if count >= k:
break
return res
if __name__ == '__main__':
solution = TopKFreqElem()
solution.run_tests() | apache-2.0 |
mccheung/kbengine | kbe/src/lib/python/Lib/test/test_poll.py | 91 | 6552 | # Test case for the os.poll() function
import os
import subprocess
import random
import select
try:
import threading
except ImportError:
threading = None
import time
import unittest
from test.support import TESTFN, run_unittest, reap_threads, cpython_only
try:
select.poll
except AttributeError:
raise unittest.SkipTest("select.poll not defined")
def find_ready_matching(ready, flag):
match = []
for fd, mode in ready:
if mode & flag:
match.append(fd)
return match
class PollTests(unittest.TestCase):
def test_poll1(self):
# Basic functional test of poll object
# Create a bunch of pipe and test that poll works with them.
p = select.poll()
NUM_PIPES = 12
MSG = b" This is a test."
MSG_LEN = len(MSG)
readers = []
writers = []
r2w = {}
w2r = {}
for i in range(NUM_PIPES):
rd, wr = os.pipe()
p.register(rd)
p.modify(rd, select.POLLIN)
p.register(wr, select.POLLOUT)
readers.append(rd)
writers.append(wr)
r2w[rd] = wr
w2r[wr] = rd
bufs = []
while writers:
ready = p.poll()
ready_writers = find_ready_matching(ready, select.POLLOUT)
if not ready_writers:
raise RuntimeError("no pipes ready for writing")
wr = random.choice(ready_writers)
os.write(wr, MSG)
ready = p.poll()
ready_readers = find_ready_matching(ready, select.POLLIN)
if not ready_readers:
raise RuntimeError("no pipes ready for reading")
rd = random.choice(ready_readers)
buf = os.read(rd, MSG_LEN)
self.assertEqual(len(buf), MSG_LEN)
bufs.append(buf)
os.close(r2w[rd]) ; os.close( rd )
p.unregister( r2w[rd] )
p.unregister( rd )
writers.remove(r2w[rd])
self.assertEqual(bufs, [MSG] * NUM_PIPES)
def test_poll_unit_tests(self):
# returns NVAL for invalid file descriptor
FD, w = os.pipe()
os.close(FD)
os.close(w)
p = select.poll()
p.register(FD)
r = p.poll()
self.assertEqual(r[0], (FD, select.POLLNVAL))
f = open(TESTFN, 'w')
fd = f.fileno()
p = select.poll()
p.register(f)
r = p.poll()
self.assertEqual(r[0][0], fd)
f.close()
r = p.poll()
self.assertEqual(r[0], (fd, select.POLLNVAL))
os.unlink(TESTFN)
# type error for invalid arguments
p = select.poll()
self.assertRaises(TypeError, p.register, p)
self.assertRaises(TypeError, p.unregister, p)
# can't unregister non-existent object
p = select.poll()
self.assertRaises(KeyError, p.unregister, 3)
# Test error cases
pollster = select.poll()
class Nope:
pass
class Almost:
def fileno(self):
return 'fileno'
self.assertRaises(TypeError, pollster.register, Nope(), 0)
self.assertRaises(TypeError, pollster.register, Almost(), 0)
# Another test case for poll(). This is copied from the test case for
# select(), modified to use poll() instead.
def test_poll2(self):
cmd = 'for i in 0 1 2 3 4 5 6 7 8 9; do echo testing...; sleep 1; done'
proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE,
bufsize=0)
p = proc.stdout
pollster = select.poll()
pollster.register( p, select.POLLIN )
for tout in (0, 1000, 2000, 4000, 8000, 16000) + (-1,)*10:
fdlist = pollster.poll(tout)
if (fdlist == []):
continue
fd, flags = fdlist[0]
if flags & select.POLLHUP:
line = p.readline()
if line != b"":
self.fail('error: pipe seems to be closed, but still returns data')
continue
elif flags & select.POLLIN:
line = p.readline()
if not line:
break
self.assertEqual(line, b'testing...\n')
continue
else:
self.fail('Unexpected return value from select.poll: %s' % fdlist)
p.close()
def test_poll3(self):
# test int overflow
pollster = select.poll()
pollster.register(1)
self.assertRaises(OverflowError, pollster.poll, 1 << 64)
x = 2 + 3
if x != 5:
self.fail('Overflow must have occurred')
# Issues #15989, #17919
self.assertRaises(OverflowError, pollster.register, 0, -1)
self.assertRaises(OverflowError, pollster.register, 0, 1 << 64)
self.assertRaises(OverflowError, pollster.modify, 1, -1)
self.assertRaises(OverflowError, pollster.modify, 1, 1 << 64)
@cpython_only
def test_poll_c_limits(self):
from _testcapi import USHRT_MAX, INT_MAX, UINT_MAX
pollster = select.poll()
pollster.register(1)
# Issues #15989, #17919
self.assertRaises(OverflowError, pollster.register, 0, USHRT_MAX + 1)
self.assertRaises(OverflowError, pollster.modify, 1, USHRT_MAX + 1)
self.assertRaises(OverflowError, pollster.poll, INT_MAX + 1)
self.assertRaises(OverflowError, pollster.poll, UINT_MAX + 1)
@unittest.skipUnless(threading, 'Threading required for this test.')
@reap_threads
def test_threaded_poll(self):
r, w = os.pipe()
self.addCleanup(os.close, r)
self.addCleanup(os.close, w)
rfds = []
for i in range(10):
fd = os.dup(r)
self.addCleanup(os.close, fd)
rfds.append(fd)
pollster = select.poll()
for fd in rfds:
pollster.register(fd, select.POLLIN)
t = threading.Thread(target=pollster.poll)
t.start()
try:
time.sleep(0.5)
# trigger ufds array reallocation
for fd in rfds:
pollster.unregister(fd)
pollster.register(w, select.POLLOUT)
self.assertRaises(RuntimeError, pollster.poll)
finally:
# and make the call to poll() from the thread return
os.write(w, b'spam')
t.join()
def test_main():
run_unittest(PollTests)
if __name__ == '__main__':
test_main()
| lgpl-3.0 |
highweb-project/highweb-webcl-html5spec | tools/cr/cr/commands/run.py | 16 | 1802 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A module for the run command."""
import cr
class RunCommand(cr.Command):
"""The implementation of the run command.
This first uses Builder to bring the target up to date.
It then uses Installer to install the target (if needed), and
finally it uses Runner to run the target.
You can use skip version to not perform any of these steps.
"""
def __init__(self):
super(RunCommand, self).__init__()
self.help = 'Invoke a target'
def AddArguments(self, subparsers):
parser = super(RunCommand, self).AddArguments(subparsers)
cr.Builder.AddArguments(self, parser)
cr.Installer.AddArguments(self, parser)
cr.Runner.AddArguments(self, parser)
cr.Target.AddArguments(self, parser, allow_multiple=False)
self.ConsumeArgs(parser, 'the binary')
return parser
def Run(self):
original_targets = cr.Target.GetTargets()
targets = original_targets[:]
for target in original_targets:
targets.extend(target.GetRunDependencies())
test_targets = [target for target in targets if target.is_test]
run_targets = [target for target in targets if not target.is_test]
if cr.Installer.Skipping():
# No installer, only build test targets
build_targets = test_targets
else:
build_targets = targets
if build_targets:
cr.Builder.Build(build_targets, [])
# See if we can use restart when not installing
if cr.Installer.Skipping():
cr.Runner.Restart(targets, cr.context.remains)
else:
cr.Runner.Kill(run_targets, [])
cr.Installer.Reinstall(run_targets, [])
cr.Runner.Invoke(original_targets, cr.context.remains)
| bsd-3-clause |
dkillick/iris | lib/iris/tests/unit/analysis/test_MAX.py | 1 | 3172 | # (C) British Crown Copyright 2018, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""Unit tests for the :data:`iris.analysis.MAX` aggregator."""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
# Import iris.tests first so that some things can be initialised before
# importing anything else.
import iris.tests as tests
import numpy as np
import numpy.ma as ma
from iris.analysis import MAX
from iris.cube import Cube
from iris.coords import DimCoord
from iris._lazy_data import as_lazy_data, is_lazy_data
class Test_basics(tests.IrisTest):
def setUp(self):
data = np.array([1, 2, 3, 4, 5])
coord = DimCoord([6, 7, 8, 9, 10], long_name='foo')
self.cube = Cube(data)
self.cube.add_dim_coord(coord, 0)
self.lazy_cube = Cube(as_lazy_data(data))
self.lazy_cube.add_dim_coord(coord, 0)
def test_name(self):
self.assertEqual(MAX.name(), 'maximum')
def test_collapse(self):
data = MAX.aggregate(self.cube.data, axis=0)
self.assertArrayEqual(data, [5])
def test_lazy(self):
lazy_data = MAX.lazy_aggregate(self.lazy_cube.lazy_data(), axis=0)
self.assertTrue(is_lazy_data(lazy_data))
def test_lazy_collapse(self):
lazy_data = MAX.lazy_aggregate(self.lazy_cube.lazy_data(), axis=0)
self.assertArrayEqual(lazy_data.compute(), [5])
class Test_masked(tests.IrisTest):
def setUp(self):
self.cube = Cube(ma.masked_greater([1, 2, 3, 4, 5], 3))
self.cube.add_dim_coord(DimCoord([6, 7, 8, 9, 10], long_name='foo'), 0)
def test_ma(self):
data = MAX.aggregate(self.cube.data, axis=0)
self.assertArrayEqual(data, [3])
class Test_lazy_masked(tests.IrisTest):
def setUp(self):
masked_data = ma.masked_greater([1, 2, 3, 4, 5], 3)
self.cube = Cube(as_lazy_data(masked_data))
self.cube.add_dim_coord(DimCoord([6, 7, 8, 9, 10], long_name='foo'), 0)
def test_lazy_ma(self):
lazy_data = MAX.lazy_aggregate(self.cube.lazy_data(), axis=0)
self.assertTrue(is_lazy_data(lazy_data))
self.assertArrayEqual(lazy_data.compute(), [3])
class Test_aggregate_shape(tests.IrisTest):
def test(self):
shape = ()
kwargs = dict()
self.assertTupleEqual(MAX.aggregate_shape(**kwargs), shape)
kwargs = dict(wibble='wobble')
self.assertTupleEqual(MAX.aggregate_shape(**kwargs), shape)
if __name__ == "__main__":
tests.main()
| lgpl-3.0 |
pigeonflight/strider-plone | docker/appengine/lib/django-1.4/tests/regressiontests/utils/module_loading.py | 43 | 6133 | import os
import sys
import imp
from zipimport import zipimporter
from django.utils import unittest
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
class DefaultLoader(unittest.TestCase):
def setUp(self):
sys.meta_path.insert(0, ProxyFinder())
def tearDown(self):
sys.meta_path.pop(0)
def test_loader(self):
"Normal module existence can be tested"
test_module = import_module('regressiontests.utils.test_module')
test_no_submodule = import_module(
'regressiontests.utils.test_no_submodule')
# An importable child
self.assertTrue(module_has_submodule(test_module, 'good_module'))
mod = import_module('regressiontests.utils.test_module.good_module')
self.assertEqual(mod.content, 'Good Module')
# A child that exists, but will generate an import error if loaded
self.assertTrue(module_has_submodule(test_module, 'bad_module'))
self.assertRaises(ImportError, import_module, 'regressiontests.utils.test_module.bad_module')
# A child that doesn't exist
self.assertFalse(module_has_submodule(test_module, 'no_such_module'))
self.assertRaises(ImportError, import_module, 'regressiontests.utils.test_module.no_such_module')
# A child that doesn't exist, but is the name of a package on the path
self.assertFalse(module_has_submodule(test_module, 'django'))
self.assertRaises(ImportError, import_module, 'regressiontests.utils.test_module.django')
# Don't be confused by caching of import misses
import types # causes attempted import of regressiontests.utils.types
self.assertFalse(module_has_submodule(sys.modules['regressiontests.utils'], 'types'))
# A module which doesn't have a __path__ (so no submodules)
self.assertFalse(module_has_submodule(test_no_submodule, 'anything'))
self.assertRaises(ImportError, import_module,
'regressiontests.utils.test_no_submodule.anything')
class EggLoader(unittest.TestCase):
def setUp(self):
self.old_path = sys.path[:]
self.egg_dir = '%s/eggs' % os.path.dirname(__file__)
def tearDown(self):
sys.path = self.old_path
sys.path_importer_cache.clear()
sys.modules.pop('egg_module.sub1.sub2.bad_module', None)
sys.modules.pop('egg_module.sub1.sub2.good_module', None)
sys.modules.pop('egg_module.sub1.sub2', None)
sys.modules.pop('egg_module.sub1', None)
sys.modules.pop('egg_module.bad_module', None)
sys.modules.pop('egg_module.good_module', None)
sys.modules.pop('egg_module', None)
def test_shallow_loader(self):
"Module existence can be tested inside eggs"
egg_name = '%s/test_egg.egg' % self.egg_dir
sys.path.append(egg_name)
egg_module = import_module('egg_module')
# An importable child
self.assertTrue(module_has_submodule(egg_module, 'good_module'))
mod = import_module('egg_module.good_module')
self.assertEqual(mod.content, 'Good Module')
# A child that exists, but will generate an import error if loaded
self.assertTrue(module_has_submodule(egg_module, 'bad_module'))
self.assertRaises(ImportError, import_module, 'egg_module.bad_module')
# A child that doesn't exist
self.assertFalse(module_has_submodule(egg_module, 'no_such_module'))
self.assertRaises(ImportError, import_module, 'egg_module.no_such_module')
def test_deep_loader(self):
"Modules deep inside an egg can still be tested for existence"
egg_name = '%s/test_egg.egg' % self.egg_dir
sys.path.append(egg_name)
egg_module = import_module('egg_module.sub1.sub2')
# An importable child
self.assertTrue(module_has_submodule(egg_module, 'good_module'))
mod = import_module('egg_module.sub1.sub2.good_module')
self.assertEqual(mod.content, 'Deep Good Module')
# A child that exists, but will generate an import error if loaded
self.assertTrue(module_has_submodule(egg_module, 'bad_module'))
self.assertRaises(ImportError, import_module, 'egg_module.sub1.sub2.bad_module')
# A child that doesn't exist
self.assertFalse(module_has_submodule(egg_module, 'no_such_module'))
self.assertRaises(ImportError, import_module, 'egg_module.sub1.sub2.no_such_module')
class ProxyFinder(object):
def __init__(self):
self._cache = {}
def find_module(self, fullname, path=None):
tail = fullname.rsplit('.', 1)[-1]
try:
self._cache[fullname] = imp.find_module(tail, path)
except ImportError:
return None
else:
return self # this is a loader as well
def load_module(self, fullname):
if fullname in sys.modules:
return sys.modules[fullname]
fd, fn, info = self._cache[fullname]
return imp.load_module(fullname, fd, fn, info)
class TestFinder(object):
def __init__(self, *args, **kwargs):
self.importer = zipimporter(*args, **kwargs)
def find_module(self, path):
importer = self.importer.find_module(path)
if importer is None:
return
return TestLoader(importer)
class TestLoader(object):
def __init__(self, importer):
self.importer = importer
def load_module(self, name):
mod = self.importer.load_module(name)
mod.__loader__ = self
return mod
class CustomLoader(EggLoader):
"""The Custom Loader test is exactly the same as the EggLoader, but
it uses a custom defined Loader and Finder that is intentionally
split into two classes. Although the EggLoader combines both functions
into one class, this isn't required.
"""
def setUp(self):
super(CustomLoader, self).setUp()
sys.path_hooks.insert(0, TestFinder)
sys.path_importer_cache.clear()
def tearDown(self):
super(CustomLoader, self).tearDown()
sys.path_hooks.pop(0)
| mit |
nemesisdesign/django | tests/postgres_tests/test_array.py | 7 | 29963 | import decimal
import json
import unittest
import uuid
from django import forms
from django.core import exceptions, serializers, validators
from django.core.exceptions import FieldError
from django.core.management import call_command
from django.db import IntegrityError, connection, models
from django.test import TransactionTestCase, override_settings
from django.test.utils import isolate_apps
from django.utils import timezone
from . import PostgreSQLTestCase
from .models import (
ArrayFieldSubclass, CharArrayModel, DateTimeArrayModel, IntegerArrayModel,
NestedIntegerArrayModel, NullableIntegerArrayModel, OtherTypesArrayModel,
PostgreSQLModel, Tag,
)
try:
from django.contrib.postgres.fields import ArrayField
from django.contrib.postgres.forms import (
SimpleArrayField, SplitArrayField, SplitArrayWidget,
)
except ImportError:
pass
class TestSaveLoad(PostgreSQLTestCase):
def test_integer(self):
instance = IntegerArrayModel(field=[1, 2, 3])
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_char(self):
instance = CharArrayModel(field=['hello', 'goodbye'])
instance.save()
loaded = CharArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_dates(self):
instance = DateTimeArrayModel(
datetimes=[timezone.now()],
dates=[timezone.now().date()],
times=[timezone.now().time()],
)
instance.save()
loaded = DateTimeArrayModel.objects.get()
self.assertEqual(instance.datetimes, loaded.datetimes)
self.assertEqual(instance.dates, loaded.dates)
self.assertEqual(instance.times, loaded.times)
def test_tuples(self):
instance = IntegerArrayModel(field=(1,))
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertSequenceEqual(instance.field, loaded.field)
def test_integers_passed_as_strings(self):
# This checks that get_prep_value is deferred properly
instance = IntegerArrayModel(field=['1'])
instance.save()
loaded = IntegerArrayModel.objects.get()
self.assertEqual(loaded.field, [1])
def test_default_null(self):
instance = NullableIntegerArrayModel()
instance.save()
loaded = NullableIntegerArrayModel.objects.get(pk=instance.pk)
self.assertIsNone(loaded.field)
self.assertEqual(instance.field, loaded.field)
def test_null_handling(self):
instance = NullableIntegerArrayModel(field=None)
instance.save()
loaded = NullableIntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
instance = IntegerArrayModel(field=None)
with self.assertRaises(IntegrityError):
instance.save()
def test_nested(self):
instance = NestedIntegerArrayModel(field=[[1, 2], [3, 4]])
instance.save()
loaded = NestedIntegerArrayModel.objects.get()
self.assertEqual(instance.field, loaded.field)
def test_other_array_types(self):
instance = OtherTypesArrayModel(
ips=['192.168.0.1', '::1'],
uuids=[uuid.uuid4()],
decimals=[decimal.Decimal(1.25), 1.75],
tags=[Tag(1), Tag(2), Tag(3)],
)
instance.save()
loaded = OtherTypesArrayModel.objects.get()
self.assertEqual(instance.ips, loaded.ips)
self.assertEqual(instance.uuids, loaded.uuids)
self.assertEqual(instance.decimals, loaded.decimals)
self.assertEqual(instance.tags, loaded.tags)
def test_null_from_db_value_handling(self):
instance = OtherTypesArrayModel.objects.create(
ips=['192.168.0.1', '::1'],
uuids=[uuid.uuid4()],
decimals=[decimal.Decimal(1.25), 1.75],
tags=None,
)
instance.refresh_from_db()
self.assertIsNone(instance.tags)
def test_model_set_on_base_field(self):
instance = IntegerArrayModel()
field = instance._meta.get_field('field')
self.assertEqual(field.model, IntegerArrayModel)
self.assertEqual(field.base_field.model, IntegerArrayModel)
class TestQuerying(PostgreSQLTestCase):
def setUp(self):
self.objs = [
NullableIntegerArrayModel.objects.create(field=[1]),
NullableIntegerArrayModel.objects.create(field=[2]),
NullableIntegerArrayModel.objects.create(field=[2, 3]),
NullableIntegerArrayModel.objects.create(field=[20, 30, 40]),
NullableIntegerArrayModel.objects.create(field=None),
]
def test_exact(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__exact=[1]),
self.objs[:1]
)
def test_exact_charfield(self):
instance = CharArrayModel.objects.create(field=['text'])
self.assertSequenceEqual(
CharArrayModel.objects.filter(field=['text']),
[instance]
)
def test_exact_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field=[[1, 2], [3, 4]]),
[instance]
)
def test_isnull(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__isnull=True),
self.objs[-1:]
)
def test_gt(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__gt=[0]),
self.objs[:4]
)
def test_lt(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__lt=[2]),
self.objs[:1]
)
def test_in(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__in=[[1], [2]]),
self.objs[:2]
)
@unittest.expectedFailure
def test_in_including_F_object(self):
# This test asserts that Array objects passed to filters can be
# constructed to contain F objects. This currently doesn't work as the
# psycopg2 mogrify method that generates the ARRAY() syntax is
# expecting literals, not column references (#27095).
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__in=[[models.F('id')]]),
self.objs[:2]
)
def test_in_as_F_object(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__in=[models.F('field')]),
self.objs[:4]
)
def test_contained_by(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contained_by=[1, 2]),
self.objs[:2]
)
@unittest.expectedFailure
def test_contained_by_including_F_object(self):
# This test asserts that Array objects passed to filters can be
# constructed to contain F objects. This currently doesn't work as the
# psycopg2 mogrify method that generates the ARRAY() syntax is
# expecting literals, not column references (#27095).
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contained_by=[models.F('id'), 2]),
self.objs[:2]
)
def test_contains(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__contains=[2]),
self.objs[1:3]
)
def test_contains_charfield(self):
# Regression for #22907
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__contains=['text']),
[]
)
def test_contained_by_charfield(self):
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__contained_by=['text']),
[]
)
def test_overlap_charfield(self):
self.assertSequenceEqual(
CharArrayModel.objects.filter(field__overlap=['text']),
[]
)
def test_index(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0=2),
self.objs[1:3]
)
def test_index_chained(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0__lt=3),
self.objs[0:3]
)
def test_index_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0__0=1),
[instance]
)
@unittest.expectedFailure
def test_index_used_on_nested_data(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0=[1, 2]),
[instance]
)
def test_overlap(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__overlap=[1, 2]),
self.objs[0:3]
)
def test_len(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__len__lte=2),
self.objs[0:3]
)
def test_len_empty_array(self):
obj = NullableIntegerArrayModel.objects.create(field=[])
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__len=0),
[obj]
)
def test_slice(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0_1=[2]),
self.objs[1:3]
)
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(field__0_2=[2, 3]),
self.objs[2:3]
)
@unittest.expectedFailure
def test_slice_nested(self):
instance = NestedIntegerArrayModel.objects.create(field=[[1, 2], [3, 4]])
self.assertSequenceEqual(
NestedIntegerArrayModel.objects.filter(field__0__0_1=[1]),
[instance]
)
def test_usage_in_subquery(self):
self.assertSequenceEqual(
NullableIntegerArrayModel.objects.filter(
id__in=NullableIntegerArrayModel.objects.filter(field__len=3)
),
[self.objs[3]]
)
def test_unsupported_lookup(self):
msg = "Unsupported lookup '0_bar' for ArrayField or join on the field not permitted."
with self.assertRaisesMessage(FieldError, msg):
list(NullableIntegerArrayModel.objects.filter(field__0_bar=[2]))
msg = "Unsupported lookup '0bar' for ArrayField or join on the field not permitted."
with self.assertRaisesMessage(FieldError, msg):
list(NullableIntegerArrayModel.objects.filter(field__0bar=[2]))
class TestDateTimeExactQuerying(PostgreSQLTestCase):
def setUp(self):
now = timezone.now()
self.datetimes = [now]
self.dates = [now.date()]
self.times = [now.time()]
self.objs = [
DateTimeArrayModel.objects.create(
datetimes=self.datetimes,
dates=self.dates,
times=self.times,
)
]
def test_exact_datetimes(self):
self.assertSequenceEqual(
DateTimeArrayModel.objects.filter(datetimes=self.datetimes),
self.objs
)
def test_exact_dates(self):
self.assertSequenceEqual(
DateTimeArrayModel.objects.filter(dates=self.dates),
self.objs
)
def test_exact_times(self):
self.assertSequenceEqual(
DateTimeArrayModel.objects.filter(times=self.times),
self.objs
)
class TestOtherTypesExactQuerying(PostgreSQLTestCase):
def setUp(self):
self.ips = ['192.168.0.1', '::1']
self.uuids = [uuid.uuid4()]
self.decimals = [decimal.Decimal(1.25), 1.75]
self.tags = [Tag(1), Tag(2), Tag(3)]
self.objs = [
OtherTypesArrayModel.objects.create(
ips=self.ips,
uuids=self.uuids,
decimals=self.decimals,
tags=self.tags,
)
]
def test_exact_ip_addresses(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(ips=self.ips),
self.objs
)
def test_exact_uuids(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(uuids=self.uuids),
self.objs
)
def test_exact_decimals(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(decimals=self.decimals),
self.objs
)
def test_exact_tags(self):
self.assertSequenceEqual(
OtherTypesArrayModel.objects.filter(tags=self.tags),
self.objs
)
@isolate_apps('postgres_tests')
class TestChecks(PostgreSQLTestCase):
def test_field_checks(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.CharField())
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
# The inner CharField is missing a max_length.
self.assertEqual(errors[0].id, 'postgres.E001')
self.assertIn('max_length', errors[0].msg)
def test_invalid_base_fields(self):
class MyModel(PostgreSQLModel):
field = ArrayField(models.ManyToManyField('postgres_tests.IntegerArrayModel'))
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
self.assertEqual(errors[0].id, 'postgres.E002')
def test_nested_field_checks(self):
"""
Nested ArrayFields are permitted.
"""
class MyModel(PostgreSQLModel):
field = ArrayField(ArrayField(models.CharField()))
model = MyModel()
errors = model.check()
self.assertEqual(len(errors), 1)
# The inner CharField is missing a max_length.
self.assertEqual(errors[0].id, 'postgres.E001')
self.assertIn('max_length', errors[0].msg)
@unittest.skipUnless(connection.vendor == 'postgresql', "PostgreSQL specific tests")
class TestMigrations(TransactionTestCase):
available_apps = ['postgres_tests']
def test_deconstruct(self):
field = ArrayField(models.IntegerField())
name, path, args, kwargs = field.deconstruct()
new = ArrayField(*args, **kwargs)
self.assertEqual(type(new.base_field), type(field.base_field))
def test_deconstruct_with_size(self):
field = ArrayField(models.IntegerField(), size=3)
name, path, args, kwargs = field.deconstruct()
new = ArrayField(*args, **kwargs)
self.assertEqual(new.size, field.size)
def test_deconstruct_args(self):
field = ArrayField(models.CharField(max_length=20))
name, path, args, kwargs = field.deconstruct()
new = ArrayField(*args, **kwargs)
self.assertEqual(new.base_field.max_length, field.base_field.max_length)
def test_subclass_deconstruct(self):
field = ArrayField(models.IntegerField())
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, 'django.contrib.postgres.fields.ArrayField')
field = ArrayFieldSubclass()
name, path, args, kwargs = field.deconstruct()
self.assertEqual(path, 'postgres_tests.models.ArrayFieldSubclass')
@override_settings(MIGRATION_MODULES={
"postgres_tests": "postgres_tests.array_default_migrations",
})
def test_adding_field_with_default(self):
# See #22962
table_name = 'postgres_tests_integerarraydefaultmodel'
with connection.cursor() as cursor:
self.assertNotIn(table_name, connection.introspection.table_names(cursor))
call_command('migrate', 'postgres_tests', verbosity=0)
with connection.cursor() as cursor:
self.assertIn(table_name, connection.introspection.table_names(cursor))
call_command('migrate', 'postgres_tests', 'zero', verbosity=0)
with connection.cursor() as cursor:
self.assertNotIn(table_name, connection.introspection.table_names(cursor))
@override_settings(MIGRATION_MODULES={
"postgres_tests": "postgres_tests.array_index_migrations",
})
def test_adding_arrayfield_with_index(self):
"""
ArrayField shouldn't have varchar_patterns_ops or text_patterns_ops indexes.
"""
table_name = 'postgres_tests_chartextarrayindexmodel'
call_command('migrate', 'postgres_tests', verbosity=0)
with connection.cursor() as cursor:
like_constraint_columns_list = [
v['columns']
for k, v in list(connection.introspection.get_constraints(cursor, table_name).items())
if k.endswith('_like')
]
# Only the CharField should have a LIKE index.
self.assertEqual(like_constraint_columns_list, [['char2']])
# All fields should have regular indexes.
with connection.cursor() as cursor:
indexes = [
c['columns'][0]
for c in connection.introspection.get_constraints(cursor, table_name).values()
if c['index'] and len(c['columns']) == 1
]
self.assertIn('char', indexes)
self.assertIn('char2', indexes)
self.assertIn('text', indexes)
call_command('migrate', 'postgres_tests', 'zero', verbosity=0)
with connection.cursor() as cursor:
self.assertNotIn(table_name, connection.introspection.table_names(cursor))
class TestSerialization(PostgreSQLTestCase):
test_data = (
'[{"fields": {"field": "[\\"1\\", \\"2\\", null]"}, "model": "postgres_tests.integerarraymodel", "pk": null}]'
)
def test_dumping(self):
instance = IntegerArrayModel(field=[1, 2, None])
data = serializers.serialize('json', [instance])
self.assertEqual(json.loads(data), json.loads(self.test_data))
def test_loading(self):
instance = list(serializers.deserialize('json', self.test_data))[0].object
self.assertEqual(instance.field, [1, 2, None])
class TestValidation(PostgreSQLTestCase):
def test_unbounded(self):
field = ArrayField(models.IntegerField())
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([1, None], None)
self.assertEqual(cm.exception.code, 'item_invalid')
self.assertEqual(
cm.exception.message % cm.exception.params,
'Item 1 in the array did not validate: This field cannot be null.'
)
def test_blank_true(self):
field = ArrayField(models.IntegerField(blank=True, null=True))
# This should not raise a validation error
field.clean([1, None], None)
def test_with_size(self):
field = ArrayField(models.IntegerField(), size=3)
field.clean([1, 2, 3], None)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([1, 2, 3, 4], None)
self.assertEqual(cm.exception.messages[0], 'List contains 4 items, it should contain no more than 3.')
def test_nested_array_mismatch(self):
field = ArrayField(ArrayField(models.IntegerField()))
field.clean([[1, 2], [3, 4]], None)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([[1, 2], [3, 4, 5]], None)
self.assertEqual(cm.exception.code, 'nested_array_mismatch')
self.assertEqual(cm.exception.messages[0], 'Nested arrays must have the same length.')
def test_with_base_field_error_params(self):
field = ArrayField(models.CharField(max_length=2))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['abc'], None)
self.assertEqual(len(cm.exception.error_list), 1)
exception = cm.exception.error_list[0]
self.assertEqual(
exception.message,
'Item 0 in the array did not validate: Ensure this value has at most 2 characters (it has 3).'
)
self.assertEqual(exception.code, 'item_invalid')
self.assertEqual(exception.params, {'nth': 0, 'value': 'abc', 'limit_value': 2, 'show_value': 3})
def test_with_validators(self):
field = ArrayField(models.IntegerField(validators=[validators.MinValueValidator(1)]))
field.clean([1, 2], None)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean([0], None)
self.assertEqual(len(cm.exception.error_list), 1)
exception = cm.exception.error_list[0]
self.assertEqual(
exception.message,
'Item 0 in the array did not validate: Ensure this value is greater than or equal to 1.'
)
self.assertEqual(exception.code, 'item_invalid')
self.assertEqual(exception.params, {'nth': 0, 'value': 0, 'limit_value': 1, 'show_value': 0})
class TestSimpleFormField(PostgreSQLTestCase):
def test_valid(self):
field = SimpleArrayField(forms.CharField())
value = field.clean('a,b,c')
self.assertEqual(value, ['a', 'b', 'c'])
def test_to_python_fail(self):
field = SimpleArrayField(forms.IntegerField())
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,9')
self.assertEqual(cm.exception.messages[0], 'Item 0 in the array did not validate: Enter a whole number.')
def test_validate_fail(self):
field = SimpleArrayField(forms.CharField(required=True))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,')
self.assertEqual(cm.exception.messages[0], 'Item 2 in the array did not validate: This field is required.')
def test_validate_fail_base_field_error_params(self):
field = SimpleArrayField(forms.CharField(max_length=2))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('abc,c,defg')
errors = cm.exception.error_list
self.assertEqual(len(errors), 2)
first_error = errors[0]
self.assertEqual(
first_error.message,
'Item 0 in the array did not validate: Ensure this value has at most 2 characters (it has 3).'
)
self.assertEqual(first_error.code, 'item_invalid')
self.assertEqual(first_error.params, {'nth': 0, 'value': 'abc', 'limit_value': 2, 'show_value': 3})
second_error = errors[1]
self.assertEqual(
second_error.message,
'Item 2 in the array did not validate: Ensure this value has at most 2 characters (it has 4).'
)
self.assertEqual(second_error.code, 'item_invalid')
self.assertEqual(second_error.params, {'nth': 2, 'value': 'defg', 'limit_value': 2, 'show_value': 4})
def test_validators_fail(self):
field = SimpleArrayField(forms.RegexField('[a-e]{2}'))
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,bc,de')
self.assertEqual(cm.exception.messages[0], 'Item 0 in the array did not validate: Enter a valid value.')
def test_delimiter(self):
field = SimpleArrayField(forms.CharField(), delimiter='|')
value = field.clean('a|b|c')
self.assertEqual(value, ['a', 'b', 'c'])
def test_delimiter_with_nesting(self):
field = SimpleArrayField(SimpleArrayField(forms.CharField()), delimiter='|')
value = field.clean('a,b|c,d')
self.assertEqual(value, [['a', 'b'], ['c', 'd']])
def test_prepare_value(self):
field = SimpleArrayField(forms.CharField())
value = field.prepare_value(['a', 'b', 'c'])
self.assertEqual(value, 'a,b,c')
def test_max_length(self):
field = SimpleArrayField(forms.CharField(), max_length=2)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,c')
self.assertEqual(cm.exception.messages[0], 'List contains 3 items, it should contain no more than 2.')
def test_min_length(self):
field = SimpleArrayField(forms.CharField(), min_length=4)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('a,b,c')
self.assertEqual(cm.exception.messages[0], 'List contains 3 items, it should contain no fewer than 4.')
def test_required(self):
field = SimpleArrayField(forms.CharField(), required=True)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean('')
self.assertEqual(cm.exception.messages[0], 'This field is required.')
def test_model_field_formfield(self):
model_field = ArrayField(models.CharField(max_length=27))
form_field = model_field.formfield()
self.assertIsInstance(form_field, SimpleArrayField)
self.assertIsInstance(form_field.base_field, forms.CharField)
self.assertEqual(form_field.base_field.max_length, 27)
def test_model_field_formfield_size(self):
model_field = ArrayField(models.CharField(max_length=27), size=4)
form_field = model_field.formfield()
self.assertIsInstance(form_field, SimpleArrayField)
self.assertEqual(form_field.max_length, 4)
class TestSplitFormField(PostgreSQLTestCase):
def test_valid(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), size=3)
data = {'array_0': 'a', 'array_1': 'b', 'array_2': 'c'}
form = SplitForm(data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data, {'array': ['a', 'b', 'c']})
def test_required(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), required=True, size=3)
data = {'array_0': '', 'array_1': '', 'array_2': ''}
form = SplitForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'array': ['This field is required.']})
def test_remove_trailing_nulls(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(required=False), size=5, remove_trailing_nulls=True)
data = {'array_0': 'a', 'array_1': '', 'array_2': 'b', 'array_3': '', 'array_4': ''}
form = SplitForm(data)
self.assertTrue(form.is_valid(), form.errors)
self.assertEqual(form.cleaned_data, {'array': ['a', '', 'b']})
def test_remove_trailing_nulls_not_required(self):
class SplitForm(forms.Form):
array = SplitArrayField(
forms.CharField(required=False),
size=2,
remove_trailing_nulls=True,
required=False,
)
data = {'array_0': '', 'array_1': ''}
form = SplitForm(data)
self.assertTrue(form.is_valid())
self.assertEqual(form.cleaned_data, {'array': []})
def test_required_field(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), size=3)
data = {'array_0': 'a', 'array_1': 'b', 'array_2': ''}
form = SplitForm(data)
self.assertFalse(form.is_valid())
self.assertEqual(form.errors, {'array': ['Item 2 in the array did not validate: This field is required.']})
def test_invalid_integer(self):
msg = 'Item 1 in the array did not validate: Ensure this value is less than or equal to 100.'
with self.assertRaisesMessage(exceptions.ValidationError, msg):
SplitArrayField(forms.IntegerField(max_value=100), size=2).clean([0, 101])
def test_rendering(self):
class SplitForm(forms.Form):
array = SplitArrayField(forms.CharField(), size=3)
self.assertHTMLEqual(str(SplitForm()), '''
<tr>
<th><label for="id_array_0">Array:</label></th>
<td>
<input id="id_array_0" name="array_0" type="text" required />
<input id="id_array_1" name="array_1" type="text" required />
<input id="id_array_2" name="array_2" type="text" required />
</td>
</tr>
''')
def test_invalid_char_length(self):
field = SplitArrayField(forms.CharField(max_length=2), size=3)
with self.assertRaises(exceptions.ValidationError) as cm:
field.clean(['abc', 'c', 'defg'])
self.assertEqual(cm.exception.messages, [
'Item 0 in the array did not validate: Ensure this value has at most 2 characters (it has 3).',
'Item 2 in the array did not validate: Ensure this value has at most 2 characters (it has 4).',
])
def test_splitarraywidget_value_omitted_from_data(self):
class Form(forms.ModelForm):
field = SplitArrayField(forms.IntegerField(), required=False, size=2)
class Meta:
model = IntegerArrayModel
fields = ('field',)
form = Form({'field_0': '1', 'field_1': '2'})
self.assertEqual(form.errors, {})
obj = form.save(commit=False)
self.assertEqual(obj.field, [1, 2])
class TestSplitFormWidget(PostgreSQLTestCase):
def test_value_omitted_from_data(self):
widget = SplitArrayWidget(forms.TextInput(), size=2)
self.assertIs(widget.value_omitted_from_data({}, {}, 'field'), True)
self.assertIs(widget.value_omitted_from_data({'field_0': 'value'}, {}, 'field'), False)
self.assertIs(widget.value_omitted_from_data({'field_1': 'value'}, {}, 'field'), False)
self.assertIs(widget.value_omitted_from_data({'field_0': 'value', 'field_1': 'value'}, {}, 'field'), False)
| bsd-3-clause |
defionscode/ansible | lib/ansible/utils/module_docs_fragments/purestorage.py | 32 | 2113 | #
# (c) 2017, Simon Dodsley <simon@purestorage.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# Standard Pure Storage documentation fragment
DOCUMENTATION = '''
options:
- See separate platform section for more details
requirements:
- See separate platform section for more details
notes:
- Ansible modules are available for the following Pure Storage products: FlashArray, FlashBlade
'''
# Documentation fragment for FlashBlade
FB = '''
options:
fb_url:
description:
- FlashBlade management IP address or Hostname.
api_token:
description:
- FlashBlade API token for admin privileged user.
notes:
- This module requires the ``purity_fb`` Python library
- You must set C(PUREFB_URL) and C(PUREFB_API) environment variables
if I(fb_url) and I(api_token) arguments are not passed to the module directly
requirements:
- "python >= 2.7"
- "purity_fb >= 1.1"
'''
# Documentation fragment for FlashArray
FA = '''
options:
fa_url:
description:
- FlashArray management IPv4 address or Hostname.
required: true
api_token:
description:
- FlashArray API token for admin privileged user.
required: true
notes:
- This module requires the ``purestorage`` Python library
- You must set C(PUREFA_URL) and C(PUREFA_API) environment variables
if I(fa_url) and I(api_token) arguments are not passed to the module directly
requirements:
- "python >= 2.7"
- purestorage
'''
| gpl-3.0 |
EmreAtes/spack | lib/spack/spack/platforms/darwin.py | 3 | 1897 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import platform
from spack.architecture import Platform, Target
from spack.operating_systems.mac_os import MacOs
class Darwin(Platform):
priority = 89
front_end = 'x86_64'
back_end = 'x86_64'
default = 'x86_64'
def __init__(self):
super(Darwin, self).__init__('darwin')
self.add_target(self.default, Target(self.default))
mac_os = MacOs()
self.default_os = str(mac_os)
self.front_os = str(mac_os)
self.back_os = str(mac_os)
self.add_operating_system(str(mac_os), mac_os)
@classmethod
def detect(self):
return 'darwin' in platform.system().lower()
| lgpl-2.1 |
ricardogsilva/QGIS | tests/src/python/test_provider_shapefile.py | 3 | 55599 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for the OGR/Shapefile provider.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Matthias Kuhn'
__date__ = '2015-04-23'
__copyright__ = 'Copyright 2015, The QGIS Project'
import os
import re
import tempfile
import shutil
import glob
import osgeo.gdal
import osgeo.ogr
import sys
from osgeo import gdal
from qgis.core import (
QgsApplication,
QgsDataProvider,
QgsSettings,
QgsFeature,
QgsField,
QgsGeometry,
QgsVectorLayer,
QgsFeatureRequest,
QgsProviderRegistry,
QgsRectangle,
QgsVectorDataProvider,
QgsWkbTypes,
QgsVectorLayerExporter,
)
from qgis.PyQt.QtCore import QVariant
from qgis.testing import start_app, unittest
from utilities import unitTestDataPath
from providertestbase import ProviderTestCase
start_app()
TEST_DATA_DIR = unitTestDataPath()
def GDAL_COMPUTE_VERSION(maj, min, rev):
return ((maj) * 1000000 + (min) * 10000 + (rev) * 100)
class ErrorReceiver():
def __init__(self):
self.msg = None
def receiveError(self, msg):
self.msg = msg
class TestPyQgsShapefileProvider(unittest.TestCase, ProviderTestCase):
@classmethod
def setUpClass(cls):
"""Run before all tests"""
# Create test layer
cls.basetestpath = tempfile.mkdtemp()
cls.repackfilepath = tempfile.mkdtemp()
srcpath = os.path.join(TEST_DATA_DIR, 'provider')
for file in glob.glob(os.path.join(srcpath, 'shapefile.*')):
shutil.copy(os.path.join(srcpath, file), cls.basetestpath)
shutil.copy(os.path.join(srcpath, file), cls.repackfilepath)
for file in glob.glob(os.path.join(srcpath, 'shapefile_poly.*')):
shutil.copy(os.path.join(srcpath, file), cls.basetestpath)
cls.basetestfile = os.path.join(cls.basetestpath, 'shapefile.shp')
cls.repackfile = os.path.join(cls.repackfilepath, 'shapefile.shp')
cls.basetestpolyfile = os.path.join(cls.basetestpath, 'shapefile_poly.shp')
cls.vl = QgsVectorLayer('{}|layerid=0'.format(cls.basetestfile), 'test', 'ogr')
assert(cls.vl.isValid())
cls.source = cls.vl.dataProvider()
cls.vl_poly = QgsVectorLayer('{}|layerid=0'.format(cls.basetestpolyfile), 'test', 'ogr')
assert (cls.vl_poly.isValid())
cls.poly_provider = cls.vl_poly.dataProvider()
cls.dirs_to_cleanup = [cls.basetestpath, cls.repackfilepath]
@classmethod
def tearDownClass(cls):
"""Run after all tests"""
del(cls.vl)
del(cls.vl_poly)
for dirname in cls.dirs_to_cleanup:
shutil.rmtree(dirname, True)
def treat_time_as_string(self):
return True
def treat_datetime_as_string(self):
return True
def getSource(self):
tmpdir = tempfile.mkdtemp()
self.dirs_to_cleanup.append(tmpdir)
srcpath = os.path.join(TEST_DATA_DIR, 'provider')
for file in glob.glob(os.path.join(srcpath, 'shapefile.*')):
shutil.copy(os.path.join(srcpath, file), tmpdir)
datasource = os.path.join(tmpdir, 'shapefile.shp')
vl = QgsVectorLayer('{}|layerid=0'.format(datasource), 'test', 'ogr')
return vl
def getEditableLayer(self):
return self.getSource()
def enableCompiler(self):
QgsSettings().setValue('/qgis/compileExpressions', True)
return True
def disableCompiler(self):
QgsSettings().setValue('/qgis/compileExpressions', False)
def uncompiledFilters(self):
filters = set(['name ILIKE \'QGIS\'',
'"name" NOT LIKE \'Ap%\'',
'"name" NOT ILIKE \'QGIS\'',
'"name" NOT ILIKE \'pEAR\'',
'name <> \'Apple\'',
'"name" <> \'apple\'',
'(name = \'Apple\') is not null',
'name ILIKE \'aPple\'',
'name ILIKE \'%pp%\'',
'cnt = 1100 % 1000',
'"name" || \' \' || "name" = \'Orange Orange\'',
'"name" || \' \' || "cnt" = \'Orange 100\'',
'\'x\' || "name" IS NOT NULL',
'\'x\' || "name" IS NULL',
'cnt = 10 ^ 2',
'"name" ~ \'[OP]ra[gne]+\'',
'false and NULL',
'true and NULL',
'NULL and false',
'NULL and true',
'NULL and NULL',
'false or NULL',
'true or NULL',
'NULL or false',
'NULL or true',
'NULL or NULL',
'not name = \'Apple\'',
'not name = \'Apple\' or name = \'Apple\'',
'not name = \'Apple\' or not name = \'Apple\'',
'not name = \'Apple\' and pk = 4',
'not name = \'Apple\' and not pk = 4',
'num_char IN (2, 4, 5)',
'-cnt > 0',
'-cnt < 0',
'-cnt - 1 = -101',
'-(-cnt) = 100',
'-(cnt) = -(100)',
'sqrt(pk) >= 2',
'radians(cnt) < 2',
'degrees(pk) <= 200',
'abs(cnt) <= 200',
'cos(pk) < 0',
'sin(pk) < 0',
'tan(pk) < 0',
'acos(-1) < pk',
'asin(1) < pk',
'atan(3.14) < pk',
'atan2(3.14, pk) < 1',
'exp(pk) < 10',
'ln(pk) <= 1',
'log(3, pk) <= 1',
'log10(pk) < 0.5',
'round(3.14) <= pk',
'round(0.314,1) * 10 = pk',
'floor(3.14) <= pk',
'ceil(3.14) <= pk',
'pk < pi()',
'round(cnt / 66.67) <= 2',
'floor(cnt / 66.67) <= 2',
'ceil(cnt / 66.67) <= 2',
'pk < pi() / 2',
'pk = char(51)',
'pk = coalesce(NULL,3,4)',
'lower(name) = \'apple\'',
'upper(name) = \'APPLE\'',
'name = trim(\' Apple \')',
'x($geometry) < -70',
'y($geometry) > 70',
'xmin($geometry) < -70',
'ymin($geometry) > 70',
'xmax($geometry) < -70',
'ymax($geometry) > 70',
'disjoint($geometry,geom_from_wkt( \'Polygon ((-72.2 66.1, -65.2 66.1, -65.2 72.0, -72.2 72.0, -72.2 66.1))\'))',
'intersects($geometry,geom_from_wkt( \'Polygon ((-72.2 66.1, -65.2 66.1, -65.2 72.0, -72.2 72.0, -72.2 66.1))\'))',
'contains(geom_from_wkt( \'Polygon ((-72.2 66.1, -65.2 66.1, -65.2 72.0, -72.2 72.0, -72.2 66.1))\'),$geometry)',
'distance($geometry,geom_from_wkt( \'Point (-70 70)\')) > 7',
'intersects($geometry,geom_from_gml( \'<gml:Polygon srsName="EPSG:4326"><gml:outerBoundaryIs><gml:LinearRing><gml:coordinates>-72.2,66.1 -65.2,66.1 -65.2,72.0 -72.2,72.0 -72.2,66.1</gml:coordinates></gml:LinearRing></gml:outerBoundaryIs></gml:Polygon>\'))',
'x($geometry) < -70',
'y($geometry) > 79',
'xmin($geometry) < -70',
'ymin($geometry) < 76',
'xmax($geometry) > -68',
'ymax($geometry) > 80',
'area($geometry) > 10',
'perimeter($geometry) < 12',
'relate($geometry,geom_from_wkt( \'Polygon ((-68.2 82.1, -66.95 82.1, -66.95 79.05, -68.2 79.05, -68.2 82.1))\')) = \'FF2FF1212\'',
'relate($geometry,geom_from_wkt( \'Polygon ((-68.2 82.1, -66.95 82.1, -66.95 79.05, -68.2 79.05, -68.2 82.1))\'), \'****F****\')',
'crosses($geometry,geom_from_wkt( \'Linestring (-68.2 82.1, -66.95 82.1, -66.95 79.05)\'))',
'overlaps($geometry,geom_from_wkt( \'Polygon ((-68.2 82.1, -66.95 82.1, -66.95 79.05, -68.2 79.05, -68.2 82.1))\'))',
'within($geometry,geom_from_wkt( \'Polygon ((-75.1 76.1, -75.1 81.6, -68.8 81.6, -68.8 76.1, -75.1 76.1))\'))',
'overlaps(translate($geometry,-1,-1),geom_from_wkt( \'Polygon ((-75.1 76.1, -75.1 81.6, -68.8 81.6, -68.8 76.1, -75.1 76.1))\'))',
'overlaps(buffer($geometry,1),geom_from_wkt( \'Polygon ((-75.1 76.1, -75.1 81.6, -68.8 81.6, -68.8 76.1, -75.1 76.1))\'))',
'intersects(centroid($geometry),geom_from_wkt( \'Polygon ((-74.4 78.2, -74.4 79.1, -66.8 79.1, -66.8 78.2, -74.4 78.2))\'))',
'intersects(point_on_surface($geometry),geom_from_wkt( \'Polygon ((-74.4 78.2, -74.4 79.1, -66.8 79.1, -66.8 78.2, -74.4 78.2))\'))',
'"dt" <= format_date(make_datetime(2020, 5, 4, 12, 13, 14), \'yyyy-MM-dd hh:mm:ss\')',
'"dt" < format_date(make_date(2020, 5, 4), \'yyyy-MM-dd hh:mm:ss\')',
'"dt" = format_date(to_datetime(\'000www14ww13ww12www4ww5ww2020\',\'zzzwwwsswwmmwwhhwwwdwwMwwyyyy\'),\'yyyy-MM-dd hh:mm:ss\')',
'"date" = to_date(\'www4ww5ww2020\',\'wwwdwwMwwyyyy\')',
'to_time("time") >= make_time(12, 14, 14)',
'to_time("time") = to_time(\'000www14ww13ww12www\',\'zzzwwwsswwmmwwhhwww\')',
'to_datetime("dt", \'yyyy-MM-dd hh:mm:ss\') + make_interval(days:=1) <= make_datetime(2020, 5, 4, 12, 13, 14)',
'to_datetime("dt", \'yyyy-MM-dd hh:mm:ss\') + make_interval(days:=0.01) <= make_datetime(2020, 5, 4, 12, 13, 14)'
])
return filters
def partiallyCompiledFilters(self):
return set(['name = \'Apple\'',
'name = \'apple\'',
'\"NaMe\" = \'Apple\'',
'name LIKE \'Apple\'',
'name LIKE \'aPple\'',
'name LIKE \'Ap_le\'',
'name LIKE \'Ap\\_le\'',
'"name"="name2"'])
def testRepack(self):
vl = QgsVectorLayer('{}|layerid=0'.format(self.repackfile), 'test', 'ogr')
ids = [f.id() for f in vl.getFeatures(QgsFeatureRequest().setFilterExpression('pk=1'))]
vl.selectByIds(ids)
self.assertEqual(vl.selectedFeatureIds(), ids)
self.assertEqual(vl.featureCount(), 5)
self.assertTrue(vl.startEditing())
self.assertTrue(vl.deleteFeature(3))
self.assertTrue(vl.commitChanges())
self.assertTrue(vl.selectedFeatureCount() == 0 or vl.selectedFeatures()[0]['pk'] == 1)
def testUpdateMode(self):
""" Test that on-the-fly re-opening in update/read-only mode works """
tmpdir = tempfile.mkdtemp()
self.dirs_to_cleanup.append(tmpdir)
srcpath = os.path.join(TEST_DATA_DIR, 'provider')
for file in glob.glob(os.path.join(srcpath, 'shapefile.*')):
shutil.copy(os.path.join(srcpath, file), tmpdir)
datasource = os.path.join(tmpdir, 'shapefile.shp')
vl = QgsVectorLayer('{}|layerid=0'.format(datasource), 'test', 'ogr')
caps = vl.dataProvider().capabilities()
self.assertTrue(caps & QgsVectorDataProvider.AddFeatures)
self.assertTrue(caps & QgsVectorDataProvider.DeleteFeatures)
self.assertTrue(caps & QgsVectorDataProvider.ChangeAttributeValues)
self.assertTrue(caps & QgsVectorDataProvider.AddAttributes)
self.assertTrue(caps & QgsVectorDataProvider.DeleteAttributes)
self.assertTrue(caps & QgsVectorDataProvider.CreateSpatialIndex)
self.assertTrue(caps & QgsVectorDataProvider.SelectAtId)
self.assertTrue(caps & QgsVectorDataProvider.ChangeGeometries)
# self.assertTrue(caps & QgsVectorDataProvider.ChangeFeatures)
# We should be really opened in read-only mode even if write capabilities are declared
self.assertEqual(vl.dataProvider().property("_debug_open_mode"), "read-only")
# Unbalanced call to leaveUpdateMode()
self.assertFalse(vl.dataProvider().leaveUpdateMode())
# Test that startEditing() / commitChanges() plays with enterUpdateMode() / leaveUpdateMode()
self.assertTrue(vl.startEditing())
self.assertEqual(vl.dataProvider().property("_debug_open_mode"), "read-write")
self.assertTrue(vl.dataProvider().isValid())
self.assertTrue(vl.commitChanges())
self.assertEqual(vl.dataProvider().property("_debug_open_mode"), "read-only")
self.assertTrue(vl.dataProvider().isValid())
# Manual enterUpdateMode() / leaveUpdateMode() with 2 depths
self.assertTrue(vl.dataProvider().enterUpdateMode())
self.assertEqual(vl.dataProvider().property("_debug_open_mode"), "read-write")
caps = vl.dataProvider().capabilities()
self.assertTrue(caps & QgsVectorDataProvider.AddFeatures)
f = QgsFeature()
f.setAttributes([200])
f.setGeometry(QgsGeometry.fromWkt('Point (2 49)'))
(ret, feature_list) = vl.dataProvider().addFeatures([f])
self.assertTrue(ret)
fid = feature_list[0].id()
features = [f_iter for f_iter in vl.getFeatures(QgsFeatureRequest().setFilterFid(fid))]
values = [f_iter['pk'] for f_iter in features]
self.assertEqual(values, [200])
got_geom = [f_iter.geometry() for f_iter in features][0].constGet()
self.assertEqual((got_geom.x(), got_geom.y()), (2.0, 49.0))
self.assertTrue(vl.dataProvider().changeGeometryValues({fid: QgsGeometry.fromWkt('Point (3 50)')}))
self.assertTrue(vl.dataProvider().changeAttributeValues({fid: {0: 100}}))
features = [f_iter for f_iter in vl.getFeatures(QgsFeatureRequest().setFilterFid(fid))]
values = [f_iter['pk'] for f_iter in features]
got_geom = [f_iter.geometry() for f_iter in features][0].constGet()
self.assertEqual((got_geom.x(), got_geom.y()), (3.0, 50.0))
self.assertTrue(vl.dataProvider().deleteFeatures([fid]))
# Check that it has really disappeared
osgeo.gdal.PushErrorHandler('CPLQuietErrorHandler')
features = [f_iter for f_iter in vl.getFeatures(QgsFeatureRequest().setFilterFid(fid))]
osgeo.gdal.PopErrorHandler()
self.assertEqual(features, [])
self.assertTrue(vl.dataProvider().addAttributes([QgsField("new_field", QVariant.Int, "integer")]))
self.assertTrue(vl.dataProvider().deleteAttributes([len(vl.dataProvider().fields()) - 1]))
self.assertTrue(vl.startEditing())
self.assertEqual(vl.dataProvider().property("_debug_open_mode"), "read-write")
self.assertTrue(vl.commitChanges())
self.assertEqual(vl.dataProvider().property("_debug_open_mode"), "read-write")
self.assertTrue(vl.dataProvider().enterUpdateMode())
self.assertEqual(vl.dataProvider().property("_debug_open_mode"), "read-write")
self.assertTrue(vl.dataProvider().leaveUpdateMode())
self.assertEqual(vl.dataProvider().property("_debug_open_mode"), "read-write")
self.assertTrue(vl.dataProvider().leaveUpdateMode())
self.assertEqual(vl.dataProvider().property("_debug_open_mode"), "read-only")
# Test that update mode will be implicitly enabled if doing an action
# that requires update mode
(ret, _) = vl.dataProvider().addFeatures([QgsFeature()])
self.assertTrue(ret)
self.assertEqual(vl.dataProvider().property("_debug_open_mode"), "read-write")
def testUpdateModeFailedReopening(self):
''' Test that methods on provider don't crash after a failed reopening '''
# Windows doesn't like removing files opened by OGR, whatever
# their open mode, so that makes it hard to test
if sys.platform == 'win32':
return
tmpdir = tempfile.mkdtemp()
self.dirs_to_cleanup.append(tmpdir)
srcpath = os.path.join(TEST_DATA_DIR, 'provider')
for file in glob.glob(os.path.join(srcpath, 'shapefile.*')):
shutil.copy(os.path.join(srcpath, file), tmpdir)
datasource = os.path.join(tmpdir, 'shapefile.shp')
vl = QgsVectorLayer('{}|layerid=0'.format(datasource), 'test', 'ogr')
os.unlink(datasource)
self.assertFalse(vl.dataProvider().enterUpdateMode())
self.assertFalse(vl.dataProvider().enterUpdateMode())
self.assertEqual(vl.dataProvider().property("_debug_open_mode"), "invalid")
self.assertFalse(vl.dataProvider().isValid())
self.assertEqual(len([f for f in vl.dataProvider().getFeatures()]), 0)
self.assertEqual(len(vl.dataProvider().subLayers()), 0)
self.assertFalse(vl.dataProvider().setSubsetString('TRUE'))
(ret, _) = vl.dataProvider().addFeatures([QgsFeature()])
self.assertFalse(ret)
self.assertFalse(vl.dataProvider().deleteFeatures([1]))
self.assertFalse(vl.dataProvider().addAttributes([QgsField()]))
self.assertFalse(vl.dataProvider().deleteAttributes([1]))
self.assertFalse(vl.dataProvider().changeGeometryValues({0: QgsGeometry.fromWkt('Point (3 50)')}))
self.assertFalse(vl.dataProvider().changeAttributeValues({0: {0: 0}}))
self.assertFalse(vl.dataProvider().createSpatialIndex())
self.assertFalse(vl.dataProvider().createAttributeIndex(0))
def testreloadData(self):
''' Test reloadData() '''
tmpdir = tempfile.mkdtemp()
self.dirs_to_cleanup.append(tmpdir)
srcpath = os.path.join(TEST_DATA_DIR, 'provider')
for file in glob.glob(os.path.join(srcpath, 'shapefile.*')):
shutil.copy(os.path.join(srcpath, file), tmpdir)
datasource = os.path.join(tmpdir, 'shapefile.shp')
vl1 = QgsVectorLayer('{}|layerid=0'.format(datasource), 'test', 'ogr')
vl2 = QgsVectorLayer('{}|layerid=0'.format(datasource), 'test', 'ogr')
self.assertTrue(vl1.startEditing())
self.assertTrue(vl1.deleteAttributes([1]))
self.assertTrue(vl1.commitChanges())
self.assertEqual(len(vl1.fields()) + 1, len(vl2.fields()))
# Reload
vl2.reload()
# And now check that fields are up-to-date
self.assertEqual(len(vl1.fields()), len(vl2.fields()))
def testRenameAttributes(self):
''' Test renameAttributes() '''
tmpdir = tempfile.mkdtemp()
self.dirs_to_cleanup.append(tmpdir)
srcpath = os.path.join(TEST_DATA_DIR, 'provider')
for file in glob.glob(os.path.join(srcpath, 'shapefile.*')):
shutil.copy(os.path.join(srcpath, file), tmpdir)
datasource = os.path.join(tmpdir, 'shapefile.shp')
vl = QgsVectorLayer('{}|layerid=0'.format(datasource), 'test', 'ogr')
provider = vl.dataProvider()
# bad rename
self.assertFalse(provider.renameAttributes({-1: 'not_a_field'}))
self.assertFalse(provider.renameAttributes({100: 'not_a_field'}))
# already exists
self.assertFalse(provider.renameAttributes({2: 'cnt'}))
# rename one field
self.assertTrue(provider.renameAttributes({2: 'newname'}))
self.assertEqual(provider.fields().at(2).name(), 'newname')
vl.updateFields()
fet = next(vl.getFeatures())
self.assertEqual(fet.fields()[2].name(), 'newname')
# rename two fields
self.assertTrue(provider.renameAttributes({2: 'newname2', 3: 'another'}))
self.assertEqual(provider.fields().at(2).name(), 'newname2')
self.assertEqual(provider.fields().at(3).name(), 'another')
vl.updateFields()
fet = next(vl.getFeatures())
self.assertEqual(fet.fields()[2].name(), 'newname2')
self.assertEqual(fet.fields()[3].name(), 'another')
# close file and reopen, then recheck to confirm that changes were saved to file
del vl
vl = None
vl = QgsVectorLayer('{}|layerid=0'.format(datasource), 'test', 'ogr')
provider = vl.dataProvider()
self.assertEqual(provider.fields().at(2).name(), 'newname2')
self.assertEqual(provider.fields().at(3).name(), 'another')
fet = next(vl.getFeatures())
self.assertEqual(fet.fields()[2].name(), 'newname2')
self.assertEqual(fet.fields()[3].name(), 'another')
def testDeleteGeometry(self):
''' Test changeGeometryValues() with a null geometry '''
tmpdir = tempfile.mkdtemp()
self.dirs_to_cleanup.append(tmpdir)
srcpath = os.path.join(TEST_DATA_DIR, 'provider')
for file in glob.glob(os.path.join(srcpath, 'shapefile.*')):
shutil.copy(os.path.join(srcpath, file), tmpdir)
datasource = os.path.join(tmpdir, 'shapefile.shp')
vl = QgsVectorLayer('{}|layerid=0'.format(datasource), 'test', 'ogr')
self.assertTrue(vl.dataProvider().changeGeometryValues({0: QgsGeometry()}))
vl = None
vl = QgsVectorLayer('{}|layerid=0'.format(datasource), 'test', 'ogr')
fet = next(vl.getFeatures())
self.assertFalse(fet.hasGeometry())
def testDeleteShapes(self):
''' Test fix for #11007 '''
tmpdir = tempfile.mkdtemp()
self.dirs_to_cleanup.append(tmpdir)
srcpath = os.path.join(TEST_DATA_DIR, 'provider')
for file in glob.glob(os.path.join(srcpath, 'shapefile.*')):
shutil.copy(os.path.join(srcpath, file), tmpdir)
datasource = os.path.join(tmpdir, 'shapefile.shp')
vl = QgsVectorLayer('{}|layerid=0'.format(datasource), 'test', 'ogr')
feature_count = vl.featureCount()
# Start an iterator that will open a new connection
iterator = vl.getFeatures()
next(iterator)
# Delete a feature
self.assertTrue(vl.startEditing())
self.assertTrue(vl.deleteFeature(1))
self.assertTrue(vl.commitChanges())
# Test the content of the shapefile while it is still opened
ds = osgeo.ogr.Open(datasource)
# Test repacking has been done
self.assertTrue(ds.GetLayer(0).GetFeatureCount() == feature_count - 1)
ds = None
# Delete another feature while in update mode
self.assertTrue(2 == 2)
vl.dataProvider().enterUpdateMode()
vl.dataProvider().deleteFeatures([0])
# Test that repacking has not been done (since in update mode)
ds = osgeo.ogr.Open(datasource)
self.assertTrue(ds.GetLayer(0).GetFeatureCount() == feature_count - 1)
ds = None
# Test that repacking was performed when leaving updateMode
vl.dataProvider().leaveUpdateMode()
ds = osgeo.ogr.Open(datasource)
self.assertTrue(ds.GetLayer(0).GetFeatureCount() == feature_count - 2)
ds = None
vl = None
def testDontRepackOnReload(self):
''' Test fix for #18421 '''
tmpdir = tempfile.mkdtemp()
self.dirs_to_cleanup.append(tmpdir)
srcpath = os.path.join(TEST_DATA_DIR, 'provider')
for file in glob.glob(os.path.join(srcpath, 'shapefile.*')):
shutil.copy(os.path.join(srcpath, file), tmpdir)
datasource = os.path.join(tmpdir, 'shapefile.shp')
vl = QgsVectorLayer('{}|layerid=0'.format(datasource), 'test', 'ogr')
feature_count = vl.featureCount()
# Start an iterator that will open a new connection
iterator = vl.getFeatures()
next(iterator)
# Delete another feature while in update mode
vl.dataProvider().enterUpdateMode()
vl.dataProvider().reloadData()
vl.dataProvider().deleteFeatures([0])
# Test that repacking has not been done (since in update mode)
ds = osgeo.ogr.Open(datasource)
self.assertTrue(ds.GetLayer(0).GetFeatureCount() == feature_count)
ds = None
vl = None
def testRepackUnderFileLocks(self):
''' Test fix for #15570 and #15393 '''
tmpdir = tempfile.mkdtemp()
self.dirs_to_cleanup.append(tmpdir)
srcpath = os.path.join(TEST_DATA_DIR, 'provider')
for file in glob.glob(os.path.join(srcpath, 'shapefile.*')):
shutil.copy(os.path.join(srcpath, file), tmpdir)
datasource = os.path.join(tmpdir, 'shapefile.shp')
vl = QgsVectorLayer('{}|layerid=0'.format(datasource), 'test', 'ogr')
feature_count = vl.featureCount()
# Keep a file descriptor opened on the .dbf, .shp and .shx
f_shp = open(os.path.join(tmpdir, 'shapefile.shp'), 'rb')
f_shx = open(os.path.join(tmpdir, 'shapefile.shx'), 'rb')
f_dbf = open(os.path.join(tmpdir, 'shapefile.dbf'), 'rb')
# Delete a feature
self.assertTrue(vl.startEditing())
self.assertTrue(vl.deleteFeature(1))
# Commit changes and check no error is emitted
cbk = ErrorReceiver()
vl.dataProvider().raiseError.connect(cbk.receiveError)
self.assertTrue(vl.commitChanges())
self.assertIsNone(cbk.msg)
vl = None
del f_shp
del f_shx
del f_dbf
# Test repacking has been done
ds = osgeo.ogr.Open(datasource)
self.assertTrue(ds.GetLayer(0).GetFeatureCount(), feature_count - 1)
ds = None
def testRepackAtFirstSave(self):
''' Test fix for #15407 '''
tmpdir = tempfile.mkdtemp()
self.dirs_to_cleanup.append(tmpdir)
srcpath = os.path.join(TEST_DATA_DIR, 'provider')
for file in glob.glob(os.path.join(srcpath, 'shapefile.*')):
shutil.copy(os.path.join(srcpath, file), tmpdir)
datasource = os.path.join(tmpdir, 'shapefile.shp')
ds = osgeo.ogr.Open(datasource)
lyr = ds.GetLayer(0)
original_feature_count = lyr.GetFeatureCount()
lyr.DeleteFeature(2)
ds = None
vl = QgsVectorLayer('{}|layerid=0'.format(datasource), 'test', 'ogr')
self.assertTrue(vl.featureCount(), original_feature_count)
# Edit a feature (attribute change only)
self.assertTrue(vl.startEditing())
self.assertTrue(vl.dataProvider().changeAttributeValues({0: {0: 100}}))
# Commit changes and check no error is emitted
cbk = ErrorReceiver()
vl.dataProvider().raiseError.connect(cbk.receiveError)
self.assertTrue(vl.commitChanges())
self.assertIsNone(cbk.msg)
self.assertTrue(vl.featureCount(), original_feature_count - 1)
vl = None
# Test repacking has been done
ds = osgeo.ogr.Open(datasource)
self.assertTrue(ds.GetLayer(0).GetFeatureCount(), original_feature_count - 1)
ds = None
def testOpenWithFilter(self):
file_path = os.path.join(TEST_DATA_DIR, 'provider', 'shapefile.shp')
uri = '{}|layerid=0|subset="name" = \'Apple\''.format(file_path)
options = QgsDataProvider.ProviderOptions()
# ensure that no longer required ogr SQL layers are correctly cleaned up
# we need to run this twice for the incorrect cleanup asserts to trip,
# since they are triggered only when fetching an existing layer from the ogr
# connection pool
for i in range(2):
vl = QgsVectorLayer(uri)
self.assertTrue(vl.isValid(), 'Layer not valid, iteration {}'.format(i + 1))
self.assertEqual(vl.featureCount(), 1)
f = next(vl.getFeatures())
self.assertEqual(f['name'], 'Apple')
# force close of data provider
vl.setDataSource('', 'test', 'ogr', options)
def testEncoding(self):
file_path = os.path.join(TEST_DATA_DIR, 'shapefile', 'iso-8859-1.shp')
vl = QgsVectorLayer(file_path)
self.assertTrue(vl.isValid())
self.assertEqual(vl.dataProvider().encoding(), 'ISO-8859-1')
self.assertEqual(next(vl.getFeatures())[1], 'äöü')
file_path = os.path.join(TEST_DATA_DIR, 'shapefile', 'iso-8859-1_ldid.shp')
vl = QgsVectorLayer(file_path)
self.assertTrue(vl.isValid())
self.assertEqual(vl.dataProvider().encoding(), 'ISO-8859-1')
self.assertEqual(next(vl.getFeatures())[1], 'äöü')
file_path = os.path.join(TEST_DATA_DIR, 'shapefile', 'latin1.shp')
vl = QgsVectorLayer(file_path)
self.assertTrue(vl.isValid())
self.assertEqual(vl.dataProvider().encoding(), 'ISO-8859-1')
self.assertEqual(next(vl.getFeatures())[1], 'äöü')
file_path = os.path.join(TEST_DATA_DIR, 'shapefile', 'utf8.shp')
vl = QgsVectorLayer(file_path)
self.assertTrue(vl.isValid())
self.assertEqual(vl.dataProvider().encoding(), 'UTF-8')
self.assertEqual(next(vl.getFeatures())[1], 'äöü')
file_path = os.path.join(TEST_DATA_DIR, 'shapefile', 'windows-1252.shp')
vl = QgsVectorLayer(file_path)
self.assertTrue(vl.isValid())
self.assertEqual(vl.dataProvider().encoding(), 'windows-1252')
self.assertEqual(next(vl.getFeatures())[1], 'äöü')
file_path = os.path.join(TEST_DATA_DIR, 'shapefile', 'windows-1252_ldid.shp')
vl = QgsVectorLayer(file_path)
self.assertTrue(vl.isValid())
self.assertEqual(vl.dataProvider().encoding(), 'windows-1252')
self.assertEqual(next(vl.getFeatures())[1], 'äöü')
if int(gdal.VersionInfo('VERSION_NUM')) >= GDAL_COMPUTE_VERSION(3, 1, 0):
# correct autodetection of vsizip based shapefiles depends on GDAL 3.1
file_path = os.path.join(TEST_DATA_DIR, 'shapefile', 'windows-1252.zip')
vl = QgsVectorLayer('/vsizip/{}'.format(file_path))
self.assertTrue(vl.isValid())
self.assertEqual(vl.dataProvider().encoding(), 'windows-1252')
self.assertEqual(next(vl.getFeatures())[1], 'äöü')
file_path = os.path.join(TEST_DATA_DIR, 'shapefile', 'system_encoding.shp')
vl = QgsVectorLayer(file_path)
self.assertTrue(vl.isValid())
# no encoding hints, so it should default to UTF-8 (which is wrong for this particular file, but the correct guess to make first!)
self.assertEqual(vl.dataProvider().encoding(), 'UTF-8')
self.assertNotEqual(next(vl.getFeatures())[1], 'äöü')
# set to correct encoding
vl.dataProvider().setEncoding('ISO-8859-1')
self.assertEqual(vl.dataProvider().encoding(), 'ISO-8859-1')
self.assertEqual(next(vl.getFeatures())[1], 'äöü')
def testCreateAttributeIndex(self):
tmpdir = tempfile.mkdtemp()
self.dirs_to_cleanup.append(tmpdir)
srcpath = os.path.join(TEST_DATA_DIR, 'provider')
for file in glob.glob(os.path.join(srcpath, 'shapefile.*')):
shutil.copy(os.path.join(srcpath, file), tmpdir)
datasource = os.path.join(tmpdir, 'shapefile.shp')
vl = QgsVectorLayer('{}|layerid=0'.format(datasource), 'test', 'ogr')
self.assertTrue(vl.isValid())
self.assertTrue(vl.dataProvider().capabilities() & QgsVectorDataProvider.CreateAttributeIndex)
self.assertFalse(vl.dataProvider().createAttributeIndex(-1))
self.assertFalse(vl.dataProvider().createAttributeIndex(100))
self.assertTrue(vl.dataProvider().createAttributeIndex(1))
def testCreateSpatialIndex(self):
tmpdir = tempfile.mkdtemp()
self.dirs_to_cleanup.append(tmpdir)
srcpath = os.path.join(TEST_DATA_DIR, 'provider')
for file in glob.glob(os.path.join(srcpath, 'shapefile.*')):
shutil.copy(os.path.join(srcpath, file), tmpdir)
datasource = os.path.join(tmpdir, 'shapefile.shp')
vl = QgsVectorLayer('{}|layerid=0'.format(datasource), 'test', 'ogr')
self.assertTrue(vl.isValid())
self.assertTrue(vl.dataProvider().capabilities() & QgsVectorDataProvider.CreateSpatialIndex)
self.assertTrue(vl.dataProvider().createSpatialIndex())
def testSubSetStringEditable_bug17795_but_with_modified_behavior(self):
"""Test that a layer is still editable after setting a subset"""
testPath = TEST_DATA_DIR + '/' + 'lines.shp'
isEditable = QgsVectorDataProvider.ChangeAttributeValues
vl = QgsVectorLayer(testPath, 'subset_test', 'ogr')
self.assertTrue(vl.isValid())
self.assertTrue(vl.dataProvider().capabilities() & isEditable)
vl = QgsVectorLayer(testPath, 'subset_test', 'ogr')
vl.setSubsetString('')
self.assertTrue(vl.isValid())
self.assertTrue(vl.dataProvider().capabilities() & isEditable)
vl = QgsVectorLayer(testPath, 'subset_test', 'ogr')
vl.setSubsetString('"Name" = \'Arterial\'')
self.assertTrue(vl.isValid())
self.assertTrue(vl.dataProvider().capabilities() & isEditable)
vl.setSubsetString('')
self.assertTrue(vl.dataProvider().capabilities() & isEditable)
def testSubsetStringExtent_bug17863(self):
"""Check that the extent is correct when applied in the ctor and when
modified after a subset string is set """
def _lessdigits(s):
return re.sub(r'(\d+\.\d{3})\d+', r'\1', s)
testPath = TEST_DATA_DIR + '/' + 'points.shp'
subSetString = '"Class" = \'Biplane\''
subSet = '|layerid=0|subset=%s' % subSetString
# unfiltered
vl = QgsVectorLayer(testPath, 'test', 'ogr')
self.assertTrue(vl.isValid())
unfiltered_extent = _lessdigits(vl.extent().toString())
del(vl)
# filter after construction ...
subSet_vl2 = QgsVectorLayer(testPath, 'test', 'ogr')
self.assertEqual(_lessdigits(subSet_vl2.extent().toString()), unfiltered_extent)
# ... apply filter now!
subSet_vl2.setSubsetString(subSetString)
self.assertEqual(subSet_vl2.subsetString(), subSetString)
self.assertNotEqual(_lessdigits(subSet_vl2.extent().toString()), unfiltered_extent)
filtered_extent = _lessdigits(subSet_vl2.extent().toString())
del(subSet_vl2)
# filtered in constructor
subSet_vl = QgsVectorLayer(testPath + subSet, 'subset_test', 'ogr')
self.assertEqual(subSet_vl.subsetString(), subSetString)
self.assertTrue(subSet_vl.isValid())
# This was failing in bug 17863
self.assertEqual(_lessdigits(subSet_vl.extent().toString()), filtered_extent)
self.assertNotEqual(_lessdigits(subSet_vl.extent().toString()), unfiltered_extent)
def testMalformedSubsetStrings(self):
"""Test that invalid where clauses always return false"""
testPath = TEST_DATA_DIR + '/' + 'lines.shp'
vl = QgsVectorLayer(testPath, 'subset_test', 'ogr')
self.assertTrue(vl.isValid())
self.assertTrue(vl.setSubsetString(''))
self.assertTrue(vl.setSubsetString('"Name" = \'Arterial\''))
self.assertTrue(vl.setSubsetString('select * from lines where "Name" = \'Arterial\''))
self.assertFalse(vl.setSubsetString('this is invalid sql'))
self.assertFalse(vl.setSubsetString('select * from lines where "NonExistentField" = \'someValue\''))
self.assertFalse(vl.setSubsetString('select * from lines where "Name" = \'Arte...'))
self.assertFalse(vl.setSubsetString('select * from lines where "Name" in (\'Arterial\', \'Highway\' '))
self.assertFalse(vl.setSubsetString('select * from NonExistentTable'))
self.assertFalse(vl.setSubsetString('select NonExistentField from lines'))
self.assertFalse(vl.setSubsetString('"NonExistentField" = \'someValue\''))
self.assertFalse(vl.setSubsetString('"Name" = \'Arte...'))
self.assertFalse(vl.setSubsetString('"Name" in (\'Arterial\', \'Highway\' '))
self.assertTrue(vl.setSubsetString(''))
def testMultipatch(self):
"""Check that we can deal with multipatch shapefiles, returned natively by OGR as GeometryCollection of TIN"""
testPath = TEST_DATA_DIR + '/' + 'multipatch.shp'
vl = QgsVectorLayer(testPath, 'test', 'ogr')
self.assertTrue(vl.isValid())
self.assertEqual(vl.wkbType(), QgsWkbTypes.MultiPolygonZ)
f = next(vl.getFeatures())
self.assertEqual(f.geometry().wkbType(), QgsWkbTypes.MultiPolygonZ)
self.assertEqual(f.geometry().constGet().asWkt(),
'MultiPolygonZ (((0 0 0, 0 1 0, 1 1 0, 0 0 0)),((0 0 0, 1 1 0, 1 0 0, 0 0 0)),((0 0 0, 0 -1 0, 1 -1 0, 0 0 0)),((0 0 0, 1 -1 0, 1 0 0, 0 0 0)))')
def testShzSupport(self):
''' Test support for single layer compressed shapefiles (.shz) '''
if int(osgeo.gdal.VersionInfo('VERSION_NUM')) < GDAL_COMPUTE_VERSION(3, 1, 0):
return
tmpfile = os.path.join(self.basetestpath, 'testShzSupport.shz')
ds = osgeo.ogr.GetDriverByName('ESRI Shapefile').CreateDataSource(tmpfile)
lyr = ds.CreateLayer('testShzSupport', geom_type=osgeo.ogr.wkbPoint)
lyr.CreateField(osgeo.ogr.FieldDefn('attr', osgeo.ogr.OFTInteger))
f = osgeo.ogr.Feature(lyr.GetLayerDefn())
f.SetField('attr', 1)
f.SetGeometry(osgeo.ogr.CreateGeometryFromWkt('POINT(0 0)'))
lyr.CreateFeature(f)
f = None
ds = None
vl = QgsVectorLayer(tmpfile, 'test', 'ogr')
self.assertTrue(vl.isValid())
self.assertEqual(vl.wkbType(), QgsWkbTypes.Point)
f = next(vl.getFeatures())
assert f['attr'] == 1
self.assertEqual(f.geometry().constGet().asWkt(), 'Point (0 0)')
self.assertTrue(vl.startEditing())
self.assertTrue(vl.changeAttributeValue(f.id(), 0, -1))
self.assertTrue(vl.commitChanges())
f = next(vl.getFeatures())
assert f['attr'] == -1
# Check DataItem
registry = QgsApplication.dataItemProviderRegistry()
ogrprovider = next(provider for provider in registry.providers() if provider.name() == 'OGR')
item = ogrprovider.createDataItem(tmpfile, None)
self.assertTrue(item.uri().endswith('testShzSupport.shz'))
def testShpZipSupport(self):
''' Test support for multi layer compressed shapefiles (.shp.zip) '''
if int(osgeo.gdal.VersionInfo('VERSION_NUM')) < GDAL_COMPUTE_VERSION(3, 1, 0):
return
tmpfile = os.path.join(self.basetestpath, 'testShpZipSupport.shp.zip')
ds = osgeo.ogr.GetDriverByName('ESRI Shapefile').CreateDataSource(tmpfile)
lyr = ds.CreateLayer('layer1', geom_type=osgeo.ogr.wkbPoint)
lyr.CreateField(osgeo.ogr.FieldDefn('attr', osgeo.ogr.OFTInteger))
f = osgeo.ogr.Feature(lyr.GetLayerDefn())
f.SetField('attr', 1)
f.SetGeometry(osgeo.ogr.CreateGeometryFromWkt('POINT(0 0)'))
lyr.CreateFeature(f)
f = None
lyr = ds.CreateLayer('layer2', geom_type=osgeo.ogr.wkbMultiLineString)
lyr.CreateField(osgeo.ogr.FieldDefn('attr', osgeo.ogr.OFTInteger))
f = osgeo.ogr.Feature(lyr.GetLayerDefn())
f.SetField('attr', 2)
f.SetGeometry(osgeo.ogr.CreateGeometryFromWkt('LINESTRING(0 0,1 1)'))
lyr.CreateFeature(f)
f = None
ds = None
vl1 = QgsVectorLayer(tmpfile + '|layername=layer1', 'test', 'ogr')
vl2 = QgsVectorLayer(tmpfile + '|layername=layer2', 'test', 'ogr')
self.assertTrue(vl1.isValid())
self.assertTrue(vl2.isValid())
self.assertEqual(vl1.wkbType(), QgsWkbTypes.Point)
self.assertEqual(vl2.wkbType(), QgsWkbTypes.MultiLineString)
f1 = next(vl1.getFeatures())
f2 = next(vl2.getFeatures())
assert f1['attr'] == 1
self.assertEqual(f1.geometry().constGet().asWkt(), 'Point (0 0)')
assert f2['attr'] == 2
self.assertEqual(f2.geometry().constGet().asWkt(), 'MultiLineString ((0 0, 1 1))')
self.assertTrue(vl1.startEditing())
self.assertTrue(vl2.startEditing())
self.assertTrue(vl1.changeAttributeValue(f1.id(), 0, -1))
self.assertTrue(vl2.changeAttributeValue(f2.id(), 0, -2))
self.assertTrue(vl1.commitChanges())
self.assertTrue(vl2.commitChanges())
f = next(vl1.getFeatures())
assert f['attr'] == -1
f = next(vl2.getFeatures())
assert f['attr'] == -2
# Check DataItem
registry = QgsApplication.dataItemProviderRegistry()
ogrprovider = next(provider for provider in registry.providers() if provider.name() == 'OGR')
item = ogrprovider.createDataItem(tmpfile, None)
children = item.createChildren()
self.assertEqual(len(children), 2)
uris = sorted([children[i].uri() for i in range(2)])
self.assertIn('testShpZipSupport.shp.zip|layername=layer1', uris[0])
self.assertIn('testShpZipSupport.shp.zip|layername=layer2', uris[1])
gdalprovider = next(provider for provider in registry.providers() if provider.name() == 'GDAL')
item = gdalprovider.createDataItem(tmpfile, None)
assert not item
def testWriteShapefileWithSingleConversion(self):
"""Check writing geometries from a POLYGON ESRI shapefile does not
convert to multi when "forceSinglePartGeometryType" options is TRUE
also checks failing cases.
OGR provider always report MULTI for POLYGON and LINESTRING, but if we set
the import option "forceSinglePartGeometryType" the writer must respect the
actual single-part type if the features in the data provider are actually single
and not multi.
"""
ml = QgsVectorLayer(
('Polygon?crs=epsg:4326&field=id:int'),
'test',
'memory')
provider = ml.dataProvider()
ft = QgsFeature()
ft.setGeometry(QgsGeometry.fromWkt('Polygon ((0 0, 0 1, 1 1, 1 0, 0 0))'))
ft.setAttributes([1])
res, features = provider.addFeatures([ft])
dest_file_name = os.path.join(self.basetestpath, 'multipart.shp')
write_result, error_message = QgsVectorLayerExporter.exportLayer(ml,
dest_file_name,
'ogr',
ml.crs(),
False,
{"driverName": "ESRI Shapefile"}
)
self.assertEqual(write_result, QgsVectorLayerExporter.NoError, error_message)
# Open the newly created layer
shapefile_layer = QgsVectorLayer(dest_file_name)
dest_singlepart_file_name = os.path.join(self.basetestpath, 'singlepart.gpkg')
write_result, error_message = QgsVectorLayerExporter.exportLayer(shapefile_layer,
dest_singlepart_file_name,
'ogr',
shapefile_layer.crs(),
False,
{
"forceSinglePartGeometryType": True,
"driverName": "GPKG",
})
self.assertEqual(write_result, QgsVectorLayerExporter.NoError, error_message)
# Load result layer and check that it's NOT MULTI
single_layer = QgsVectorLayer(dest_singlepart_file_name)
self.assertTrue(single_layer.isValid())
self.assertTrue(QgsWkbTypes.isSingleType(single_layer.wkbType()))
# Now save the shapfile layer into a gpkg with no force options
dest_multipart_file_name = os.path.join(self.basetestpath, 'multipart.gpkg')
write_result, error_message = QgsVectorLayerExporter.exportLayer(shapefile_layer,
dest_multipart_file_name,
'ogr',
shapefile_layer.crs(),
False,
{
"forceSinglePartGeometryType": False,
"driverName": "GPKG",
})
self.assertEqual(write_result, QgsVectorLayerExporter.NoError, error_message)
# Load result layer and check that it's MULTI
multi_layer = QgsVectorLayer(dest_multipart_file_name)
self.assertTrue(multi_layer.isValid())
self.assertTrue(QgsWkbTypes.isMultiType(multi_layer.wkbType()))
# Failing case: add a real multi to the shapefile and try to force to single
self.assertTrue(shapefile_layer.startEditing())
ft = QgsFeature()
ft.setGeometry(QgsGeometry.fromWkt('MultiPolygon (((0 0, 0 1, 1 1, 1 0, 0 0)), ((-10 -10,-10 -9,-9 -9,-10 -10)))'))
ft.setAttributes([2])
self.assertTrue(shapefile_layer.addFeatures([ft]))
self.assertTrue(shapefile_layer.commitChanges())
dest_multipart_failure_file_name = os.path.join(self.basetestpath, 'multipart_failure.gpkg')
write_result, error_message = QgsVectorLayerExporter.exportLayer(shapefile_layer,
dest_multipart_failure_file_name,
'ogr',
shapefile_layer.crs(),
False,
{
"forceSinglePartGeometryType": True,
"driverName": "GPKG",
})
self.assertTrue(QgsWkbTypes.isMultiType(multi_layer.wkbType()))
self.assertEqual(write_result, QgsVectorLayerExporter.ErrFeatureWriteFailed, "Failed to transform a feature with ID '1' to single part. Writing stopped.")
def testReadingLayerGeometryTypes(self):
tests = [(osgeo.ogr.wkbPoint, 'Point (0 0)', QgsWkbTypes.Point, 'Point (0 0)'),
(osgeo.ogr.wkbPoint25D, 'Point Z (0 0 1)', QgsWkbTypes.PointZ, 'PointZ (0 0 1)'),
(osgeo.ogr.wkbPointM, 'Point M (0 0 1)', QgsWkbTypes.PointM, 'PointM (0 0 1)'),
(osgeo.ogr.wkbPointZM, 'Point ZM (0 0 1 2)', QgsWkbTypes.PointZM, 'PointZM (0 0 1 2)'),
(osgeo.ogr.wkbLineString, 'LineString (0 0, 1 1)', QgsWkbTypes.MultiLineString, 'MultiLineString ((0 0, 1 1))'),
(osgeo.ogr.wkbLineString25D, 'LineString Z (0 0 10, 1 1 10)', QgsWkbTypes.MultiLineStringZ, 'MultiLineStringZ ((0 0 10, 1 1 10))'),
(osgeo.ogr.wkbLineStringM, 'LineString M (0 0 10, 1 1 10)', QgsWkbTypes.MultiLineStringM, 'MultiLineStringM ((0 0 10, 1 1 10))'),
(osgeo.ogr.wkbLineStringZM, 'LineString ZM (0 0 10 20, 1 1 10 20)', QgsWkbTypes.MultiLineStringZM, 'MultiLineStringZM ((0 0 10 20, 1 1 10 20))'),
(osgeo.ogr.wkbPolygon, 'Polygon ((0 0,0 1,1 1,0 0))', QgsWkbTypes.MultiPolygon, 'MultiPolygon (((0 0, 0 1, 1 1, 0 0)))'),
(osgeo.ogr.wkbPolygon25D, 'Polygon Z ((0 0 10, 0 1 10, 1 1 10, 0 0 10))', QgsWkbTypes.MultiPolygonZ, 'MultiPolygonZ (((0 0 10, 0 1 10, 1 1 10, 0 0 10)))'),
(osgeo.ogr.wkbPolygonM, 'Polygon M ((0 0 10, 0 1 10, 1 1 10, 0 0 10))', QgsWkbTypes.MultiPolygonM, 'MultiPolygonM (((0 0 10, 0 1 10, 1 1 10, 0 0 10)))'),
(osgeo.ogr.wkbPolygonZM, 'Polygon ZM ((0 0 10 20, 0 1 10 20, 1 1 10 20, 0 0 10 20))', QgsWkbTypes.MultiPolygonZM, 'MultiPolygonZM (((0 0 10 20, 0 1 10 20, 1 1 10 20, 0 0 10 20)))'),
(osgeo.ogr.wkbMultiPoint, 'MultiPoint (0 0,1 1)', QgsWkbTypes.MultiPoint, 'MultiPoint ((0 0),(1 1))'),
(osgeo.ogr.wkbMultiPoint25D, 'MultiPoint Z ((0 0 10), (1 1 10))', QgsWkbTypes.MultiPointZ, 'MultiPointZ ((0 0 10),(1 1 10))'),
(osgeo.ogr.wkbMultiPointM, 'MultiPoint M ((0 0 10), (1 1 10))', QgsWkbTypes.MultiPointM, 'MultiPointM ((0 0 10),(1 1 10))'),
(osgeo.ogr.wkbMultiPointZM, 'MultiPoint ZM ((0 0 10 20), (1 1 10 20))', QgsWkbTypes.MultiPointZM, 'MultiPointZM ((0 0 10 20),(1 1 10 20))'),
(osgeo.ogr.wkbMultiLineString, 'MultiLineString ((0 0, 1 1))', QgsWkbTypes.MultiLineString, 'MultiLineString ((0 0, 1 1))'),
(osgeo.ogr.wkbMultiLineString25D, 'MultiLineString Z ((0 0 10, 1 1 10))', QgsWkbTypes.MultiLineStringZ, 'MultiLineStringZ ((0 0 10, 1 1 10))'),
(osgeo.ogr.wkbMultiLineStringM, 'MultiLineString M ((0 0 10, 1 1 10))', QgsWkbTypes.MultiLineStringM, 'MultiLineStringM ((0 0 10, 1 1 10))'),
(osgeo.ogr.wkbMultiLineStringZM, 'MultiLineString ZM ((0 0 10 20, 1 1 10 20))', QgsWkbTypes.MultiLineStringZM, 'MultiLineStringZM ((0 0 10 20, 1 1 10 20))'),
(osgeo.ogr.wkbMultiPolygon, 'MultiPolygon (((0 0,0 1,1 1,0 0)))', QgsWkbTypes.MultiPolygon, 'MultiPolygon (((0 0, 0 1, 1 1, 0 0)))'),
(osgeo.ogr.wkbMultiPolygon25D, 'MultiPolygon Z (((0 0 10, 0 1 10, 1 1 10, 0 0 10)))', QgsWkbTypes.MultiPolygonZ, 'MultiPolygonZ (((0 0 10, 0 1 10, 1 1 10, 0 0 10)))'),
(osgeo.ogr.wkbMultiPolygonM, 'MultiPolygon M (((0 0 10, 0 1 10, 1 1 10, 0 0 10)))', QgsWkbTypes.MultiPolygonM, 'MultiPolygonM (((0 0 10, 0 1 10, 1 1 10, 0 0 10)))'),
(osgeo.ogr.wkbMultiPolygonZM, 'MultiPolygon ZM (((0 0 10 20, 0 1 10 20, 1 1 10 20, 0 0 10 20)))', QgsWkbTypes.MultiPolygonZM, 'MultiPolygonZM (((0 0 10 20, 0 1 10 20, 1 1 10 20, 0 0 10 20)))'),
]
for ogr_type, wkt, qgis_type, expected_wkt in tests:
filename = 'testPromoteToMulti'
tmpfile = os.path.join(self.basetestpath, filename)
ds = osgeo.ogr.GetDriverByName('ESRI Shapefile').CreateDataSource(tmpfile)
lyr = ds.CreateLayer(filename, geom_type=ogr_type)
f = osgeo.ogr.Feature(lyr.GetLayerDefn())
f.SetGeometry(osgeo.ogr.CreateGeometryFromWkt(wkt))
lyr.CreateFeature(f)
ds = None
vl = QgsVectorLayer(tmpfile, 'test', 'ogr')
self.assertTrue(vl.isValid())
self.assertEqual(vl.wkbType(), qgis_type)
f = next(vl.getFeatures())
self.assertEqual(f.geometry().constGet().asWkt(), expected_wkt)
del vl
osgeo.ogr.GetDriverByName('ESRI Shapefile').DeleteDataSource(tmpfile)
def testEncoding(self):
""" Test that CP852 shapefile is read/written correctly """
tmpdir = tempfile.mkdtemp()
self.dirs_to_cleanup.append(tmpdir)
for file in glob.glob(os.path.join(TEST_DATA_DIR, 'test_852.*')):
shutil.copy(os.path.join(TEST_DATA_DIR, file), tmpdir)
datasource = os.path.join(tmpdir, 'test_852.shp')
vl = QgsVectorLayer(datasource, 'test')
self.assertTrue(vl.isValid())
self.assertEqual([f.attributes() for f in vl.dataProvider().getFeatures()], [['abcŐ']])
f = QgsFeature()
f.setAttributes(['abcŐabcŐabcŐ'])
self.assertTrue(vl.dataProvider().addFeature(f))
# read it back in
vl = QgsVectorLayer(datasource, 'test')
self.assertTrue(vl.isValid())
self.assertEqual([f.attributes() for f in vl.dataProvider().getFeatures()], [['abcŐ'], ['abcŐabcŐabcŐ']])
def testSkipFeatureCountOnFeatureCount(self):
"""Test QgsDataProvider.SkipFeatureCount on featureCount()"""
testPath = TEST_DATA_DIR + '/' + 'lines.shp'
provider = QgsProviderRegistry.instance().createProvider('ogr', testPath, QgsDataProvider.ProviderOptions(), QgsDataProvider.SkipFeatureCount)
self.assertTrue(provider.isValid())
self.assertEqual(provider.featureCount(), QgsVectorDataProvider.UnknownCount)
def testSkipFeatureCountOnSubLayers(self):
"""Test QgsDataProvider.SkipFeatureCount on subLayers()"""
datasource = os.path.join(TEST_DATA_DIR, 'shapefile')
provider = QgsProviderRegistry.instance().createProvider('ogr', datasource, QgsDataProvider.ProviderOptions(), QgsDataProvider.SkipFeatureCount)
self.assertTrue(provider.isValid())
sublayers = provider.subLayers()
self.assertTrue(len(sublayers) > 1)
self.assertEqual(sublayers[0].split(QgsDataProvider.sublayerSeparator())[2], '-1')
def testLayersOnSameOGRLayerWithAndWithoutFilter(self):
"""Test fix for https://github.com/qgis/QGIS/issues/43361"""
file_path = os.path.join(TEST_DATA_DIR, 'provider', 'shapefile.shp')
uri = '{}|layerId=0|subset="name" = \'Apple\''.format(file_path)
options = QgsDataProvider.ProviderOptions()
vl1 = QgsVectorLayer(uri, 'vl1', 'ogr')
vl2 = QgsVectorLayer(uri, 'vl2', 'ogr')
vl3 = QgsVectorLayer('{}|layerId=0'.format(file_path), 'vl3', 'ogr')
self.assertEqual(vl1.featureCount(), 1)
vl1_extent = QgsGeometry.fromRect(vl1.extent())
self.assertEqual(vl2.featureCount(), 1)
vl2_extent = QgsGeometry.fromRect(vl2.extent())
self.assertEqual(vl3.featureCount(), 5)
vl3_extent = QgsGeometry.fromRect(vl3.extent())
reference = QgsGeometry.fromRect(QgsRectangle(-68.2, 70.8, -68.2, 70.8))
assert QgsGeometry.compare(vl1_extent.asPolygon()[0], reference.asPolygon()[0],
0.00001), 'Expected {}, got {}'.format(reference.asWkt(), vl1_extent.asWkt())
assert QgsGeometry.compare(vl2_extent.asPolygon()[0], reference.asPolygon()[0],
0.00001), 'Expected {}, got {}'.format(reference.asWkt(), vl2_extent.asWkt())
reference = QgsGeometry.fromRect(QgsRectangle(-71.123, 66.33, -65.32, 78.3))
assert QgsGeometry.compare(vl3_extent.asPolygon()[0], reference.asPolygon()[0],
0.00001), 'Expected {}, got {}'.format(reference.asWkt(), vl3_extent.asWkt())
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
yongshengwang/hue | build/env/lib/python2.7/site-packages/guppy-0.1.10-py2.7-linux-x86_64.egg/guppy/heapy/test/test_Path.py | 37 | 26034 | #._cv_part guppy.heapy.test.test_Path
from guppy.heapy.test import support
import sys, unittest
class TestCase(support.TestCase):
def setUp(self):
support.TestCase.setUp(self)
self.Path = self.heapy.Path
def chkrel(self, src, dst, relstr=None, clas=None):
rel = self.relation(src, dst)
if clas is not None:
self.assert_(isinstance(rel, clas))
if relstr is None:
print rel
else:
sr = str(rel)
if sr.startswith('<') and not relstr.startswith('<'):
self.assert_( sr.endswith('>') )
sr = sr[1:-1].split(',')
self.assert_(relstr in sr)
else:
self.aseq(sr, relstr)
def chkrelattr(self, src, *attrs):
for attr in attrs:
self.chkrel(src, getattr(src, attr), '%s.'+attr)
def chkpath(self, src, dst, expect=None):
rel = self.shpaths(dst, src)
if expect is None:
print rel
else:
li = rel.aslist()
if len(li) == 1: li = li[0]
self.aseq(str(li), str(expect), -1)
def relation(self, src, dst):
return self.Path.relation(src, dst)
def shpaths(self, dst, src=None, *args, **kwds):
#return self.Path.shpaths(dst, src, *args, **kwds)
dst = self.iso(dst)
if src is not None:
src = self.iso(src)
return dst.get_shpaths(src, *args, **kwds)
class RelationTestCase(TestCase):
# Test relations from standard types and some simple paths
def test_list_relation(self):
v1 = 'v1'
v2 = 'v2'
v3 = range(100, 200)
x = [v1, v2, v3]
# xxx Why are these commented out?
# It works when I remove the first comment...
# Didn't it work in some other arch?
#self.chkrel(x, v1, '%s[0]')
#self.chkrel(x, v2, '%s[1]')
#self.chkrel(x, v3, '%s[2]')
def test_cell_relation(self):
cellvalue = []
def f():
return cellvalue
self.chkrel(f.func_closure[0], cellvalue, '%s->ob_ref')
def test_class_relation(self):
# Test old-style classes
class T:
tvar = []
class U:
uvar = []
class V(U,T):
vvar = []
self.chkrelattr(V, '__name__', '__dict__', '__bases__', 'vvar')
# The relation method doesn't look in base classes -
# I suppose it doesn't need to. This would be a test in that case:
# self.chkrel(V, V.uvar, '%s.uvar')
# Currently, only the path is found:
self.chkpath(V, V.uvar, "%s.__bases__[0].__dict__['uvar']")
self.chkpath(V, V.tvar, "%s.__bases__[1].__dict__['tvar']")
self.chkpath(V, V.vvar, "%s.__dict__['vvar']")
def test_code_relation(self):
def f():
a = 3
return self, a
co = f.func_code
self.chkpath(co, 3, '%s.co_consts[1]') # xxx brittle test but catches a bug
# commented in notes Sep 27 2004
self.chkrelattr(co, 'co_code', 'co_consts', 'co_names', 'co_varnames',
'co_freevars', 'co_cellvars', 'co_filename', 'co_name',
'co_lnotab')
#B
def test_dict_relation(self):
k1 = 'k1'
k2 = 'k2'
v1 = 'v1'
v2 = 'v2'
k3 = tuple(range(100))
v3 = tuple(range(100, 200))
x = {k1:v1, k2:v2, k3:v3}
self.chkrel(x, v1, "%s['k1']")
self.chkrel(x, v2, "%s['k2']")
self.chkrel(x, v3, "%s[(0, 1, 2, 3, 4, 5, ...)]")
ks = [str(self.relation(x, k1)),
str(self.relation(x, k2)),
str(self.relation(x, k3))]
ks.sort()
self.aseq(ks, ['%s.keys()[0]', '%s.keys()[1]', '%s.keys()[2]'])
def test_dictproxy_relation(self):
v1 = 'v1'
class T(object):
k1 = v1
x = T.__dict__
self.chkpath(x, v1, "%s->dict['k1']")
self.chkrel(x, v1, "%s['k1']")
def test_frame_relation(self):
try:
1/0
except:
type, value, traceback = sys.exc_info()
f = traceback.tb_frame
f.f_trace = lambda : None
f.f_exc_type = []
f.f_exc_value = []
f.f_exc_traceback = []
self.chkrelattr(f,'f_back', 'f_code', 'f_builtins', 'f_globals',
'f_trace', 'f_exc_type', 'f_exc_value', 'f_exc_traceback',
'f_locals')
a = []
# The representation of local variables is how they may be accessed
# - not how they are really stored.
# xxx this may be confusing/lack information?
# The information is available in the relation object class,
# it is just not represented with str()...
self.chkrel(f, a, "%s.f_locals['a']", clas=self.Path.R_LOCAL_VAR)
x = []
z = []
def func(x, y=3):
try:
1/0
except:
type, value, traceback = sys.exc_info()
frame = traceback.tb_frame
return self, frame, z
_, frame, __ = func(0)
del _, __
self.chkrel(frame, self, "%s.f_locals ['self']", clas=self.Path.R_CELL)
self.chkrel(f, x, "%s.f_locals['x']", clas=self.Path.R_LOCAL_VAR)
self.chkrel(f, z, "%s.f_locals ['z']", clas=self.Path.R_CELL)
# self becomes both a local var and a cell var, since it is an argument.
self.chkrel(f, self, "<%s.f_locals['self'],%s.f_locals ['self']>")
# Stack variables doesn't work (Because ceval.c doesn't update
# the f_stacktop index.) so the corresponding part of frame_relate is not tested.
#B
def test_function_relation(self):
def f(x, y=3):
return self
f.a = []
self.chkrelattr(f, 'func_code', 'func_globals', 'func_defaults',
'func_closure', 'func_doc', 'func_name', 'func_dict',
'a')
def test_instance_relation(self):
# Test 'traditional' class instance
class T:
tvar = []
t = T()
self.chkrelattr(t, '__class__', '__dict__')
t.a = []
self.chkrelattr(t, 'a')
# No direct relation for class variables - as noted in test_object_relation
self.chkpath(t, t.tvar, "%s.__class__.__dict__['tvar']")
class U:
uvar = []
class V(U, T):
vvar = []
v = V()
self.chkpath(v, v.uvar, "%s.__class__.__bases__[0].__dict__['uvar']")
self.chkpath(v, v.tvar, "%s.__class__.__bases__[1].__dict__['tvar']")
self.chkpath(v, v.vvar, "%s.__class__.__dict__['vvar']")
def test_instancemethod_relation(self):
class T:
def f(x):
pass
self.chkrelattr(T.f, 'im_func', 'im_class')
t = T()
self.chkrelattr(t.f, 'im_func', 'im_class', 'im_self')
def test_list_relation(self):
v1 = 'v1'
v2 = 'v2'
v3 = range(100, 200)
x = [v1, v2, v3]
self.chkrel(x, v1, '%s[0]')
self.chkrel(x, v2, '%s[1]')
self.chkrel(x, v3, '%s[2]')
#
def test_meth_relation(self):
x = []
#self.chkrel(x.append, x, '%s->m_self')
self.chkrel(x.append, x, '%s.__self__')
def test_module_relation(self):
self.chkrelattr(unittest, '__dict__', 'TestCase')
def test_nodegraph_relation(self):
a = 0
b = 1
rl = [a, b]
rg = self.heapy.heapyc.NodeGraph([(a, rl), (b, rl)])
self.chkrel(rg, a, '%s->edges[0].src')
self.chkrel(rg, b, '%s->edges[1].src')
self.chkrel(rg, rl, '<%s->edges[0].tgt,%s->edges[1].tgt>')
self.chkpath(rg, a, '%s->edges[0].src')
self.chkpath(rg, rl, ['%s->edges[0].tgt', '%s->edges[1].tgt'])
def test_nodeset_relation(self):
from guppy.sets import immnodeset, mutnodeset
if 0:
# This is hard to get to work accross different architectures
# Noted Jan 17 2006
x = [0, 1, 'a', 'b']
x.sort(lambda a, b: cmp(id(a), id(b)))
else:
# This is a relaxed variant, still tests SOME thing!
x = ['a']
for s in (immnodeset(x), mutnodeset(x)):
for i in range(len(x)):
self.chkrel(s, x[i], 'list(%%s)[%s]'%i)
def test_object_relation(self):
class T(object):
__slots__ = 'a', 'b'
t = T()
a = []
t.a = a
b = []
t.b = b
#self.chkrel(t, T, 'type(%s)')
self.chkrel(t, T, '%s->ob_type')
self.chkrelattr(t, 'a', 'b')
# We shouldn't have a __dict__ here - just make sure this is the case
self.failUnlessRaises(AttributeError, lambda:t.__dict__)
class U(T):
pass
u = U()
u.a = a
self.chkpath(u, T, "%s->ob_type.__base__")
self.chkrel(u, a, '%s.a')
c = []
u.c = c
self.chkrel(u, c, '%s.c')
self.chkrel(u, u.__dict__, '%s.__dict__')
class V(U):
pass
v = V()
v.c = c
self.chkrelattr(v, '__dict__')
class W(V):
__slots__ = 'c', 'd', 'b'
pass
w = W()
w.a = a
w.b = b
w.c = c
w.d = []
w.e = []
self.chkrelattr(w, '__dict__', 'a', 'b', 'c', 'd', 'e')
self.chkpath(w, w.a, '%s.a')
self.chkpath(w, w.b, '%s.b')
self.chkpath(w, w.c, '%s.c')
self.chkpath(w, w.d, '%s.d')
self.chkpath(w, w.e, "%s.__dict__['e']")
class R(object):
rvar = []
class S(R, T):
svar = []
s = S()
s.a = a
s.b = b
s.c = c
self.chkrelattr(s, '__dict__', 'a', 'b', 'c')
self.chkpath(s, s.a, '%s.a')
self.chkpath(s, s.b, '%s.b')
self.chkpath(s, s.c, "%s.__dict__['c']")
# Class variables are not directly related- should they be that?
# Possibly, but the compression could as well be done in Python.
# We just check that we can get the path.
self.chkpath(s, s.svar, "%s->ob_type.__dict__['svar']")
self.chkpath(s, s.rvar, ["%s->ob_type.__bases__[0].__dict__['rvar']",
"%s->ob_type.__mro__[1].__dict__['rvar']"])
self.chkpath(s, s.__slots__, "%s->ob_type.__base__.__dict__['__slots__']")
def test_traceback_relation(self):
try:
def g():
1/0
g()
except:
type, value, traceback = sys.exc_info()
self.chkrelattr(traceback, 'tb_next', 'tb_frame')
def test_tuple_relation(self):
v1 = 'v1'
v2 = 'v2'
v3 = range(100, 200)
x = (v1, v2, v3)
self.chkrel(x, v1, '%s[0]')
self.chkrel(x, v2, '%s[1]')
self.chkrel(x, v3, '%s[2]')
def test_type_relation(self):
name = 'T'
base = object
bases = (base,)
dict = {'__slots__':('a','b')}
T = type(name, bases, dict)
# tp_dict can't be directly tested since .__dict__ returns a proxy
# and the dict passed is not used directly.
# We test it indirectly by getting a path through it.
self.chkpath(T, T.a, "%s.__dict__['a']")
# The C-struct __slots__ field can't be tested directly
# This just tests the ordinary attribute
self.chkpath(T, T.__slots__, "%s.__dict__['__slots__']")
self.chkrelattr(T, '__mro__', '__base__', '__bases__')
# tp_cache and tp_subclasses can also not be tested directly
# We could try use referrers if it worked
# print V.referents(T).reprobj.select('TOC=="dict"')
# Inheritance is tested via test_object_relation()
class RootTestCase(TestCase):
def test_1(self):
import sys, __builtin__
root = self.View.root
# Interpreter attributes
rel = str(self.relation(root, sys.modules))
self.assert_(eval(rel % 'root') is sys.modules)
self.aseq(rel, '%s.i0_modules')
rel = str(self.relation(root, sys.__dict__))
self.assert_(eval(rel % 'root') is sys.__dict__)
self.aseq(rel, '%s.i0_sysdict')
rel = str(self.relation(root, __builtin__.__dict__))
self.assert_(eval(rel % 'root') is __builtin__.__dict__)
self.aseq(rel, '%s.i0_builtins')
if sys.version >= "2.3.3": # The version I saw them; they may have come earlier
for name in "codec_search_path", "codec_search_cache", "codec_error_registry":
attr = "i0_%s"%name
rel = str(self.relation(root, getattr(root, attr)))
self.aseq(rel, '%%s.%s'%attr)
# Thread attributes
try:
1/0
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
for name in 'exc_type', 'exc_value', 'exc_traceback':
rel = str(self.relation(root, eval(name)))
self.asis(eval(rel % 'root') , eval(name))
# There are more, untested, attributes, but the code is farily regular...
# More complication is to do with frames which I concentrate on for now.
# We need to find out what level we are at - count to lowest frame
level = 0
frame = exc_traceback.tb_frame
#print self.relation(root, frame)
#print self.relation(root, exc_type)
while frame.f_back:
frame = frame.f_back
level += 1
rel = str(self.relation(root, frame))
self.assert_(rel.endswith('_f0'))
rel = str(self.relation(root, exc_traceback.tb_frame))
import re
self.asis( eval(rel%'root'), exc_traceback.tb_frame)
self.assert_(rel.endswith('_f%d'%level))
def test_thread(self):
try:
import thread
except ImportError:
print 'threading not enabled - skipping test'
return
root = self.View.root
def task(self):
try:
1/0
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
self.exc_traceback = exc_traceback
self.sync = 1
while self.sync:
pass
self.sync = 1
self.sync = 0
thread.start_new_thread(task, (self,))
while not self.sync:
pass
exc_traceback = self.exc_traceback
rel = str(self.relation(root, exc_traceback))
self.asis(eval(rel%'root'), exc_traceback)
self.sync = 0
while not self.sync:
pass
def task(self):
self.test_1()
self.sync = 1
self.sync = 0
thread.start_new_thread(task, (self,))
while not self.sync:
pass
def _test_secondary_interpreter(self):
try:
import thread
except ImportError:
print 'threading not enabled - skipping test'
return
import_remote = """\
import sys
import thread
import time
def task():
time.sleep(0.5)
thread.start_new_thread(task, ())
self.sysdict = sys.__dict__
self.sync = 1
while self.sync:
pass
# print 'done'
"""
self.sync = 0
thid = self.heapy.heapyc.interpreter(import_remote, {'self':self})
root = self.View.root
import sys
sysdict = sys.__dict__
rel = str(self.relation(root, sysdict))
self.aseq(rel, '%s.i0_sysdict')
while not self.sync:
pass
rel = str(self.relation(root, self.sysdict))
self.aseq(rel, '%s.i1_sysdict')
self.sync = 0
def test_rootframe(self):
# assert 0 # to do
pass
class PathTestCase(TestCase):
def makegraph(self, width, length):
# Generate a structure which will yield a high number
# of shortest paths.
# Returns a pair src, dst which are connected via a noncyclic graph
# with many edges.
# The length of each path (all shortest), number of edges will be length
# The number of nodes will be 2 + width * (length - 1)
# The number of paths will be
# width ** length, if width >= 1 and length >= 1
dst = []
ls = []
for i in range(width):
ls.append([dst])
ls = [dst] * width
for i in range(length-1):
xs = []
for j in range(width):
ys = []
xs.append(ys)
for k in range(width):
ys.append(ls[k])
ls = xs
src = ls
return src, dst
def chkgraph(self, width, length, expect=None):
src, dst = self.makegraph(width, length)
self.chkpath(src, dst, expect)
def test_path(self):
dst = 'dst'
self.chkpath([dst], dst, '%s[0]')
self.chkpath([[], dst], dst, '%s[1]')
self.chkpath([dst, dst], dst, "['%s[0]', '%s[1]']")
self.chkpath([[dst,0], dst, [dst,2]], dst, "%s[1]")
self.chkpath([[dst,0], [dst,2]], dst, "['%s[0][0]', '%s[1][0]']")
src, dst = self.makegraph(1, 1)
self.chkgraph(1, 1, '%s[0]')
self.chkgraph(1, 2, '%s[0][0]')
self.chkgraph(2, 1, ['%s[0]', '%s[1]'])
self.chkgraph(3, 2, ['%s[0][0]', '%s[0][1]', '%s[0][2]',
'%s[1][0]', '%s[1][1]', '%s[1][2]',
'%s[2][0]', '%s[2][1]', '%s[2][2]'])
def test_numpaths(self):
for (width, length) in [(2, 1), (7, 3), (3, 7), (10, 20)]:
src, dst = self.makegraph(width, length)
p = self.shpaths(dst, src)
self.aseq( p.numpaths, width**length)
def test_iter(self):
src, dst = self.makegraph(2, 2)
p = self.shpaths(dst, src)
it = iter(p)
ss = []
for i in it:
ss.append(str(i))
ss.sort()
self.aseq(ss, ['%s[0][0]', '%s[0][1]', '%s[1][0]', '%s[1][1]'])
# Check that we can get some of the first values from the iterator
# of a graph with an astronomical number of paths.
width = 11
length = 13
numpaths = 20
src, dst = self.makegraph(width, length)
p = self.shpaths(dst, src)
it = iter(p)
for i in range(numpaths):
path = it.next()
sp = str(path)
div, mod = divmod(i, width)
self.aseq(sp, '%s'+'[0]'*(length-2)+'[%d][%d]'%(div, mod))
# Check that the iterator works even if the graph initially
# would yield astronomical numbers of dead ends.
# (The initial algorithm took astronomically long time.)
osrc = src
#osrc = [[],[]]
src, dst = self.makegraph(width, length)
src[0] = osrc
p = self.shpaths(dst, src)
it = iter(p)
for i in range(numpaths):
path = it.next()
sp = str(path)
div, mod = divmod(i, width)
self.aseq(sp, '%s[1]'+'[0]'*(length-3)+'[%d][%d]'%(div, mod))
#print sp
# Test iterating with a negative start and a large positive start
numfromend = width / 2
for it in [p.iter(-numfromend), p.iter(p.numpaths-numfromend)]:
for i, path in enumerate(it):
sp = str(path)
self.aseq(sp, '%s'+('[%d]'%(width-1))*(length-1)+'[%d]'%(width-numfromend+i))
# Test iterating with start and stop
start = 5
stop = 25
i = start
for path in p.iter(start, stop):
sp = str(path)
div, mod = divmod(i, width)
self.aseq(sp, '%s[1]'+'[0]'*(length-3)+'[%d][%d]'%(div, mod))
self.aseq(path.index, i)
i += 1
self.aseq(i, stop)
def test_str(self):
# Make sure large number of paths will yield reasonable representations
width = 11
length = 4
src, dst = self.makegraph(width, length)
p = self.shpaths(dst, src)
p.maxpaths = 1
self.aseq(str(p), " 0: Src[0][0][0][0]\n<... 14640 more paths ...>")
p.maxpaths = 2
self.aseq(str(p), " 0: Src[0][0][0][0]\n 1: Src[0][0][0][1]\n<... 14639 more paths ...>")
def test_printing(self):
# Test the pretty-printing and moreing methods
from StringIO import StringIO
output = StringIO()
self.Path.output = output
width = 11
length = 4
src, dst = self.makegraph(width, length)
p = self.shpaths(dst, src)
p.maxpaths = 2
p.pp()
p.more()
self.aseq( output.getvalue(),"""\
0: Src[0][0][0][0]
1: Src[0][0][0][1]
<... 14639 more paths ...>
2: Src[0][0][0][2]
3: Src[0][0][0][3]
<... 14637 more paths ...>
""" )
def test_subscript(self):
# Test subscripting
width = 3
length = 40
src, dst = self.makegraph(width, length)
p = self.shpaths(dst, src)
np = width**length
self.aseq(np, p.numpaths)
#p[0].pp(p.output)
self.aseq(str(p[0]), '%s'+'[0]'*length)
self.aseq(str(p[-np]), '%s'+'[0]'*length)
self.aseq(str(p[width-1]), '%s'+'[0]'*(length-1) + '[%d]'%(width-1))
self.aseq(str(p[width]), '%s'+'[0]'*(length-2) + '[1][0]')
self.aseq(str(p[width+1]), '%s'+'[0]'*(length-2) + '[1][1]')
self.aseq(str(p[np-1]), '%s'+('[%d]'%(width-1))*length)
self.aseq(str(p[-1]), '%s'+('[%d]'%(width-1))*length)
self.failUnlessRaises(IndexError, lambda:p[np])
self.failUnlessRaises(IndexError, lambda:p[-np-1])
class MultiTestCase(TestCase):
def test_pp(self):
# Test printing of multi relations
self.Path.output = self.Path._root.StringIO.StringIO()
iso = self.iso
dst = [[],[]]
src = iso(dst[:]*2)
dst = [iso(x) for x in dst]
p = self.Path.shpgraph(dst, src)
p.pp()
p = self.Path.shpgraph(dst, src, srcname='A',dstname='B')
p.pp()
self.aseq(self.Path.output.getvalue(), """\
--- Dst[0] ---
0: Src[0]
1: Src[2]
--- Dst[1] ---
0: Src[1]
1: Src[3]
--- B[0] ---
0: A[0]
1: A[2]
--- B[1] ---
0: A[1]
1: A[3]
""")
class AvoidTestCase(TestCase):
def test_1(self):
# Test that we can find new paths by avoiding edges
# selected from previously found paths.
# First we generate a graph with paths of various lengths...
src = ['src']
a = src
for i in range(3):
b = ['b%d'%i]
c = ['c%d'%i,b]
a.append(b)
a.append(c)
a = b
dst = a
p = self.shpaths(dst, src)
for avoid, result in [
([], '%s[1][1][1]'),
([0], '%s[2][1][1][1]'),
([1], '%s[1][2][1][1]'),
([2], '%s[1][1][2][1]'),
([0, 1], '%s[2][1][2][1][1]'),
([1, 2], '%s[1][2][1][2][1]'),
# ([1, -1], '%s[1][2][1][2][1]'),
([0, 2], '%s[2][1][1][2][1]'),
([0, 1, 2], '%s[2][1][2][1][2][1]'),
([2, 1, 0], '%s[2][1][2][1][2][1]'),
]:
result = result%' 0: Src'
# Find new path by avoiding edges from the original path
q = self.shpaths(dst, src, avoid_edges=p.edges_at(*avoid))
self.aseq(str(q), result)
# Find the same path but via a direct method
q = p.copy_but_avoid_edges_at_levels(*avoid)
self.aseq(str(q), result)
# The same, but via a shorter method name
q = p.avoided(*avoid)
self.aseq(str(q), result)
# Test that the avoided set is carried on to copies
q = p.avoided(0).avoided(2)
self.aseq(str(q), ' 0: Src[2][1][2][1][1]')
class NewTestCase(TestCase):
def test_1(self):
import sys
o = self.python.StringIO.StringIO()
iso = self.iso
x = iso(sys.__dict__)
print >>o, x.shpaths
# This used to include a path via parameter avoid_edges
# which was confusing
print >>o, x.shpaths.avoided(0)
# repr() used to be quite useless. I have it now defined as .pp(),
# but without trailin newline.
print >>o, repr(x.shpaths)
print >>o, repr(x.shpaths)
# The shpaths object could sometimes disturb a shpath calculation
# because dst was free in it.
x = []
y = [[[x]]]
sp = iso(x).get_shpaths(iso(y))
print >>o, sp
y.append(sp)
print >>o, iso(x).get_shpaths(iso(y))
# Test that the shortest paths to a set of objects, is the shortest
# paths to those that can be reached by the shortest paths, only
x = []
y = [x]
z = [y]
print >>o, iso(x, y).get_shpaths(iso(z))
if 0: # feature is dropped, for now at least. Nov 4 2005
# Test that the shortest path to an abstract set of objects,
# is the shortest paths to all the closest such objects,
# and that the time to calculate this doesn't need to involve
# an entire heap traversal to find all such objects
clock = self.python.time.clock
import gc
gc.collect()
t = clock()
x = str(iso(x, y).get_shpaths(iso(z)))
fast = clock() - t
gc.collect()
t = clock()
x = str((iso() | list).get_shpaths(iso(z)))
slow = clock() - t
# Originally, it was 16 times slower to use an abstract set
# Now, it's about 2.5;
# print slow/fast # has been seen printing 2.17 to 3.25
# we test with some margin
self.assert_(slow < 5 * fast)
# Test that we can relate objects that inherits from a class and object
# (Used to segfault)
class C:
pass
class O(C, object):
__slots__ = 'x',
ob = O()
ob.x = x
print >>o, iso(x).get_shpaths(iso(ob))
# Test that generalization to a set of sources makes some sense
# The shortest paths are from the closest sources
# Hack to make a constant address rendering, for test comparison.
# This doesn't change anything permanently.
# XXX come up with an official way to do this.
summary_str = self.heapy.UniSet.summary_str
str_address = lambda x:'<address>'
str_address._idpart_header = getattr(summary_str.str_address, '_idpart_header', None)
str_address._idpart_sortrender = getattr(summary_str.str_address, '_idpart_sortrender', None)
summary_str.str_address = str_address
S = iso()
shp = iso(x).get_shpaths(iso(y, z))
print >>o, shp
print >>o, repr(shp)
for p in shp:
S = S ^ p.src
self.aseq(S, iso(y))
shp = iso(x).get_shpaths(iso(ob, y, z))
print >>o, str(shp)
print >>o, repr(shp)
S = iso()
for i, p in enumerate(shp):
S = S ^ p.src
self.aseq(p.src, shp[i].src)
self.aseq(S, iso(ob, y))
# Test that the iter can be restarted
# even after multiple sources handling was added
it = iter(shp)
a = list(it)
it.isatend = 0
b = list(it)
self.aseq( str(a),str(b))
self.aseq( o.getvalue(), """\
0: hpy().Root.i0_sysdict
0: Src.i0_modules['sys'].__dict__
0: hpy().Root.i0_sysdict
0: hpy().Root.i0_sysdict
0: Src[0][0][0]
0: Src[0][0][0]
0: Src[0]
0: Src.x
0: <1 list: <address>*1>[0]
0: <1 list: <address>*1>[0]
0: <1 list: <address>*1>[0]
1: <1 __main__.O: <address>>.x
0: <1 list: <address>*1>[0]
1: <1 __main__.O: <address>>.x
""".replace('__main__', self.__module__))
def test_2(self):
# To assist interactivity,
# the more attribute is defined to return an object which
# the repr() of gives more lines; and has a similar more attribute.
# Testing this functionality here.
o = self.python.StringIO.StringIO()
iso = self.iso
dst = []
src = [dst]*20
print >>o, repr(iso(dst).get_shpaths(iso(src)))
print >>o, repr(iso(dst).get_shpaths(iso(src)).more)
p = iso(dst).get_shpaths(iso(src))
print >>o, repr(p.more)
self.aseq(o.getvalue(),"""\
0: Src[0]
1: Src[1]
2: Src[2]
3: Src[3]
4: Src[4]
5: Src[5]
6: Src[6]
7: Src[7]
8: Src[8]
9: Src[9]
<... 10 more paths ...>
10: Src[10]
11: Src[11]
12: Src[12]
13: Src[13]
14: Src[14]
15: Src[15]
16: Src[16]
17: Src[17]
18: Src[18]
19: Src[19]
10: Src[10]
11: Src[11]
12: Src[12]
13: Src[13]
14: Src[14]
15: Src[15]
16: Src[16]
17: Src[17]
18: Src[18]
19: Src[19]
""")
def test_empty(self):
# Test empty paths
iso = self.iso
dst = []
self.assert_( len(list(iso(dst).get_shpaths(iso()))) == 0)
def test_3(self):
# Test that Edges is not included in the shortest path
iso = self.iso
dst = []
shp = iso(dst).shpaths
del dst
self.assert_('Edges' not in str( shp.avoided(0) ))
#print shp.avoided(0)
dst = []
src = [dst]
shp = iso(dst).get_shpaths(iso(src))
src[0] = shp
dst = iso(dst)
src = iso(src)
self.assert_( dst.get_shpaths(src).numpaths == 0)
# Test the sets attribute
dst = []
src = [dst]
dst = iso(dst)
src = iso(src)
self.aseq( dst.get_shpaths(src).sets, (src, dst))
# Test that srs doesn't disturb the path calculation
class C:
pass
c = C()
cd = iso(c.__dict__)
p = cd.shpaths
repr(p)
del c
q = cd.shpaths
self.aseq( repr(q).strip(), "")
del p, q
# Test that internals of ShortestPath are hidden in general
# (via hiding_env), to consistent result when used interactively,
# as commented on in notes.txt per Nov 30 2004.
dst = []
src = [[[[dst]]]]
d = iso(dst)
s = iso(src)
p = d.get_shpaths(s)
self.aseq( str(p), " 0: Src[0][0][0][0]")
src.append(p)
p._XX_ = dst # A shorter path, but it should be hidden
self.aseq( str(d.get_shpaths(s)), " 0: Src[0][0][0][0]")
# Test what .more prints finally
self.aseq( str(p.more), '<No more paths>')
# Test that .top is idempotent
self.asis(p.more.top.top, p)
def run_test(case, debug=0):
support.run_unittest(case, debug)
def test_main(debug=0):
if 1:
run_test(NewTestCase, debug)
if 1:
run_test(RelationTestCase, debug)
run_test(RootTestCase, debug)
if 1:
run_test(PathTestCase, debug)
if 1:
run_test(MultiTestCase, debug)
run_test(AvoidTestCase, debug)
if __name__ == "__main__":
test_main()
| apache-2.0 |
wshallum/ansible | lib/ansible/modules/monitoring/stackdriver.py | 23 | 7320 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
module: stackdriver
short_description: Send code deploy and annotation events to stackdriver
description:
- Send code deploy and annotation events to Stackdriver
version_added: "1.6"
author: "Ben Whaley (@bwhaley)"
options:
key:
description:
- API key.
required: true
default: null
event:
description:
- The type of event to send, either annotation or deploy
choices: ['annotation', 'deploy']
required: false
default: null
revision_id:
description:
- The revision of the code that was deployed. Required for deploy events
required: false
default: null
deployed_by:
description:
- The person or robot responsible for deploying the code
required: false
default: "Ansible"
deployed_to:
description:
- "The environment code was deployed to. (ie: development, staging, production)"
required: false
default: null
repository:
description:
- The repository (or project) deployed
required: false
default: null
msg:
description:
- The contents of the annotation message, in plain text. Limited to 256 characters. Required for annotation.
required: false
default: null
annotated_by:
description:
- The person or robot who the annotation should be attributed to.
required: false
default: "Ansible"
level:
description:
- one of INFO/WARN/ERROR, defaults to INFO if not supplied. May affect display.
choices: ['INFO', 'WARN', 'ERROR']
required: false
default: 'INFO'
instance_id:
description:
- id of an EC2 instance that this event should be attached to, which will limit the contexts where this event is shown
required: false
default: null
event_epoch:
description:
- "Unix timestamp of where the event should appear in the timeline, defaults to now. Be careful with this."
required: false
default: null
'''
EXAMPLES = '''
- stackdriver:
key: AAAAAA
event: deploy
deployed_to: production
deployed_by: leeroyjenkins
repository: MyWebApp
revision_id: abcd123
- stackdriver:
key: AAAAAA
event: annotation
msg: Greetings from Ansible
annotated_by: leeroyjenkins
level: WARN
instance_id: i-abcd1234
'''
# ===========================================
# Stackdriver module specific support methods.
#
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
# Let snippet from module_utils/basic.py return a proper error in this case
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.pycompat24 import get_exception
from ansible.module_utils.urls import fetch_url
def send_deploy_event(module, key, revision_id, deployed_by='Ansible', deployed_to=None, repository=None):
"""Send a deploy event to Stackdriver"""
deploy_api = "https://event-gateway.stackdriver.com/v1/deployevent"
params = {}
params['revision_id'] = revision_id
params['deployed_by'] = deployed_by
if deployed_to:
params['deployed_to'] = deployed_to
if repository:
params['repository'] = repository
return do_send_request(module, deploy_api, params, key)
def send_annotation_event(module, key, msg, annotated_by='Ansible', level=None, instance_id=None, event_epoch=None):
"""Send an annotation event to Stackdriver"""
annotation_api = "https://event-gateway.stackdriver.com/v1/annotationevent"
params = {}
params['message'] = msg
if annotated_by:
params['annotated_by'] = annotated_by
if level:
params['level'] = level
if instance_id:
params['instance_id'] = instance_id
if event_epoch:
params['event_epoch'] = event_epoch
return do_send_request(module, annotation_api, params, key)
def do_send_request(module, url, params, key):
data = json.dumps(params)
headers = {
'Content-Type': 'application/json',
'x-stackdriver-apikey': key
}
response, info = fetch_url(module, url, headers=headers, data=data, method='POST')
if info['status'] != 200:
module.fail_json(msg="Unable to send msg: %s" % info['msg'])
# ===========================================
# Module execution.
#
def main():
module = AnsibleModule(
argument_spec=dict(
key=dict(required=True),
event=dict(required=True, choices=['deploy', 'annotation']),
msg=dict(),
revision_id=dict(),
annotated_by=dict(default='Ansible'),
level=dict(default='INFO', choices=['INFO', 'WARN', 'ERROR']),
instance_id=dict(),
event_epoch=dict(),
deployed_by=dict(default='Ansible'),
deployed_to=dict(),
repository=dict(),
),
supports_check_mode=True
)
key = module.params["key"]
event = module.params["event"]
# Annotation params
msg = module.params["msg"]
annotated_by = module.params["annotated_by"]
level = module.params["level"]
instance_id = module.params["instance_id"]
event_epoch = module.params["event_epoch"]
# Deploy params
revision_id = module.params["revision_id"]
deployed_by = module.params["deployed_by"]
deployed_to = module.params["deployed_to"]
repository = module.params["repository"]
##################################################################
# deploy requires revision_id
# annotation requires msg
# We verify these manually
##################################################################
if event == 'deploy':
if not revision_id:
module.fail_json(msg="revision_id required for deploy events")
try:
send_deploy_event(module, key, revision_id, deployed_by, deployed_to, repository)
except Exception:
e = get_exception()
module.fail_json(msg="unable to sent deploy event: %s" % e)
if event == 'annotation':
if not msg:
module.fail_json(msg="msg required for annotation events")
try:
send_annotation_event(module, key, msg, annotated_by, level, instance_id, event_epoch)
except Exception:
e = get_exception()
module.fail_json(msg="unable to sent annotation event: %s" % e)
changed = True
module.exit_json(changed=changed, deployed_by=deployed_by)
if __name__ == '__main__':
main()
| gpl-3.0 |
mdanielwork/intellij-community | python/lib/Lib/site-packages/django/core/management/commands/dumpdata.py | 249 | 8960 | from django.core.exceptions import ImproperlyConfigured
from django.core.management.base import BaseCommand, CommandError
from django.core import serializers
from django.db import connections, router, DEFAULT_DB_ALIAS
from django.utils.datastructures import SortedDict
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--format', default='json', dest='format',
help='Specifies the output serialization format for fixtures.'),
make_option('--indent', default=None, dest='indent', type='int',
help='Specifies the indent level to use when pretty-printing output'),
make_option('--database', action='store', dest='database',
default=DEFAULT_DB_ALIAS, help='Nominates a specific database to load '
'fixtures into. Defaults to the "default" database.'),
make_option('-e', '--exclude', dest='exclude',action='append', default=[],
help='An appname or appname.ModelName to exclude (use multiple --exclude to exclude multiple apps/models).'),
make_option('-n', '--natural', action='store_true', dest='use_natural_keys', default=False,
help='Use natural keys if they are available.'),
make_option('-a', '--all', action='store_true', dest='use_base_manager', default=False,
help="Use Django's base manager to dump all models stored in the database, including those that would otherwise be filtered or modified by a custom manager."),
)
help = ("Output the contents of the database as a fixture of the given "
"format (using each model's default manager unless --all is "
"specified).")
args = '[appname appname.ModelName ...]'
def handle(self, *app_labels, **options):
from django.db.models import get_app, get_apps, get_models, get_model
format = options.get('format','json')
indent = options.get('indent',None)
using = options.get('database', DEFAULT_DB_ALIAS)
connection = connections[using]
excludes = options.get('exclude',[])
show_traceback = options.get('traceback', False)
use_natural_keys = options.get('use_natural_keys', False)
use_base_manager = options.get('use_base_manager', False)
excluded_apps = set()
excluded_models = set()
for exclude in excludes:
if '.' in exclude:
app_label, model_name = exclude.split('.', 1)
model_obj = get_model(app_label, model_name)
if not model_obj:
raise CommandError('Unknown model in excludes: %s' % exclude)
excluded_models.add(model_obj)
else:
try:
app_obj = get_app(exclude)
excluded_apps.add(app_obj)
except ImproperlyConfigured:
raise CommandError('Unknown app in excludes: %s' % exclude)
if len(app_labels) == 0:
app_list = SortedDict((app, None) for app in get_apps() if app not in excluded_apps)
else:
app_list = SortedDict()
for label in app_labels:
try:
app_label, model_label = label.split('.')
try:
app = get_app(app_label)
except ImproperlyConfigured:
raise CommandError("Unknown application: %s" % app_label)
if app in excluded_apps:
continue
model = get_model(app_label, model_label)
if model is None:
raise CommandError("Unknown model: %s.%s" % (app_label, model_label))
if app in app_list.keys():
if app_list[app] and model not in app_list[app]:
app_list[app].append(model)
else:
app_list[app] = [model]
except ValueError:
# This is just an app - no model qualifier
app_label = label
try:
app = get_app(app_label)
except ImproperlyConfigured:
raise CommandError("Unknown application: %s" % app_label)
if app in excluded_apps:
continue
app_list[app] = None
# Check that the serialization format exists; this is a shortcut to
# avoid collating all the objects and _then_ failing.
if format not in serializers.get_public_serializer_formats():
raise CommandError("Unknown serialization format: %s" % format)
try:
serializers.get_serializer(format)
except KeyError:
raise CommandError("Unknown serialization format: %s" % format)
# Now collate the objects to be serialized.
objects = []
for model in sort_dependencies(app_list.items()):
if model in excluded_models:
continue
if not model._meta.proxy and router.allow_syncdb(using, model):
if use_base_manager:
objects.extend(model._base_manager.using(using).all())
else:
objects.extend(model._default_manager.using(using).all())
try:
return serializers.serialize(format, objects, indent=indent,
use_natural_keys=use_natural_keys)
except Exception, e:
if show_traceback:
raise
raise CommandError("Unable to serialize database: %s" % e)
def sort_dependencies(app_list):
"""Sort a list of app,modellist pairs into a single list of models.
The single list of models is sorted so that any model with a natural key
is serialized before a normal model, and any model with a natural key
dependency has it's dependencies serialized first.
"""
from django.db.models import get_model, get_models
# Process the list of models, and get the list of dependencies
model_dependencies = []
models = set()
for app, model_list in app_list:
if model_list is None:
model_list = get_models(app)
for model in model_list:
models.add(model)
# Add any explicitly defined dependencies
if hasattr(model, 'natural_key'):
deps = getattr(model.natural_key, 'dependencies', [])
if deps:
deps = [get_model(*d.split('.')) for d in deps]
else:
deps = []
# Now add a dependency for any FK or M2M relation with
# a model that defines a natural key
for field in model._meta.fields:
if hasattr(field.rel, 'to'):
rel_model = field.rel.to
if hasattr(rel_model, 'natural_key'):
deps.append(rel_model)
for field in model._meta.many_to_many:
rel_model = field.rel.to
if hasattr(rel_model, 'natural_key'):
deps.append(rel_model)
model_dependencies.append((model, deps))
model_dependencies.reverse()
# Now sort the models to ensure that dependencies are met. This
# is done by repeatedly iterating over the input list of models.
# If all the dependencies of a given model are in the final list,
# that model is promoted to the end of the final list. This process
# continues until the input list is empty, or we do a full iteration
# over the input models without promoting a model to the final list.
# If we do a full iteration without a promotion, that means there are
# circular dependencies in the list.
model_list = []
while model_dependencies:
skipped = []
changed = False
while model_dependencies:
model, deps = model_dependencies.pop()
# If all of the models in the dependency list are either already
# on the final model list, or not on the original serialization list,
# then we've found another model with all it's dependencies satisfied.
found = True
for candidate in ((d not in models or d in model_list) for d in deps):
if not candidate:
found = False
if found:
model_list.append(model)
changed = True
else:
skipped.append((model, deps))
if not changed:
raise CommandError("Can't resolve dependencies for %s in serialized app list." %
', '.join('%s.%s' % (model._meta.app_label, model._meta.object_name)
for model, deps in sorted(skipped, key=lambda obj: obj[0].__name__))
)
model_dependencies = skipped
return model_list
| apache-2.0 |
m42e/jirash | deps/requests/packages/charade/euctwfreq.py | 3133 | 34872 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# EUCTW frequency table
# Converted from big5 work
# by Taiwan's Mandarin Promotion Council
# <http:#www.edu.tw:81/mandr/>
# 128 --> 0.42261
# 256 --> 0.57851
# 512 --> 0.74851
# 1024 --> 0.89384
# 2048 --> 0.97583
#
# Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98
# Random Distribution Ration = 512/(5401-512)=0.105
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR
EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75
# Char to FreqOrder table ,
EUCTW_TABLE_SIZE = 8102
EUCTWCharToFreqOrder = (
1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742
3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758
1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774
63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790
3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806
4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822
7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838
630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854
179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870
995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886
2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902
1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918
3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934
706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950
1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966
3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982
2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998
437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014
3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030
1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046
7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062
266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078
7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094
1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110
32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126
188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142
3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158
3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174
324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190
2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206
2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222
314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238
287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254
3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270
1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286
1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302
1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318
2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334
265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350
4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366
1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382
7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398
2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414
383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430
98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446
523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462
710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478
7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494
379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510
1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526
585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542
690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558
7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574
1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590
544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606
3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622
4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638
3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654
279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670
610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686
1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702
4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718
3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734
3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750
2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766
7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782
3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798
7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814
1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830
2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846
1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862
78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878
1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894
4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910
3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926
534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942
165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958
626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974
2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990
7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006
1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022
2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038
1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054
1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070
7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086
7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102
7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118
3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134
4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150
1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166
7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182
2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198
7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214
3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230
3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246
7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262
2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278
7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294
862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310
4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326
2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342
7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358
3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374
2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390
2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406
294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422
2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438
1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454
1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470
2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486
1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502
7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518
7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534
2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550
4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566
1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582
7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598
829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614
4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630
375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646
2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662
444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678
1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694
1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710
730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726
3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742
3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758
1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774
3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790
7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806
7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822
1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838
2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854
1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870
3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886
2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902
3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918
2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934
4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950
4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966
3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982
97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998
3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014
424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030
3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046
3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062
3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078
1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094
7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110
199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126
7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142
1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158
391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174
4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190
3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206
397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222
2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238
2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254
3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270
1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286
4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302
2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318
1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334
1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350
2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366
3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382
1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398
7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414
1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430
4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446
1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462
135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478
1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494
3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510
3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526
2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542
1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558
4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574
660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590
7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606
2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622
3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638
4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654
790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670
7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686
7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702
1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718
4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734
3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750
2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766
3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782
3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798
2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814
1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830
4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846
3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862
3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878
2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894
4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910
7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926
3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942
2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958
3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974
1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990
2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006
3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022
4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038
2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054
2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070
7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086
1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102
2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118
1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134
3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150
4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166
2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182
3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198
3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214
2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230
4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246
2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262
3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278
4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294
7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310
3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326
194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342
1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358
4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374
1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390
4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406
7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422
510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438
7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454
2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470
1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486
1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502
3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518
509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534
552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550
478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566
3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582
2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598
751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614
7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630
1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646
3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662
7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678
1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694
7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710
4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726
1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742
2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758
2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774
4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790
802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806
809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822
3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838
3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854
1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870
2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886
7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902
1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918
1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934
3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950
919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966
1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982
4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998
7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014
2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030
3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046
516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062
1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078
2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094
2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110
7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126
7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142
7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158
2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174
2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190
1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206
4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222
3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238
3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254
4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270
4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286
2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302
2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318
7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334
4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350
7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366
2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382
1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398
3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414
4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430
2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446
120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462
2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478
1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494
2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510
2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526
4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542
7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558
1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574
3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590
7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606
1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622
8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638
2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654
8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670
2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686
2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702
8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718
8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734
8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750
408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766
8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782
4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798
3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814
8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830
1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846
8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862
425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878
1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894
479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910
4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926
1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942
4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958
1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974
433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990
3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006
4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022
8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038
938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054
3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070
890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086
2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102
#Everything below is of no interest for detection purpose
2515,1613,4582,8119,3312,3866,2516,8120,4058,8121,1637,4059,2466,4583,3867,8122, # 8118
2493,3016,3734,8123,8124,2192,8125,8126,2162,8127,8128,8129,8130,8131,8132,8133, # 8134
8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,8144,8145,8146,8147,8148,8149, # 8150
8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,8160,8161,8162,8163,8164,8165, # 8166
8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181, # 8182
8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197, # 8198
8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213, # 8214
8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229, # 8230
8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245, # 8246
8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,8256,8257,8258,8259,8260,8261, # 8262
8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,8272,8273,8274,8275,8276,8277, # 8278
8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,8290,8291,8292,8293, # 8294
8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,8308,8309, # 8310
8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322,8323,8324,8325, # 8326
8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337,8338,8339,8340,8341, # 8342
8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353,8354,8355,8356,8357, # 8358
8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,8368,8369,8370,8371,8372,8373, # 8374
8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,8384,8385,8386,8387,8388,8389, # 8390
8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,8400,8401,8402,8403,8404,8405, # 8406
8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,8416,8417,8418,8419,8420,8421, # 8422
8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,8432,8433,8434,8435,8436,8437, # 8438
8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,8448,8449,8450,8451,8452,8453, # 8454
8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,8464,8465,8466,8467,8468,8469, # 8470
8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,8480,8481,8482,8483,8484,8485, # 8486
8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501, # 8502
8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517, # 8518
8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533, # 8534
8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549, # 8550
8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,8565, # 8566
8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,8576,8577,8578,8579,8580,8581, # 8582
8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597, # 8598
8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,8608,8609,8610,8611,8612,8613, # 8614
8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,8624,8625,8626,8627,8628,8629, # 8630
8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,8640,8641,8642,8643,8644,8645, # 8646
8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,8657,8658,8659,8660,8661, # 8662
8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672,8673,8674,8675,8676,8677, # 8678
8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,8688,8689,8690,8691,8692,8693, # 8694
8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,8704,8705,8706,8707,8708,8709, # 8710
8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,8720,8721,8722,8723,8724,8725, # 8726
8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,8736,8737,8738,8739,8740,8741) # 8742
# flake8: noqa
| mit |
levigross/pyscanner | mytests/django/core/cache/backends/locmem.py | 80 | 4491 | "Thread-safe in-memory cache backend."
from __future__ import with_statement
import time
try:
import cPickle as pickle
except ImportError:
import pickle
from django.core.cache.backends.base import BaseCache
from django.utils.synch import RWLock
# Global in-memory store of cache data. Keyed by name, to provide
# multiple named local memory caches.
_caches = {}
_expire_info = {}
_locks = {}
class LocMemCache(BaseCache):
def __init__(self, name, params):
BaseCache.__init__(self, params)
global _caches, _expire_info, _locks
self._cache = _caches.setdefault(name, {})
self._expire_info = _expire_info.setdefault(name, {})
self._lock = _locks.setdefault(name, RWLock())
def add(self, key, value, timeout=None, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
with self._lock.writer():
exp = self._expire_info.get(key)
if exp is None or exp <= time.time():
try:
pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
self._set(key, pickled, timeout)
return True
except pickle.PickleError:
pass
return False
def get(self, key, default=None, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
with self._lock.reader():
exp = self._expire_info.get(key)
if exp is None:
return default
elif exp > time.time():
try:
pickled = self._cache[key]
return pickle.loads(pickled)
except pickle.PickleError:
return default
with self._lock.writer():
try:
del self._cache[key]
del self._expire_info[key]
except KeyError:
pass
return default
def _set(self, key, value, timeout=None):
if len(self._cache) >= self._max_entries:
self._cull()
if timeout is None:
timeout = self.default_timeout
self._cache[key] = value
self._expire_info[key] = time.time() + timeout
def set(self, key, value, timeout=None, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
with self._lock.writer():
try:
pickled = pickle.dumps(value, pickle.HIGHEST_PROTOCOL)
self._set(key, pickled, timeout)
except pickle.PickleError:
pass
def incr(self, key, delta=1, version=None):
value = self.get(key, version=version)
if value is None:
raise ValueError("Key '%s' not found" % key)
new_value = value + delta
key = self.make_key(key, version=version)
with self._lock.writer():
try:
pickled = pickle.dumps(new_value, pickle.HIGHEST_PROTOCOL)
self._cache[key] = pickled
except pickle.PickleError:
pass
return new_value
def has_key(self, key, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
with self._lock.reader():
exp = self._expire_info.get(key)
if exp is None:
return False
elif exp > time.time():
return True
with self._lock.writer():
try:
del self._cache[key]
del self._expire_info[key]
except KeyError:
pass
return False
def _cull(self):
if self._cull_frequency == 0:
self.clear()
else:
doomed = [k for (i, k) in enumerate(self._cache) if i % self._cull_frequency == 0]
for k in doomed:
self._delete(k)
def _delete(self, key):
try:
del self._cache[key]
except KeyError:
pass
try:
del self._expire_info[key]
except KeyError:
pass
def delete(self, key, version=None):
key = self.make_key(key, version=version)
self.validate_key(key)
with self._lock.writer():
self._delete(key)
def clear(self):
self._cache.clear()
self._expire_info.clear()
# For backwards compatibility
class CacheClass(LocMemCache):
pass
| mit |
Work4Labs/lettuce | tests/integration/lib/Django-1.2.5/django/contrib/sites/tests.py | 139 | 2138 | from django.conf import settings
from django.contrib.sites.models import Site, RequestSite, get_current_site
from django.core.exceptions import ObjectDoesNotExist
from django.http import HttpRequest
from django.test import TestCase
class SitesFrameworkTests(TestCase):
def setUp(self):
Site(id=settings.SITE_ID, domain="example.com", name="example.com").save()
self.old_Site_meta_installed = Site._meta.installed
Site._meta.installed = True
def tearDown(self):
Site._meta.installed = self.old_Site_meta_installed
def test_site_manager(self):
# Make sure that get_current() does not return a deleted Site object.
s = Site.objects.get_current()
self.assert_(isinstance(s, Site))
s.delete()
self.assertRaises(ObjectDoesNotExist, Site.objects.get_current)
def test_site_cache(self):
# After updating a Site object (e.g. via the admin), we shouldn't return a
# bogus value from the SITE_CACHE.
site = Site.objects.get_current()
self.assertEqual(u"example.com", site.name)
s2 = Site.objects.get(id=settings.SITE_ID)
s2.name = "Example site"
s2.save()
site = Site.objects.get_current()
self.assertEqual(u"Example site", site.name)
def test_get_current_site(self):
# Test that the correct Site object is returned
request = HttpRequest()
request.META = {
"SERVER_NAME": "example.com",
"SERVER_PORT": "80",
}
site = get_current_site(request)
self.assert_(isinstance(site, Site))
self.assertEqual(site.id, settings.SITE_ID)
# Test that an exception is raised if the sites framework is installed
# but there is no matching Site
site.delete()
self.assertRaises(ObjectDoesNotExist, get_current_site, request)
# A RequestSite is returned if the sites framework is not installed
Site._meta.installed = False
site = get_current_site(request)
self.assert_(isinstance(site, RequestSite))
self.assertEqual(site.name, u"example.com")
| gpl-3.0 |
TathagataChakraborti/resource-conflicts | PLANROB-2015/seq-sat-lama/py2.5/lib/python2.5/distutils/bcppcompiler.py | 85 | 15086 | """distutils.bcppcompiler
Contains BorlandCCompiler, an implementation of the abstract CCompiler class
for the Borland C++ compiler.
"""
# This implementation by Lyle Johnson, based on the original msvccompiler.py
# module and using the directions originally published by Gordon Williams.
# XXX looks like there's a LOT of overlap between these two classes:
# someone should sit down and factor out the common code as
# WindowsCCompiler! --GPW
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id: bcppcompiler.py 37828 2004-11-10 22:23:15Z loewis $"
import sys, os
from distutils.errors import \
DistutilsExecError, DistutilsPlatformError, \
CompileError, LibError, LinkError, UnknownFileError
from distutils.ccompiler import \
CCompiler, gen_preprocess_options, gen_lib_options
from distutils.file_util import write_file
from distutils.dep_util import newer
from distutils import log
class BCPPCompiler(CCompiler) :
"""Concrete class that implements an interface to the Borland C/C++
compiler, as defined by the CCompiler abstract class.
"""
compiler_type = 'bcpp'
# Just set this so CCompiler's constructor doesn't barf. We currently
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
# as it really isn't necessary for this sort of single-compiler class.
# Would be nice to have a consistent interface with UnixCCompiler,
# though, so it's worth thinking about.
executables = {}
# Private class data (need to distinguish C from C++ source for compiler)
_c_extensions = ['.c']
_cpp_extensions = ['.cc', '.cpp', '.cxx']
# Needed for the filename generation methods provided by the
# base class, CCompiler.
src_extensions = _c_extensions + _cpp_extensions
obj_extension = '.obj'
static_lib_extension = '.lib'
shared_lib_extension = '.dll'
static_lib_format = shared_lib_format = '%s%s'
exe_extension = '.exe'
def __init__ (self,
verbose=0,
dry_run=0,
force=0):
CCompiler.__init__ (self, verbose, dry_run, force)
# These executables are assumed to all be in the path.
# Borland doesn't seem to use any special registry settings to
# indicate their installation locations.
self.cc = "bcc32.exe"
self.linker = "ilink32.exe"
self.lib = "tlib.exe"
self.preprocess_options = None
self.compile_options = ['/tWM', '/O2', '/q', '/g0']
self.compile_options_debug = ['/tWM', '/Od', '/q', '/g0']
self.ldflags_shared = ['/Tpd', '/Gn', '/q', '/x']
self.ldflags_shared_debug = ['/Tpd', '/Gn', '/q', '/x']
self.ldflags_static = []
self.ldflags_exe = ['/Gn', '/q', '/x']
self.ldflags_exe_debug = ['/Gn', '/q', '/x','/r']
# -- Worker methods ------------------------------------------------
def compile(self, sources,
output_dir=None, macros=None, include_dirs=None, debug=0,
extra_preargs=None, extra_postargs=None, depends=None):
macros, objects, extra_postargs, pp_opts, build = \
self._setup_compile(output_dir, macros, include_dirs, sources,
depends, extra_postargs)
compile_opts = extra_preargs or []
compile_opts.append ('-c')
if debug:
compile_opts.extend (self.compile_options_debug)
else:
compile_opts.extend (self.compile_options)
for obj in objects:
try:
src, ext = build[obj]
except KeyError:
continue
# XXX why do the normpath here?
src = os.path.normpath(src)
obj = os.path.normpath(obj)
# XXX _setup_compile() did a mkpath() too but before the normpath.
# Is it possible to skip the normpath?
self.mkpath(os.path.dirname(obj))
if ext == '.res':
# This is already a binary file -- skip it.
continue # the 'for' loop
if ext == '.rc':
# This needs to be compiled to a .res file -- do it now.
try:
self.spawn (["brcc32", "-fo", obj, src])
except DistutilsExecError, msg:
raise CompileError, msg
continue # the 'for' loop
# The next two are both for the real compiler.
if ext in self._c_extensions:
input_opt = ""
elif ext in self._cpp_extensions:
input_opt = "-P"
else:
# Unknown file type -- no extra options. The compiler
# will probably fail, but let it just in case this is a
# file the compiler recognizes even if we don't.
input_opt = ""
output_opt = "-o" + obj
# Compiler command line syntax is: "bcc32 [options] file(s)".
# Note that the source file names must appear at the end of
# the command line.
try:
self.spawn ([self.cc] + compile_opts + pp_opts +
[input_opt, output_opt] +
extra_postargs + [src])
except DistutilsExecError, msg:
raise CompileError, msg
return objects
# compile ()
def create_static_lib (self,
objects,
output_libname,
output_dir=None,
debug=0,
target_lang=None):
(objects, output_dir) = self._fix_object_args (objects, output_dir)
output_filename = \
self.library_filename (output_libname, output_dir=output_dir)
if self._need_link (objects, output_filename):
lib_args = [output_filename, '/u'] + objects
if debug:
pass # XXX what goes here?
try:
self.spawn ([self.lib] + lib_args)
except DistutilsExecError, msg:
raise LibError, msg
else:
log.debug("skipping %s (up-to-date)", output_filename)
# create_static_lib ()
def link (self,
target_desc,
objects,
output_filename,
output_dir=None,
libraries=None,
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
debug=0,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
target_lang=None):
# XXX this ignores 'build_temp'! should follow the lead of
# msvccompiler.py
(objects, output_dir) = self._fix_object_args (objects, output_dir)
(libraries, library_dirs, runtime_library_dirs) = \
self._fix_lib_args (libraries, library_dirs, runtime_library_dirs)
if runtime_library_dirs:
log.warn("I don't know what to do with 'runtime_library_dirs': %s",
str(runtime_library_dirs))
if output_dir is not None:
output_filename = os.path.join (output_dir, output_filename)
if self._need_link (objects, output_filename):
# Figure out linker args based on type of target.
if target_desc == CCompiler.EXECUTABLE:
startup_obj = 'c0w32'
if debug:
ld_args = self.ldflags_exe_debug[:]
else:
ld_args = self.ldflags_exe[:]
else:
startup_obj = 'c0d32'
if debug:
ld_args = self.ldflags_shared_debug[:]
else:
ld_args = self.ldflags_shared[:]
# Create a temporary exports file for use by the linker
if export_symbols is None:
def_file = ''
else:
head, tail = os.path.split (output_filename)
modname, ext = os.path.splitext (tail)
temp_dir = os.path.dirname(objects[0]) # preserve tree structure
def_file = os.path.join (temp_dir, '%s.def' % modname)
contents = ['EXPORTS']
for sym in (export_symbols or []):
contents.append(' %s=_%s' % (sym, sym))
self.execute(write_file, (def_file, contents),
"writing %s" % def_file)
# Borland C++ has problems with '/' in paths
objects2 = map(os.path.normpath, objects)
# split objects in .obj and .res files
# Borland C++ needs them at different positions in the command line
objects = [startup_obj]
resources = []
for file in objects2:
(base, ext) = os.path.splitext(os.path.normcase(file))
if ext == '.res':
resources.append(file)
else:
objects.append(file)
for l in library_dirs:
ld_args.append("/L%s" % os.path.normpath(l))
ld_args.append("/L.") # we sometimes use relative paths
# list of object files
ld_args.extend(objects)
# XXX the command-line syntax for Borland C++ is a bit wonky;
# certain filenames are jammed together in one big string, but
# comma-delimited. This doesn't mesh too well with the
# Unix-centric attitude (with a DOS/Windows quoting hack) of
# 'spawn()', so constructing the argument list is a bit
# awkward. Note that doing the obvious thing and jamming all
# the filenames and commas into one argument would be wrong,
# because 'spawn()' would quote any filenames with spaces in
# them. Arghghh!. Apparently it works fine as coded...
# name of dll/exe file
ld_args.extend([',',output_filename])
# no map file and start libraries
ld_args.append(',,')
for lib in libraries:
# see if we find it and if there is a bcpp specific lib
# (xxx_bcpp.lib)
libfile = self.find_library_file(library_dirs, lib, debug)
if libfile is None:
ld_args.append(lib)
# probably a BCPP internal library -- don't warn
else:
# full name which prefers bcpp_xxx.lib over xxx.lib
ld_args.append(libfile)
# some default libraries
ld_args.append ('import32')
ld_args.append ('cw32mt')
# def file for export symbols
ld_args.extend([',',def_file])
# add resource files
ld_args.append(',')
ld_args.extend(resources)
if extra_preargs:
ld_args[:0] = extra_preargs
if extra_postargs:
ld_args.extend(extra_postargs)
self.mkpath (os.path.dirname (output_filename))
try:
self.spawn ([self.linker] + ld_args)
except DistutilsExecError, msg:
raise LinkError, msg
else:
log.debug("skipping %s (up-to-date)", output_filename)
# link ()
# -- Miscellaneous methods -----------------------------------------
def find_library_file (self, dirs, lib, debug=0):
# List of effective library names to try, in order of preference:
# xxx_bcpp.lib is better than xxx.lib
# and xxx_d.lib is better than xxx.lib if debug is set
#
# The "_bcpp" suffix is to handle a Python installation for people
# with multiple compilers (primarily Distutils hackers, I suspect
# ;-). The idea is they'd have one static library for each
# compiler they care about, since (almost?) every Windows compiler
# seems to have a different format for static libraries.
if debug:
dlib = (lib + "_d")
try_names = (dlib + "_bcpp", lib + "_bcpp", dlib, lib)
else:
try_names = (lib + "_bcpp", lib)
for dir in dirs:
for name in try_names:
libfile = os.path.join(dir, self.library_filename(name))
if os.path.exists(libfile):
return libfile
else:
# Oops, didn't find it in *any* of 'dirs'
return None
# overwrite the one from CCompiler to support rc and res-files
def object_filenames (self,
source_filenames,
strip_dir=0,
output_dir=''):
if output_dir is None: output_dir = ''
obj_names = []
for src_name in source_filenames:
# use normcase to make sure '.rc' is really '.rc' and not '.RC'
(base, ext) = os.path.splitext (os.path.normcase(src_name))
if ext not in (self.src_extensions + ['.rc','.res']):
raise UnknownFileError, \
"unknown file type '%s' (from '%s')" % \
(ext, src_name)
if strip_dir:
base = os.path.basename (base)
if ext == '.res':
# these can go unchanged
obj_names.append (os.path.join (output_dir, base + ext))
elif ext == '.rc':
# these need to be compiled to .res-files
obj_names.append (os.path.join (output_dir, base + '.res'))
else:
obj_names.append (os.path.join (output_dir,
base + self.obj_extension))
return obj_names
# object_filenames ()
def preprocess (self,
source,
output_file=None,
macros=None,
include_dirs=None,
extra_preargs=None,
extra_postargs=None):
(_, macros, include_dirs) = \
self._fix_compile_args(None, macros, include_dirs)
pp_opts = gen_preprocess_options(macros, include_dirs)
pp_args = ['cpp32.exe'] + pp_opts
if output_file is not None:
pp_args.append('-o' + output_file)
if extra_preargs:
pp_args[:0] = extra_preargs
if extra_postargs:
pp_args.extend(extra_postargs)
pp_args.append(source)
# We need to preprocess: either we're being forced to, or the
# source file is newer than the target (or the target doesn't
# exist).
if self.force or output_file is None or newer(source, output_file):
if output_file:
self.mkpath(os.path.dirname(output_file))
try:
self.spawn(pp_args)
except DistutilsExecError, msg:
print msg
raise CompileError, msg
# preprocess()
| mit |
caseylucas/ansible-modules-core | system/mount.py | 17 | 12052 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Red Hat, inc
# Written by Seth Vidal
# based on the mount modules from salt and puppet
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: mount
short_description: Control active and configured mount points
description:
- This module controls active and configured mount points in C(/etc/fstab).
version_added: "0.6"
options:
name:
description:
- "path to the mount point, eg: C(/mnt/files)"
required: true
src:
description:
- device to be mounted on I(name). Required when C(state=present) or C(state=mounted)
required: false
default: null
fstype:
description:
- file-system type. Required when C(state=present) or C(state=mounted)
required: false
default: null
opts:
description:
- mount options (see fstab(5))
required: false
default: null
dump:
description:
- "dump (see fstab(5)), Note that if nulled, C(state=present) will cease to work and duplicate entries will be made with subsequent runs."
required: false
default: 0
passno:
description:
- "passno (see fstab(5)), Note that if nulled, C(state=present) will cease to work and duplicate entries will be made with subsequent runs."
required: false
default: 0
state:
description:
- If C(mounted) or C(unmounted), the device will be actively mounted or unmounted as needed and appropriately configured in I(fstab).
- C(absent) and C(present) only deal with I(fstab) but will not affect current mounting.
- If specifying C(mounted) and the mount point is not present, the mount point will be created. Similarly.
- Specifying C(absent) will remove the mount point directory.
required: true
choices: [ "present", "absent", "mounted", "unmounted" ]
fstab:
description:
- file to use instead of C(/etc/fstab). You shouldn't use that option
unless you really know what you are doing. This might be useful if
you need to configure mountpoints in a chroot environment.
required: false
default: /etc/fstab
author:
- Ansible Core Team
- Seth Vidal
'''
EXAMPLES = '''
# Mount DVD read-only
- mount: name=/mnt/dvd src=/dev/sr0 fstype=iso9660 opts=ro state=present
# Mount up device by label
- mount: name=/srv/disk src='LABEL=SOME_LABEL' fstype=ext4 state=present
# Mount up device by UUID
- mount: name=/home src='UUID=b3e48f45-f933-4c8e-a700-22a159ec9077' fstype=xfs opts=noatime state=present
'''
def write_fstab(lines, dest):
fs_w = open(dest, 'w')
for l in lines:
fs_w.write(l)
fs_w.flush()
fs_w.close()
def _escape_fstab(v):
""" escape space (040), ampersand (046) and backslash (134) which are invalid in fstab fields """
if isinstance(v, int):
return v
else:
return v.replace('\\', '\\134').replace(' ', '\\040').replace('&', '\\046')
def set_mount(module, **kwargs):
""" set/change a mount point location in fstab """
# kwargs: name, src, fstype, opts, dump, passno, state, fstab=/etc/fstab
args = dict(
opts = 'defaults',
dump = '0',
passno = '0',
fstab = '/etc/fstab'
)
args.update(kwargs)
new_line = '%(src)s %(name)s %(fstype)s %(opts)s %(dump)s %(passno)s\n'
to_write = []
exists = False
changed = False
escaped_args = dict([(k, _escape_fstab(v)) for k, v in args.iteritems()])
for line in open(args['fstab'], 'r').readlines():
if not line.strip():
to_write.append(line)
continue
if line.strip().startswith('#'):
to_write.append(line)
continue
if len(line.split()) != 6:
# not sure what this is or why it is here
# but it is not our fault so leave it be
to_write.append(line)
continue
ld = {}
ld['src'], ld['name'], ld['fstype'], ld['opts'], ld['dump'], ld['passno'] = line.split()
if ld['name'] != escaped_args['name']:
to_write.append(line)
continue
# it exists - now see if what we have is different
exists = True
for t in ('src', 'fstype','opts', 'dump', 'passno'):
if ld[t] != escaped_args[t]:
changed = True
ld[t] = escaped_args[t]
if changed:
to_write.append(new_line % ld)
else:
to_write.append(line)
if not exists:
to_write.append(new_line % escaped_args)
changed = True
if changed and not module.check_mode:
write_fstab(to_write, args['fstab'])
return (args['name'], changed)
def unset_mount(module, **kwargs):
""" remove a mount point from fstab """
# kwargs: name, src, fstype, opts, dump, passno, state, fstab=/etc/fstab
args = dict(
opts = 'default',
dump = '0',
passno = '0',
fstab = '/etc/fstab'
)
args.update(kwargs)
to_write = []
changed = False
escaped_name = _escape_fstab(args['name'])
for line in open(args['fstab'], 'r').readlines():
if not line.strip():
to_write.append(line)
continue
if line.strip().startswith('#'):
to_write.append(line)
continue
if len(line.split()) != 6:
# not sure what this is or why it is here
# but it is not our fault so leave it be
to_write.append(line)
continue
ld = {}
ld['src'], ld['name'], ld['fstype'], ld['opts'], ld['dump'], ld['passno'] = line.split()
if ld['name'] != escaped_name:
to_write.append(line)
continue
# if we got here we found a match - continue and mark changed
changed = True
if changed and not module.check_mode:
write_fstab(to_write, args['fstab'])
return (args['name'], changed)
def mount(module, **kwargs):
""" mount up a path or remount if needed """
# kwargs: name, src, fstype, opts, dump, passno, state, fstab=/etc/fstab
args = dict(
opts = 'default',
dump = '0',
passno = '0',
fstab = '/etc/fstab'
)
args.update(kwargs)
mount_bin = module.get_bin_path('mount')
name = kwargs['name']
cmd = [ mount_bin, ]
if ismount(name):
cmd += [ '-o', 'remount', ]
if get_platform().lower() == 'freebsd':
cmd += [ '-F', args['fstab'], ]
cmd += [ name, ]
rc, out, err = module.run_command(cmd)
if rc == 0:
return 0, ''
else:
return rc, out+err
def umount(module, **kwargs):
""" unmount a path """
umount_bin = module.get_bin_path('umount')
name = kwargs['name']
cmd = [umount_bin, name]
rc, out, err = module.run_command(cmd)
if rc == 0:
return 0, ''
else:
return rc, out+err
def main():
module = AnsibleModule(
argument_spec = dict(
state = dict(required=True, choices=['present', 'absent', 'mounted', 'unmounted']),
name = dict(required=True),
opts = dict(default=None),
passno = dict(default=None, type='str'),
dump = dict(default=None),
src = dict(required=False),
fstype = dict(required=False),
fstab = dict(default='/etc/fstab')
),
supports_check_mode=True,
required_if = (
['state', 'mounted', ['src', 'fstype']],
['state', 'present', ['src', 'fstype']]
)
)
changed = False
rc = 0
args = {'name': module.params['name']}
if module.params['src'] is not None:
args['src'] = module.params['src']
if module.params['fstype'] is not None:
args['fstype'] = module.params['fstype']
if module.params['passno'] is not None:
args['passno'] = module.params['passno']
if module.params['opts'] is not None:
args['opts'] = module.params['opts']
if module.params['dump'] is not None:
args['dump'] = module.params['dump']
if module.params['fstab'] is not None:
args['fstab'] = module.params['fstab']
# if fstab file does not exist, we first need to create it. This mainly
# happens when fstab optin is passed to the module.
if not os.path.exists(args['fstab']):
if not os.path.exists(os.path.dirname(args['fstab'])):
os.makedirs(os.path.dirname(args['fstab']))
open(args['fstab'],'a').close()
# absent == remove from fstab and unmounted
# unmounted == do not change fstab state, but unmount
# present == add to fstab, do not change mount state
# mounted == add to fstab if not there and make sure it is mounted, if it has changed in fstab then remount it
state = module.params['state']
name = module.params['name']
if state == 'absent':
name, changed = unset_mount(module, **args)
if changed and not module.check_mode:
if ismount(name):
res,msg = umount(module, **args)
if res:
module.fail_json(msg="Error unmounting %s: %s" % (name, msg))
if os.path.exists(name):
try:
os.rmdir(name)
except (OSError, IOError):
e = get_exception()
module.fail_json(msg="Error rmdir %s: %s" % (name, str(e)))
module.exit_json(changed=changed, **args)
if state == 'unmounted':
if ismount(name):
if not module.check_mode:
res,msg = umount(module, **args)
if res:
module.fail_json(msg="Error unmounting %s: %s" % (name, msg))
changed = True
module.exit_json(changed=changed, **args)
if state in ['mounted', 'present']:
if state == 'mounted':
if not os.path.exists(name) and not module.check_mode:
try:
os.makedirs(name)
except (OSError, IOError):
e = get_exception()
module.fail_json(msg="Error making dir %s: %s" % (name, str(e)))
name, changed = set_mount(module, **args)
if state == 'mounted':
res = 0
if ismount(name):
if changed and not module.check_mode:
res,msg = mount(module, **args)
elif 'bind' in args.get('opts', []):
changed = True
cmd = 'mount -l'
rc, out, err = module.run_command(cmd)
allmounts = out.split('\n')
for mounts in allmounts[:-1]:
arguments = mounts.split()
if arguments[0] == args['src'] and arguments[2] == args['name'] and arguments[4] == args['fstype']:
changed = False
if changed:
res,msg = mount(module, **args)
else:
changed = True
if not module.check_mode:
res,msg = mount(module, **args)
if res:
module.fail_json(msg="Error mounting %s: %s" % (name, msg))
module.exit_json(changed=changed, **args)
module.fail_json(msg='Unexpected position reached')
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ismount import *
main()
| gpl-3.0 |
sestrella/ansible | test/units/modules/network/onyx/test_onyx_vlan.py | 68 | 3956 | #
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat.mock import patch
from ansible.modules.network.onyx import onyx_vlan
from units.modules.utils import set_module_args
from .onyx_module import TestOnyxModule, load_fixture
class TestOnyxVlanModule(TestOnyxModule):
module = onyx_vlan
def setUp(self):
super(TestOnyxVlanModule, self).setUp()
self.mock_get_config = patch.object(
onyx_vlan.OnyxVlanModule, "_get_vlan_config")
self.get_config = self.mock_get_config.start()
self.mock_load_config = patch(
'ansible.module_utils.network.onyx.onyx.load_config')
self.load_config = self.mock_load_config.start()
self.mock_get_version = patch.object(
onyx_vlan.OnyxVlanModule, "_get_os_version")
self.get_version = self.mock_get_version.start()
def tearDown(self):
super(TestOnyxVlanModule, self).tearDown()
self.mock_get_config.stop()
self.mock_load_config.stop()
self.mock_get_version.stop()
def load_fixtures(self, commands=None, transport='cli'):
config_file = 'onyx_vlan_show.cfg'
self.get_config.return_value = load_fixture(config_file)
self.load_config.return_value = None
self.get_version.return_value = "3.6.5000"
def test_vlan_no_change(self):
set_module_args(dict(vlan_id=20))
self.execute_module(changed=False)
def test_vlan_remove_name(self):
set_module_args(dict(vlan_id=10, name=''))
commands = ['vlan 10 no name']
self.execute_module(changed=True, commands=commands)
def test_vlan_change_name(self):
set_module_args(dict(vlan_id=10, name='test-test'))
commands = ['vlan 10 name test-test']
self.execute_module(changed=True, commands=commands)
def test_vlan_create(self):
set_module_args(dict(vlan_id=30))
commands = ['vlan 30', 'exit']
self.execute_module(changed=True, commands=commands)
def test_vlan_create_with_name(self):
set_module_args(dict(vlan_id=30, name='test-test'))
commands = ['vlan 30', 'exit', 'vlan 30 name test-test']
self.execute_module(changed=True, commands=commands)
def test_vlan_remove(self):
set_module_args(dict(vlan_id=20, state='absent'))
commands = ['no vlan 20']
self.execute_module(changed=True, commands=commands)
def test_vlan_remove_not_exist(self):
set_module_args(dict(vlan_id=30, state='absent'))
self.execute_module(changed=False)
def test_vlan_aggregate(self):
aggregate = list()
aggregate.append(dict(vlan_id=30))
aggregate.append(dict(vlan_id=20))
set_module_args(dict(aggregate=aggregate))
commands = ['vlan 30', 'exit']
self.execute_module(changed=True, commands=commands)
def test_vlan_aggregate_purge(self):
aggregate = list()
aggregate.append(dict(vlan_id=30))
aggregate.append(dict(vlan_id=20))
set_module_args(dict(aggregate=aggregate, purge=True))
commands = ['vlan 30', 'exit', 'no vlan 10', 'no vlan 1']
self.execute_module(changed=True, commands=commands)
| gpl-3.0 |
sklam/numba | numba/np/npyfuncs.py | 2 | 58451 | """Codegen for functions used as kernels in NumPy functions
Typically, the kernels of several ufuncs that can't map directly to
Python builtins
"""
import math
from llvmlite.llvmpy import core as lc
from numba.core.imputils import impl_ret_untracked
from numba.core import typing, types, errors, lowering, cgutils
from numba.np import npdatetime
from numba.cpython import cmathimpl, mathimpl, numbers
# some NumPy constants. Note that we could generate some of them using
# the math library, but having the values copied from npy_math seems to
# yield more accurate results
_NPY_LOG2E = 1.442695040888963407359924681001892137 # math.log(math.e, 2)
_NPY_LOG10E = 0.434294481903251827651128918916605082 # math.log(math.e, 10)
_NPY_LOGE2 = 0.693147180559945309417232121458176568 # math.log(2)
def _check_arity_and_homogeneity(sig, args, arity, return_type = None):
"""checks that the following are true:
- args and sig.args have arg_count elements
- all input types are homogeneous
- return type is 'return_type' if provided, otherwise it must be
homogeneous with the input types.
"""
assert len(args) == arity
assert len(sig.args) == arity
ty = sig.args[0]
if return_type is None:
return_type = ty
# must have homogeneous args
if not( all(arg==ty for arg in sig.args) and sig.return_type == return_type):
import inspect
fname = inspect.currentframe().f_back.f_code.co_name
msg = '{0} called with invalid types: {1}'.format(fname, sig)
assert False, msg
def _call_func_by_name_with_cast(context, builder, sig, args,
func_name, ty=types.float64):
# it is quite common in NumPy to have loops implemented as a call
# to the double version of the function, wrapped in casts. This
# helper function facilitates that.
mod = builder.module
lty = context.get_argument_type(ty)
fnty = lc.Type.function(lty, [lty]*len(sig.args))
fn = cgutils.insert_pure_function(mod, fnty, name=func_name)
cast_args = [context.cast(builder, arg, argty, ty)
for arg, argty in zip(args, sig.args) ]
result = builder.call(fn, cast_args)
return context.cast(builder, result, types.float64, sig.return_type)
def _dispatch_func_by_name_type(context, builder, sig, args, table, user_name):
# for most cases the functions are homogeneous on all their types.
# this code dispatches on the first argument type as it is the most useful
# for our uses (all cases but ldexp are homogeneous in all types, and
# dispatching on the first argument type works of ldexp as well)
#
# assumes that the function pointed by func_name has the type
# signature sig (but needs translation to llvm types).
ty = sig.args[0]
try:
func_name = table[ty]
except KeyError as e:
msg = "No {0} function for real type {1}".format(user_name, str(e))
raise errors.LoweringError(msg)
mod = builder.module
if ty in types.complex_domain:
# In numba struct types are always passed by pointer. So the call has to
# be transformed from "result = func(ops...)" to "func(&result, ops...).
# note that the result value pointer as first argument is the convention
# used by numba.
# First, prepare the return value
out = context.make_complex(builder, ty)
ptrargs = [cgutils.alloca_once_value(builder, arg)
for arg in args]
call_args = [out._getpointer()] + ptrargs
# get_value_as_argument for struct types like complex allocate stack space
# and initialize with the value, the return value is the pointer to that
# allocated space (ie: pointer to a copy of the value in the stack).
# get_argument_type returns a pointer to the struct type in consonance.
call_argtys = [ty] + list(sig.args)
call_argltys = [context.get_value_type(ty).as_pointer()
for ty in call_argtys]
fnty = lc.Type.function(lc.Type.void(), call_argltys)
# Note: the function isn't pure here (it writes to its pointer args)
fn = mod.get_or_insert_function(fnty, name=func_name)
builder.call(fn, call_args)
retval = builder.load(call_args[0])
else:
argtypes = [context.get_argument_type(aty) for aty in sig.args]
restype = context.get_argument_type(sig.return_type)
fnty = lc.Type.function(restype, argtypes)
fn = cgutils.insert_pure_function(mod, fnty, name=func_name)
retval = context.call_external_function(builder, fn, sig.args, args)
return retval
########################################################################
# Division kernels inspired by NumPy loops.c.src code
#
# The builtins are not applicable as they rely on a test for zero in the
# denominator. If it is zero the appropriate exception is raised.
# In NumPy, a division by zero does not raise an exception, but instead
# generated a known value. Note that a division by zero in any of the
# operations of a vector may raise an exception or issue a warning
# depending on the np.seterr configuration. This is not supported
# right now (and in any case, it won't be handled by these functions
# either)
def np_int_sdiv_impl(context, builder, sig, args):
# based on the actual code in NumPy loops.c.src for signed integer types
_check_arity_and_homogeneity(sig, args, 2)
num, den = args
ty = sig.args[0] # any arg type will do, homogeneous
ZERO = context.get_constant(ty, 0)
MINUS_ONE = context.get_constant(ty, -1)
MIN_INT = context.get_constant(ty, 1 << (den.type.width-1))
den_is_zero = builder.icmp(lc.ICMP_EQ, ZERO, den)
den_is_minus_one = builder.icmp(lc.ICMP_EQ, MINUS_ONE, den)
num_is_min_int = builder.icmp(lc.ICMP_EQ, MIN_INT, num)
could_cause_sigfpe = builder.and_(den_is_minus_one, num_is_min_int)
force_zero = builder.or_(den_is_zero, could_cause_sigfpe)
with builder.if_else(force_zero, likely=False) as (then, otherwise):
with then:
bb_then = builder.basic_block
with otherwise:
bb_otherwise = builder.basic_block
div = builder.sdiv(num, den)
mod = builder.srem(num, den)
num_gt_zero = builder.icmp(lc.ICMP_SGT, num, ZERO)
den_gt_zero = builder.icmp(lc.ICMP_SGT, den, ZERO)
not_same_sign = builder.xor(num_gt_zero, den_gt_zero)
mod_not_zero = builder.icmp(lc.ICMP_NE, mod, ZERO)
needs_fixing = builder.and_(not_same_sign, mod_not_zero)
fix_value = builder.select(needs_fixing, MINUS_ONE, ZERO)
result_otherwise = builder.add(div, fix_value)
result = builder.phi(ZERO.type)
result.add_incoming(ZERO, bb_then)
result.add_incoming(result_otherwise, bb_otherwise)
return result
def np_int_srem_impl(context, builder, sig, args):
# based on the actual code in NumPy loops.c.src for signed integers
_check_arity_and_homogeneity(sig, args, 2)
num, den = args
ty = sig.args[0] # any arg type will do, homogeneous
ZERO = context.get_constant(ty, 0)
den_not_zero = builder.icmp(lc.ICMP_NE, ZERO, den)
bb_no_if = builder.basic_block
with cgutils.if_unlikely(builder, den_not_zero):
bb_if = builder.basic_block
mod = builder.srem(num,den)
num_gt_zero = builder.icmp(lc.ICMP_SGT, num, ZERO)
den_gt_zero = builder.icmp(lc.ICMP_SGT, den, ZERO)
not_same_sign = builder.xor(num_gt_zero, den_gt_zero)
mod_not_zero = builder.icmp(lc.ICMP_NE, mod, ZERO)
needs_fixing = builder.and_(not_same_sign, mod_not_zero)
fix_value = builder.select(needs_fixing, den, ZERO)
final_mod = builder.add(fix_value, mod)
result = builder.phi(ZERO.type)
result.add_incoming(ZERO, bb_no_if)
result.add_incoming(final_mod, bb_if)
return result
def np_int_sdivrem_impl(context, builder, sig, args):
div = np_int_sdiv_impl(context, builder, sig.return_type[0](*sig.args), args)
rem = np_int_srem_impl(context, builder, sig.return_type[1](*sig.args), args)
return context.make_tuple(builder, sig.return_type, [div, rem])
def np_int_udiv_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 2)
num, den = args
ty = sig.args[0] # any arg type will do, homogeneous
ZERO = context.get_constant(ty, 0)
div_by_zero = builder.icmp(lc.ICMP_EQ, ZERO, den)
with builder.if_else(div_by_zero, likely=False) as (then, otherwise):
with then:
# division by zero
bb_then = builder.basic_block
with otherwise:
# divide!
div = builder.udiv(num, den)
bb_otherwise = builder.basic_block
result = builder.phi(ZERO.type)
result.add_incoming(ZERO, bb_then)
result.add_incoming(div, bb_otherwise)
return result
def np_int_urem_impl(context, builder, sig, args):
# based on the actual code in NumPy loops.c.src for signed integers
_check_arity_and_homogeneity(sig, args, 2)
num, den = args
ty = sig.args[0] # any arg type will do, homogeneous
ZERO = context.get_constant(ty, 0)
den_not_zero = builder.icmp(lc.ICMP_NE, ZERO, den)
bb_no_if = builder.basic_block
with cgutils.if_unlikely(builder, den_not_zero):
bb_if = builder.basic_block
mod = builder.urem(num,den)
result = builder.phi(ZERO.type)
result.add_incoming(ZERO, bb_no_if)
result.add_incoming(mod, bb_if)
return result
def np_int_udivrem_impl(context, builder, sig, args):
div = np_int_udiv_impl(context, builder, sig.return_type[0](*sig.args), args)
rem = np_int_urem_impl(context, builder, sig.return_type[1](*sig.args), args)
return context.make_tuple(builder, sig.return_type, [div, rem])
# implementation of int_fmod is in fact the same as the unsigned remainder,
# that is: srem with a special case returning 0 when the denominator is 0.
np_int_fmod_impl = np_int_urem_impl
def np_real_div_impl(context, builder, sig, args):
# in NumPy real div has the same semantics as an fdiv for generating
# NANs, INF and NINF
_check_arity_and_homogeneity(sig, args, 2)
return builder.fdiv(*args)
def np_real_mod_impl(context, builder, sig, args):
# note: this maps to NumPy remainder, which has the same semantics as Python
# based on code in loops.c.src
_check_arity_and_homogeneity(sig, args, 2)
in1, in2 = args
ty = sig.args[0]
ZERO = context.get_constant(ty, 0.0)
res = builder.frem(in1, in2)
res_ne_zero = builder.fcmp(lc.FCMP_ONE, res, ZERO)
den_lt_zero = builder.fcmp(lc.FCMP_OLT, in2, ZERO)
res_lt_zero = builder.fcmp(lc.FCMP_OLT, res, ZERO)
needs_fixing = builder.and_(res_ne_zero,
builder.xor(den_lt_zero, res_lt_zero))
fix_value = builder.select(needs_fixing, in2, ZERO)
return builder.fadd(res, fix_value)
def np_real_fmod_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 2)
return builder.frem(*args)
def _fabs(context, builder, arg):
ZERO = lc.Constant.real(arg.type, 0.0)
arg_negated = builder.fsub(ZERO, arg)
arg_is_negative = builder.fcmp(lc.FCMP_OLT, arg, ZERO)
return builder.select(arg_is_negative, arg_negated, arg)
def np_complex_div_impl(context, builder, sig, args):
# Extracted from numpy/core/src/umath/loops.c.src,
# inspired by complex_div_impl
# variables named coherent with loops.c.src
# This is implemented using the approach described in
# R.L. Smith. Algorithm 116: Complex division.
# Communications of the ACM, 5(8):435, 1962
in1, in2 = [context.make_complex(builder, sig.args[0], value=arg)
for arg in args]
in1r = in1.real # numerator.real
in1i = in1.imag # numerator.imag
in2r = in2.real # denominator.real
in2i = in2.imag # denominator.imag
ftype = in1r.type
assert all([i.type==ftype for i in [in1r, in1i, in2r, in2i]]), "mismatched types"
out = context.make_helper(builder, sig.return_type)
ZERO = lc.Constant.real(ftype, 0.0)
ONE = lc.Constant.real(ftype, 1.0)
# if abs(denominator.real) >= abs(denominator.imag)
in2r_abs = _fabs(context, builder, in2r)
in2i_abs = _fabs(context, builder, in2i)
in2r_abs_ge_in2i_abs = builder.fcmp(lc.FCMP_OGE, in2r_abs, in2i_abs)
with builder.if_else(in2r_abs_ge_in2i_abs) as (then, otherwise):
with then:
# if abs(denominator.real) == 0 and abs(denominator.imag) == 0
in2r_is_zero = builder.fcmp(lc.FCMP_OEQ, in2r_abs, ZERO)
in2i_is_zero = builder.fcmp(lc.FCMP_OEQ, in2i_abs, ZERO)
in2_is_zero = builder.and_(in2r_is_zero, in2i_is_zero)
with builder.if_else(in2_is_zero) as (inn_then, inn_otherwise):
with inn_then:
# division by 0.
# fdiv generates the appropriate NAN/INF/NINF
out.real = builder.fdiv(in1r, in2r_abs)
out.imag = builder.fdiv(in1i, in2i_abs)
with inn_otherwise:
# general case for:
# abs(denominator.real) > abs(denominator.imag)
rat = builder.fdiv(in2i, in2r)
# scl = 1.0/(in2r + in2i*rat)
tmp1 = builder.fmul(in2i, rat)
tmp2 = builder.fadd(in2r, tmp1)
scl = builder.fdiv(ONE, tmp2)
# out.real = (in1r + in1i*rat)*scl
# out.imag = (in1i - in1r*rat)*scl
tmp3 = builder.fmul(in1i, rat)
tmp4 = builder.fmul(in1r, rat)
tmp5 = builder.fadd(in1r, tmp3)
tmp6 = builder.fsub(in1i, tmp4)
out.real = builder.fmul(tmp5, scl)
out.imag = builder.fmul(tmp6, scl)
with otherwise:
# general case for:
# abs(denominator.imag) > abs(denominator.real)
rat = builder.fdiv(in2r, in2i)
# scl = 1.0/(in2i + in2r*rat)
tmp1 = builder.fmul(in2r, rat)
tmp2 = builder.fadd(in2i, tmp1)
scl = builder.fdiv(ONE, tmp2)
# out.real = (in1r*rat + in1i)*scl
# out.imag = (in1i*rat - in1r)*scl
tmp3 = builder.fmul(in1r, rat)
tmp4 = builder.fmul(in1i, rat)
tmp5 = builder.fadd(tmp3, in1i)
tmp6 = builder.fsub(tmp4, in1r)
out.real = builder.fmul(tmp5, scl)
out.imag = builder.fmul(tmp6, scl)
return out._getvalue()
########################################################################
# NumPy logaddexp
def np_real_logaddexp_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 2)
dispatch_table = {
types.float32: 'npy_logaddexpf',
types.float64: 'npy_logaddexp',
}
return _dispatch_func_by_name_type(context, builder, sig, args,
dispatch_table, 'logaddexp')
########################################################################
# NumPy logaddexp2
def np_real_logaddexp2_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 2)
dispatch_table = {
types.float32: 'npy_logaddexp2f',
types.float64: 'npy_logaddexp2',
}
return _dispatch_func_by_name_type(context, builder, sig, args,
dispatch_table, 'logaddexp2')
########################################################################
# true div kernels
def np_int_truediv_impl(context, builder, sig, args):
# in NumPy we don't check for 0 denominator... fdiv handles div by
# 0 in the way NumPy expects..
# integer truediv always yields double
num, den = args
lltype = num.type
assert all(i.type==lltype for i in args), "must have homogeneous types"
numty, denty = sig.args
num = context.cast(builder, num, numty, types.float64)
den = context.cast(builder, den, denty, types.float64)
return builder.fdiv(num,den)
########################################################################
# floor div kernels
def np_real_floor_div_impl(context, builder, sig, args):
res = np_real_div_impl(context, builder, sig, args)
s = typing.signature(sig.return_type, sig.return_type)
return np_real_floor_impl(context, builder, s, (res,))
def np_real_divmod_impl(context, builder, sig, args):
div = np_real_floor_div_impl(context, builder, sig.return_type[0](*sig.args), args)
rem = np_real_mod_impl(context, builder, sig.return_type[1](*sig.args), args)
return context.make_tuple(builder, sig.return_type, [div, rem])
def np_complex_floor_div_impl(context, builder, sig, args):
# this is based on the complex floor divide in Numpy's loops.c.src
# This is basically a full complex division with a complex floor
# applied.
# The complex floor seems to be defined as the real floor applied
# with the real part and zero in the imaginary part. Fully developed
# so it avoids computing anything related to the imaginary result.
float_kind = sig.args[0].underlying_float
floor_sig = typing.signature(float_kind, float_kind)
in1, in2 = [context.make_complex(builder, sig.args[0], value=arg)
for arg in args]
in1r = in1.real
in1i = in1.imag
in2r = in2.real
in2i = in2.imag
ftype = in1r.type
assert all([i.type==ftype for i in [in1r, in1i, in2r, in2i]]), "mismatched types"
ZERO = lc.Constant.real(ftype, 0.0)
out = context.make_helper(builder, sig.return_type)
out.imag = ZERO
in2r_abs = _fabs(context, builder, in2r)
in2i_abs = _fabs(context, builder, in2i)
in2r_abs_ge_in2i_abs = builder.fcmp(lc.FCMP_OGE, in2r_abs, in2i_abs)
with builder.if_else(in2r_abs_ge_in2i_abs) as (then, otherwise):
with then:
rat = builder.fdiv(in2i, in2r)
# out.real = floor((in1r+in1i*rat)/(in2r + in2i*rat))
tmp1 = builder.fmul(in1i, rat)
tmp2 = builder.fmul(in2i, rat)
tmp3 = builder.fadd(in1r, tmp1)
tmp4 = builder.fadd(in2r, tmp2)
tmp5 = builder.fdiv(tmp3, tmp4)
out.real = np_real_floor_impl(context, builder, floor_sig, (tmp5,))
with otherwise:
rat = builder.fdiv(in2r, in2i)
# out.real = floor((in1i + in1r*rat)/(in2i + in2r*rat))
tmp1 = builder.fmul(in1r, rat)
tmp2 = builder.fmul(in2r, rat)
tmp3 = builder.fadd(in1i, tmp1)
tmp4 = builder.fadd(in2i, tmp2)
tmp5 = builder.fdiv(tmp3, tmp4)
out.real = np_real_floor_impl(context, builder, floor_sig, (tmp5,))
return out._getvalue()
########################################################################
# numpy power funcs
def np_complex_power_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 2)
return numbers.complex_power_impl(context, builder, sig, args)
########################################################################
# numpy greatest common denominator
def np_gcd_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 2)
return mathimpl.gcd_impl(context, builder, sig, args)
########################################################################
# numpy lowest common multiple
def np_lcm_impl(context, builder, sig, args):
import numpy as np
xty, yty = sig.args
assert xty == yty == sig.return_type
x, y = args
def lcm(a, b):
"""
Like gcd, heavily cribbed from Julia.
"""
return 0 if a == 0 else abs(a * (b // np.gcd(b, a)))
res = context.compile_internal(builder, lcm, sig, args)
return impl_ret_untracked(context, builder, sig.return_type, res)
########################################################################
# Numpy style complex sign
def np_complex_sign_impl(context, builder, sig, args):
# equivalent to complex sign in NumPy's sign
# but implemented via selects, balancing the 4 cases.
_check_arity_and_homogeneity(sig, args, 1)
op = args[0]
ty = sig.args[0]
float_ty = ty.underlying_float
ZERO = context.get_constant(float_ty, 0.0)
ONE = context.get_constant(float_ty, 1.0)
MINUS_ONE = context.get_constant(float_ty, -1.0)
NAN = context.get_constant(float_ty, float('nan'))
result = context.make_complex(builder, ty)
result.real = ZERO
result.imag = ZERO
cmp_sig = typing.signature(types.boolean, *[ty] * 2)
cmp_args = [op, result._getvalue()]
arg1_ge_arg2 = np_complex_ge_impl(context, builder, cmp_sig, cmp_args)
arg1_eq_arg2 = np_complex_eq_impl(context, builder, cmp_sig, cmp_args)
arg1_lt_arg2 = np_complex_lt_impl(context, builder, cmp_sig, cmp_args)
real_when_ge = builder.select(arg1_eq_arg2, ZERO, ONE)
real_when_nge = builder.select(arg1_lt_arg2, MINUS_ONE, NAN)
result.real = builder.select(arg1_ge_arg2, real_when_ge, real_when_nge)
return result._getvalue()
########################################################################
# Numpy rint
def np_real_rint_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return mathimpl.call_fp_intrinsic(builder, 'llvm.rint', args)
def np_complex_rint_impl(context, builder, sig, args):
# based on code in NumPy's funcs.inc.src
# rint of a complex number defined as rint of its real and imag
# parts
_check_arity_and_homogeneity(sig, args, 1)
ty = sig.args[0]
float_ty = ty.underlying_float
in1 = context.make_complex(builder, ty, value=args[0])
out = context.make_complex(builder, ty)
inner_sig = typing.signature(*[float_ty]*2)
out.real = np_real_rint_impl(context, builder, inner_sig, [in1.real])
out.imag = np_real_rint_impl(context, builder, inner_sig, [in1.imag])
return out._getvalue()
########################################################################
# NumPy exp
def np_real_exp_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return mathimpl.exp_impl(context, builder, sig, args)
def np_complex_exp_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return cmathimpl.exp_impl(context, builder, sig, args)
########################################################################
# NumPy exp2
def np_real_exp2_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
dispatch_table = {
types.float32: 'npy_exp2f',
types.float64: 'npy_exp2',
}
return _dispatch_func_by_name_type(context, builder, sig, args,
dispatch_table, 'exp2')
def np_complex_exp2_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
ty = sig.args[0]
float_ty = ty.underlying_float
in1 = context.make_complex(builder, ty, value=args[0])
tmp = context.make_complex(builder, ty)
loge2 = context.get_constant(float_ty, _NPY_LOGE2)
tmp.real = builder.fmul(loge2, in1.real)
tmp.imag = builder.fmul(loge2, in1.imag)
return np_complex_exp_impl(context, builder, sig, [tmp._getvalue()])
########################################################################
# NumPy log
def np_real_log_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return mathimpl.log_impl(context, builder, sig, args)
def np_complex_log_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return cmathimpl.log_impl(context, builder, sig, args)
########################################################################
# NumPy log2
def np_real_log2_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
dispatch_table = {
types.float32: 'npy_log2f',
types.float64: 'npy_log2',
}
return _dispatch_func_by_name_type(context, builder, sig, args,
dispatch_table, 'log2')
def np_complex_log2_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
ty = sig.args[0]
float_ty = ty.underlying_float
tmp = np_complex_log_impl(context, builder, sig, args)
tmp = context.make_complex(builder, ty, value=tmp)
log2e = context.get_constant(float_ty, _NPY_LOG2E)
tmp.real = builder.fmul(log2e, tmp.real)
tmp.imag = builder.fmul(log2e, tmp.imag)
return tmp._getvalue()
########################################################################
# NumPy log10
def np_real_log10_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return mathimpl.log10_impl(context, builder, sig, args)
def np_complex_log10_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
ty = sig.args[0]
float_ty = ty.underlying_float
tmp = np_complex_log_impl(context, builder, sig, args)
tmp = context.make_complex(builder, ty, value=tmp)
log10e = context.get_constant(float_ty, _NPY_LOG10E)
tmp.real = builder.fmul(log10e, tmp.real)
tmp.imag = builder.fmul(log10e, tmp.imag)
return tmp._getvalue()
########################################################################
# NumPy expm1
def np_real_expm1_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return mathimpl.expm1_impl(context, builder, sig, args)
def np_complex_expm1_impl(context, builder, sig, args):
# this is based on nc_expm1 in funcs.inc.src
_check_arity_and_homogeneity(sig, args, 1)
ty = sig.args[0]
float_ty = ty.underlying_float
float_unary_sig = typing.signature(*[float_ty]*2)
MINUS_ONE = context.get_constant(float_ty, -1.0)
in1 = context.make_complex(builder, ty, value=args[0])
a = np_real_exp_impl(context, builder, float_unary_sig, [in1.real])
out = context.make_complex(builder, ty)
cos_imag = np_real_cos_impl(context, builder, float_unary_sig, [in1.imag])
sin_imag = np_real_sin_impl(context, builder, float_unary_sig, [in1.imag])
tmp = builder.fmul(a, cos_imag)
out.imag = builder.fmul(a, sin_imag)
out.real = builder.fadd(tmp, MINUS_ONE)
return out._getvalue()
########################################################################
# NumPy log1p
def np_real_log1p_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return mathimpl.log1p_impl(context, builder, sig, args)
def np_complex_log1p_impl(context, builder, sig, args):
# base on NumPy's nc_log1p in funcs.inc.src
_check_arity_and_homogeneity(sig, args, 1)
ty = sig.args[0]
float_ty = ty.underlying_float
float_unary_sig = typing.signature(*[float_ty]*2)
float_binary_sig = typing.signature(*[float_ty]*3)
ONE = context.get_constant(float_ty, 1.0)
in1 = context.make_complex(builder, ty, value=args[0])
out = context.make_complex(builder, ty)
real_plus_one = builder.fadd(in1.real, ONE)
l = np_real_hypot_impl(context, builder, float_binary_sig,
[real_plus_one, in1.imag])
out.imag = np_real_atan2_impl(context, builder, float_binary_sig,
[in1.imag, real_plus_one])
out.real = np_real_log_impl(context, builder, float_unary_sig, [l])
return out._getvalue()
########################################################################
# NumPy sqrt
def np_real_sqrt_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return mathimpl.sqrt_impl(context, builder, sig, args)
def np_complex_sqrt_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return cmathimpl.sqrt_impl(context, builder, sig, args)
########################################################################
# NumPy square
def np_int_square_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return builder.mul(args[0], args[0])
def np_real_square_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return builder.fmul(args[0], args[0])
def np_complex_square_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
binary_sig = typing.signature(*[sig.return_type]*3)
return numbers.complex_mul_impl(context, builder, binary_sig,
[args[0], args[0]])
########################################################################
# NumPy reciprocal
def np_int_reciprocal_impl(context, builder, sig, args):
# based on the implementation in loops.c.src
# integer versions for reciprocal are performed via promotion
# using double, and then converted back to the type
_check_arity_and_homogeneity(sig, args, 1)
ty = sig.return_type
binary_sig = typing.signature(*[ty]*3)
in_as_float = context.cast(builder, args[0], ty, types.float64)
ONE = context.get_constant(types.float64, 1)
result_as_float = builder.fdiv(ONE, in_as_float)
return context.cast(builder, result_as_float, types.float64, ty)
def np_real_reciprocal_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
ONE = context.get_constant(sig.return_type, 1.0)
return builder.fdiv(ONE, args[0])
def np_complex_reciprocal_impl(context, builder, sig, args):
# based on the implementation in loops.c.src
# Basically the same Smith method used for division, but with
# the numerator substitued by 1.0
_check_arity_and_homogeneity(sig, args, 1)
ty = sig.args[0]
float_ty = ty.underlying_float
ZERO = context.get_constant(float_ty, 0.0)
ONE = context.get_constant(float_ty, 1.0)
in1 = context.make_complex(builder, ty, value=args[0])
out = context.make_complex(builder, ty)
in1r = in1.real
in1i = in1.imag
in1r_abs = _fabs(context, builder, in1r)
in1i_abs = _fabs(context, builder, in1i)
in1i_abs_le_in1r_abs = builder.fcmp(lc.FCMP_OLE, in1i_abs, in1r_abs)
with builder.if_else(in1i_abs_le_in1r_abs) as (then, otherwise):
with then:
r = builder.fdiv(in1i, in1r)
tmp0 = builder.fmul(in1i, r)
d = builder.fadd(in1r, tmp0)
inv_d = builder.fdiv(ONE, d)
minus_r = builder.fsub(ZERO, r)
out.real = inv_d
out.imag = builder.fmul(minus_r, inv_d)
with otherwise:
r = builder.fdiv(in1r, in1i)
tmp0 = builder.fmul(in1r, r)
d = builder.fadd(tmp0, in1i)
inv_d = builder.fdiv(ONE, d)
out.real = builder.fmul(r, inv_d)
out.imag = builder.fsub(ZERO, inv_d)
return out._getvalue()
########################################################################
# NumPy sin
def np_real_sin_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return mathimpl.sin_impl(context, builder, sig, args)
def np_complex_sin_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return cmathimpl.sin_impl(context, builder, sig, args)
########################################################################
# NumPy cos
def np_real_cos_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return mathimpl.cos_impl(context, builder, sig, args)
def np_complex_cos_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return cmathimpl.cos_impl(context, builder, sig, args)
########################################################################
# NumPy tan
def np_real_tan_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return mathimpl.tan_impl(context, builder, sig, args)
########################################################################
# NumPy asin
def np_real_asin_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return mathimpl.asin_impl(context, builder, sig, args)
########################################################################
# NumPy acos
def np_real_acos_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return mathimpl.acos_impl(context, builder, sig, args)
########################################################################
# NumPy atan
def np_real_atan_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return mathimpl.atan_impl(context, builder, sig, args)
########################################################################
# NumPy atan2
def np_real_atan2_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 2)
return mathimpl.atan2_float_impl(context, builder, sig, args)
########################################################################
# NumPy hypot
def np_real_hypot_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 2)
return mathimpl.hypot_float_impl(context, builder, sig, args)
########################################################################
# NumPy sinh
def np_real_sinh_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return mathimpl.sinh_impl(context, builder, sig, args)
def np_complex_sinh_impl(context, builder, sig, args):
# npymath does not provide a complex sinh. The code in funcs.inc.src
# is translated here...
_check_arity_and_homogeneity(sig, args, 1)
ty = sig.args[0]
fty = ty.underlying_float
fsig1 = typing.signature(*[fty]*2)
x = context.make_complex(builder, ty, args[0])
out = context.make_complex(builder, ty)
xr = x.real
xi = x.imag
sxi = np_real_sin_impl(context, builder, fsig1, [xi])
shxr = np_real_sinh_impl(context, builder, fsig1, [xr])
cxi = np_real_cos_impl(context, builder, fsig1, [xi])
chxr = np_real_cosh_impl(context, builder, fsig1, [xr])
out.real = builder.fmul(cxi, shxr)
out.imag = builder.fmul(sxi, chxr)
return out._getvalue()
########################################################################
# NumPy cosh
def np_real_cosh_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return mathimpl.cosh_impl(context, builder, sig, args)
def np_complex_cosh_impl(context, builder, sig, args):
# npymath does not provide a complex cosh. The code in funcs.inc.src
# is translated here...
_check_arity_and_homogeneity(sig, args, 1)
ty = sig.args[0]
fty = ty.underlying_float
fsig1 = typing.signature(*[fty]*2)
x = context.make_complex(builder, ty, args[0])
out = context.make_complex(builder, ty)
xr = x.real
xi = x.imag
cxi = np_real_cos_impl(context, builder, fsig1, [xi])
chxr = np_real_cosh_impl(context, builder, fsig1, [xr])
sxi = np_real_sin_impl(context, builder, fsig1, [xi])
shxr = np_real_sinh_impl(context, builder, fsig1, [xr])
out.real = builder.fmul(cxi, chxr)
out.imag = builder.fmul(sxi, shxr)
return out._getvalue()
########################################################################
# NumPy tanh
def np_real_tanh_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return mathimpl.tanh_impl(context, builder, sig, args)
def np_complex_tanh_impl(context, builder, sig, args):
# npymath does not provide complex tan functions. The code
# in funcs.inc.src for tanh is translated here...
_check_arity_and_homogeneity(sig, args, 1)
ty = sig.args[0]
fty = ty.underlying_float
fsig1 = typing.signature(*[fty]*2)
ONE = context.get_constant(fty, 1.0)
x = context.make_complex(builder, ty, args[0])
out = context.make_complex(builder, ty)
xr = x.real
xi = x.imag
si = np_real_sin_impl(context, builder, fsig1, [xi])
ci = np_real_cos_impl(context, builder, fsig1, [xi])
shr = np_real_sinh_impl(context, builder, fsig1, [xr])
chr_ = np_real_cosh_impl(context, builder, fsig1, [xr])
rs = builder.fmul(ci, shr)
is_ = builder.fmul(si, chr_)
rc = builder.fmul(ci, chr_)
ic = builder.fmul(si, shr) # note: opposite sign from code in funcs.inc.src
sqr_rc = builder.fmul(rc, rc)
sqr_ic = builder.fmul(ic, ic)
d = builder.fadd(sqr_rc, sqr_ic)
inv_d = builder.fdiv(ONE, d)
rs_rc = builder.fmul(rs, rc)
is_ic = builder.fmul(is_, ic)
is_rc = builder.fmul(is_, rc)
rs_ic = builder.fmul(rs, ic)
numr = builder.fadd(rs_rc, is_ic)
numi = builder.fsub(is_rc, rs_ic)
out.real = builder.fmul(numr, inv_d)
out.imag = builder.fmul(numi, inv_d)
return out._getvalue()
########################################################################
# NumPy asinh
def np_real_asinh_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return mathimpl.asinh_impl(context, builder, sig, args)
########################################################################
# NumPy acosh
def np_real_acosh_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return mathimpl.acosh_impl(context, builder, sig, args)
def np_complex_acosh_impl(context, builder, sig, args):
# npymath does not provide a complex acosh. The code in funcs.inc.src
# is translated here...
# log(x + sqrt(x+1) * sqrt(x-1))
_check_arity_and_homogeneity(sig, args, 1)
ty = sig.args[0]
csig2 = typing.signature(*[ty]*3)
ONE = context.get_constant_generic(builder, ty, 1.0 + 0.0j)
x = args[0]
x_plus_one = numbers.complex_add_impl(context, builder, csig2, [x,
ONE])
x_minus_one = numbers.complex_sub_impl(context, builder, csig2, [x,
ONE])
sqrt_x_plus_one = np_complex_sqrt_impl(context, builder, sig, [x_plus_one])
sqrt_x_minus_one = np_complex_sqrt_impl(context, builder, sig, [x_minus_one])
prod_sqrt = numbers.complex_mul_impl(context, builder, csig2,
[sqrt_x_plus_one,
sqrt_x_minus_one])
log_arg = numbers.complex_add_impl(context, builder, csig2, [x,
prod_sqrt])
return np_complex_log_impl(context, builder, sig, [log_arg])
########################################################################
# NumPy atanh
def np_real_atanh_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return mathimpl.atanh_impl(context, builder, sig, args)
########################################################################
# NumPy floor
def np_real_floor_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return mathimpl.call_fp_intrinsic(builder, 'llvm.floor', args)
########################################################################
# NumPy ceil
def np_real_ceil_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return mathimpl.call_fp_intrinsic(builder, 'llvm.ceil', args)
########################################################################
# NumPy trunc
def np_real_trunc_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return mathimpl.call_fp_intrinsic(builder, 'llvm.trunc', args)
########################################################################
# NumPy fabs
def np_real_fabs_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
return mathimpl.call_fp_intrinsic(builder, 'llvm.fabs', args)
########################################################################
# NumPy style predicates
# For real and integer types rely on numbers... but complex ordering in
# NumPy is lexicographic (while Python does not provide ordering).
def np_complex_ge_impl(context, builder, sig, args):
# equivalent to macro CGE in NumPy's loops.c.src
# ((xr > yr && !npy_isnan(xi) && !npy_isnan(yi)) || (xr == yr && xi >= yi))
_check_arity_and_homogeneity(sig, args, 2, return_type=types.boolean)
ty = sig.args[0]
in1, in2 = [context.make_complex(builder, ty, value=arg) for arg in args]
xr = in1.real
xi = in1.imag
yr = in2.real
yi = in2.imag
xr_gt_yr = builder.fcmp(lc.FCMP_OGT, xr, yr)
no_nan_xi_yi = builder.fcmp(lc.FCMP_ORD, xi, yi)
xr_eq_yr = builder.fcmp(lc.FCMP_OEQ, xr, yr)
xi_ge_yi = builder.fcmp(lc.FCMP_OGE, xi, yi)
first_term = builder.and_(xr_gt_yr, no_nan_xi_yi)
second_term = builder.and_(xr_eq_yr, xi_ge_yi)
return builder.or_(first_term, second_term)
def np_complex_le_impl(context, builder, sig, args):
# equivalent to macro CLE in NumPy's loops.c.src
# ((xr < yr && !npy_isnan(xi) && !npy_isnan(yi)) || (xr == yr && xi <= yi))
_check_arity_and_homogeneity(sig, args, 2, return_type=types.boolean)
ty = sig.args[0]
in1, in2 = [context.make_complex(builder, ty, value=arg) for arg in args]
xr = in1.real
xi = in1.imag
yr = in2.real
yi = in2.imag
xr_lt_yr = builder.fcmp(lc.FCMP_OLT, xr, yr)
no_nan_xi_yi = builder.fcmp(lc.FCMP_ORD, xi, yi)
xr_eq_yr = builder.fcmp(lc.FCMP_OEQ, xr, yr)
xi_le_yi = builder.fcmp(lc.FCMP_OLE, xi, yi)
first_term = builder.and_(xr_lt_yr, no_nan_xi_yi)
second_term = builder.and_(xr_eq_yr, xi_le_yi)
return builder.or_(first_term, second_term)
def np_complex_gt_impl(context, builder, sig, args):
# equivalent to macro CGT in NumPy's loops.c.src
# ((xr > yr && !npy_isnan(xi) && !npy_isnan(yi)) || (xr == yr && xi > yi))
_check_arity_and_homogeneity(sig, args, 2, return_type=types.boolean)
ty = sig.args[0]
in1, in2 = [context.make_complex(builder, ty, value=arg) for arg in args]
xr = in1.real
xi = in1.imag
yr = in2.real
yi = in2.imag
xr_gt_yr = builder.fcmp(lc.FCMP_OGT, xr, yr)
no_nan_xi_yi = builder.fcmp(lc.FCMP_ORD, xi, yi)
xr_eq_yr = builder.fcmp(lc.FCMP_OEQ, xr, yr)
xi_gt_yi = builder.fcmp(lc.FCMP_OGT, xi, yi)
first_term = builder.and_(xr_gt_yr, no_nan_xi_yi)
second_term = builder.and_(xr_eq_yr, xi_gt_yi)
return builder.or_(first_term, second_term)
def np_complex_lt_impl(context, builder, sig, args):
# equivalent to macro CLT in NumPy's loops.c.src
# ((xr < yr && !npy_isnan(xi) && !npy_isnan(yi)) || (xr == yr && xi < yi))
_check_arity_and_homogeneity(sig, args, 2, return_type=types.boolean)
ty = sig.args[0]
in1, in2 = [context.make_complex(builder, ty, value=arg) for arg in args]
xr = in1.real
xi = in1.imag
yr = in2.real
yi = in2.imag
xr_lt_yr = builder.fcmp(lc.FCMP_OLT, xr, yr)
no_nan_xi_yi = builder.fcmp(lc.FCMP_ORD, xi, yi)
xr_eq_yr = builder.fcmp(lc.FCMP_OEQ, xr, yr)
xi_lt_yi = builder.fcmp(lc.FCMP_OLT, xi, yi)
first_term = builder.and_(xr_lt_yr, no_nan_xi_yi)
second_term = builder.and_(xr_eq_yr, xi_lt_yi)
return builder.or_(first_term, second_term)
def np_complex_eq_impl(context, builder, sig, args):
# equivalent to macro CEQ in NumPy's loops.c.src
# (xr == yr && xi == yi)
_check_arity_and_homogeneity(sig, args, 2, return_type=types.boolean)
ty = sig.args[0]
in1, in2 = [context.make_complex(builder, ty, value=arg) for arg in args]
xr = in1.real
xi = in1.imag
yr = in2.real
yi = in2.imag
xr_eq_yr = builder.fcmp(lc.FCMP_OEQ, xr, yr)
xi_eq_yi = builder.fcmp(lc.FCMP_OEQ, xi, yi)
return builder.and_(xr_eq_yr, xi_eq_yi)
def np_complex_ne_impl(context, builder, sig, args):
# equivalent to macro CNE in NumPy's loops.c.src
# (xr != yr || xi != yi)
_check_arity_and_homogeneity(sig, args, 2, return_type=types.boolean)
ty = sig.args[0]
in1, in2 = [context.make_complex(builder, ty, value=arg) for arg in args]
xr = in1.real
xi = in1.imag
yr = in2.real
yi = in2.imag
xr_ne_yr = builder.fcmp(lc.FCMP_UNE, xr, yr)
xi_ne_yi = builder.fcmp(lc.FCMP_UNE, xi, yi)
return builder.or_(xr_ne_yr, xi_ne_yi)
########################################################################
# NumPy logical algebra
# these are made generic for all types for now, assuming that
# cgutils.is_true works in the underlying types.
def _complex_is_true(context, builder, ty, val):
complex_val = context.make_complex(builder, ty, value=val)
re_true = cgutils.is_true(builder, complex_val.real)
im_true = cgutils.is_true(builder, complex_val.imag)
return builder.or_(re_true, im_true)
def np_logical_and_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 2, return_type=types.boolean)
a = cgutils.is_true(builder, args[0])
b = cgutils.is_true(builder, args[1])
return builder.and_(a, b)
def np_complex_logical_and_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 2, return_type=types.boolean)
a = _complex_is_true(context, builder, sig.args[0], args[0])
b = _complex_is_true(context, builder, sig.args[1], args[1])
return builder.and_(a, b)
def np_logical_or_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 2, return_type=types.boolean)
a = cgutils.is_true(builder, args[0])
b = cgutils.is_true(builder, args[1])
return builder.or_(a, b)
def np_complex_logical_or_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 2, return_type=types.boolean)
a = _complex_is_true(context, builder, sig.args[0], args[0])
b = _complex_is_true(context, builder, sig.args[1], args[1])
return builder.or_(a, b)
def np_logical_xor_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 2, return_type=types.boolean)
a = cgutils.is_true(builder, args[0])
b = cgutils.is_true(builder, args[1])
return builder.xor(a, b)
def np_complex_logical_xor_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 2, return_type=types.boolean)
a = _complex_is_true(context, builder, sig.args[0], args[0])
b = _complex_is_true(context, builder, sig.args[1], args[1])
return builder.xor(a, b)
def np_logical_not_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1, return_type=types.boolean)
return cgutils.is_false(builder, args[0])
def np_complex_logical_not_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1, return_type=types.boolean)
a = _complex_is_true(context, builder, sig.args[0], args[0])
return builder.not_(a)
########################################################################
# NumPy style max/min
#
# There are 2 different sets of functions to perform max and min in
# NumPy: maximum/minimum and fmax/fmin.
# Both differ in the way NaNs are handled, so the actual differences
# come in action only on float/complex numbers. The functions used for
# integers is shared. For booleans maximum is equivalent to or, and
# minimum is equivalent to and. Datetime support will go elsewhere.
def np_int_smax_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 2)
arg1, arg2 = args
arg1_sge_arg2 = builder.icmp(lc.ICMP_SGE, arg1, arg2)
return builder.select(arg1_sge_arg2, arg1, arg2)
def np_int_umax_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 2)
arg1, arg2 = args
arg1_uge_arg2 = builder.icmp(lc.ICMP_UGE, arg1, arg2)
return builder.select(arg1_uge_arg2, arg1, arg2)
def np_real_maximum_impl(context, builder, sig, args):
# maximum prefers nan (tries to return a nan).
_check_arity_and_homogeneity(sig, args, 2)
arg1, arg2 = args
arg1_nan = builder.fcmp(lc.FCMP_UNO, arg1, arg1)
any_nan = builder.fcmp(lc.FCMP_UNO, arg1, arg2)
nan_result = builder.select(arg1_nan, arg1, arg2)
arg1_ge_arg2 = builder.fcmp(lc.FCMP_OGE, arg1, arg2)
non_nan_result = builder.select(arg1_ge_arg2, arg1, arg2)
return builder.select(any_nan, nan_result, non_nan_result)
def np_real_fmax_impl(context, builder, sig, args):
# fmax prefers non-nan (tries to return a non-nan).
_check_arity_and_homogeneity(sig, args, 2)
arg1, arg2 = args
arg2_nan = builder.fcmp(lc.FCMP_UNO, arg2, arg2)
any_nan = builder.fcmp(lc.FCMP_UNO, arg1, arg2)
nan_result = builder.select(arg2_nan, arg1, arg2)
arg1_ge_arg2 = builder.fcmp(lc.FCMP_OGE, arg1, arg2)
non_nan_result = builder.select(arg1_ge_arg2, arg1, arg2)
return builder.select(any_nan, nan_result, non_nan_result)
def np_complex_maximum_impl(context, builder, sig, args):
# maximum prefers nan (tries to return a nan).
# There is an extra caveat with complex numbers, as there is more
# than one type of nan. NumPy's docs state that the nan in the
# first argument is returned when both arguments are nans.
# If only one nan is found, that nan is returned.
_check_arity_and_homogeneity(sig, args, 2)
ty = sig.args[0]
bc_sig = typing.signature(types.boolean, ty)
bcc_sig = typing.signature(types.boolean, *[ty]*2)
arg1, arg2 = args
arg1_nan = np_complex_isnan_impl(context, builder, bc_sig, [arg1])
arg2_nan = np_complex_isnan_impl(context, builder, bc_sig, [arg2])
any_nan = builder.or_(arg1_nan, arg2_nan)
nan_result = builder.select(arg1_nan, arg1, arg2)
arg1_ge_arg2 = np_complex_ge_impl(context, builder, bcc_sig, args)
non_nan_result = builder.select(arg1_ge_arg2, arg1, arg2)
return builder.select(any_nan, nan_result, non_nan_result)
def np_complex_fmax_impl(context, builder, sig, args):
# fmax prefers non-nan (tries to return a non-nan).
# There is an extra caveat with complex numbers, as there is more
# than one type of nan. NumPy's docs state that the nan in the
# first argument is returned when both arguments are nans.
_check_arity_and_homogeneity(sig, args, 2)
ty = sig.args[0]
bc_sig = typing.signature(types.boolean, ty)
bcc_sig = typing.signature(types.boolean, *[ty]*2)
arg1, arg2 = args
arg1_nan = np_complex_isnan_impl(context, builder, bc_sig, [arg1])
arg2_nan = np_complex_isnan_impl(context, builder, bc_sig, [arg2])
any_nan = builder.or_(arg1_nan, arg2_nan)
nan_result = builder.select(arg2_nan, arg1, arg2)
arg1_ge_arg2 = np_complex_ge_impl(context, builder, bcc_sig, args)
non_nan_result = builder.select(arg1_ge_arg2, arg1, arg2)
return builder.select(any_nan, nan_result, non_nan_result)
def np_int_smin_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 2)
arg1, arg2 = args
arg1_sle_arg2 = builder.icmp(lc.ICMP_SLE, arg1, arg2)
return builder.select(arg1_sle_arg2, arg1, arg2)
def np_int_umin_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 2)
arg1, arg2 = args
arg1_ule_arg2 = builder.icmp(lc.ICMP_ULE, arg1, arg2)
return builder.select(arg1_ule_arg2, arg1, arg2)
def np_real_minimum_impl(context, builder, sig, args):
# minimum prefers nan (tries to return a nan).
_check_arity_and_homogeneity(sig, args, 2)
arg1, arg2 = args
arg1_nan = builder.fcmp(lc.FCMP_UNO, arg1, arg1)
any_nan = builder.fcmp(lc.FCMP_UNO, arg1, arg2)
nan_result = builder.select(arg1_nan, arg1, arg2)
arg1_le_arg2 = builder.fcmp(lc.FCMP_OLE, arg1, arg2)
non_nan_result = builder.select(arg1_le_arg2, arg1, arg2)
return builder.select(any_nan, nan_result, non_nan_result)
def np_real_fmin_impl(context, builder, sig, args):
# fmin prefers non-nan (tries to return a non-nan).
_check_arity_and_homogeneity(sig, args, 2)
arg1, arg2 = args
arg1_nan = builder.fcmp(lc.FCMP_UNO, arg1, arg1)
any_nan = builder.fcmp(lc.FCMP_UNO, arg1, arg2)
nan_result = builder.select(arg1_nan, arg2, arg1)
arg1_le_arg2 = builder.fcmp(lc.FCMP_OLE, arg1, arg2)
non_nan_result = builder.select(arg1_le_arg2, arg1, arg2)
return builder.select(any_nan, nan_result, non_nan_result)
def np_complex_minimum_impl(context, builder, sig, args):
# minimum prefers nan (tries to return a nan).
# There is an extra caveat with complex numbers, as there is more
# than one type of nan. NumPy's docs state that the nan in the
# first argument is returned when both arguments are nans.
# If only one nan is found, that nan is returned.
_check_arity_and_homogeneity(sig, args, 2)
ty = sig.args[0]
bc_sig = typing.signature(types.boolean, ty)
bcc_sig = typing.signature(types.boolean, *[ty]*2)
arg1, arg2 = args
arg1_nan = np_complex_isnan_impl(context, builder, bc_sig, [arg1])
arg2_nan = np_complex_isnan_impl(context, builder, bc_sig, [arg2])
any_nan = builder.or_(arg1_nan, arg2_nan)
nan_result = builder.select(arg1_nan, arg1, arg2)
arg1_le_arg2 = np_complex_le_impl(context, builder, bcc_sig, args)
non_nan_result = builder.select(arg1_le_arg2, arg1, arg2)
return builder.select(any_nan, nan_result, non_nan_result)
def np_complex_fmin_impl(context, builder, sig, args):
# fmin prefers non-nan (tries to return a non-nan).
# There is an extra caveat with complex numbers, as there is more
# than one type of nan. NumPy's docs state that the nan in the
# first argument is returned when both arguments are nans.
_check_arity_and_homogeneity(sig, args, 2)
ty = sig.args[0]
bc_sig = typing.signature(types.boolean, ty)
bcc_sig = typing.signature(types.boolean, *[ty]*2)
arg1, arg2 = args
arg1_nan = np_complex_isnan_impl(context, builder, bc_sig, [arg1])
arg2_nan = np_complex_isnan_impl(context, builder, bc_sig, [arg2])
any_nan = builder.or_(arg1_nan, arg2_nan)
nan_result = builder.select(arg2_nan, arg1, arg2)
arg1_le_arg2 = np_complex_le_impl(context, builder, bcc_sig, args)
non_nan_result = builder.select(arg1_le_arg2, arg1, arg2)
return builder.select(any_nan, nan_result, non_nan_result)
########################################################################
# NumPy floating point misc
def np_int_isnan_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1, return_type=types.boolean)
return cgutils.false_bit
def np_real_isnan_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1, return_type=types.boolean)
return mathimpl.is_nan(builder, args[0])
def np_complex_isnan_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1, return_type=types.boolean)
x, = args
ty, = sig.args
complex_val = context.make_complex(builder, ty, value=x)
return cmathimpl.is_nan(builder, complex_val)
def np_int_isfinite_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1, return_type=types.boolean)
return cgutils.true_bit
def np_datetime_isfinite_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1, return_type=types.boolean)
return builder.icmp_unsigned('!=', args[0], npdatetime.NAT)
def np_datetime_isnat_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1, return_type=types.boolean)
return builder.icmp_signed('==', args[0], npdatetime.NAT)
def np_real_isfinite_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1, return_type=types.boolean)
return mathimpl.is_finite(builder, args[0])
def np_complex_isfinite_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1, return_type=types.boolean)
x, = args
ty, = sig.args
complex_val = context.make_complex(builder, ty, value=x)
return cmathimpl.is_finite(builder, complex_val)
def np_int_isinf_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1, return_type=types.boolean)
return cgutils.false_bit
def np_real_isinf_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1, return_type=types.boolean)
return mathimpl.is_inf(builder, args[0])
def np_complex_isinf_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1, return_type=types.boolean)
x, = args
ty, = sig.args
complex_val = context.make_complex(builder, ty, value=x)
return cmathimpl.is_inf(builder, complex_val)
def np_real_signbit_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1, return_type=types.boolean)
dispatch_table = {
types.float32: 'numba_signbitf',
types.float64: 'numba_signbit',
}
inner_sig = typing.signature(types.intc, *sig.args)
int_res = _dispatch_func_by_name_type(context, builder, inner_sig, args,
dispatch_table, 'signbit')
bool_res = builder.icmp_unsigned('!=', int_res, int_res.type(0))
return bool_res
def np_real_copysign_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 2)
return mathimpl.copysign_float_impl(context, builder, sig, args)
def np_real_nextafter_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 2)
dispatch_table = {
types.float32: 'npy_nextafterf',
types.float64: 'npy_nextafter',
}
return _dispatch_func_by_name_type(context, builder, sig, args,
dispatch_table, 'nextafter')
def np_real_spacing_impl(context, builder, sig, args):
_check_arity_and_homogeneity(sig, args, 1)
dispatch_table = {
types.float32: 'npy_spacingf',
types.float64: 'npy_spacing',
}
return _dispatch_func_by_name_type(context, builder, sig, args,
dispatch_table, 'spacing')
def np_real_ldexp_impl(context, builder, sig, args):
# this one is slightly different to other ufuncs.
# arguments are not homogeneous and second arg may come as
# an 'i' or an 'l'.
# the function expects the second argument to be have a C int type
x1, x2 = args
ty1, ty2 = sig.args
# note that types.intc should be equivalent to int_ that is
# 'NumPy's default int')
x2 = context.cast(builder, x2, ty2, types.intc)
f_fi_sig = typing.signature(ty1, ty1, types.intc)
return mathimpl.ldexp_impl(context, builder, f_fi_sig, (x1, x2))
| bsd-2-clause |
tuffz/pi-weather-app | Carthage/Checkouts/realm-cocoa/Realm/ObjectStore/external/catch/scripts/releaseNotes.py | 25 | 1666 | import os
import re
import urllib2
import json
from scriptCommon import catchPath
from scriptCommon import runAndCapture
issueNumberRe = re.compile( r'(.*?)#([0-9]*)([^0-9]?.*)' )
rootPath = os.path.join( catchPath, 'include/' )
versionPath = os.path.join( rootPath, "internal/catch_version.hpp" )
hashes = runAndCapture( ['git', 'log', '-2', '--format="%H"', versionPath] )
lines = runAndCapture( ['git', 'log', hashes[1] + ".." + hashes[0], catchPath] )
prevLine = ""
messages = []
dates = []
issues = {}
def getIssueTitle( issueNumber ):
try:
s = urllib2.urlopen("https://api.github.com/repos/philsquared/catch/issues/" + issueNumber ).read()
except e:
return "#HTTP Error#"
try:
j = json.loads( s )
return j["title"]
except e:
return "#JSON Error#"
for line in lines:
if line.startswith( "commit"):
pass
elif line.startswith( "Author:"):
pass
elif line.startswith( "Date:"):
dates.append( line[5:].lstrip() )
pass
elif line == "" and prevLine == "":
pass
else:
prevLine = line
match = issueNumberRe.match( line )
line2 = ""
while match:
issueNumber = match.group(2)
issue = '#{0} ("{1}")'.format( issueNumber, getIssueTitle( issueNumber ) )
line2 = line2 + match.group(1) + issue
match = issueNumberRe.match( match.group(3) )
if line2 == "":
messages.append( line )
else:
messages.append( line2 )
print "All changes between {0} and {1}:\n".format( dates[-1], dates[0] )
for line in messages:
print line
| mit |
nrejack/redi | test/TestSortElementTree.py | 2 | 7241 | #!/usr/bin/env python
# Contributors:
# Christopher P. Barnes <senrabc@gmail.com>
# Andrei Sura: github.com/indera
# Mohan Das Katragadda <mohan.das142@gmail.com>
# Philip Chase <philipbchase@gmail.com>
# Ruchi Vivek Desai <ruchivdesai@gmail.com>
# Taeber Rapczak <taeber@ufl.edu>
# Nicholas Rejack <nrejack@ufl.edu>
# Josh Hanna <josh@hanna.io>
# Copyright (c) 2014-2015, University of Florida
# All rights reserved.
#
# Distributed under the BSD 3-Clause License
# For full text of the BSD 3-Clause License see http://opensource.org/licenses/BSD-3-Clause
import unittest
import tempfile
import os
import logging
from lxml import etree
from redi import redi
class TestSortElementTree(unittest.TestCase):
def setUp(self):
redi.configure_logging('.')
#redi.logger.setLevel(logging.DEBUG)
# un-sorted XML file
self.unsorted = """<?xml version="1.0" encoding="UTF-8"?>
<study>
<subject>
<NAME>PLATELET COUNT</NAME>
<ORD_VALUE>123</ORD_VALUE>
<STUDY_ID>999-0262</STUDY_ID>
<redcapFormName>cbc</redcapFormName>
<loinc_code>component_A</loinc_code>
<DATE_TIME_STAMP>2013-12-03 00:00:00</DATE_TIME_STAMP>
</subject>
<subject>
<NAME>PLATELET COUNT</NAME>
<ORD_VALUE>123</ORD_VALUE>
<STUDY_ID>999-0262</STUDY_ID>
<redcapFormName>cbc</redcapFormName>
<loinc_code>component_A</loinc_code>
<DATE_TIME_STAMP>2013-12-01 00:00:00</DATE_TIME_STAMP>
</subject>
<subject>
<NAME>PLATELET COUNT</NAME>
<ORD_VALUE>123</ORD_VALUE>
<STUDY_ID>999-0262</STUDY_ID>
<redcapFormName>cbc</redcapFormName>
<loinc_code>component_A</loinc_code>
<DATE_TIME_STAMP>2013-12-01 00:12:01</DATE_TIME_STAMP>
</subject>
<subject>
<NAME>PLATELET COUNT</NAME>
<ORD_VALUE>123</ORD_VALUE>
<STUDY_ID>999-0262</STUDY_ID>
<redcapFormName>cbc</redcapFormName>
<loinc_code>component_A</loinc_code>
<DATE_TIME_STAMP>2013-12-02 00:00:00</DATE_TIME_STAMP>
</subject>
</study>
"""
# we expect the following sorted tree
self.sorted_tree_keep_all_false = """<?xml version="1.0" encoding="UTF-8"?>
<study>
<subject>
<NAME>PLATELET COUNT</NAME>
<ORD_VALUE>123</ORD_VALUE>
<STUDY_ID>999-0262</STUDY_ID>
<redcapFormName>cbc</redcapFormName>
<loinc_code>component_A</loinc_code>
<DATE_TIME_STAMP>2013-12-01 00:00:00</DATE_TIME_STAMP>
</subject>
<subject>
<NAME>PLATELET COUNT</NAME>
<ORD_VALUE>123</ORD_VALUE>
<STUDY_ID>999-0262</STUDY_ID>
<redcapFormName>cbc</redcapFormName>
<loinc_code>component_A</loinc_code>
<DATE_TIME_STAMP>2013-12-02 00:00:00</DATE_TIME_STAMP>
</subject>
<subject>
<NAME>PLATELET COUNT</NAME>
<ORD_VALUE>123</ORD_VALUE>
<STUDY_ID>999-0262</STUDY_ID>
<redcapFormName>cbc</redcapFormName>
<loinc_code>component_A</loinc_code>
<DATE_TIME_STAMP>2013-12-03 00:00:00</DATE_TIME_STAMP>
</subject>
</study>
"""
self.sorted_tree_keep_all_true = """<?xml version="1.0" encoding="UTF-8"?>
<study>
<subject>
<NAME>PLATELET COUNT</NAME>
<ORD_VALUE>123</ORD_VALUE>
<STUDY_ID>999-0262</STUDY_ID>
<redcapFormName>cbc</redcapFormName>
<loinc_code>component_A</loinc_code>
<DATE_TIME_STAMP>2013-12-01 00:00:00</DATE_TIME_STAMP>
</subject>
<subject>
<NAME>PLATELET COUNT</NAME>
<ORD_VALUE>123</ORD_VALUE>
<STUDY_ID>999-0262</STUDY_ID>
<redcapFormName>cbc</redcapFormName>
<loinc_code>component_A</loinc_code>
<DATE_TIME_STAMP>2013-12-01 00:12:01</DATE_TIME_STAMP>
</subject>
<subject>
<NAME>PLATELET COUNT</NAME>
<ORD_VALUE>123</ORD_VALUE>
<STUDY_ID>999-0262</STUDY_ID>
<redcapFormName>cbc</redcapFormName>
<loinc_code>component_A</loinc_code>
<DATE_TIME_STAMP>2013-12-02 00:00:00</DATE_TIME_STAMP>
</subject>
<subject>
<NAME>PLATELET COUNT</NAME>
<ORD_VALUE>123</ORD_VALUE>
<STUDY_ID>999-0262</STUDY_ID>
<redcapFormName>cbc</redcapFormName>
<loinc_code>component_A</loinc_code>
<DATE_TIME_STAMP>2013-12-03 00:00:00</DATE_TIME_STAMP>
</subject>
</study>
"""
self.dirpath = tempfile.mkdtemp()
# def test_sort_elementtree(self):
# tree_to_sort = etree.ElementTree(etree.fromstring(self.unsorted))
# # make the original test work
# redi.sort_element_tree(tree_to_sort, self.dirpath)
#
# # TODO: create a way to test if --keep-all is True
# # test the keep all results functionality
# # redi.sort_element_tree(tree_to_sort, self.dirpath, True)
#
# par = etree.XMLParser(remove_blank_text = True)
# clean_expect = etree.XML(self.sorted_tree, parser=par)
# clean_result = etree.XML(etree.tostring(tree_to_sort), parser=par)
# self.assertEqual(etree.tostring(clean_expect), etree.tostring(clean_result))
def test_sort_elementtree_keep_all_true(self):
tree_to_sort = etree.ElementTree(etree.fromstring(self.unsorted))
# make the original test work
redi.sort_element_tree(tree_to_sort, self.dirpath, True)
# TODO: create a way to test if --keep-all is True
# test the keep all results functionality
# redi.sort_element_tree(tree_to_sort, self.dirpath, True)
# then the log should NOT!! have the line "Remove duplicate result using key:"
par = etree.XMLParser(remove_blank_text = True)
clean_expect = etree.XML(self.sorted_tree_keep_all_true, parser=par)
clean_result = etree.XML(etree.tostring(tree_to_sort), parser=par)
self.assertEqual(etree.tostring(clean_expect), etree.tostring(clean_result))
def test_sort_elementtree_keep_all_false(self):
tree_to_sort = etree.ElementTree(etree.fromstring(self.unsorted))
# make the original test work
redi.sort_element_tree(tree_to_sort, self.dirpath, False)
# TODO: create a way to test if --keep-all is false
# test the keep all results functionality
# redi.sort_element_tree(tree_to_sort, self.dirpath, False)
# then the log should have the line "Remove duplicate result using key:"
par = etree.XMLParser(remove_blank_text = True)
clean_expect = etree.XML(self.sorted_tree_keep_all_false, parser=par)
clean_result = etree.XML(etree.tostring(tree_to_sort), parser=par)
self.assertEqual(etree.tostring(clean_expect), etree.tostring(clean_result))
def tearDown(self):
try:
os.unlink(os.path.join(self.dirpath,
"rawDataSortedBeforeCompression.xml"))
except:
print("setUp failed to unlink "\
"file \'rawDataSortedBeforeCompression\'.xml")
try:
os.rmdir(self.dirpath)
except OSError:
raise LogException("Folder \'{}\' is not empty, hence cannot "\
"be deleted.".format(self.dirpath))
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
inversesquarelaw/invent | tictactoe.py | 1 | 6240 | #Tic Tac Toe
import random
def drawBoard(board):
#this function prints out the board it was passed
#"board" is a list of 10 strings representing the board (ignore index 0)
print(' | |')
print(' ' + board[7] + ' | ' + board[8] + ' | ' + board[9])
print(' | |')
print('-----------')
print(' | |')
print(' ' + board[4] + ' | ' + board[5] + ' | ' + board[6])
print(' | |')
print('-----------')
print(' | |')
print(' ' + board[1] + ' | ' + board[2] + ' | ' + board[3])
print(' | |')
def inputPlayerLetter():
#let player choose which letter they want to be.
#returns a list with the player's letter as the first item, and computers letter as the second letter
letter = ''
while not (letter == 'X' or letter == 'O'):
print('Do you want to be X or O?')
letter = input().upper()
#the 1st element in the tuple is the player's letter, the 2nd is the computer's letter
if letter == 'X':
return ['X', 'O']
else:
return ['O', 'X']
def whoGoesFirst():
#randomly choose the player who goes first
if random.randint(0, 1) == 0:
return 'computer'
else:
return 'player'
def playAgain():
#this function returns True if the player wants to play again, otherwise False
print('Do you want to play again? (yes or no)')
return input().lower().startswith('y')
def makeMove(board, letter, move):
board[move] = letter
def isWinner(bo, le):
#given a board and a player's letter, this function returns True if that player has won.
#We use bo instead of board and le instead of letter so we don't have to type as much.
return (
(bo[7] == le and bo[8] == le and bo[9] == le) or #across top
(bo[4] == le and bo[5] == le and bo[6] == le) or #across middle
(bo[1] == le and bo[2] == le and bo[3] == le) or #across bottom
(bo[1] == le and bo[4] == le and bo[7] == le) or #down left
(bo[2] == le and bo[5] == le and bo[8] == le) or #down middle
(bo[3] == le and bo[6] == le and bo[9] == le) or #down right
(bo[1] == le and bo[5] == le and bo[9] == le) or #diagonal
(bo[3] == le and bo[5] == le and bo[7] == le)) #diagonal
def getBoardCopy(board):
#make a duplicate of board list and return the duplicate
dupeBoard = []
for i in board:
dupeBoard.append(i)
return dupeBoard
#checks if it is a valid move
def isSpaceFree(board, move):
#return true if the passed move is free on the passed board
return board[move] == ' ' or board[move] == '1' or board[move] == '2' or board[move] == '3' or board[move] == '4' or board[move] == '5' or board[move] == '6' or board[move] == '7' or board[move] == '8' or board[move] == '9'
def getPlayerMove(board):
#let the player type in his move
move = ' '
while move not in '1 2 3 4 5 6 7 8 9'.split() or not isSpaceFree(board, int(move)):
print('What is your next move? (1-9)')
move = input()
return int(move)
def chooseRandomMoveFromList(board, movesList):
#returns a valid move from the passed lsit on the passed board
#returns None if there is no valid move
possibleMoves = []
for i in movesList:
if isSpaceFree(board, i):
possibleMoves.append(i)
if len(possibleMoves) != 0:
return random.choice(possibleMoves)
else:
return None
def getComputerMove(board, computerLetter):
#given a board and the computer's letter, determine where to move and return that move.
if computerLetter == 'X':
playerLetter = 'O'
else:
playerLetter = 'X'
for i in range(1, 10):
copy = getBoardCopy(board)
if isSpaceFree(copy, i):
makeMove(copy, computerLetter, i)
if isWinner(copy, computerLetter):
return i
#check if the player could win on his next move, and block them.
for i in range(1, 10):
copy = getBoardCopy(board)
if isSpaceFree(copy, i):
makeMove(copy, playerLetter, i)
if isWinner(copy, playerLetter):
return i
#try to take one of the corners, if they are free.
move = chooseRandomMoveFromList(board, [1, 3, 7,9])
if move != None:
return move
#try to take center, if it is free.
if isSpaceFree(board, 5):
return 5
#move on one of the sides
return chooseRandomMoveFromList(board, [2, 4, 6, 8])
def isBoardFull(board):
#return True if every space on the board has been taken. Otherwise return False.
for i in range(1, 10):
if isSpaceFree(board, i):
return False
return True
print('Welcome to Tic Tac Toe!')
while True:
#reset the board
theBoard = [' '] * 10
for i in range(len(theBoard)):
theBoard[i] = str(i)
playerLetter, computerLetter = inputPlayerLetter()
turn = whoGoesFirst()
print('The ' + turn + ' will go first.')
gameIsPlaying = True
while gameIsPlaying:
if turn == 'player':
#Player's turn.
drawBoard(theBoard)
move = getPlayerMove(theBoard)
makeMove(theBoard, playerLetter, move)
if isWinner(theBoard, playerLetter):
drawBoard(theBoard)
print('Hooray! You have won the game!')
gameIsPlaying = False
else:
if isBoardFull(theBoard):
drawBoard(theBoard)
print('The game is a tie!')
break
else:
turn = 'computer'
else:
#computer's turn
move = getComputerMove(theBoard, computerLetter)
makeMove(theBoard, computerLetter, move)
if isWinner(theBoard, computerLetter):
drawBoard(theBoard)
print('The computer has beaten you! You lose.')
gameIsPlaying = False
else:
if isBoardFull(theBoard):
drawBoard(theBoard)
print('The game is a tie!')
break
else:
turn = 'player'
if not playAgain():
break
| gpl-2.0 |
ehashman/oh-mainline | vendor/packages/webtest/tests/test_debugapp.py | 15 | 6309 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
import sys
import six
import webtest
from webtest.debugapp import debug_app
from webtest.compat import PY3
from webtest.compat import to_bytes
from webtest.compat import print_stderr
from webtest.app import AppError
from tests.compat import unittest
import webbrowser
def test_print_unicode():
print_stderr('°C')
class TestTesting(unittest.TestCase):
def setUp(self):
self.app = webtest.TestApp(debug_app)
def test_url_class(self):
class U:
def __str__(self):
return '/'
res = self.app.get(U())
self.assertEqual(res.status_int, 200)
def test_testing(self):
res = self.app.get('/')
self.assertEqual(res.status_int, 200)
self.assertEqual(res.headers['content-type'], 'text/plain')
self.assertEqual(res.content_type, 'text/plain')
res = self.app.request('/', method='GET')
self.assertEqual(res.status_int, 200)
self.assertEqual(res.headers['content-type'], 'text/plain')
self.assertEqual(res.content_type, 'text/plain')
res = self.app.head('/')
self.assertEqual(res.status_int, 200)
self.assertEqual(res.headers['content-type'], 'text/plain')
self.assertTrue(res.content_length > 0)
self.assertEqual(res.body, to_bytes(''))
res = self.app.head('/', xhr=True)
self.assertEqual(res.status_int, 200)
def test_post_unicode(self):
res = self.app.post(
'/', params=dict(a='é'),
content_type='application/x-www-form-urlencoded;charset=utf8')
res.mustcontain('a=%C3%A9')
def test_post_unicode_body(self):
res = self.app.post(
'/', params='é',
content_type='text/plain; charset=utf8')
self.assertTrue(res.body.endswith(b'\xc3\xa9'))
res.mustcontain('é')
def test_post_params(self):
res = self.app.post('/', params=dict(a=1))
res.mustcontain('a=1')
res = self.app.post('/', params=[('a', '1')])
res.mustcontain('a=1')
res = self.app.post_json('/', params=dict(a=1))
res.mustcontain('{"a": 1}')
res = self.app.post_json('/', params=False)
res.mustcontain('false')
def test_put_params(self):
res = self.app.put('/', params=dict(a=1))
res.mustcontain('a=1')
res = self.app.put_json('/', params=dict(a=1))
res.mustcontain('{"a": 1}')
res = self.app.put_json('/', params=False)
res.mustcontain('false')
def test_delete_params(self):
res = self.app.delete('/', params=dict(a=1))
res.mustcontain('a=1')
res = self.app.delete_json('/', params=dict(a=1))
res.mustcontain('{"a": 1}')
def test_options(self):
res = self.app.options('/')
self.assertEqual(res.status_int, 200)
def test_exception(self):
self.assertRaises(Exception, self.app.get, '/?error=t')
self.assertRaises(webtest.AppError, self.app.get,
'/?status=404%20Not%20Found')
def test_bad_content_type(self):
resp = self.app.get('/')
self.assertRaises(AttributeError, lambda: resp.json)
resp = self.app.get('/?header-content-type=application/json')
self.assertRaises(AttributeError, lambda: resp.pyquery)
self.assertRaises(AttributeError, lambda: resp.lxml)
self.assertRaises(AttributeError, lambda: resp.xml)
def test_app_from_config_file(self):
config = os.path.join(os.path.dirname(__file__), 'deploy.ini')
app = webtest.TestApp('config:%s#main' % config)
resp = app.get('/')
self.assertEqual(resp.status_int, 200)
def test_errors(self):
try:
self.app.get('/?errorlog=somelogs')
assert(False, "An AppError should be raised")
except AppError:
e = sys.exc_info()[1]
assert six.text_type(e) \
== "Application had errors logged:\nsomelogs"
def test_request_obj(self):
res = self.app.get('/')
res = self.app.request(res.request)
def test_showbrowser(self):
open_new = webbrowser.open_new
self.filename = ''
def open_new(f):
self.filename = f
webbrowser.open_new = open_new
res = self.app.get('/')
res.showbrowser()
assert self.filename.startswith('file://'), self.filename
def test_303(self):
res = self.app.get('/?status=302%20Redirect&header-location=/foo')
self.assertEqual(res.status_int, 302)
print(res.location)
self.assertEqual(res.location, 'http://localhost/foo', res)
self.assertEqual(res.headers['location'], 'http://localhost/foo')
res = res.follow()
self.assertEqual(res.request.url, 'http://localhost/foo')
self.assertIn('Response: 200 OK', str(res))
self.assertIn('200 OK', repr(res))
self.app.get('/?status=303%20redirect', status='3*')
def test_204(self):
self.app.post('/?status=204%20OK')
def test_404(self):
self.app.get('/?status=404%20Not%20Found', status=404)
self.assertRaises(webtest.AppError, self.app.get, '/', status=404)
def test_print_stderr(self):
res = self.app.get('/')
res.charset = 'utf-8'
res.text = '°C'
print_stderr(str(res))
res.charset = None
print_stderr(str(res))
def test_app_error(self):
res = self.app.get('/')
res.charset = 'utf-8'
res.text = '°C'
AppError('%s %s %s %s', res.status, '', res.request.url, res)
res.charset = None
AppError('%s %s %s %s', res.status, '', res.request.url, res)
def test_exception_repr(self):
res = self.app.get('/')
res.charset = 'utf-8'
res.text = '°C'
if not PY3:
unicode(AssertionError(res))
str(AssertionError(res))
res.charset = None
if not PY3:
unicode(AssertionError(res))
str(AssertionError(res))
def test_fake_dict(self):
class FakeDict(object):
def items(self):
return [('a', '10'), ('a', '20')]
self.app.post('/params', params=FakeDict())
| agpl-3.0 |
hassaanm/stock-trading | src/pybrain/rl/environments/ode/tasks/ccrl.py | 5 | 15166 | __author__ = 'Frank Sehnke, sehnke@in.tum.de'
from pybrain.rl.environments import EpisodicTask
from pybrain.rl.environments.ode.sensors import SpecificBodyPositionSensor
from scipy import tanh, zeros, array, random, sqrt, asarray
#Basic class for all ccrl tasks
class CCRLTask(EpisodicTask):
def __init__(self, env):
EpisodicTask.__init__(self, env)
#Overall maximal tourque - is multiplied with relative max tourque for individual joint.
self.maxPower = 100.0
self.reward_history = []
self.count = 0 #timestep counter
self.epiLen = 1500 #suggestet episodic length for normal Johnnie tasks
self.incLearn = 0 #counts the task resets for incrementall learning
self.env.FricMu = 20.0 #We need higher friction for CCRL
self.env.dt = 0.002 #We also need more timly resolution
# normalize standard sensors to (-1, 1)
self.sensor_limits = []
#Angle sensors
for i in range(self.env.actLen):
self.sensor_limits.append((self.env.cLowList[i], self.env.cHighList[i]))
# Joint velocity sensors
for i in range(self.env.actLen):
self.sensor_limits.append((-20, 20))
#Norm all actor dimensions to (-1, 1)
self.actor_limits = [(-1, 1)] * env.actLen
self.oldAction = zeros(env.actLen, float)
self.dist = zeros(9, float)
self.dif = array([0.0, 0.0, 0.0])
self.target = array([-6.5, 1.75, -10.5])
self.grepRew = 0.0
self.tableFlag = 0.0
self.env.addSensor(SpecificBodyPositionSensor(['objectP00'], "glasPos"))
self.env.addSensor(SpecificBodyPositionSensor(['palmLeft'], "palmPos"))
self.env.addSensor(SpecificBodyPositionSensor(['fingerLeft1'], "finger1Pos"))
self.env.addSensor(SpecificBodyPositionSensor(['fingerLeft2'], "finger2Pos"))
#we changed sensors so we need to update environments sensorLength variable
self.env.obsLen = len(self.env.getSensors())
#normalization for the task spezific sensors
for i in range(self.env.obsLen - 2 * self.env.actLen):
self.sensor_limits.append((-4, 4))
self.actor_limits = None
def getObservation(self):
""" a filtered mapping to getSample of the underlying environment. """
sensors = self.env.getSensors()
#Sensor hand to target object
for i in range(3):
self.dist[i] = ((sensors[self.env.obsLen - 9 + i] + sensors[self.env.obsLen - 6 + i] + sensors[self.env.obsLen - 3 + i]) / 3.0 - (sensors[self.env.obsLen - 12 + i] + self.dif[i])) * 4.0 #sensors[self.env.obsLen-12+i]
#Sensor hand angle to horizontal plane X-Axis
for i in range(3):
self.dist[i + 3] = (sensors[self.env.obsLen - 3 + i] - sensors[self.env.obsLen - 6 + i]) * 5.0
#Sensor hand angle to horizontal plane Y-Axis
for i in range(3):
self.dist[i + 6] = ((sensors[self.env.obsLen - 3 + i] + sensors[self.env.obsLen - 6 + i]) / 2.0 - sensors[self.env.obsLen - 9 + i]) * 10.0
if self.sensor_limits:
sensors = self.normalize(sensors)
sens = []
for i in range(self.env.obsLen - 12):
sens.append(sensors[i])
for i in range(9):
sens.append(self.dist[i])
for i in self.oldAction:
sens.append(i)
return sens
def performAction(self, action):
#Filtered mapping towards performAction of the underlying environment
#The standard CCRL task uses a PID controller to controll directly angles instead of forces
#This makes most tasks much simpler to learn
self.oldAction = action
#Grasping as reflex depending on the distance to target - comment in for more easy grasping
if abs(abs(self.dist[:3]).sum())<2.0: action[15]=1.0 #self.grepRew=action[15]*.01
else: action[15]=-1.0 #self.grepRew=action[15]*-.03
isJoints=array(self.env.getSensorByName('JointSensor')) #The joint angles
isSpeeds=array(self.env.getSensorByName('JointVelocitySensor')) #The joint angular velocitys
act=(action+1.0)/2.0*(self.env.cHighList-self.env.cLowList)+self.env.cLowList #norm output to action intervall
action=tanh((act-isJoints-0.9*isSpeeds*self.env.tourqueList)*16.0)*self.maxPower*self.env.tourqueList #simple PID
EpisodicTask.performAction(self, action)
#self.env.performAction(action)
def isFinished(self):
#returns true if episode timesteps has reached episode length and resets the task
if self.count > self.epiLen:
self.res()
return True
else:
self.count += 1
return False
def res(self):
#sets counter and history back, increases incremental counter
self.count = 0
self.incLearn += 1
self.reward_history.append(self.getTotalReward())
self.tableFlag = 0.0
def getReward(self):
#rewarded for approaching the object
dis = sqrt((self.dist[0:3] ** 2).sum())
return (25.0 - dis) / float(self.epiLen) - float(self.env.tableSum) * 0.1
#Learn to grasp a glas at a fixed location
class CCRLGlasTask(CCRLTask):
def __init__(self, env):
CCRLTask.__init__(self, env)
self.dif = array([0.0, 0.0, 0.0])
self.epiLen = 1000 #suggestet episodic length for normal Johnnie tasks
def isFinished(self):
#returns true if episode timesteps has reached episode length and resets the task
if self.count > self.epiLen:
self.res()
return True
else:
self.count += 1
return False
def getReward(self):
if self.env.glasSum >= 2: grip = 1000.0
else: grip = 0.0
if self.env.tableSum > 0: self.tableFlag = -1.0
else: tableFlag = 0.0
self.dist[3] = 0.0
self.dist[8] = 0.0
dis = 100.0/((self.dist[:3] ** 2).sum()+0.1)
nig = 10.0/((self.dist[3:] ** 2).sum()+0.1)
if self.env.stepCounter == self.epiLen: print "Grip:", grip, "Dis:", dis, "Nig:", nig, "Table:", self.tableFlag
return (10 + grip + nig + dis + self.tableFlag) / float(self.epiLen) #-dis
#else:
# return (25.0 - dis) / float(self.epiLen) + (grip / nig - float(self.env.tableSum)) * 0.1 #+self.grepRew (10.0-dis)/float(self.epiLen)+
#Learn to grasp a plate at a fixed location
class CCRLPlateTask(CCRLTask):
def __init__(self, env):
CCRLTask.__init__(self, env)
self.dif = array([0.0, 0.2, 0.8])
self.epiLen = 1000 #suggestet episodic length for normal Johnnie tasks
def isFinished(self):
#returns true if episode timesteps has reached episode length and resets the task
if self.count > self.epiLen:
self.res()
return True
else:
if self.count == 1: self.pertGlasPos(0)
self.count += 1
return False
def pertGlasPos(self, num):
if num == 0: self.env.pert = asarray([0.0, 0.0, 0.5])
def getReward(self):
if self.env.glasSum >= 2: grip = 1.0
else: grip = 0.0
if self.env.tableSum > 0: self.tableFlag = 10.0
#self.dist[4]=0.0
#self.dist[8]=0.0
dis = sqrt((self.dist[0:3] ** 2).sum())
if self.count == self.epiLen:
return 25.0 + grip - dis - self.tableFlag #/nig
else:
return (25.0 - dis) / float(self.epiLen) + (grip - float(self.env.tableSum)) * 0.1 #/nig -(1.0+self.oldAction[15])
#Learn to grasp a glas at 5 different locations
class CCRLGlasVarTask(CCRLGlasTask):
def __init__(self, env):
CCRLGlasTask.__init__(self, env)
self.epiLen = 5000 #suggestet episodic length for normal Johnnie tasks
def isFinished(self):
#returns true if episode timesteps has reached episode length and resets the task
if self.count > self.epiLen:
self.res()
return True
else:
if self.count == 1:
self.pertGlasPos(0)
if self.count == self.epiLen / 5 + 1:
self.env.reset()
self.pertGlasPos(1)
if self.count == 2 * self.epiLen / 5 + 1:
self.env.reset()
self.pertGlasPos(2)
if self.count == 3 * self.epiLen / 5 + 1:
self.env.reset()
self.pertGlasPos(3)
if self.count == 4 * self.epiLen / 5 + 1:
self.env.reset()
self.pertGlasPos(4)
self.count += 1
return False
def pertGlasPos(self, num):
if num == 0: self.env.pert = asarray([1.0, 0.0, 0.5])
if num == 1: self.env.pert = asarray([-1.0, 0.0, 0.5])
if num == 2: self.env.pert = asarray([1.0, 0.0, 0.0])
if num == 3: self.env.pert = asarray([-1.0, 0.0, 0.0])
if num == 4: self.env.pert = asarray([0.0, 0.0, 0.25])
def getReward(self):
if self.env.glasSum >= 2: grip = 1.0
else: grip = 0.0
if self.env.tableSum > 0: self.tableFlag = 10.0
self.dist[3] = 0.0
self.dist[8] = 0.0
dis = sqrt((self.dist ** 2).sum())
nig = (abs(self.dist[4]) + 1.0)
if self.count == self.epiLen or self.count == self.epiLen / 5 or self.count == 2 * self.epiLen / 5 or self.count == 3 * self.epiLen / 5 or self.count == 4 * self.epiLen / 5:
return 25.0 + grip / nig - dis - self.tableFlag #/nig
else:
return (25.0 - dis) / float(self.epiLen) + (grip / nig - float(self.env.tableSum)) * 0.1 #/nig
#Learn to grasp a glas at random locations
class CCRLGlasVarRandTask(CCRLGlasVarTask):
def pertGlasPos(self, num):
self.env.pert = asarray([random.random()*2.0 - 1.0, 0.0, random.random()*0.5 + 0.5])
#Some experimental stuff
class CCRLPointTask(CCRLGlasVarTask):
def __init__(self, env):
CCRLGlasVarTask.__init__(self, env)
self.epiLen = 1000 #suggestet episodic length for normal Johnnie tasks
def isFinished(self):
#returns true if episode timesteps has reached episode length and resets the task
if self.count > self.epiLen:
self.res()
return True
else:
if self.count == 1:
self.pertGlasPos(0)
self.count += 1
return False
def getObservation(self):
""" a filtered mapping to getSample of the underlying environment. """
sensors = self.env.getSensors()
sensSort = []
#Angle and angleVelocity
for i in range(32):
sensSort.append(sensors[i])
#Angles wanted (old action)
for i in self.oldAction:
sensSort.append(i)
#Hand position
for i in range(3):
sensSort.append((sensors[38 + i] + sensors[41 + i]) / 2)
#Hand orientation (Hack - make correkt!!!!)
sensSort.append((sensors[38] - sensors[41]) / 2 - sensors[35]) #pitch
sensSort.append((sensors[38 + 1] - sensors[41 + 1]) / 2 - sensors[35 + 1]) #yaw
sensSort.append((sensors[38 + 1] - sensors[41 + 1])) #roll
#Target position
for i in range(3):
sensSort.append(self.target[i])
#Target orientation
for i in range(3):
sensSort.append(0.0)
#Object type (start with random)
sensSort.append(float(random.randint(-1, 1))) #roll
#normalisation
if self.sensor_limits:
sensors = self.normalize(sensors)
sens = []
for i in range(32):
sens.append(sensors[i])
for i in range(29):
sens.append(sensSort[i + 32])
#calc dist to target
self.dist = array([(sens[54] - sens[48]), (sens[55] - sens[49]), (sens[56] - sens[50]), sens[51], sens[52], sens[53], sens[15]])
return sens
def pertGlasPos(self, num):
if num == 0: self.target = asarray([0.0, 0.0, 1.0])
self.env.pert = self.target.copy()
self.target = self.target.copy() + array([-6.5, 1.75, -10.5])
def getReward(self):
dis = sqrt((self.dist ** 2).sum())
return (25.0 - dis) / float(self.epiLen) - float(self.env.tableSum) * 0.1
class CCRLPointVarTask(CCRLPointTask):
def __init__(self, env):
CCRLPointTask.__init__(self, env)
self.epiLen = 2000 #suggestet episodic length for normal Johnnie tasks
def isFinished(self):
#returns true if episode timesteps has reached episode length and resets the task
if self.count > self.epiLen:
self.res()
return True
else:
if self.count == 1:
self.pertGlasPos(0)
if self.count == self.epiLen / 2 + 1:
self.env.reset()
self.pertGlasPos(1)
self.count += 1
return False
def getObservation(self):
""" a filtered mapping to getSample of the underlying environment. """
sensors = self.env.getSensors()
sensSort = []
#Angle and angleVelocity
for i in range(32):
sensSort.append(sensors[i])
#Angles wanted (old action)
for i in self.oldAction:
sensSort.append(i)
#Hand position
for i in range(3):
sensSort.append((sensors[38 + i] + sensors[41 + i]) / 2)
#Hand orientation (Hack - make correkt!!!!)
sensSort.append((sensors[38] - sensors[41]) / 2 - sensors[35]) #pitch
sensSort.append((sensors[38 + 1] - sensors[41 + 1]) / 2 - sensors[35 + 1]) #yaw
sensSort.append((sensors[38 + 1] - sensors[41 + 1])) #roll
#Target position
for i in range(3):
sensSort.append(self.target[i])
#Target orientation
for i in range(3):
sensSort.append(0.0)
#Object type (start with random)
sensSort.append(float(random.randint(-1, 1))) #roll
#normalisation
if self.sensor_limits:
sensors = self.normalize(sensors)
sens = []
for i in range(32):
sens.append(sensors[i])
for i in range(29):
sens.append(sensSort[i + 32])
#calc dist to target
self.dist = array([(sens[54] - sens[48]) * 10.0, (sens[55] - sens[49]) * 10.0, (sens[56] - sens[50]) * 10.0, sens[51], sens[52], sens[53], 1.0 + sens[15]])
return sens
def pertGlasPos(self, num):
if num == 0: self.target = asarray([1.0, 0.0, 1.0])
if num == 1: self.target = asarray([-1.0, 0.0, 1.0])
if num == 2: self.target = asarray([1.0, 0.0, 0.0])
if num == 3: self.target = asarray([-1.0, 0.0, 0.0])
if num == 4: self.target = asarray([0.0, 0.0, 0.5])
self.env.pert = self.target.copy()
self.target = self.target.copy() + array([-6.5, 1.75, -10.5])
def getReward(self):
dis = sqrt((self.dist ** 2).sum())
subEpi = self.epiLen / 2
if self.count == self.epiLen or self.count == subEpi:
return (25.0 - dis) / 2.0
else:
return (25.0 - dis) / float(self.epiLen) - float(self.env.tableSum) * 0.1
| apache-2.0 |
tek/amino | amino/case.py | 1 | 4782 | from types import SimpleNamespace
from typing import Type, Generic, Any, TypeVar, Callable, Tuple, _GenericAlias, get_type_hints
import inspect
from amino.util.string import snake_case
from amino import ADT, Map, List, Lists, do, Do, Either, Try, Just, Nothing, Maybe
from amino.algebra import Algebra
from amino.logging import module_log
log = module_log()
Alg = TypeVar('Alg', bound=Algebra)
C = TypeVar('C', bound=Alg)
A = TypeVar('A')
B = TypeVar('B')
class CaseMeta(type):
def __new__(cls: type, name: str, bases: tuple, namespace: SimpleNamespace, alg: Type[Alg]=None, **kw: Any) -> type:
inst = super().__new__(cls, name, bases, namespace, **kw)
if alg is not None:
inst.case = case_dispatch(inst, alg)
return inst
class Case(Generic[Alg, B], metaclass=CaseMeta):
@classmethod
def match(cls, scrutinee: Alg, *a: Any, **kw: Any) -> B:
return cls().case(scrutinee, *a, **kw)
def __call__(self, scrutinee: Alg, *a: Any, **kw: Any) -> B:
return self.case(scrutinee, *a, **kw)
def normalize_type(tpe: Type[C]) -> Type[C]:
return getattr(tpe, '__origin__', tpe)
def case_list(
cls: Type[Case[C, B]],
variants: List[Type[C]],
alg: Type[C],
has_default: bool,
) -> Map[Type[C], Callable[[Case[C, B]], B]]:
@do(Maybe[Tuple[Type[C], Callable[[Case[C, B]], B]]])
def is_handler(name: str, f: Callable) -> Do:
effective = getattr(f, '__do_original', f)
hints = yield Try(get_type_hints, effective).to_maybe
spec = yield Try(inspect.getfullargspec, effective).to_maybe
param_name = yield Lists.wrap(spec.args).lift(1)
param_type = yield Map(hints).lift(param_name)
yield (
Just((normalize_type(param_type), f))
if isinstance(param_type, type) and issubclass(param_type, alg) else
Just((normalize_type(param_type), f))
if isinstance(param_type, _GenericAlias) and issubclass(param_type.__origin__, alg) else
Nothing
)
handlers = Lists.wrap(inspect.getmembers(cls, inspect.isfunction)).flat_map2(is_handler)
def find_handler(variant: Type[C]) -> Callable[[Case[C, B]], B]:
def not_found() -> None:
if not has_default:
raise Exception(f'no case defined for {variant} on {cls.__name__}')
def match_handler(tpe: type, f: Callable) -> Maybe[Callable[[Case[C, B]], B]]:
return Just(f) if issubclass(tpe, variant) else Nothing
return handlers.find_map2(match_handler).get_or_else(not_found)
return variants.map(find_handler)
# TODO determine default case from param type being the ADT
def case_dispatch(cls: Type[Case[C, B]], alg: Type[C]) -> Callable[[Case[C, B], C], B]:
def error(func: Case[C, B], variant: Alg, *a: Any, **kw: Any) -> None:
raise TypeError(f'no case defined for {variant} on {cls.__name__}')
default = getattr(cls, 'case_default', error)
has_default = default is not error
cases = case_list(cls, alg.__algebra_variants__.sort_by(lambda a: a.__algebra_index__), alg, has_default)
length = cases.length
def case(func: Case[C, B], scrutinee: C, *a: Any, **kw: Any) -> B:
index = getattr(scrutinee, '__algebra_index__', None)
handler = (cases[index] or default) if index is not None and index < length else default
return handler(func, scrutinee, *a, **kw)
return case
class CaseRecMeta(type):
def __new__(cls, name: str, bases: tuple, namespace: dict, alg: Type[Alg]=None, **kw: Any) -> type:
inst = super().__new__(cls, name, bases, namespace, **kw)
if alg is not None:
inst.case = case_dispatch(inst, alg)
return inst
class CaseRec(Generic[Alg, A], metaclass=CaseRecMeta):
def __call__(self, scrutinee: Alg, *a: Any, **kw: Any) -> 'Rec[Alg, A]':
return Rec(self, scrutinee, a, kw)
class RecStep(Generic[Alg, A], ADT['RecStep[Alg, A]']):
pass
class Rec(Generic[Alg, A], RecStep[Alg, A]):
def __init__(self, func: CaseRec[Alg, A], scrutinee: Alg, args: list, kwargs: dict) -> None:
self.func = func
self.scrutinee = scrutinee
self.args = args
self.kwargs = kwargs
def eval(self) -> A:
return eval_case_rec(self)
class Term(Generic[Alg, A], RecStep[Alg, A]):
def __init__(self, result: A) -> None:
self.result = result
def eval_case_rec(step: Rec[Alg, A]) -> A:
while True:
step = step.func.case(step.scrutinee, *step.args, **step.kwargs)
if isinstance(step, Term):
return step.result
elif not isinstance(step, Rec):
raise Exception(f'invalid result in CaseRec step: {step}')
__all__ = ('Case', 'CaseRec', 'Term')
| mit |
evilzone/Artificial-Intelligence | search.py | 1 | 7805 | # search.py
# ---------
# Licensing Information: Please do not distribute or publish solutions to this
# project. You are free to use and extend these projects for educational
# purposes. The Pacman AI projects were developed at UC Berkeley, primarily by
# John DeNero (denero@cs.berkeley.edu) and Dan Klein (klein@cs.berkeley.edu).
# For more info, see http://inst.eecs.berkeley.edu/~cs188/sp09/pacman.html
"""
In search.py, you will implement generic search algorithms which are called
by Pacman agents (in searchAgents.py).
"""
import util
class SearchProblem:
"""
This class outlines the structure of a search problem, but doesn't implement
any of the methods (in object-oriented terminology: an abstract class).
You do not need to change anything in this class, ever.
"""
def getStartState(self):
"""
Returns the start state for the search problem
"""
util.raiseNotDefined()
def isGoalState(self, state):
"""
state: Search state
Returns True if and only if the state is a valid goal state
"""
util.raiseNotDefined()
def getSuccessors(self, state):
"""
state: Search state
For a given state, this should return a list of triples,
(successor, action, stepCost), where 'successor' is a
successor to the current state, 'action' is the action
required to get there, and 'stepCost' is the incremental
cost of expanding to that successor
"""
util.raiseNotDefined()
def getCostOfActions(self, actions):
"""
actions: A list of actions to take
This method returns the total cost of a particular sequence of actions. The sequence must
be composed of legal moves
"""
util.raiseNotDefined()
def tinyMazeSearch(problem):
"""
Returns a sequence of moves that solves tinyMaze. For any other
maze, the sequence of moves will be incorrect, so only use this for tinyMaze
"""
from game import Directions
s = Directions.SOUTH
w = Directions.WEST
return [s,s,w,s,w,w,s,w]
def depthFirstSearch(problem):
"""
Search the deepest nodes in the search tree first [p 85].
Your search algorithm needs to return a list of actions that reaches
the goal. Make sure to implement a graph search algorithm [Fig. 3.7].
To get started, you might want to try some of these simple commands to
understand the search problem that is being passed in:
print "Start:", problem.getStartState()
print "Is the start a goal?", problem.isGoalState(problem.getStartState())
print "Start's successors:", problem.getSuccessors(problem.getStartState())
"""
"*** YOUR CODE HERE ***"
from game import Actions
# fringe: list of active nodes
# explr: list of explored nodes
#i = 0
soln = []
explr = []
visit = []
fringe = util.Stack()
node = [None, problem.getStartState(), '', 0]
fringe.push(node)
#while i < 5:
while not fringe.isEmpty():
node = parent, state, dirctn, cost = fringe.pop()
if problem.isGoalState(state):
visit.append(node)
#print str(node[1]) + '--' + str(node[2]) + '-->' + str(node[0])
soln.append(node[2])
#explr.append(state)
break
if not (state in explr):# and \
#not (state in fringe.getList()):
for successor in problem.getSuccessors(state):
#for successor in reversed(problem.getSuccessors(state)):
fringe.push([state, successor[0], successor[1], successor[2]])
visit.append(node)
explr.append(state)
#print explr
#print visit
parentNode = visit.pop()
while len(visit) != 1:
curNode = visit.pop()
#print str(curNode) + str(parentNode)
#print str(curNode[0]) + ', ' + str(curNode[1]) + ' == ' + str(goalState)
while curNode[1] != parentNode[0]:
curNode = visit.pop()
if curNode[0] is None:
break
parentNode = curNode
#print str(curNode[1]) + '--' + str(curNode[2]) + '-->' + str(curNode[0])
soln.append(curNode[2])
#i = i + 1
#print explor
#print '-----------'
#print soln[::-1]
#print visit
return soln[::-1]
util.raiseNotDefined()
def breadthFirstSearch(problem):
"Search the shallowest nodes in the search tree first. [p 81]"
"*** YOUR CODE HERE ***"
soln = []
explr = []
visit = []
fringe = util.Queue()
node = [None, problem.getStartState(), '', 0]
fringe.push(node)
while not fringe.isEmpty():
node = parent, state, dirctn, cost = fringe.pop()
if problem.isGoalState(state):
visit.append(node)
soln.append(node[2])
break
if not (state in explr):
for successor in problem.getSuccessors(state):
fringe.push([state, successor[0], successor[1], successor[2]])
visit.append(node)
explr.append(state)
parentNode = visit.pop()
while len(visit) != 1:
curNode = visit.pop()
while curNode[1] != parentNode[0]:
curNode = visit.pop()
if curNode[0] is None:
break
parentNode = curNode
soln.append(curNode[2])
return soln[::-1]
util.raiseNotDefined()
def uniformCostSearch(problem):
"Search the node of least total cost first. "
"*** YOUR CODE HERE ***"
soln = []
explr = []
visit = []
fringe = util.PriorityQueue()
node = [None, problem.getStartState(), '', 0]
fringe.push(node, 0)
while not fringe.isEmpty():
flag = True
node = parent, state, dirctn, cost = fringe.pop()
#print '-------------------------'
#print node
#print explr
if problem.isGoalState(state):
visit.append(node)
soln.append(node[2])
break
for vState, vCost in explr:
if state == vState and cost >= vCost:
#print str(vState) + ' $$ ' + str(vCost)
flag = False
if flag:
for successor in problem.getSuccessors(state):
#print cost + successor[2]
fringe.push([state, successor[0], successor[1], cost+successor[2]], cost+successor[2])
visit.append(node)
explr.append((state, cost))
parentNode = visit.pop()
while len(visit) != 1:
curNode = visit.pop()
while curNode[1] != parentNode[0]:
curNode = visit.pop()
if curNode[0] is None:
break
parentNode = curNode
soln.append(curNode[2])
return soln[::-1]
util.raiseNotDefined()
def nullHeuristic(state, problem=None):
"""
A heuristic function estimates the cost from the current state to the nearest
goal in the provided SearchProblem. This heuristic is trivial.
"""
return 0
def aStarSearch(problem, heuristic=nullHeuristic):
"Search the node that has the lowest combined cost and heuristic first."
"*** YOUR CODE HERE ***"
soln = []
explr = []
visit = []
fringe = util.PriorityQueue()
node = [None, problem.getStartState(), '', 0]
fringe.push(node, heuristic(node[1], problem))
while not fringe.isEmpty():
flag = True
node = parent, state, dirctn, cost = fringe.pop()
if problem.isGoalState(state):
visit.append(node)
soln.append(node[2])
break
for vState, vCost in explr:
if state == vState and cost >= vCost:
flag = False
if flag:
for successor in problem.getSuccessors(state):
fringe.push([state, successor[0], successor[1], cost + successor[2]], \
cost + successor[2] + heuristic(state, problem))
visit.append(node)
explr.append((state, cost))
parentNode = visit.pop()
while len(visit) != 1:
curNode = visit.pop()
while curNode[1] != parentNode[0]:
curNode = visit.pop()
if curNode[0] is None:
break
parentNode = curNode
soln.append(curNode[2])
return soln[::-1]
util.raiseNotDefined()
# Abbreviations
bfs = breadthFirstSearch
dfs = depthFirstSearch
astar = aStarSearch
ucs = uniformCostSearch | gpl-2.0 |
michaelld/gnuradio | gr-dtv/python/dtv/atsc_rx.py | 9 | 2500 | #!/usr/bin/env /usr/bin/python
#
# Copyright 2014 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 59 Temple Place - Suite 330,
# Boston, MA 02111-1307, USA.
from __future__ import absolute_import
from __future__ import unicode_literals
from gnuradio import gr, filter, analog
from .atsc_rx_filter import *
class atsc_rx(gr.hier_block2):
def __init__(self, input_rate, sps):
gr.hier_block2.__init__(self, "atsc_rx",
gr.io_signature(1, 1, gr.sizeof_gr_complex), # Input signature
gr.io_signature(1, 1, gr.sizeof_char)) # Output signature
# ATSC receiver filter/interpolator
rx_filt = atsc_rx_filter(input_rate, sps)
# Lock on to pilot tone, shift to DC, then discard Q channel
output_rate = ATSC_SYMBOL_RATE*sps
pll = dtv.atsc_fpll(output_rate)
# Remove pilot tone now at DC
dcr = filter.dc_blocker_ff(4096)
# Normalize signal to proper constellation amplitude
agc = analog.agc_ff(1e-5, 4.0)
# Synchronize bit and segment timing
btl = dtv.atsc_sync(output_rate)
# Check for correct field sync
fsc = dtv.atsc_fs_checker()
# Equalize channel using training sequences
equ = dtv.atsc_equalizer()
# Remove convolutional trellis coding
vit = dtv.atsc_viterbi_decoder()
# Remove convolutional interleaving
dei = dtv.atsc_deinterleaver()
# Reed-Solomon decode
rsd = dtv.atsc_rs_decoder()
# Derandomize MPEG2-TS packet
der = dtv.atsc_derandomizer()
# Remove padding from packet
dep = dtv.atsc_depad()
# Connect pipeline
self.connect(self, rx_filt, pll, dcr, agc, btl, fsc, equ)
self.connect(equ, vit, dei, rsd, der, dep, self)
| gpl-3.0 |
utkbansal/kuma | vendor/packages/translate/convert/po2csv.py | 25 | 3743 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2003-2006 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""Convert Gettext PO localization files to Comma-Separated Value (.csv) files.
See: http://docs.translatehouse.org/projects/translate-toolkit/en/latest/commands/csv2po.html
for examples and usage instructions.
"""
from translate.storage import csvl10n, po
class po2csv:
def convertcomments(self, inputunit):
return " ".join(inputunit.getlocations())
def convertunit(self, inputunit):
csvunit = csvl10n.csvunit()
if inputunit.isheader():
return None
#csvunit.location = "location"
#csvunit.source = "source"
#csvunit.target = "target"
elif inputunit.isblank():
return None
else:
csvunit.location = self.convertcomments(inputunit)
csvunit.source = inputunit.source
csvunit.target = inputunit.target
return csvunit
def convertplurals(self, inputunit):
"""Convert PO plural units
We only convert the first plural form. So languages with multiple
plurals are not handled. For single plural languages we simply
skip this plural extraction.
"""
if len(inputunit.target.strings) == 1: # No plural forms
return None
csvunit = csvl10n.csvunit()
csvunit.location = self.convertcomments(inputunit)
csvunit.source = inputunit.source.strings[1]
csvunit.target = inputunit.target.strings[1]
return csvunit
def convertstore(self, inputstore, columnorder=None):
if columnorder is None:
columnorder = ['location', 'source', 'target']
outputstore = csvl10n.csvfile(fieldnames=columnorder)
for inputunit in inputstore.units:
outputunit = self.convertunit(inputunit)
if outputunit is not None:
outputstore.addunit(outputunit)
if inputunit.hasplural():
outputunit = self.convertplurals(inputunit)
if outputunit is not None:
outputstore.addunit(outputunit)
return outputstore
def convertcsv(inputfile, outputfile, templatefile, columnorder=None):
"""reads in inputfile using po, converts using po2csv, writes to outputfile"""
# note that templatefile is not used, but it is required by the converter...
inputstore = po.pofile(inputfile)
if inputstore.isempty():
return 0
convertor = po2csv()
outputstore = convertor.convertstore(inputstore, columnorder)
outputfile.write(str(outputstore))
return 1
def main(argv=None):
from translate.convert import convert
formats = {"po": ("csv", convertcsv)}
parser = convert.ConvertOptionParser(formats, description=__doc__)
parser.add_option("", "--columnorder", dest="columnorder", default=None,
help="specify the order and position of columns (location,source,target)")
parser.passthrough.append("columnorder")
parser.run(argv)
if __name__ == '__main__':
main()
| mpl-2.0 |
KitKatXperience/platform_external_chromium_org | chrome/test/functional/prefetch.py | 79 | 4170 | #!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This functional test spawns a web server, and runs chrome to point
at that web server.
The content served contains prefetch requests, and the tests assert that the
webserver logs reflect that.
Run like any functional test:
$ python chrome/test/functional/prefetch.py
in a repo with a built pyautolib
The import of multiprocessing implies python 2.6 is required
"""
import os
import time
import multiprocessing
import Queue
import string
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import pyauto_functional # Must be imported before pyauto
import pyauto
# this class handles IPC retrieving server "logs" from our integral
# server. Each test should clear() the log, and then run asserts on
# the retrieval list.
# at startup, the server puts an int in the queue which is its port,
# we store that for subsequent tests
class ServerLog:
def clear(self):
self.log = {}
def __init__(self,queue):
self.clear()
self.port = None
self.queue = queue
def _readQueue(self):
try:
while True:
queueval = self.queue.get(False)
if isinstance(queueval,int):
self.port = queueval
else:
self.log[queueval] = True
except Queue.Empty:
return
def getPort(self):
if not self.port:
self._readQueue()
return self.port
def isRetrieved(self,path):
self._readQueue()
try:
return self.log[path]
except KeyError:
return None
#
# The next few classes run a simple web server that returns log information
# via a multiprocessing.Queue.
#
class AbstractPrefetchServerHandler(BaseHTTPRequestHandler):
content = {
"prefetch-origin.html":
(200, """<html><head>
<link rel="prefetch" href="static-prefetch-target.html">
<script type="text/javascript">
function changeParagraph()
{
var newPara = document.createElement("p");
newPara.innerHTML =
"<link rel=\\"prefetch\\" href=\\"dynamic-prefetch-target.html\\">" +
"<p>This paragraph contains a dynamic link prefetch. " +
"The target of this prefetch is " +
"<a href=\\"dynamic-prefetch-target.html\\">this document.</a>";
var para = document.getElementById("p1");
document.body.insertBefore(newPara,para);
}
</script>
</head>
<body onload="changeParagraph()">
<p id="p1">This is a document that contains a link prefetch. The target of
that prefetch is <a href="static-prefetch-target.html">this document.</a>
</body>"""),
"static-prefetch-target.html":
(200, "<html><head></head><body>empty</body>"),
"dynamic-prefetch-target.html":
(200, "<html><head></head><body>empty</body>")}
def do_GET(self):
self.queue.put(self.path[1:])
try:
response_code, response = self.content[self.path[1:]]
self.send_response(response_code)
self.end_headers()
self.wfile.write(response)
except KeyError:
self.send_response(404)
self.end_headers()
def run_web_server(queue_arg):
class PrefetchServerHandler(AbstractPrefetchServerHandler):
queue = queue_arg
server = HTTPServer(('',0), PrefetchServerHandler)
queue.put(server.server_port)
server.serve_forever()
#
# Here's the test itself
#
queue = multiprocessing.Queue()
server_log = ServerLog(queue)
class PrefetchTest(pyauto.PyUITest):
"""Testcase for Prefetching"""
def testBasic(self):
server_log.clear()
url = "http://localhost:%d/prefetch-origin.html" % server_log.getPort()
self.NavigateToURL(url)
self.assertEqual(True, server_log.isRetrieved("prefetch-origin.html"))
time.sleep(0.1) # required since prefetches occur after onload
self.assertEqual(True, server_log.isRetrieved(
"static-prefetch-target.html"))
self.assertEqual(True, server_log.isRetrieved(
"dynamic-prefetch-target.html"))
if __name__ == '__main__':
web_server = multiprocessing.Process(target=run_web_server,args=(queue,))
web_server.daemon = True
web_server.start()
pyauto_functional.Main()
| bsd-3-clause |
dsyang/buck | programs/test_buck_tool.py | 8 | 3189 | # Copyright 2016-present Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import unittest
from buck_tool import CommandLineArgs
class TestCommandLineArgs(unittest.TestCase):
def test_empty_command(self):
args = CommandLineArgs(["buck"])
self.assertEqual(args.command, None)
self.assertEqual(args.buck_options, [])
self.assertEqual(args.command_options, [])
self.assertTrue(args.is_help(), "With no arguments should show help")
def test_single_command(self):
args = CommandLineArgs(["buck", "clean"])
self.assertEqual(args.command, "clean")
self.assertEqual(args.buck_options, [])
self.assertEqual(args.command_options, [])
self.assertFalse(args.is_help())
def test_global_short_help(self):
args = CommandLineArgs(["buck", "-h"])
self.assertEqual(args.command, None)
self.assertEqual(args.buck_options, ["-h"])
self.assertEqual(args.command_options, [])
self.assertTrue(args.is_help())
def test_global_help(self):
args = CommandLineArgs(["buck", "--help"])
self.assertEqual(args.command, None)
self.assertEqual(args.buck_options, ["--help"])
self.assertEqual(args.command_options, [])
self.assertTrue(args.is_help())
def test_global_version(self):
args = CommandLineArgs(["buck", "--version"])
self.assertEqual(args.command, None)
self.assertEqual(args.buck_options, ["--version"])
self.assertEqual(args.command_options, [])
self.assertTrue(args.is_help(), "--version does not require a build")
self.assertTrue(args.is_version())
def test_command_help(self):
args = CommandLineArgs(["buck", "clean", "--help"])
self.assertEqual(args.command, "clean")
self.assertEqual(args.buck_options, [])
self.assertEqual(args.command_options, ["--help"])
self.assertTrue(args.is_help())
def test_help_command(self):
args = CommandLineArgs(["buck", "--help", "clean"])
self.assertEqual(args.command, "clean")
self.assertEqual(args.buck_options, ["--help"])
self.assertEqual(args.command_options, [])
self.assertFalse(args.is_help(), "Global --help ignored with command")
def test_command_all(self):
args = CommandLineArgs(["buck", "--help", "--version", "clean", "--help", "all"])
self.assertEqual(args.command, "clean")
self.assertEqual(args.buck_options, ["--help", "--version"])
self.assertEqual(args.command_options, ["--help", "all"])
self.assertTrue(args.is_help())
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
annegentle/magnum | magnum/tests/unit/objects/test_objects.py | 12 | 16335 | # Copyright 2015 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import gettext
import iso8601
import netaddr
from oslo_utils import timeutils
from oslo_versionedobjects import fields
from magnum.common import context as magnum_context
from magnum.common import exception
from magnum.objects import base
from magnum.objects import utils
from magnum.tests import base as test_base
gettext.install('magnum')
@base.MagnumObjectRegistry.register
class MyObj(base.MagnumObject):
VERSION = '1.0'
fields = {'foo': fields.IntegerField(),
'bar': fields.StringField(),
'missing': fields.StringField(),
}
def obj_load_attr(self, attrname):
setattr(self, attrname, 'loaded!')
@base.remotable_classmethod
def query(cls, context):
obj = cls(context)
obj.foo = 1
obj.bar = 'bar'
obj.obj_reset_changes()
return obj
@base.remotable
def marco(self, context):
return 'polo'
@base.remotable
def update_test(self, context):
if context.project_id == 'alternate':
self.bar = 'alternate-context'
else:
self.bar = 'updated'
@base.remotable
def save(self, context):
self.obj_reset_changes()
@base.remotable
def refresh(self, context):
self.foo = 321
self.bar = 'refreshed'
self.obj_reset_changes()
@base.remotable
def modify_save_modify(self, context):
self.bar = 'meow'
self.save()
self.foo = 42
class MyObj2(object):
@classmethod
def obj_name(cls):
return 'MyObj'
@base.remotable_classmethod
def get(cls, *args, **kwargs):
pass
class TestSubclassedObject(MyObj):
fields = {'new_field': fields.StringField()}
class TestUtils(test_base.TestCase):
def test_datetime_or_none(self):
naive_dt = datetime.datetime.now()
dt = timeutils.parse_isotime(timeutils.isotime(naive_dt))
self.assertEqual(utils.datetime_or_none(dt), dt)
self.assertEqual(utils.datetime_or_none(dt),
naive_dt.replace(tzinfo=iso8601.iso8601.Utc(),
microsecond=0))
self.assertIsNone(utils.datetime_or_none(None))
self.assertRaises(ValueError, utils.datetime_or_none, 'foo')
def test_datetime_or_str_or_none(self):
dts = timeutils.isotime()
dt = timeutils.parse_isotime(dts)
self.assertEqual(utils.datetime_or_str_or_none(dt), dt)
self.assertIsNone(utils.datetime_or_str_or_none(None))
self.assertEqual(utils.datetime_or_str_or_none(dts), dt)
self.assertRaises(ValueError, utils.datetime_or_str_or_none, 'foo')
def test_int_or_none(self):
self.assertEqual(utils.int_or_none(1), 1)
self.assertEqual(utils.int_or_none('1'), 1)
self.assertIsNone(utils.int_or_none(None))
self.assertRaises(ValueError, utils.int_or_none, 'foo')
def test_str_or_none(self):
class Obj(object):
pass
self.assertEqual(utils.str_or_none('foo'), 'foo')
self.assertEqual(utils.str_or_none(1), '1')
self.assertIsNone(utils.str_or_none(None))
def test_ip_or_none(self):
ip4 = netaddr.IPAddress('1.2.3.4', 4)
ip6 = netaddr.IPAddress('1::2', 6)
self.assertEqual(utils.ip_or_none(4)('1.2.3.4'), ip4)
self.assertEqual(utils.ip_or_none(6)('1::2'), ip6)
self.assertIsNone(utils.ip_or_none(4)(None))
self.assertIsNone(utils.ip_or_none(6)(None))
self.assertRaises(netaddr.AddrFormatError, utils.ip_or_none(4), 'foo')
self.assertRaises(netaddr.AddrFormatError, utils.ip_or_none(6), 'foo')
def test_dt_serializer(self):
class Obj(object):
foo = utils.dt_serializer('bar')
obj = Obj()
obj.bar = timeutils.parse_isotime('1955-11-05T00:00:00Z')
self.assertEqual('1955-11-05T00:00:00Z', obj.foo())
obj.bar = None
self.assertIsNone(obj.foo())
obj.bar = 'foo'
self.assertRaises(AttributeError, obj.foo)
def test_dt_deserializer(self):
dt = timeutils.parse_isotime('1955-11-05T00:00:00Z')
self.assertEqual(utils.dt_deserializer(None, timeutils.isotime(dt)),
dt)
self.assertIsNone(utils.dt_deserializer(None, None))
self.assertRaises(ValueError, utils.dt_deserializer, None, 'foo')
class _TestObject(object):
def test_hydration_type_error(self):
primitive = {'magnum_object.name': 'MyObj',
'magnum_object.namespace': 'magnum',
'magnum_object.version': '1.5',
'magnum_object.data': {'foo': 'a'}}
self.assertRaises(ValueError, MyObj.obj_from_primitive, primitive)
def test_hydration(self):
primitive = {'magnum_object.name': 'MyObj',
'magnum_object.namespace': 'magnum',
'magnum_object.version': '1.5',
'magnum_object.data': {'foo': 1}}
obj = MyObj.obj_from_primitive(primitive)
self.assertEqual(1, obj.foo)
def test_hydration_bad_ns(self):
primitive = {'magnum_object.name': 'MyObj',
'magnum_object.namespace': 'foo',
'magnum_object.version': '1.5',
'magnum_object.data': {'foo': 1}}
self.assertRaises(exception.UnsupportedObjectError,
MyObj.obj_from_primitive, primitive)
def test_dehydration(self):
expected = {'magnum_object.name': 'MyObj',
'magnum_object.namespace': 'magnum',
'magnum_object.version': '1.5',
'magnum_object.data': {'foo': 1}}
obj = MyObj(self.context)
obj.foo = 1
obj.obj_reset_changes()
self.assertEqual(expected, obj.obj_to_primitive())
def test_get_updates(self):
obj = MyObj(self.context)
self.assertEqual({}, obj.obj_get_changes())
obj.foo = 123
self.assertEqual({'foo': 123}, obj.obj_get_changes())
obj.bar = 'test'
self.assertEqual({'foo': 123, 'bar': 'test'}, obj.obj_get_changes())
obj.obj_reset_changes()
self.assertEqual({}, obj.obj_get_changes())
def test_object_property(self):
obj = MyObj(self.context, foo=1)
self.assertEqual(1, obj.foo)
def test_object_property_type_error(self):
obj = MyObj(self.context)
def fail():
obj.foo = 'a'
self.assertRaises(ValueError, fail)
def test_load(self):
obj = MyObj(self.context)
self.assertEqual('loaded!', obj.bar)
def test_load_in_base(self):
class Foo(base.MagnumObject):
fields = {'foobar': fields.IntegerField()}
obj = Foo(self.context)
# NOTE(danms): Can't use assertRaisesRegexp() because of py26
raised = False
try:
obj.foobar
except NotImplementedError as ex:
raised = True
self.assertTrue(raised)
self.assertTrue('foobar' in str(ex))
def test_loaded_in_primitive(self):
obj = MyObj(self.context)
obj.foo = 1
obj.obj_reset_changes()
self.assertEqual('loaded!', obj.bar)
expected = {'magnum_object.name': 'MyObj',
'magnum_object.namespace': 'magnum',
'magnum_object.version': '1.0',
'magnum_object.changes': ['bar'],
'magnum_object.data': {'foo': 1,
'bar': 'loaded!'}}
self.assertEqual(expected, obj.obj_to_primitive())
def test_changes_in_primitive(self):
obj = MyObj(self.context)
obj.foo = 123
self.assertEqual(set(['foo']), obj.obj_what_changed())
primitive = obj.obj_to_primitive()
self.assertTrue('magnum_object.changes' in primitive)
obj2 = MyObj.obj_from_primitive(primitive)
self.assertEqual(set(['foo']), obj2.obj_what_changed())
obj2.obj_reset_changes()
self.assertEqual(set(), obj2.obj_what_changed())
def test_unknown_objtype(self):
self.assertRaises(exception.UnsupportedObjectError,
base.MagnumObject.obj_class_from_name, 'foo', '1.0')
def test_with_alternate_context(self):
context1 = magnum_context.RequestContext('foo', 'foo')
context2 = magnum_context.RequestContext('bar', project_id='alternate')
obj = MyObj.query(context1)
obj.update_test(context2)
self.assertEqual('alternate-context', obj.bar)
self.assertRemotes()
def test_orphaned_object(self):
obj = MyObj.query(self.context)
obj._context = None
self.assertRaises(exception.OrphanedObjectError,
obj.update_test)
self.assertRemotes()
def test_changed_1(self):
obj = MyObj.query(self.context)
obj.foo = 123
self.assertEqual(set(['foo']), obj.obj_what_changed())
obj.update_test(self.context)
self.assertEqual(set(['foo', 'bar']), obj.obj_what_changed())
self.assertEqual(123, obj.foo)
self.assertRemotes()
def test_changed_2(self):
obj = MyObj.query(self.context)
obj.foo = 123
self.assertEqual(set(['foo']), obj.obj_what_changed())
obj.save()
self.assertEqual(set([]), obj.obj_what_changed())
self.assertEqual(123, obj.foo)
self.assertRemotes()
def test_changed_3(self):
obj = MyObj.query(self.context)
obj.foo = 123
self.assertEqual(set(['foo']), obj.obj_what_changed())
obj.refresh()
self.assertEqual(set([]), obj.obj_what_changed())
self.assertEqual(321, obj.foo)
self.assertEqual('refreshed', obj.bar)
self.assertRemotes()
def test_changed_4(self):
obj = MyObj.query(self.context)
obj.bar = 'something'
self.assertEqual(set(['bar']), obj.obj_what_changed())
obj.modify_save_modify(self.context)
self.assertEqual(set(['foo']), obj.obj_what_changed())
self.assertEqual(42, obj.foo)
self.assertEqual('meow', obj.bar)
self.assertRemotes()
def test_static_result(self):
obj = MyObj.query(self.context)
self.assertEqual('bar', obj.bar)
result = obj.marco()
self.assertEqual('polo', result)
self.assertRemotes()
def test_updates(self):
obj = MyObj.query(self.context)
self.assertEqual(1, obj.foo)
obj.update_test()
self.assertEqual('updated', obj.bar)
self.assertRemotes()
def test_base_attributes(self):
dt = datetime.datetime(1955, 11, 5)
obj = MyObj(self.context)
obj.created_at = dt
obj.updated_at = dt
expected = {'magnum_object.name': 'MyObj',
'magnum_object.namespace': 'magnum',
'magnum_object.version': '1.0',
'magnum_object.changes':
['created_at', 'updated_at'],
'magnum_object.data':
{'created_at': timeutils.isotime(dt),
'updated_at': timeutils.isotime(dt)}
}
actual = obj.obj_to_primitive()
# magnum_object.changes is built from a set and order is undefined
self.assertEqual(sorted(expected['magnum_object.changes']),
sorted(actual['magnum_object.changes']))
del expected['magnum_object.changes'], actual['magnum_object.changes']
self.assertEqual(expected, actual)
def test_contains(self):
obj = MyObj(self.context)
self.assertFalse('foo' in obj)
obj.foo = 1
self.assertTrue('foo' in obj)
self.assertFalse('does_not_exist' in obj)
def test_obj_attr_is_set(self):
obj = MyObj(self.context, foo=1)
self.assertTrue(obj.obj_attr_is_set('foo'))
self.assertFalse(obj.obj_attr_is_set('bar'))
self.assertRaises(AttributeError, obj.obj_attr_is_set, 'bang')
def test_get(self):
obj = MyObj(self.context, foo=1)
# Foo has value, should not get the default
self.assertEqual(obj.get('foo', 2), 1)
# Foo has value, should return the value without error
self.assertEqual(obj.get('foo'), 1)
# Bar is not loaded, so we should get the default
self.assertEqual(obj.get('bar', 'not-loaded'), 'not-loaded')
# Bar without a default should lazy-load
self.assertEqual(obj.get('bar'), 'loaded!')
# Bar now has a default, but loaded value should be returned
self.assertEqual(obj.get('bar', 'not-loaded'), 'loaded!')
# Invalid attribute should raise AttributeError
self.assertRaises(AttributeError, obj.get, 'nothing')
# ...even with a default
self.assertRaises(AttributeError, obj.get, 'nothing', 3)
def test_object_inheritance(self):
base_fields = list(base.MagnumObject.fields.keys())
myobj_fields = ['foo', 'bar', 'missing'] + base_fields
myobj3_fields = ['new_field']
self.assertTrue(issubclass(TestSubclassedObject, MyObj))
self.assertEqual(len(myobj_fields), len(MyObj.fields))
self.assertEqual(set(myobj_fields), set(MyObj.fields.keys()))
self.assertEqual(len(myobj_fields) + len(myobj3_fields),
len(TestSubclassedObject.fields))
self.assertEqual(set(myobj_fields) | set(myobj3_fields),
set(TestSubclassedObject.fields.keys()))
def test_get_changes(self):
obj = MyObj(self.context)
self.assertEqual({}, obj.obj_get_changes())
obj.foo = 123
self.assertEqual({'foo': 123}, obj.obj_get_changes())
obj.bar = 'test'
self.assertEqual({'foo': 123, 'bar': 'test'}, obj.obj_get_changes())
obj.obj_reset_changes()
self.assertEqual({}, obj.obj_get_changes())
def test_obj_fields(self):
class TestObj(base.MagnumObject):
fields = {'foo': fields.IntegerField()}
obj_extra_fields = ['bar']
@property
def bar(self):
return 'this is bar'
obj = TestObj(self.context)
self.assertEqual(set(['created_at', 'updated_at', 'foo', 'bar']),
set(obj.obj_fields))
def test_obj_constructor(self):
obj = MyObj(self.context, foo=123, bar='abc')
self.assertEqual(123, obj.foo)
self.assertEqual('abc', obj.bar)
self.assertEqual(set(['foo', 'bar']), obj.obj_what_changed())
class TestObjectSerializer(test_base.TestCase):
def test_object_serialization(self):
ser = base.MagnumObjectSerializer()
obj = MyObj(self.context)
primitive = ser.serialize_entity(self.context, obj)
self.assertTrue('magnum_object.name' in primitive)
obj2 = ser.deserialize_entity(self.context, primitive)
self.assertIsInstance(obj2, MyObj)
self.assertEqual(self.context, obj2._context)
def test_object_serialization_iterables(self):
ser = base.MagnumObjectSerializer()
obj = MyObj(self.context)
for iterable in (list, tuple, set):
thing = iterable([obj])
primitive = ser.serialize_entity(self.context, thing)
self.assertEqual(1, len(primitive))
for item in primitive:
self.assertFalse(isinstance(item, base.MagnumObject))
thing2 = ser.deserialize_entity(self.context, primitive)
self.assertEqual(1, len(thing2))
for item in thing2:
self.assertIsInstance(item, MyObj)
| apache-2.0 |
ahsquared/arc | arc-assets/themes/ut-thehill/node_modules/gulp-sass/node_modules/node-sass/node_modules/pangyp/gyp/pylib/gyp/generator/xcode.py | 526 | 54812 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import filecmp
import gyp.common
import gyp.xcodeproj_file
import errno
import os
import sys
import posixpath
import re
import shutil
import subprocess
import tempfile
# Project files generated by this module will use _intermediate_var as a
# custom Xcode setting whose value is a DerivedSources-like directory that's
# project-specific and configuration-specific. The normal choice,
# DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive
# as it is likely that multiple targets within a single project file will want
# to access the same set of generated files. The other option,
# PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific,
# it is not configuration-specific. INTERMEDIATE_DIR is defined as
# $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION).
_intermediate_var = 'INTERMEDIATE_DIR'
# SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all
# targets that share the same BUILT_PRODUCTS_DIR.
_shared_intermediate_var = 'SHARED_INTERMEDIATE_DIR'
_library_search_paths_var = 'LIBRARY_SEARCH_PATHS'
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'SHARED_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'SHARED_LIB_SUFFIX': '.dylib',
# INTERMEDIATE_DIR is a place for targets to build up intermediate products.
# It is specific to each build environment. It is only guaranteed to exist
# and be constant within the context of a project, corresponding to a single
# input file. Some build environments may allow their intermediate directory
# to be shared on a wider scale, but this is not guaranteed.
'INTERMEDIATE_DIR': '$(%s)' % _intermediate_var,
'OS': 'mac',
'PRODUCT_DIR': '$(BUILT_PRODUCTS_DIR)',
'LIB_DIR': '$(BUILT_PRODUCTS_DIR)',
'RULE_INPUT_ROOT': '$(INPUT_FILE_BASE)',
'RULE_INPUT_EXT': '$(INPUT_FILE_SUFFIX)',
'RULE_INPUT_NAME': '$(INPUT_FILE_NAME)',
'RULE_INPUT_PATH': '$(INPUT_FILE_PATH)',
'RULE_INPUT_DIRNAME': '$(INPUT_FILE_DIRNAME)',
'SHARED_INTERMEDIATE_DIR': '$(%s)' % _shared_intermediate_var,
'CONFIGURATION_NAME': '$(CONFIGURATION)',
}
# The Xcode-specific sections that hold paths.
generator_additional_path_sections = [
'mac_bundle_resources',
'mac_framework_headers',
'mac_framework_private_headers',
# 'mac_framework_dirs', input already handles _dirs endings.
]
# The Xcode-specific keys that exist on targets and aren't moved down to
# configurations.
generator_additional_non_configuration_keys = [
'mac_bundle',
'mac_bundle_resources',
'mac_framework_headers',
'mac_framework_private_headers',
'mac_xctest_bundle',
'xcode_create_dependents_test_runner',
]
# We want to let any rules apply to files that are resources also.
generator_extra_sources_for_rules = [
'mac_bundle_resources',
'mac_framework_headers',
'mac_framework_private_headers',
]
# Xcode's standard set of library directories, which don't need to be duplicated
# in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay.
xcode_standard_library_dirs = frozenset([
'$(SDKROOT)/usr/lib',
'$(SDKROOT)/usr/local/lib',
])
def CreateXCConfigurationList(configuration_names):
xccl = gyp.xcodeproj_file.XCConfigurationList({'buildConfigurations': []})
if len(configuration_names) == 0:
configuration_names = ['Default']
for configuration_name in configuration_names:
xcbc = gyp.xcodeproj_file.XCBuildConfiguration({
'name': configuration_name})
xccl.AppendProperty('buildConfigurations', xcbc)
xccl.SetProperty('defaultConfigurationName', configuration_names[0])
return xccl
class XcodeProject(object):
def __init__(self, gyp_path, path, build_file_dict):
self.gyp_path = gyp_path
self.path = path
self.project = gyp.xcodeproj_file.PBXProject(path=path)
projectDirPath = gyp.common.RelativePath(
os.path.dirname(os.path.abspath(self.gyp_path)),
os.path.dirname(path) or '.')
self.project.SetProperty('projectDirPath', projectDirPath)
self.project_file = \
gyp.xcodeproj_file.XCProjectFile({'rootObject': self.project})
self.build_file_dict = build_file_dict
# TODO(mark): add destructor that cleans up self.path if created_dir is
# True and things didn't complete successfully. Or do something even
# better with "try"?
self.created_dir = False
try:
os.makedirs(self.path)
self.created_dir = True
except OSError, e:
if e.errno != errno.EEXIST:
raise
def Finalize1(self, xcode_targets, serialize_all_tests):
# Collect a list of all of the build configuration names used by the
# various targets in the file. It is very heavily advised to keep each
# target in an entire project (even across multiple project files) using
# the same set of configuration names.
configurations = []
for xct in self.project.GetProperty('targets'):
xccl = xct.GetProperty('buildConfigurationList')
xcbcs = xccl.GetProperty('buildConfigurations')
for xcbc in xcbcs:
name = xcbc.GetProperty('name')
if name not in configurations:
configurations.append(name)
# Replace the XCConfigurationList attached to the PBXProject object with
# a new one specifying all of the configuration names used by the various
# targets.
try:
xccl = CreateXCConfigurationList(configurations)
self.project.SetProperty('buildConfigurationList', xccl)
except:
sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path)
raise
# The need for this setting is explained above where _intermediate_var is
# defined. The comments below about wanting to avoid project-wide build
# settings apply here too, but this needs to be set on a project-wide basis
# so that files relative to the _intermediate_var setting can be displayed
# properly in the Xcode UI.
#
# Note that for configuration-relative files such as anything relative to
# _intermediate_var, for the purposes of UI tree view display, Xcode will
# only resolve the configuration name once, when the project file is
# opened. If the active build configuration is changed, the project file
# must be closed and reopened if it is desired for the tree view to update.
# This is filed as Apple radar 6588391.
xccl.SetBuildSetting(_intermediate_var,
'$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)')
xccl.SetBuildSetting(_shared_intermediate_var,
'$(SYMROOT)/DerivedSources/$(CONFIGURATION)')
# Set user-specified project-wide build settings and config files. This
# is intended to be used very sparingly. Really, almost everything should
# go into target-specific build settings sections. The project-wide
# settings are only intended to be used in cases where Xcode attempts to
# resolve variable references in a project context as opposed to a target
# context, such as when resolving sourceTree references while building up
# the tree tree view for UI display.
# Any values set globally are applied to all configurations, then any
# per-configuration values are applied.
for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems():
xccl.SetBuildSetting(xck, xcv)
if 'xcode_config_file' in self.build_file_dict:
config_ref = self.project.AddOrGetFileInRootGroup(
self.build_file_dict['xcode_config_file'])
xccl.SetBaseConfiguration(config_ref)
build_file_configurations = self.build_file_dict.get('configurations', {})
if build_file_configurations:
for config_name in configurations:
build_file_configuration_named = \
build_file_configurations.get(config_name, {})
if build_file_configuration_named:
xcc = xccl.ConfigurationNamed(config_name)
for xck, xcv in build_file_configuration_named.get('xcode_settings',
{}).iteritems():
xcc.SetBuildSetting(xck, xcv)
if 'xcode_config_file' in build_file_configuration_named:
config_ref = self.project.AddOrGetFileInRootGroup(
build_file_configurations[config_name]['xcode_config_file'])
xcc.SetBaseConfiguration(config_ref)
# Sort the targets based on how they appeared in the input.
# TODO(mark): Like a lot of other things here, this assumes internal
# knowledge of PBXProject - in this case, of its "targets" property.
# ordinary_targets are ordinary targets that are already in the project
# file. run_test_targets are the targets that run unittests and should be
# used for the Run All Tests target. support_targets are the action/rule
# targets used by GYP file targets, just kept for the assert check.
ordinary_targets = []
run_test_targets = []
support_targets = []
# targets is full list of targets in the project.
targets = []
# does the it define it's own "all"?
has_custom_all = False
# targets_for_all is the list of ordinary_targets that should be listed
# in this project's "All" target. It includes each non_runtest_target
# that does not have suppress_wildcard set.
targets_for_all = []
for target in self.build_file_dict['targets']:
target_name = target['target_name']
toolset = target['toolset']
qualified_target = gyp.common.QualifiedTarget(self.gyp_path, target_name,
toolset)
xcode_target = xcode_targets[qualified_target]
# Make sure that the target being added to the sorted list is already in
# the unsorted list.
assert xcode_target in self.project._properties['targets']
targets.append(xcode_target)
ordinary_targets.append(xcode_target)
if xcode_target.support_target:
support_targets.append(xcode_target.support_target)
targets.append(xcode_target.support_target)
if not int(target.get('suppress_wildcard', False)):
targets_for_all.append(xcode_target)
if target_name.lower() == 'all':
has_custom_all = True;
# If this target has a 'run_as' attribute, add its target to the
# targets, and add it to the test targets.
if target.get('run_as'):
# Make a target to run something. It should have one
# dependency, the parent xcode target.
xccl = CreateXCConfigurationList(configurations)
run_target = gyp.xcodeproj_file.PBXAggregateTarget({
'name': 'Run ' + target_name,
'productName': xcode_target.GetProperty('productName'),
'buildConfigurationList': xccl,
},
parent=self.project)
run_target.AddDependency(xcode_target)
command = target['run_as']
script = ''
if command.get('working_directory'):
script = script + 'cd "%s"\n' % \
gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
command.get('working_directory'))
if command.get('environment'):
script = script + "\n".join(
['export %s="%s"' %
(key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val))
for (key, val) in command.get('environment').iteritems()]) + "\n"
# Some test end up using sockets, files on disk, etc. and can get
# confused if more then one test runs at a time. The generator
# flag 'xcode_serialize_all_test_runs' controls the forcing of all
# tests serially. It defaults to True. To get serial runs this
# little bit of python does the same as the linux flock utility to
# make sure only one runs at a time.
command_prefix = ''
if serialize_all_tests:
command_prefix = \
"""python -c "import fcntl, subprocess, sys
file = open('$TMPDIR/GYP_serialize_test_runs', 'a')
fcntl.flock(file.fileno(), fcntl.LOCK_EX)
sys.exit(subprocess.call(sys.argv[1:]))" """
# If we were unable to exec for some reason, we want to exit
# with an error, and fixup variable references to be shell
# syntax instead of xcode syntax.
script = script + 'exec ' + command_prefix + '%s\nexit 1\n' % \
gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
gyp.common.EncodePOSIXShellList(command.get('action')))
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
'shellScript': script,
'showEnvVarsInLog': 0,
})
run_target.AppendProperty('buildPhases', ssbp)
# Add the run target to the project file.
targets.append(run_target)
run_test_targets.append(run_target)
xcode_target.test_runner = run_target
# Make sure that the list of targets being replaced is the same length as
# the one replacing it, but allow for the added test runner targets.
assert len(self.project._properties['targets']) == \
len(ordinary_targets) + len(support_targets)
self.project._properties['targets'] = targets
# Get rid of unnecessary levels of depth in groups like the Source group.
self.project.RootGroupsTakeOverOnlyChildren(True)
# Sort the groups nicely. Do this after sorting the targets, because the
# Products group is sorted based on the order of the targets.
self.project.SortGroups()
# Create an "All" target if there's more than one target in this project
# file and the project didn't define its own "All" target. Put a generated
# "All" target first so that people opening up the project for the first
# time will build everything by default.
if len(targets_for_all) > 1 and not has_custom_all:
xccl = CreateXCConfigurationList(configurations)
all_target = gyp.xcodeproj_file.PBXAggregateTarget(
{
'buildConfigurationList': xccl,
'name': 'All',
},
parent=self.project)
for target in targets_for_all:
all_target.AddDependency(target)
# TODO(mark): This is evil because it relies on internal knowledge of
# PBXProject._properties. It's important to get the "All" target first,
# though.
self.project._properties['targets'].insert(0, all_target)
# The same, but for run_test_targets.
if len(run_test_targets) > 1:
xccl = CreateXCConfigurationList(configurations)
run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget(
{
'buildConfigurationList': xccl,
'name': 'Run All Tests',
},
parent=self.project)
for run_test_target in run_test_targets:
run_all_tests_target.AddDependency(run_test_target)
# Insert after the "All" target, which must exist if there is more than
# one run_test_target.
self.project._properties['targets'].insert(1, run_all_tests_target)
def Finalize2(self, xcode_targets, xcode_target_to_target_dict):
# Finalize2 needs to happen in a separate step because the process of
# updating references to other projects depends on the ordering of targets
# within remote project files. Finalize1 is responsible for sorting duty,
# and once all project files are sorted, Finalize2 can come in and update
# these references.
# To support making a "test runner" target that will run all the tests
# that are direct dependents of any given target, we look for
# xcode_create_dependents_test_runner being set on an Aggregate target,
# and generate a second target that will run the tests runners found under
# the marked target.
for bf_tgt in self.build_file_dict['targets']:
if int(bf_tgt.get('xcode_create_dependents_test_runner', 0)):
tgt_name = bf_tgt['target_name']
toolset = bf_tgt['toolset']
qualified_target = gyp.common.QualifiedTarget(self.gyp_path,
tgt_name, toolset)
xcode_target = xcode_targets[qualified_target]
if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget):
# Collect all the run test targets.
all_run_tests = []
pbxtds = xcode_target.GetProperty('dependencies')
for pbxtd in pbxtds:
pbxcip = pbxtd.GetProperty('targetProxy')
dependency_xct = pbxcip.GetProperty('remoteGlobalIDString')
if hasattr(dependency_xct, 'test_runner'):
all_run_tests.append(dependency_xct.test_runner)
# Directly depend on all the runners as they depend on the target
# that builds them.
if len(all_run_tests) > 0:
run_all_target = gyp.xcodeproj_file.PBXAggregateTarget({
'name': 'Run %s Tests' % tgt_name,
'productName': tgt_name,
},
parent=self.project)
for run_test_target in all_run_tests:
run_all_target.AddDependency(run_test_target)
# Insert the test runner after the related target.
idx = self.project._properties['targets'].index(xcode_target)
self.project._properties['targets'].insert(idx + 1, run_all_target)
# Update all references to other projects, to make sure that the lists of
# remote products are complete. Otherwise, Xcode will fill them in when
# it opens the project file, which will result in unnecessary diffs.
# TODO(mark): This is evil because it relies on internal knowledge of
# PBXProject._other_pbxprojects.
for other_pbxproject in self.project._other_pbxprojects.keys():
self.project.AddOrGetProjectReference(other_pbxproject)
self.project.SortRemoteProductReferences()
# Give everything an ID.
self.project_file.ComputeIDs()
# Make sure that no two objects in the project file have the same ID. If
# multiple objects wind up with the same ID, upon loading the file, Xcode
# will only recognize one object (the last one in the file?) and the
# results are unpredictable.
self.project_file.EnsureNoIDCollisions()
def Write(self):
# Write the project file to a temporary location first. Xcode watches for
# changes to the project file and presents a UI sheet offering to reload
# the project when it does change. However, in some cases, especially when
# multiple projects are open or when Xcode is busy, things don't work so
# seamlessly. Sometimes, Xcode is able to detect that a project file has
# changed but can't unload it because something else is referencing it.
# To mitigate this problem, and to avoid even having Xcode present the UI
# sheet when an open project is rewritten for inconsequential changes, the
# project file is written to a temporary file in the xcodeproj directory
# first. The new temporary file is then compared to the existing project
# file, if any. If they differ, the new file replaces the old; otherwise,
# the new project file is simply deleted. Xcode properly detects a file
# being renamed over an open project file as a change and so it remains
# able to present the "project file changed" sheet under this system.
# Writing to a temporary file first also avoids the possible problem of
# Xcode rereading an incomplete project file.
(output_fd, new_pbxproj_path) = \
tempfile.mkstemp(suffix='.tmp', prefix='project.pbxproj.gyp.',
dir=self.path)
try:
output_file = os.fdopen(output_fd, 'wb')
self.project_file.Print(output_file)
output_file.close()
pbxproj_path = os.path.join(self.path, 'project.pbxproj')
same = False
try:
same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False)
except OSError, e:
if e.errno != errno.ENOENT:
raise
if same:
# The new file is identical to the old one, just get rid of the new
# one.
os.unlink(new_pbxproj_path)
else:
# The new file is different from the old one, or there is no old one.
# Rename the new file to the permanent name.
#
# tempfile.mkstemp uses an overly restrictive mode, resulting in a
# file that can only be read by the owner, regardless of the umask.
# There's no reason to not respect the umask here, which means that
# an extra hoop is required to fetch it and reset the new file's mode.
#
# No way to get the umask without setting a new one? Set a safe one
# and then set it back to the old value.
umask = os.umask(077)
os.umask(umask)
os.chmod(new_pbxproj_path, 0666 & ~umask)
os.rename(new_pbxproj_path, pbxproj_path)
except Exception:
# Don't leave turds behind. In fact, if this code was responsible for
# creating the xcodeproj directory, get rid of that too.
os.unlink(new_pbxproj_path)
if self.created_dir:
shutil.rmtree(self.path, True)
raise
def AddSourceToTarget(source, type, pbxp, xct):
# TODO(mark): Perhaps source_extensions and library_extensions can be made a
# little bit fancier.
source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's']
# .o is conceptually more of a "source" than a "library," but Xcode thinks
# of "sources" as things to compile and "libraries" (or "frameworks") as
# things to link with. Adding an object file to an Xcode target's frameworks
# phase works properly.
library_extensions = ['a', 'dylib', 'framework', 'o']
basename = posixpath.basename(source)
(root, ext) = posixpath.splitext(basename)
if ext:
ext = ext[1:].lower()
if ext in source_extensions and type != 'none':
xct.SourcesPhase().AddFile(source)
elif ext in library_extensions and type != 'none':
xct.FrameworksPhase().AddFile(source)
else:
# Files that aren't added to a sources or frameworks build phase can still
# go into the project file, just not as part of a build phase.
pbxp.AddOrGetFileInRootGroup(source)
def AddResourceToTarget(resource, pbxp, xct):
# TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
# where it's used.
xct.ResourcesPhase().AddFile(resource)
def AddHeaderToTarget(header, pbxp, xct, is_public):
# TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
# where it's used.
settings = '{ATTRIBUTES = (%s, ); }' % ('Private', 'Public')[is_public]
xct.HeadersPhase().AddFile(header, settings)
_xcode_variable_re = re.compile('(\$\((.*?)\))')
def ExpandXcodeVariables(string, expansions):
"""Expands Xcode-style $(VARIABLES) in string per the expansions dict.
In some rare cases, it is appropriate to expand Xcode variables when a
project file is generated. For any substring $(VAR) in string, if VAR is a
key in the expansions dict, $(VAR) will be replaced with expansions[VAR].
Any $(VAR) substring in string for which VAR is not a key in the expansions
dict will remain in the returned string.
"""
matches = _xcode_variable_re.findall(string)
if matches == None:
return string
matches.reverse()
for match in matches:
(to_replace, variable) = match
if not variable in expansions:
continue
replacement = expansions[variable]
string = re.sub(re.escape(to_replace), replacement, string)
return string
_xcode_define_re = re.compile(r'([\\\"\' ])')
def EscapeXcodeDefine(s):
"""We must escape the defines that we give to XCode so that it knows not to
split on spaces and to respect backslash and quote literals. However, we
must not quote the define, or Xcode will incorrectly intepret variables
especially $(inherited)."""
return re.sub(_xcode_define_re, r'\\\1', s)
def PerformBuild(data, configurations, params):
options = params['options']
for build_file, build_file_dict in data.iteritems():
(build_file_root, build_file_ext) = os.path.splitext(build_file)
if build_file_ext != '.gyp':
continue
xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
if options.generator_output:
xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
for config in configurations:
arguments = ['xcodebuild', '-project', xcodeproj_path]
arguments += ['-configuration', config]
print "Building [%s]: %s" % (config, arguments)
subprocess.check_call(arguments)
def GenerateOutput(target_list, target_dicts, data, params):
options = params['options']
generator_flags = params.get('generator_flags', {})
parallel_builds = generator_flags.get('xcode_parallel_builds', True)
serialize_all_tests = \
generator_flags.get('xcode_serialize_all_test_runs', True)
project_version = generator_flags.get('xcode_project_version', None)
skip_excluded_files = \
not generator_flags.get('xcode_list_excluded_files', True)
xcode_projects = {}
for build_file, build_file_dict in data.iteritems():
(build_file_root, build_file_ext) = os.path.splitext(build_file)
if build_file_ext != '.gyp':
continue
xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
if options.generator_output:
xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict)
xcode_projects[build_file] = xcp
pbxp = xcp.project
if parallel_builds:
pbxp.SetProperty('attributes',
{'BuildIndependentTargetsInParallel': 'YES'})
if project_version:
xcp.project_file.SetXcodeVersion(project_version)
# Add gyp/gypi files to project
if not generator_flags.get('standalone'):
main_group = pbxp.GetProperty('mainGroup')
build_group = gyp.xcodeproj_file.PBXGroup({'name': 'Build'})
main_group.AppendChild(build_group)
for included_file in build_file_dict['included_files']:
build_group.AddOrGetFileByPath(included_file, False)
xcode_targets = {}
xcode_target_to_target_dict = {}
for qualified_target in target_list:
[build_file, target_name, toolset] = \
gyp.common.ParseQualifiedTarget(qualified_target)
spec = target_dicts[qualified_target]
if spec['toolset'] != 'target':
raise Exception(
'Multiple toolsets not supported in xcode build (target %s)' %
qualified_target)
configuration_names = [spec['default_configuration']]
for configuration_name in sorted(spec['configurations'].keys()):
if configuration_name not in configuration_names:
configuration_names.append(configuration_name)
xcp = xcode_projects[build_file]
pbxp = xcp.project
# Set up the configurations for the target according to the list of names
# supplied.
xccl = CreateXCConfigurationList(configuration_names)
# Create an XCTarget subclass object for the target. The type with
# "+bundle" appended will be used if the target has "mac_bundle" set.
# loadable_modules not in a mac_bundle are mapped to
# com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets
# to create a single-file mh_bundle.
_types = {
'executable': 'com.apple.product-type.tool',
'loadable_module': 'com.googlecode.gyp.xcode.bundle',
'shared_library': 'com.apple.product-type.library.dynamic',
'static_library': 'com.apple.product-type.library.static',
'executable+bundle': 'com.apple.product-type.application',
'loadable_module+bundle': 'com.apple.product-type.bundle',
'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test',
'shared_library+bundle': 'com.apple.product-type.framework',
}
target_properties = {
'buildConfigurationList': xccl,
'name': target_name,
}
type = spec['type']
is_xctest = int(spec.get('mac_xctest_bundle', 0))
is_bundle = int(spec.get('mac_bundle', 0)) or is_xctest
if type != 'none':
type_bundle_key = type
if is_xctest:
type_bundle_key += '+xctest'
assert type == 'loadable_module', (
'mac_xctest_bundle targets must have type loadable_module '
'(target %s)' % target_name)
elif is_bundle:
type_bundle_key += '+bundle'
xctarget_type = gyp.xcodeproj_file.PBXNativeTarget
try:
target_properties['productType'] = _types[type_bundle_key]
except KeyError, e:
gyp.common.ExceptionAppend(e, "-- unknown product type while "
"writing target %s" % target_name)
raise
else:
xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget
assert not is_bundle, (
'mac_bundle targets cannot have type none (target "%s")' %
target_name)
assert not is_xctest, (
'mac_xctest_bundle targets cannot have type none (target "%s")' %
target_name)
target_product_name = spec.get('product_name')
if target_product_name is not None:
target_properties['productName'] = target_product_name
xct = xctarget_type(target_properties, parent=pbxp,
force_outdir=spec.get('product_dir'),
force_prefix=spec.get('product_prefix'),
force_extension=spec.get('product_extension'))
pbxp.AppendProperty('targets', xct)
xcode_targets[qualified_target] = xct
xcode_target_to_target_dict[xct] = spec
spec_actions = spec.get('actions', [])
spec_rules = spec.get('rules', [])
# Xcode has some "issues" with checking dependencies for the "Compile
# sources" step with any source files/headers generated by actions/rules.
# To work around this, if a target is building anything directly (not
# type "none"), then a second target is used to run the GYP actions/rules
# and is made a dependency of this target. This way the work is done
# before the dependency checks for what should be recompiled.
support_xct = None
if type != 'none' and (spec_actions or spec_rules):
support_xccl = CreateXCConfigurationList(configuration_names);
support_target_properties = {
'buildConfigurationList': support_xccl,
'name': target_name + ' Support',
}
if target_product_name:
support_target_properties['productName'] = \
target_product_name + ' Support'
support_xct = \
gyp.xcodeproj_file.PBXAggregateTarget(support_target_properties,
parent=pbxp)
pbxp.AppendProperty('targets', support_xct)
xct.AddDependency(support_xct)
# Hang the support target off the main target so it can be tested/found
# by the generator during Finalize.
xct.support_target = support_xct
prebuild_index = 0
# Add custom shell script phases for "actions" sections.
for action in spec_actions:
# There's no need to write anything into the script to ensure that the
# output directories already exist, because Xcode will look at the
# declared outputs and automatically ensure that they exist for us.
# Do we have a message to print when this action runs?
message = action.get('message')
if message:
message = 'echo note: ' + gyp.common.EncodePOSIXShellArgument(message)
else:
message = ''
# Turn the list into a string that can be passed to a shell.
action_string = gyp.common.EncodePOSIXShellList(action['action'])
# Convert Xcode-type variable references to sh-compatible environment
# variable references.
message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message)
action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
action_string)
script = ''
# Include the optional message
if message_sh:
script += message_sh + '\n'
# Be sure the script runs in exec, and that if exec fails, the script
# exits signalling an error.
script += 'exec ' + action_string_sh + '\nexit 1\n'
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
'inputPaths': action['inputs'],
'name': 'Action "' + action['action_name'] + '"',
'outputPaths': action['outputs'],
'shellScript': script,
'showEnvVarsInLog': 0,
})
if support_xct:
support_xct.AppendProperty('buildPhases', ssbp)
else:
# TODO(mark): this assumes too much knowledge of the internals of
# xcodeproj_file; some of these smarts should move into xcodeproj_file
# itself.
xct._properties['buildPhases'].insert(prebuild_index, ssbp)
prebuild_index = prebuild_index + 1
# TODO(mark): Should verify that at most one of these is specified.
if int(action.get('process_outputs_as_sources', False)):
for output in action['outputs']:
AddSourceToTarget(output, type, pbxp, xct)
if int(action.get('process_outputs_as_mac_bundle_resources', False)):
for output in action['outputs']:
AddResourceToTarget(output, pbxp, xct)
# tgt_mac_bundle_resources holds the list of bundle resources so
# the rule processing can check against it.
if is_bundle:
tgt_mac_bundle_resources = spec.get('mac_bundle_resources', [])
else:
tgt_mac_bundle_resources = []
# Add custom shell script phases driving "make" for "rules" sections.
#
# Xcode's built-in rule support is almost powerful enough to use directly,
# but there are a few significant deficiencies that render them unusable.
# There are workarounds for some of its inadequacies, but in aggregate,
# the workarounds added complexity to the generator, and some workarounds
# actually require input files to be crafted more carefully than I'd like.
# Consequently, until Xcode rules are made more capable, "rules" input
# sections will be handled in Xcode output by shell script build phases
# performed prior to the compilation phase.
#
# The following problems with Xcode rules were found. The numbers are
# Apple radar IDs. I hope that these shortcomings are addressed, I really
# liked having the rules handled directly in Xcode during the period that
# I was prototyping this.
#
# 6588600 Xcode compiles custom script rule outputs too soon, compilation
# fails. This occurs when rule outputs from distinct inputs are
# interdependent. The only workaround is to put rules and their
# inputs in a separate target from the one that compiles the rule
# outputs. This requires input file cooperation and it means that
# process_outputs_as_sources is unusable.
# 6584932 Need to declare that custom rule outputs should be excluded from
# compilation. A possible workaround is to lie to Xcode about a
# rule's output, giving it a dummy file it doesn't know how to
# compile. The rule action script would need to touch the dummy.
# 6584839 I need a way to declare additional inputs to a custom rule.
# A possible workaround is a shell script phase prior to
# compilation that touches a rule's primary input files if any
# would-be additional inputs are newer than the output. Modifying
# the source tree - even just modification times - feels dirty.
# 6564240 Xcode "custom script" build rules always dump all environment
# variables. This is a low-prioroty problem and is not a
# show-stopper.
rules_by_ext = {}
for rule in spec_rules:
rules_by_ext[rule['extension']] = rule
# First, some definitions:
#
# A "rule source" is a file that was listed in a target's "sources"
# list and will have a rule applied to it on the basis of matching the
# rule's "extensions" attribute. Rule sources are direct inputs to
# rules.
#
# Rule definitions may specify additional inputs in their "inputs"
# attribute. These additional inputs are used for dependency tracking
# purposes.
#
# A "concrete output" is a rule output with input-dependent variables
# resolved. For example, given a rule with:
# 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'],
# if the target's "sources" list contained "one.ext" and "two.ext",
# the "concrete output" for rule input "two.ext" would be "two.cc". If
# a rule specifies multiple outputs, each input file that the rule is
# applied to will have the same number of concrete outputs.
#
# If any concrete outputs are outdated or missing relative to their
# corresponding rule_source or to any specified additional input, the
# rule action must be performed to generate the concrete outputs.
# concrete_outputs_by_rule_source will have an item at the same index
# as the rule['rule_sources'] that it corresponds to. Each item is a
# list of all of the concrete outputs for the rule_source.
concrete_outputs_by_rule_source = []
# concrete_outputs_all is a flat list of all concrete outputs that this
# rule is able to produce, given the known set of input files
# (rule_sources) that apply to it.
concrete_outputs_all = []
# messages & actions are keyed by the same indices as rule['rule_sources']
# and concrete_outputs_by_rule_source. They contain the message and
# action to perform after resolving input-dependent variables. The
# message is optional, in which case None is stored for each rule source.
messages = []
actions = []
for rule_source in rule.get('rule_sources', []):
rule_source_dirname, rule_source_basename = \
posixpath.split(rule_source)
(rule_source_root, rule_source_ext) = \
posixpath.splitext(rule_source_basename)
# These are the same variable names that Xcode uses for its own native
# rule support. Because Xcode's rule engine is not being used, they
# need to be expanded as they are written to the makefile.
rule_input_dict = {
'INPUT_FILE_BASE': rule_source_root,
'INPUT_FILE_SUFFIX': rule_source_ext,
'INPUT_FILE_NAME': rule_source_basename,
'INPUT_FILE_PATH': rule_source,
'INPUT_FILE_DIRNAME': rule_source_dirname,
}
concrete_outputs_for_this_rule_source = []
for output in rule.get('outputs', []):
# Fortunately, Xcode and make both use $(VAR) format for their
# variables, so the expansion is the only transformation necessary.
# Any remaning $(VAR)-type variables in the string can be given
# directly to make, which will pick up the correct settings from
# what Xcode puts into the environment.
concrete_output = ExpandXcodeVariables(output, rule_input_dict)
concrete_outputs_for_this_rule_source.append(concrete_output)
# Add all concrete outputs to the project.
pbxp.AddOrGetFileInRootGroup(concrete_output)
concrete_outputs_by_rule_source.append( \
concrete_outputs_for_this_rule_source)
concrete_outputs_all.extend(concrete_outputs_for_this_rule_source)
# TODO(mark): Should verify that at most one of these is specified.
if int(rule.get('process_outputs_as_sources', False)):
for output in concrete_outputs_for_this_rule_source:
AddSourceToTarget(output, type, pbxp, xct)
# If the file came from the mac_bundle_resources list or if the rule
# is marked to process outputs as bundle resource, do so.
was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources
if was_mac_bundle_resource or \
int(rule.get('process_outputs_as_mac_bundle_resources', False)):
for output in concrete_outputs_for_this_rule_source:
AddResourceToTarget(output, pbxp, xct)
# Do we have a message to print when this rule runs?
message = rule.get('message')
if message:
message = gyp.common.EncodePOSIXShellArgument(message)
message = ExpandXcodeVariables(message, rule_input_dict)
messages.append(message)
# Turn the list into a string that can be passed to a shell.
action_string = gyp.common.EncodePOSIXShellList(rule['action'])
action = ExpandXcodeVariables(action_string, rule_input_dict)
actions.append(action)
if len(concrete_outputs_all) > 0:
# TODO(mark): There's a possibilty for collision here. Consider
# target "t" rule "A_r" and target "t_A" rule "r".
makefile_name = '%s.make' % re.sub(
'[^a-zA-Z0-9_]', '_' , '%s_%s' % (target_name, rule['rule_name']))
makefile_path = os.path.join(xcode_projects[build_file].path,
makefile_name)
# TODO(mark): try/close? Write to a temporary file and swap it only
# if it's got changes?
makefile = open(makefile_path, 'wb')
# make will build the first target in the makefile by default. By
# convention, it's called "all". List all (or at least one)
# concrete output for each rule source as a prerequisite of the "all"
# target.
makefile.write('all: \\\n')
for concrete_output_index in \
xrange(0, len(concrete_outputs_by_rule_source)):
# Only list the first (index [0]) concrete output of each input
# in the "all" target. Otherwise, a parallel make (-j > 1) would
# attempt to process each input multiple times simultaneously.
# Otherwise, "all" could just contain the entire list of
# concrete_outputs_all.
concrete_output = \
concrete_outputs_by_rule_source[concrete_output_index][0]
if concrete_output_index == len(concrete_outputs_by_rule_source) - 1:
eol = ''
else:
eol = ' \\'
makefile.write(' %s%s\n' % (concrete_output, eol))
for (rule_source, concrete_outputs, message, action) in \
zip(rule['rule_sources'], concrete_outputs_by_rule_source,
messages, actions):
makefile.write('\n')
# Add a rule that declares it can build each concrete output of a
# rule source. Collect the names of the directories that are
# required.
concrete_output_dirs = []
for concrete_output_index in xrange(0, len(concrete_outputs)):
concrete_output = concrete_outputs[concrete_output_index]
if concrete_output_index == 0:
bol = ''
else:
bol = ' '
makefile.write('%s%s \\\n' % (bol, concrete_output))
concrete_output_dir = posixpath.dirname(concrete_output)
if (concrete_output_dir and
concrete_output_dir not in concrete_output_dirs):
concrete_output_dirs.append(concrete_output_dir)
makefile.write(' : \\\n')
# The prerequisites for this rule are the rule source itself and
# the set of additional rule inputs, if any.
prerequisites = [rule_source]
prerequisites.extend(rule.get('inputs', []))
for prerequisite_index in xrange(0, len(prerequisites)):
prerequisite = prerequisites[prerequisite_index]
if prerequisite_index == len(prerequisites) - 1:
eol = ''
else:
eol = ' \\'
makefile.write(' %s%s\n' % (prerequisite, eol))
# Make sure that output directories exist before executing the rule
# action.
if len(concrete_output_dirs) > 0:
makefile.write('\t@mkdir -p "%s"\n' %
'" "'.join(concrete_output_dirs))
# The rule message and action have already had the necessary variable
# substitutions performed.
if message:
# Mark it with note: so Xcode picks it up in build output.
makefile.write('\t@echo note: %s\n' % message)
makefile.write('\t%s\n' % action)
makefile.close()
# It might be nice to ensure that needed output directories exist
# here rather than in each target in the Makefile, but that wouldn't
# work if there ever was a concrete output that had an input-dependent
# variable anywhere other than in the leaf position.
# Don't declare any inputPaths or outputPaths. If they're present,
# Xcode will provide a slight optimization by only running the script
# phase if any output is missing or outdated relative to any input.
# Unfortunately, it will also assume that all outputs are touched by
# the script, and if the outputs serve as files in a compilation
# phase, they will be unconditionally rebuilt. Since make might not
# rebuild everything that could be declared here as an output, this
# extra compilation activity is unnecessary. With inputPaths and
# outputPaths not supplied, make will always be called, but it knows
# enough to not do anything when everything is up-to-date.
# To help speed things up, pass -j COUNT to make so it does some work
# in parallel. Don't use ncpus because Xcode will build ncpus targets
# in parallel and if each target happens to have a rules step, there
# would be ncpus^2 things going. With a machine that has 2 quad-core
# Xeons, a build can quickly run out of processes based on
# scheduling/other tasks, and randomly failing builds are no good.
script = \
"""JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)"
if [ "${JOB_COUNT}" -gt 4 ]; then
JOB_COUNT=4
fi
exec xcrun make -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}"
exit 1
""" % makefile_name
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
'name': 'Rule "' + rule['rule_name'] + '"',
'shellScript': script,
'showEnvVarsInLog': 0,
})
if support_xct:
support_xct.AppendProperty('buildPhases', ssbp)
else:
# TODO(mark): this assumes too much knowledge of the internals of
# xcodeproj_file; some of these smarts should move into xcodeproj_file
# itself.
xct._properties['buildPhases'].insert(prebuild_index, ssbp)
prebuild_index = prebuild_index + 1
# Extra rule inputs also go into the project file. Concrete outputs were
# already added when they were computed.
groups = ['inputs', 'inputs_excluded']
if skip_excluded_files:
groups = [x for x in groups if not x.endswith('_excluded')]
for group in groups:
for item in rule.get(group, []):
pbxp.AddOrGetFileInRootGroup(item)
# Add "sources".
for source in spec.get('sources', []):
(source_root, source_extension) = posixpath.splitext(source)
if source_extension[1:] not in rules_by_ext:
# AddSourceToTarget will add the file to a root group if it's not
# already there.
AddSourceToTarget(source, type, pbxp, xct)
else:
pbxp.AddOrGetFileInRootGroup(source)
# Add "mac_bundle_resources" and "mac_framework_private_headers" if
# it's a bundle of any type.
if is_bundle:
for resource in tgt_mac_bundle_resources:
(resource_root, resource_extension) = posixpath.splitext(resource)
if resource_extension[1:] not in rules_by_ext:
AddResourceToTarget(resource, pbxp, xct)
else:
pbxp.AddOrGetFileInRootGroup(resource)
for header in spec.get('mac_framework_private_headers', []):
AddHeaderToTarget(header, pbxp, xct, False)
# Add "mac_framework_headers". These can be valid for both frameworks
# and static libraries.
if is_bundle or type == 'static_library':
for header in spec.get('mac_framework_headers', []):
AddHeaderToTarget(header, pbxp, xct, True)
# Add "copies".
pbxcp_dict = {}
for copy_group in spec.get('copies', []):
dest = copy_group['destination']
if dest[0] not in ('/', '$'):
# Relative paths are relative to $(SRCROOT).
dest = '$(SRCROOT)/' + dest
# Coalesce multiple "copies" sections in the same target with the same
# "destination" property into the same PBXCopyFilesBuildPhase, otherwise
# they'll wind up with ID collisions.
pbxcp = pbxcp_dict.get(dest, None)
if pbxcp is None:
pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({
'name': 'Copy to ' + copy_group['destination']
},
parent=xct)
pbxcp.SetDestination(dest)
# TODO(mark): The usual comment about this knowing too much about
# gyp.xcodeproj_file internals applies.
xct._properties['buildPhases'].insert(prebuild_index, pbxcp)
pbxcp_dict[dest] = pbxcp
for file in copy_group['files']:
pbxcp.AddFile(file)
# Excluded files can also go into the project file.
if not skip_excluded_files:
for key in ['sources', 'mac_bundle_resources', 'mac_framework_headers',
'mac_framework_private_headers']:
excluded_key = key + '_excluded'
for item in spec.get(excluded_key, []):
pbxp.AddOrGetFileInRootGroup(item)
# So can "inputs" and "outputs" sections of "actions" groups.
groups = ['inputs', 'inputs_excluded', 'outputs', 'outputs_excluded']
if skip_excluded_files:
groups = [x for x in groups if not x.endswith('_excluded')]
for action in spec.get('actions', []):
for group in groups:
for item in action.get(group, []):
# Exclude anything in BUILT_PRODUCTS_DIR. They're products, not
# sources.
if not item.startswith('$(BUILT_PRODUCTS_DIR)/'):
pbxp.AddOrGetFileInRootGroup(item)
for postbuild in spec.get('postbuilds', []):
action_string_sh = gyp.common.EncodePOSIXShellList(postbuild['action'])
script = 'exec ' + action_string_sh + '\nexit 1\n'
# Make the postbuild step depend on the output of ld or ar from this
# target. Apparently putting the script step after the link step isn't
# sufficient to ensure proper ordering in all cases. With an input
# declared but no outputs, the script step should run every time, as
# desired.
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
'inputPaths': ['$(BUILT_PRODUCTS_DIR)/$(EXECUTABLE_PATH)'],
'name': 'Postbuild "' + postbuild['postbuild_name'] + '"',
'shellScript': script,
'showEnvVarsInLog': 0,
})
xct.AppendProperty('buildPhases', ssbp)
# Add dependencies before libraries, because adding a dependency may imply
# adding a library. It's preferable to keep dependencies listed first
# during a link phase so that they can override symbols that would
# otherwise be provided by libraries, which will usually include system
# libraries. On some systems, ld is finicky and even requires the
# libraries to be ordered in such a way that unresolved symbols in
# earlier-listed libraries may only be resolved by later-listed libraries.
# The Mac linker doesn't work that way, but other platforms do, and so
# their linker invocations need to be constructed in this way. There's
# no compelling reason for Xcode's linker invocations to differ.
if 'dependencies' in spec:
for dependency in spec['dependencies']:
xct.AddDependency(xcode_targets[dependency])
# The support project also gets the dependencies (in case they are
# needed for the actions/rules to work).
if support_xct:
support_xct.AddDependency(xcode_targets[dependency])
if 'libraries' in spec:
for library in spec['libraries']:
xct.FrameworksPhase().AddFile(library)
# Add the library's directory to LIBRARY_SEARCH_PATHS if necessary.
# I wish Xcode handled this automatically.
library_dir = posixpath.dirname(library)
if library_dir not in xcode_standard_library_dirs and (
not xct.HasBuildSetting(_library_search_paths_var) or
library_dir not in xct.GetBuildSetting(_library_search_paths_var)):
xct.AppendBuildSetting(_library_search_paths_var, library_dir)
for configuration_name in configuration_names:
configuration = spec['configurations'][configuration_name]
xcbc = xct.ConfigurationNamed(configuration_name)
for include_dir in configuration.get('mac_framework_dirs', []):
xcbc.AppendBuildSetting('FRAMEWORK_SEARCH_PATHS', include_dir)
for include_dir in configuration.get('include_dirs', []):
xcbc.AppendBuildSetting('HEADER_SEARCH_PATHS', include_dir)
for library_dir in configuration.get('library_dirs', []):
if library_dir not in xcode_standard_library_dirs and (
not xcbc.HasBuildSetting(_library_search_paths_var) or
library_dir not in xcbc.GetBuildSetting(_library_search_paths_var)):
xcbc.AppendBuildSetting(_library_search_paths_var, library_dir)
if 'defines' in configuration:
for define in configuration['defines']:
set_define = EscapeXcodeDefine(define)
xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define)
if 'xcode_settings' in configuration:
for xck, xcv in configuration['xcode_settings'].iteritems():
xcbc.SetBuildSetting(xck, xcv)
if 'xcode_config_file' in configuration:
config_ref = pbxp.AddOrGetFileInRootGroup(
configuration['xcode_config_file'])
xcbc.SetBaseConfiguration(config_ref)
build_files = []
for build_file, build_file_dict in data.iteritems():
if build_file.endswith('.gyp'):
build_files.append(build_file)
for build_file in build_files:
xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests)
for build_file in build_files:
xcode_projects[build_file].Finalize2(xcode_targets,
xcode_target_to_target_dict)
for build_file in build_files:
xcode_projects[build_file].Write()
| gpl-2.0 |
johnobrien/PyPractice | pipeg/TkUtil/TextEdit.py | 4 | 3400 | #!/usr/bin/env python3
# Copyright © 2012-13 Qtrac Ltd. All rights reserved.
# This program or module is free software: you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version. It is provided for
# educational purposes and is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# This module is a simplification and adaptation of the standard
# library's ScrolledText module.
import os
import sys
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__),
"..")))
import tkinter as tk
import tkinter.ttk as ttk
import TkUtil.Scrollbar
class TextEdit(ttk.Frame):
"""A scrollable tk.Text widget
Note that the kwargs are given to the tk.Text not the outer
ttk.Frame. (You can always configure the frame afterwards.)
In general:
textEdit.method() or textEdit.text.method() -> textEdit.text.method()
textEdit.yscrollbar.method() -> textEdit.yscrollbar.method()
textEdit.frame.method() -> textEdit.method()
Exceptions: private methods always go to the frame; methods that are
in the frame (e.g., bind(), cget(), config() etc.), go to the frame,
so for those use, say, textEdit.text.config() etc.
"""
def __init__(self, master=None, **kwargs):
super().__init__(master)
self.frame = self
self.text = tk.Text(self, **kwargs)
self.xscrollbar = TkUtil.Scrollbar.Scrollbar(self,
command=self.text.xview, orient=tk.HORIZONTAL)
self.yscrollbar = TkUtil.Scrollbar.Scrollbar(self,
command=self.text.yview, orient=tk.VERTICAL)
self.text.configure(yscrollcommand=self.yscrollbar.set,
xscrollcommand=self.xscrollbar.set)
self.xscrollbar.grid(row=1, column=0, sticky=(tk.W, tk.E))
self.yscrollbar.grid(row=0, column=1, sticky=(tk.N, tk.S))
self.text.grid(row=0, column=0, sticky=(tk.N, tk.S, tk.W, tk.E))
self.grid_rowconfigure(0, weight=1)
self.grid_columnconfigure(0, weight=1)
def __getattr__(self, name):
# This is only used if attribute lookup fails, so, e.g.,
# textEdit.cget() will succeed (on the frame) without coming
# here, but textEdit.index() will fail (there is no
# ttk.Frame.index method) and will come here.
return getattr(self.text, name)
if __name__ == "__main__":
if sys.stdout.isatty():
application = tk.Tk()
application.title("TextEdit")
textEdit = TextEdit(application, wrap=tk.NONE)
textEdit.pack(fill=tk.BOTH, expand=True)
def check():
textEdit.frame.config(borderwidth=2)
print("frame", textEdit.frame.cget("borderwidth"))
print("yscrollbar", textEdit.yscrollbar.fraction(5, 5))
textEdit.insert("end",
"This is a test of the method delegation.\n" * 20)
print("text", textEdit.text.index(tk.INSERT))
print("text", textEdit.index(tk.INSERT))
textEdit.text.focus()
application.after(50, check)
application.mainloop()
else:
print("Loaded OK")
| mit |
dintorf/PythonProjects | Zombit Infection/zombit_infection.py | 1 | 1195 | def answer(population, x, y, strength):
# your code here
rows = len(population)
cols = len(population[0])
patients_unchecked = [(x,y)]
patients_checked = []
# function to loop through unchecked patients
def next():
for patient in patients_unchecked:
yield patient
for x, y in next():
patient_z = population[y][x]
if patient_z <= strength:
# append patient location to unchecked patients
patients_checked.append((x,y))
# infect patient in population
population[y][x] = -1
# check bounds of adjacent patients
# if in bounds, add patient location to unchecked patients
if x-1 >= 0 and (x-1, y) not in patients_checked:
patients_unchecked.append((x-1, y))
if x+1 < cols and (x+1, y) not in patients_checked:
patients_unchecked.append((x+1, y,))
if y-1 >= 0 and (x, y-1) not in patients_checked:
patients_unchecked.append((x, y-1))
if y+1 < rows and (x, y+1) not in patients_checked:
patients_unchecked.append((x, y+1))
return population
| mit |
springcoil/luigi | test/target_test.py | 27 | 6472 | # -*- coding: utf-8 -*-
#
# Copyright 2012-2015 Spotify AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
from helpers import unittest
import luigi.target
import luigi.format
class TestException(Exception):
pass
class TargetTest(unittest.TestCase):
def test_cannot_instantiate(self):
def instantiate_target():
luigi.target.Target()
self.assertRaises(TypeError, instantiate_target)
def test_abstract_subclass(self):
class ExistsLessTarget(luigi.target.Target):
pass
def instantiate_target():
ExistsLessTarget()
self.assertRaises(TypeError, instantiate_target)
def test_instantiate_subclass(self):
class GoodTarget(luigi.target.Target):
def exists(self):
return True
def open(self, mode):
return None
GoodTarget()
class FileSystemTargetTestMixin(object):
"""All Target that take bytes (python2: str) should pass those
tests. In addition, a test to verify the method `exists`should be added
"""
def create_target(self, format=None):
raise NotImplementedError()
def assertCleanUp(self, tmp_path=''):
pass
def test_atomicity(self):
target = self.create_target()
fobj = target.open("w")
self.assertFalse(target.exists())
fobj.close()
self.assertTrue(target.exists())
def test_readback(self):
target = self.create_target()
origdata = 'lol\n'
fobj = target.open("w")
fobj.write(origdata)
fobj.close()
fobj = target.open('r')
data = fobj.read()
self.assertEqual(origdata, data)
def test_unicode_obj(self):
target = self.create_target()
origdata = u'lol\n'
fobj = target.open("w")
fobj.write(origdata)
fobj.close()
fobj = target.open('r')
data = fobj.read()
self.assertEqual(origdata, data)
def test_with_close(self):
target = self.create_target()
with target.open('w') as fobj:
tp = getattr(fobj, 'tmp_path', '')
fobj.write('hej\n')
self.assertCleanUp(tp)
self.assertTrue(target.exists())
def test_with_exception(self):
target = self.create_target()
a = {}
def foo():
with target.open('w') as fobj:
fobj.write('hej\n')
a['tp'] = getattr(fobj, 'tmp_path', '')
raise TestException('Test triggered exception')
self.assertRaises(TestException, foo)
self.assertCleanUp(a['tp'])
self.assertFalse(target.exists())
def test_del(self):
t = self.create_target()
p = t.open('w')
print('test', file=p)
tp = getattr(p, 'tmp_path', '')
del p
self.assertCleanUp(tp)
self.assertFalse(t.exists())
def test_write_cleanup_no_close(self):
t = self.create_target()
def context():
f = t.open('w')
f.write('stuff')
return getattr(f, 'tmp_path', '')
tp = context()
import gc
gc.collect() # force garbage collection of f variable
self.assertCleanUp(tp)
self.assertFalse(t.exists())
def test_text(self):
t = self.create_target(luigi.format.UTF8)
a = u'我éçф'
with t.open('w') as f:
f.write(a)
with t.open('r') as f:
b = f.read()
self.assertEqual(a, b)
def test_del_with_Text(self):
t = self.create_target(luigi.format.UTF8)
p = t.open('w')
print(u'test', file=p)
tp = getattr(p, 'tmp_path', '')
del p
self.assertCleanUp(tp)
self.assertFalse(t.exists())
def test_format_injection(self):
class CustomFormat(luigi.format.Format):
def pipe_reader(self, input_pipe):
input_pipe.foo = "custom read property"
return input_pipe
def pipe_writer(self, output_pipe):
output_pipe.foo = "custom write property"
return output_pipe
t = self.create_target(CustomFormat())
with t.open("w") as f:
self.assertEqual(f.foo, "custom write property")
with t.open("r") as f:
self.assertEqual(f.foo, "custom read property")
def test_binary_write(self):
t = self.create_target(luigi.format.Nop)
with t.open('w') as f:
f.write(b'a\xf2\xf3\r\nfd')
with t.open('r') as f:
c = f.read()
self.assertEqual(c, b'a\xf2\xf3\r\nfd')
def test_writelines(self):
t = self.create_target()
with t.open('w') as f:
f.writelines([
'a\n',
'b\n',
'c\n',
])
with t.open('r') as f:
c = f.read()
self.assertEqual(c, 'a\nb\nc\n')
def test_read_iterator(self):
t = self.create_target()
with t.open('w') as f:
f.write('a\nb\nc\n')
c = []
with t.open('r') as f:
for x in f:
c.append(x)
self.assertEqual(c, ['a\n', 'b\n', 'c\n'])
def test_gzip(self):
t = self.create_target(luigi.format.Gzip)
p = t.open('w')
test_data = b'test'
p.write(test_data)
tp = getattr(p, 'tmp_path', '')
self.assertFalse(t.exists())
p.close()
self.assertCleanUp(tp)
self.assertTrue(t.exists())
def test_gzip_works_and_cleans_up(self):
t = self.create_target(luigi.format.Gzip)
test_data = b'123testing'
with t.open('w') as f:
tp = getattr(f, 'tmp_path', '')
f.write(test_data)
self.assertCleanUp(tp)
with t.open() as f:
result = f.read()
self.assertEqual(test_data, result)
| apache-2.0 |
gqwest-erp/server | openerp/addons/procurement/wizard/mrp_procurement.py | 56 | 2022 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import threading
from openerp.osv import fields, osv
class procurement_compute(osv.osv_memory):
_name = 'procurement.order.compute'
_description = 'Compute Procurement'
def _procure_calculation_procure(self, cr, uid, ids, context=None):
try:
proc_obj = self.pool.get('procurement.order')
proc_obj._procure_confirm(cr, uid, use_new_cursor=cr.dbname, context=context)
finally:
pass
return {}
def procure_calculation(self, cr, uid, ids, context=None):
"""
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param ids: List of IDs selected
@param context: A standard dictionary
"""
threaded_calculation = threading.Thread(target=self._procure_calculation_procure, args=(cr, uid, ids, context))
threaded_calculation.start()
return {}
procurement_compute()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
harmy/kbengine | kbe/src/lib/python/Lib/encodings/euc_kr.py | 816 | 1027 | #
# euc_kr.py: Python Unicode Codec for EUC_KR
#
# Written by Hye-Shik Chang <perky@FreeBSD.org>
#
import _codecs_kr, codecs
import _multibytecodec as mbc
codec = _codecs_kr.getcodec('euc_kr')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='euc_kr',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| lgpl-3.0 |
miketheman/opencomparison | package/migrations/0016_auto__del_field_package_pypi_home_page.py | 3 | 9798 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'Package.pypi_home_page'
db.delete_column('package_package', 'pypi_home_page')
# Removing M2M table for field related_packages on 'Package'
db.delete_table('package_package_related_packages')
def backwards(self, orm):
# Adding field 'Package.pypi_home_page'
db.add_column('package_package', 'pypi_home_page', self.gf('django.db.models.fields.URLField')(max_length=200, null=True, blank=True), keep_default=False)
# Adding M2M table for field related_packages on 'Package'
db.create_table('package_package_related_packages', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('from_package', models.ForeignKey(orm['package.package'], null=False)),
('to_package', models.ForeignKey(orm['package.package'], null=False))
))
db.create_unique('package_package_related_packages', ['from_package_id', 'to_package_id'])
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'package.category': {
'Meta': {'ordering': "['title']", 'object_name': 'Category'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'show_pypi': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': "'50'"}),
'title_plural': ('django.db.models.fields.CharField', [], {'max_length': "'50'", 'blank': 'True'})
},
'package.commit': {
'Meta': {'ordering': "['-commit_date']", 'object_name': 'Commit'},
'commit_date': ('django.db.models.fields.DateTimeField', [], {}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['package.Package']"})
},
'package.package': {
'Meta': {'ordering': "['title']", 'object_name': 'Package'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['package.Category']"}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'creator'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_modified_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'modifier'", 'null': 'True', 'to': "orm['auth.User']"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'participants': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'pypi_downloads': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'pypi_url': ('django.db.models.fields.URLField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'repo_commits': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'repo_description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'repo_forks': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'repo_url': ('django.db.models.fields.URLField', [], {'unique': 'True', 'max_length': '200', 'blank': 'True'}),
'repo_watchers': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': "'100'"}),
'usage': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.User']", 'symmetrical': 'False', 'blank': 'True'})
},
'package.packageexample': {
'Meta': {'ordering': "['title']", 'object_name': 'PackageExample'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['package.Package']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': "'100'"}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200'})
},
'package.version': {
'Meta': {'ordering': "['-created']", 'object_name': 'Version'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'downloads': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'hidden': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'license': ('django.db.models.fields.CharField', [], {'max_length': "'100'"}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'number': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': "'100'", 'blank': "''"}),
'package': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['package.Package']", 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['package']
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.