repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
gaomingnudt/gm-flask2.0 | tests/test_instance_config.py | 157 | 4365 | # -*- coding: utf-8 -*-
"""
tests.test_instance
~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2015 by the Flask Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
import pytest
import flask
from flask._compat import PY2
def test_explicit_instance_paths(modules_tmpdir):
with pytest.raises(ValueError) as excinfo:
flask.Flask(__name__, instance_path='instance')
assert 'must be absolute' in str(excinfo.value)
app = flask.Flask(__name__, instance_path=str(modules_tmpdir))
assert app.instance_path == str(modules_tmpdir)
def test_main_module_paths(modules_tmpdir, purge_module):
app = modules_tmpdir.join('main_app.py')
app.write('import flask\n\napp = flask.Flask("__main__")')
purge_module('main_app')
from main_app import app
here = os.path.abspath(os.getcwd())
assert app.instance_path == os.path.join(here, 'instance')
def test_uninstalled_module_paths(modules_tmpdir, purge_module):
app = modules_tmpdir.join('config_module_app.py').write(
'import os\n'
'import flask\n'
'here = os.path.abspath(os.path.dirname(__file__))\n'
'app = flask.Flask(__name__)\n'
)
purge_module('config_module_app')
from config_module_app import app
assert app.instance_path == str(modules_tmpdir.join('instance'))
def test_uninstalled_package_paths(modules_tmpdir, purge_module):
app = modules_tmpdir.mkdir('config_package_app')
init = app.join('__init__.py')
init.write(
'import os\n'
'import flask\n'
'here = os.path.abspath(os.path.dirname(__file__))\n'
'app = flask.Flask(__name__)\n'
)
purge_module('config_package_app')
from config_package_app import app
assert app.instance_path == str(modules_tmpdir.join('instance'))
def test_installed_module_paths(modules_tmpdir, modules_tmpdir_prefix,
purge_module, site_packages, limit_loader):
site_packages.join('site_app.py').write(
'import flask\n'
'app = flask.Flask(__name__)\n'
)
purge_module('site_app')
from site_app import app
assert app.instance_path == \
modules_tmpdir.join('var').join('site_app-instance')
def test_installed_package_paths(limit_loader, modules_tmpdir,
modules_tmpdir_prefix, purge_module,
monkeypatch):
installed_path = modules_tmpdir.mkdir('path')
monkeypatch.syspath_prepend(installed_path)
app = installed_path.mkdir('installed_package')
init = app.join('__init__.py')
init.write('import flask\napp = flask.Flask(__name__)')
purge_module('installed_package')
from installed_package import app
assert app.instance_path == \
modules_tmpdir.join('var').join('installed_package-instance')
def test_prefix_package_paths(limit_loader, modules_tmpdir,
modules_tmpdir_prefix, purge_module,
site_packages):
app = site_packages.mkdir('site_package')
init = app.join('__init__.py')
init.write('import flask\napp = flask.Flask(__name__)')
purge_module('site_package')
import site_package
assert site_package.app.instance_path == \
modules_tmpdir.join('var').join('site_package-instance')
def test_egg_installed_paths(install_egg, modules_tmpdir,
modules_tmpdir_prefix):
modules_tmpdir.mkdir('site_egg').join('__init__.py').write(
'import flask\n\napp = flask.Flask(__name__)'
)
install_egg('site_egg')
try:
import site_egg
assert site_egg.app.instance_path == \
str(modules_tmpdir.join('var/').join('site_egg-instance'))
finally:
if 'site_egg' in sys.modules:
del sys.modules['site_egg']
@pytest.mark.skipif(not PY2, reason='This only works under Python 2.')
def test_meta_path_loader_without_is_package(request, modules_tmpdir):
app = modules_tmpdir.join('unimportable.py')
app.write('import flask\napp = flask.Flask(__name__)')
class Loader(object):
def find_module(self, name, path=None):
return self
sys.meta_path.append(Loader())
request.addfinalizer(sys.meta_path.pop)
with pytest.raises(AttributeError):
import unimportable
| bsd-3-clause |
bosondata/badwolf | badwolf/builder.py | 1 | 14899 | # -*- coding: utf-8 -*-
import os
import io
import time
import base64
import logging
import shlex
from html import escape as html_escape
import deansi
import requests
from flask import current_app, render_template, url_for
from requests.exceptions import ReadTimeout
from docker import DockerClient
from docker.errors import APIError, DockerException, ImageNotFound, NotFound
from markupsafe import Markup
from badwolf.utils import to_text, to_binary, sanitize_sensitive_data
from badwolf.extensions import bitbucket, sentry
from badwolf.bitbucket import BuildStatus, BitbucketAPIError
from badwolf.notification import send_mail
logger = logging.getLogger(__name__)
class Builder(object):
"""Badwolf build runner"""
def __init__(self, context, spec, build_status=None, docker_version='auto'):
self.context = context
self.spec = spec
self.repo_name = context.repository.split('/')[-1]
self.commit_hash = context.source['commit']['hash']
self.build_status = build_status or BuildStatus(
bitbucket,
context.source['repository']['full_name'],
self.commit_hash,
'badwolf/test',
url_for('log.build_log', sha=self.commit_hash, _external=True)
)
self.docker = DockerClient(
base_url=current_app.config['DOCKER_HOST'],
timeout=current_app.config['DOCKER_API_TIMEOUT'],
version=docker_version,
)
def run(self):
start_time = time.time()
branch = self.context.source['branch']
context = {
'context': self.context,
'build_log_url': self.build_status.url,
'branch': branch['name'],
'scripts': self.spec.scripts,
'ansi_termcolor_style': deansi.styleSheet(),
}
self.update_build_status('INPROGRESS', 'Test in progress')
docker_image_name, build_output = self.get_docker_image()
context.update({
'build_logs': Markup(build_output),
'elapsed_time': int(time.time() - start_time),
})
if not docker_image_name:
self.update_build_status('FAILED', 'Build or get Docker image failed')
context['exit_code'] = -1
self.send_notifications(context)
return
exit_code, output = self.run_in_container(docker_image_name)
logger.debug('Docker run output: %s', output)
if exit_code == 0:
# Success
logger.info('Test succeed for repo: %s', self.context.repository)
self.update_build_status('SUCCESSFUL', '1 of 1 test succeed')
else:
# Failed
logger.info(
'Test failed for repo: %s, exit code: %s',
self.context.repository,
exit_code
)
if exit_code == 137:
self.update_build_status('FAILED', 'build cancelled')
else:
self.update_build_status('FAILED', '1 of 1 test failed')
context.update({
'logs': Markup(deansi.deansi(output)),
'exit_code': exit_code,
'elapsed_time': int(time.time() - start_time),
})
self.send_notifications(context)
return exit_code
def get_docker_image(self):
docker_image_name = self.context.repository.replace('/', '-')
output = []
try:
docker_image = self.docker.images.get(docker_image_name)
except ImageNotFound:
docker_image = None
if not docker_image or self.context.rebuild:
build_options = {
'tag': docker_image_name,
'rm': True,
'forcerm': True,
'decode': True,
'nocache': self.context.nocache
}
build_args = self.context.environment.copy()
if self.spec.environments:
# TODO: Support run in multiple environments
build_args.update(self.spec.environments[0])
if build_args:
build_options['buildargs'] = build_args
if self.spec.image:
from_image_name, from_image_tag = self.spec.image.split(':', 2)
logger.info('Pulling Docker image %s', self.spec.image)
self.docker.images.pull(from_image_name, tag=from_image_tag)
logger.info('Pulled Docker image %s', self.spec.image)
dockerfile_content = 'FROM {}\n'.format(self.spec.image)
fileobj = io.BytesIO(dockerfile_content.encode('utf-8'))
build_options['fileobj'] = fileobj
else:
dockerfile = os.path.join(self.context.clone_path, self.spec.dockerfile)
if os.path.exists(dockerfile):
build_options['dockerfile'] = self.spec.dockerfile
else:
logger.warning(
'No Dockerfile: %s found for repo: %s, using simple runner image',
dockerfile,
self.context.repository
)
dockerfile_content = 'FROM messense/badwolf-test-runner:python\n'
fileobj = io.BytesIO(dockerfile_content.encode('utf-8'))
build_options['fileobj'] = fileobj
build_success = False
logger.info('Building Docker image %s', docker_image_name)
self.update_build_status('INPROGRESS', 'Building Docker image')
# Use low level API instead of high level API to get raw output
res = self.docker.api.build(self.context.clone_path, **build_options)
for log in res:
if 'errorDetail' in log:
msg = log['errorDetail']['message']
elif 'error' in log:
# Deprecated
# https://github.com/docker/docker/blob/master/pkg/jsonmessage/jsonmessage.go
msg = log['error']
elif 'status' in log:
msg = log['status']
else:
msg = log.get('stream')
if not msg:
continue
if 'Successfully tagged' in msg:
build_success = True
output.append(deansi.deansi(msg))
logger.info('`docker build` : %s', msg.strip())
if not build_success:
return None, ''.join(output)
return docker_image_name, ''.join(output)
def run_in_container(self, docker_image_name):
environment = self.context.environment.copy()
if self.spec.environments:
# TODO: Support run in multiple environments
environment.update(self.spec.environments[0])
# TODO: Add more test context related env vars
script = shlex.quote(to_text(base64.b64encode(to_binary(self.spec.shell_script))))
environment.update({
'HOME': '/root',
'SHELL': '/bin/{}'.format(self.spec.shell),
'BADWOLF_SCRIPT': script,
})
environment.setdefault('TERM', 'xterm-256color')
branch = self.context.source['branch']
labels = {
'repo': self.context.repository,
'commit': self.commit_hash,
'task_id': self.context.task_id,
}
if self.context.type == 'tag':
labels['tag'] = branch['name']
else:
labels['branch'] = branch['name']
if self.context.pr_id:
labels['pull_request'] = str(self.context.pr_id)
volumes = {
self.context.clone_path: {
'bind': self.context.clone_path,
'mode': 'rw',
},
}
if self.spec.docker:
volumes['/var/run/docker.sock'] = {
'bind': '/var/run/docker.sock',
'mode': 'ro',
}
environment.setdefault('DOCKER_HOST', 'unix:///var/run/docker.sock')
logger.debug('Docker container environment: \n %r', environment)
container = self.docker.containers.create(
docker_image_name,
entrypoint=['/bin/{}'.format(self.spec.shell), '-c'],
command=['echo $BADWOLF_SCRIPT | base64 --decode | /bin/{}'.format(self.spec.shell)],
environment=environment,
working_dir=self.context.clone_path,
volumes=volumes,
privileged=self.spec.privileged,
stdin_open=False,
tty=True,
labels=labels,
)
container_id = container.id
logger.info('Created container %s from image %s', container_id, docker_image_name)
output = []
try:
container.start()
self.update_build_status('INPROGRESS', 'Running tests in Docker container')
exit_code = container.wait(timeout=current_app.config['DOCKER_RUN_TIMEOUT'])
if isinstance(exit_code, dict):
exit_code = exit_code['StatusCode']
except (APIError, DockerException, ReadTimeout) as e:
exit_code = -1
output.append(str(e) + '\n')
logger.exception('Docker error')
finally:
try:
output.append(to_text(container.logs()))
container.remove(force=True)
except NotFound:
pass
except APIError as api_e:
if 'can not get logs from container which is dead or marked for removal' in str(api_e):
output.append('Build cancelled')
else:
logger.exception('Error removing docker container')
except (DockerException, ReadTimeout):
logger.exception('Error removing docker container')
return exit_code, ''.join(output)
def update_build_status(self, state, description=None):
try:
self.build_status.update(state, description=description)
except BitbucketAPIError:
logger.exception('Error calling Bitbucket API')
sentry.captureException()
def send_notifications(self, context):
exit_code = context['exit_code']
template = 'test_success' if exit_code == 0 else 'test_failure'
html = render_template('mail/' + template + '.html', **context)
html = sanitize_sensitive_data(html)
# Save log html
log_dir = os.path.join(current_app.config['BADWOLF_LOG_DIR'], self.commit_hash, self.context.task_id)
os.makedirs(log_dir, exist_ok=True)
log_file = os.path.join(log_dir, 'build.html')
with open(log_file, 'wb') as f:
f.write(to_binary(html))
if exit_code == 137:
logger.info('Build cancelled, will not sending notification')
return
test_status = 'succeed' if exit_code == 0 else 'failed'
subject = 'Test {} for repository {}'.format(test_status, self.context.repository)
notification = self.spec.notification
email = notification.email
should_send_mail = email and email.recipients and (
(exit_code == 0 and email.on_success == 'always') or
(exit_code != 0 and email.on_failure == 'always')
)
if should_send_mail:
send_mail(email.recipients, subject, html)
slack_webhook = notification.slack_webhook
if slack_webhook and slack_webhook.webhooks:
if exit_code == 0 and slack_webhook.on_success == 'always':
trigger_slack_webhook(slack_webhook.webhooks, context)
if exit_code != 0 and slack_webhook.on_failure == 'always':
trigger_slack_webhook(slack_webhook.webhooks, context)
def trigger_slack_webhook(webhooks, context):
if context['exit_code'] == 0:
color = 'good'
title = ':sparkles: <{}|Test succeed for repository {}>'.format(
context['build_log_url'],
context['context'].repository,
)
else:
color = 'warning'
title = ':broken_heart: <{}|Test failed for repository {}>'.format(
context['build_log_url'],
context['context'].repository,
)
fields = []
fields.append({
'title': 'Repository',
'value': '<https://bitbucket.org/{repo}|{repo}>'.format(repo=context['context'].repository),
'short': True,
})
if context['context'].type == 'tag':
fields.append({
'title': 'Tag',
'value': '<https://bitbucket.org/{repo}/commits/tag/{tag}|{tag}>'.format(
repo=context['context'].repository,
tag=context['branch']
),
'short': True,
})
elif context['context'].type != 'commit':
fields.append({
'title': 'Branch',
'value': '<https://bitbucket.org/{repo}/src?at={branch}|{branch}>'.format(
repo=context['context'].repository,
branch=context['branch'],
),
'short': True,
})
if context['context'].type in {'branch', 'tag', 'commit'}:
fields.append({
'title': 'Commit',
'value': '<https://bitbucket.org/{repo}/commits/{sha}|{sha}>'.format(
repo=context['context'].repository,
sha=context['context'].source['commit']['hash'],
),
'short': False
})
elif context['context'].type == 'pullrequest':
fields.append({
'title': 'Pull Request',
'value': '<https://bitbucket.org/{repo}/pull-requests/{pr_id}|{title}>'.format(
repo=context['context'].repository,
pr_id=context['context'].pr_id,
title=html_escape(context['context'].message, quote=False),
),
'short': False
})
actor = context['context'].actor
attachment = {
'fallback': title,
'title': title,
'color': color,
'title_link': context['build_log_url'],
'fields': fields,
'footer': context['context'].repo_name,
'ts': int(time.time()),
'author_name': actor['display_name'],
'author_link': actor['links']['html']['href'],
'author_icon': actor['links']['avatar']['href'],
}
if context['context'].type in {'branch', 'tag'}:
attachment['text'] = context['context'].message
payload = {'attachments': [attachment]}
session = requests.Session()
for webhook in webhooks:
logger.info('Triggering Slack webhook %s', webhook)
res = session.post(webhook, json=payload, timeout=10)
try:
res.raise_for_status()
except requests.RequestException:
logger.exception('Error triggering Slack webhook %s', webhook)
sentry.captureException()
| mit |
AmericanComputerNerd/spots2 | share/qt/extract_strings_qt.py | 2945 | 1844 | #!/usr/bin/python
'''
Extract _("...") strings for translation and convert to Qt4 stringdefs so that
they can be picked up by Qt linguist.
'''
from subprocess import Popen, PIPE
import glob
import operator
OUT_CPP="src/qt/bitcoinstrings.cpp"
EMPTY=['""']
def parse_po(text):
"""
Parse 'po' format produced by xgettext.
Return a list of (msgid,msgstr) tuples.
"""
messages = []
msgid = []
msgstr = []
in_msgid = False
in_msgstr = False
for line in text.split('\n'):
line = line.rstrip('\r')
if line.startswith('msgid '):
if in_msgstr:
messages.append((msgid, msgstr))
in_msgstr = False
# message start
in_msgid = True
msgid = [line[6:]]
elif line.startswith('msgstr '):
in_msgid = False
in_msgstr = True
msgstr = [line[7:]]
elif line.startswith('"'):
if in_msgid:
msgid.append(line)
if in_msgstr:
msgstr.append(line)
if in_msgstr:
messages.append((msgid, msgstr))
return messages
files = glob.glob('src/*.cpp') + glob.glob('src/*.h')
# xgettext -n --keyword=_ $FILES
child = Popen(['xgettext','--output=-','-n','--keyword=_'] + files, stdout=PIPE)
(out, err) = child.communicate()
messages = parse_po(out)
f = open(OUT_CPP, 'w')
f.write("""#include <QtGlobal>
// Automatically generated by extract_strings.py
#ifdef __GNUC__
#define UNUSED __attribute__((unused))
#else
#define UNUSED
#endif
""")
f.write('static const char UNUSED *bitcoin_strings[] = {\n')
messages.sort(key=operator.itemgetter(0))
for (msgid, msgstr) in messages:
if msgid != EMPTY:
f.write('QT_TRANSLATE_NOOP("bitcoin-core", %s),\n' % ('\n'.join(msgid)))
f.write('};')
f.close()
| mit |
k-j-m/Pyxon | pyxon/decode2.py | 1 | 8070 | # Dict of the form:
# {cls: {name:(fn, inv_fn)}}
# cls: class that has been written with @cprop annotations
# name: class attribute name
# fn: function to turn json data into the corresponding attribute type
# inv_fn: inverse of fn
class_props = {}
# Dict of the form:
# {AbstractClass:specifier_property}
# AbstractClass: the class that we're trying to (de)serialize
# specifier_property: the name of the json property that
# will indicate the concrete class name
specifier_properties = {}
# Dict of the form {AbstractClass: {label: ConcreteClass}}
# Used to retrieve the concrete implementation of an
# abstract class based on a string label.
class_specifiers = {}
# {ConcreteClass: (AbstractClass, concrete_label)}
conc_to_abstract = {}
def _add_type_property(specifier_properties, conc_to_abstract):
"""
Returns a function that returns type specifier properties.
I have done it like this to let me test the returned function
independent @subtyped(using='$type')
class AbstractClass(object):
def __init__(self, a,b,c, **kwargs):
self.a = a
self.b = b
self.c = c
@extending(AbstractClass, named='concrete_label')
class ConcreteClass(AbstractClass):
def __init__(self, x,y,z,**kwargs):
super(ConcreteClass,self).__init__(**kwargs)
self.x = x
self.y = y
self.z = z
data = {'a':1, 'b':2, 'c':3, 'x':101, 'y':102, 'z':103, '$type':'concrete_label'}
obj = utils.objectify(data, AbstractClass)
ly of any module-level variables.
"""
def fun(data,cls):
"""
Given some JSON data and the class from which it was produced,
this function returns the JSON data with any required type
annotations added to it.
"""
if not cls in conc_to_abstract:
return data
abstract_cls, concrete_label = conc_to_abstract[cls]
prop_name = specifier_properties[abstract_cls]
data[prop_name] = concrete_label
return data
return fun
add_type_property = _add_type_property(specifier_properties, conc_to_abstract)
def _metaprop(class_props):
"""
Wrapped MetaProp as a closure so that class_props can be passed
in during testing, rather than using the module-level variable.
"""
class MetaProp(type):
"""
Metaclass for the prop calculated property decorator.
This class contains all of the decorator logic. The reason
for using a metaclass rather than something simpler is
to allow us to use dot notation when adding calculated
properties.
"""
def __getattr__(prop_cls,key):
def prop2((f1, f2)):
def prop3(cls):
props = class_props.get(cls,{})
props[key]=(f1,f2)
class_props[cls]=props
return cls
return prop3
return prop2
return MetaProp
MetaProp = _metaprop(class_props)
class prop:
"""
Decorator for adding calculated properties to a class.
A calculated property is needed when the json data can't
be added to the class directly, for example when creating
some other user classes from the data before adding as
properties.
The decorator needs to be given 2 functions as arguments:
fun1: a function that takes JSON data and converts to some
other data type
fun2: the inverse of fun1, which takes some data type and
converts it into JSON data
Note: ideally the following will hold for any value of x
>>> fun2(fun1(x)) == x
Example:
class Foo(object): pass
@prop.y(obj(Foo))
class Bar(object):
def __init__(self, y):
self.y = y
"""
__metaclass__ = MetaProp
# Decorator annotations
def _subtyped(specifier_properties):
"""
Coded like to so to allow the specifier_properties param to be passed
in during testing, rather than having a hard-wired reference to a
module-level variable.
"""
def subtyped(using):
"""
Decorator used to indicate that a class will be subtyped.
The using= parameter is used to indicate which JSON
property will contain the name of the subclass. A sensible
value for thsi will be @type, but this wil all depend on
how you have set up the rest of the system.
Example:
@subtyped(using='@type')
class Foo(object): pass
"""
# Because this is a parameterised decorator that we call, we
# now need to create and return the decorator proper.
def subtyped2(cls):
specifier_properties[cls]=using
return cls
return subtyped2
return subtyped
subtyped = _subtyped(specifier_properties)
def _extending(conc_to_abstract, class_specifiers):
def extending(super_cls, named):
"""
This decorator is used to indicate which superclass a class
extends. This could potentially be interpreted from the classes
mro, but that starts to get tricky and we would still need to
add extra info to say what the class will be named in the data.
This label is needed because we can't necessarily rely on the
class name and the class label in the data being the same.
Example:
@extending(Foo, named='Bar')
class Baz(Foo): pass
"""
def extending2(cls):
conc_to_abstract[cls]=super_cls,named
clsmap = class_specifiers.get(super_cls,{})
clsmap[named]=cls
class_specifiers[super_cls]=clsmap
return cls
return extending2
return extending
extending = _extending(conc_to_abstract, class_specifiers)
def _conc2(class_specifiers, specifier_properties):
def conc2(data, cls):
"""
Returns the appropriate concrete class of a subtyped class
based on the content of some JSON data.
If the class is not subtyped then it gets returned.
"""
s1 = set(specifier_properties.keys())
s2 = set(class_specifiers.keys())
assert s1==s2, "You need to use @subtyped and @extending as a pair!:\n%s\n%s" % (str(s1), str(s2))
if not cls in specifier_properties:
return cls
prop_name = specifier_properties[cls]
cls_label = data[prop_name]
concrete_cls = class_specifiers[cls][cls_label]
return concrete_cls
return conc2
conc2 = _conc2(class_specifiers, specifier_properties)
def _get_class_props(class_props):
def get_class_props(cls):
"""
Returns a dict containing the deserialise/serialise functions for
all class properties that need transformations applied to them.
{propname: (deserialiser, serialiser)}
We give this a protective copy because it would be a BAD thing for
this to get changed in the field.
"""
return class_props.get(cls,{}).copy()
return get_class_props
get_class_props = _get_class_props(class_props)
def _get_parent_class(conc_to_abstract):
def get_parent_class(cls):
"""
Returns the parent inherited class if possible, otherwise
returns None.
"""
if cls in conc_to_abstract:
return conc_to_abstract[cls][0]
else:
return None
return get_parent_class
get_parent_class = _get_parent_class(conc_to_abstract)
def _get_type_annotation(conc_to_abstract, specifier_properties):
def get_type_annotation(cls):
if not cls in conc_to_abstract:
return {}
else:
parent_class, type_label = conc_to_abstract[cls]
type_propname = specifier_properties[parent_class]
return {type_propname: type_label}
return get_type_annotation
get_type_annotation = _get_type_annotation(conc_to_abstract, specifier_properties)
| mit |
MycChiu/tensorflow | tensorflow/contrib/slim/__init__.py | 63 | 2301 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Slim is an interface to contrib functions, examples and models.
TODO(nsilberman): flesh out documentation.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,line-too-long,g-importing-member,wildcard-import
# TODO(jart): Delete non-slim imports
from tensorflow.contrib import losses
from tensorflow.contrib import metrics
from tensorflow.contrib.framework.python.ops.arg_scope import *
from tensorflow.contrib.framework.python.ops.variables import *
from tensorflow.contrib.layers.python.layers import *
from tensorflow.contrib.layers.python.layers.initializers import *
from tensorflow.contrib.layers.python.layers.regularizers import *
from tensorflow.contrib.slim.python.slim import evaluation
from tensorflow.contrib.slim.python.slim import learning
from tensorflow.contrib.slim.python.slim import model_analyzer
from tensorflow.contrib.slim.python.slim import queues
from tensorflow.contrib.slim.python.slim.data import data_decoder
from tensorflow.contrib.slim.python.slim.data import data_provider
from tensorflow.contrib.slim.python.slim.data import dataset
from tensorflow.contrib.slim.python.slim.data import dataset_data_provider
from tensorflow.contrib.slim.python.slim.data import parallel_reader
from tensorflow.contrib.slim.python.slim.data import prefetch_queue
from tensorflow.contrib.slim.python.slim.data import tfexample_decoder
from tensorflow.python.util.all_util import make_all
# pylint: enable=unused-import,line-too-long,g-importing-member,wildcard-import
__all__ = make_all(__name__)
| apache-2.0 |
MoritzS/django | tests/m2m_through_regress/tests.py | 71 | 11667 | from io import StringIO
from django.contrib.auth.models import User
from django.core import management
from django.test import TestCase
from .models import (
Car, CarDriver, Driver, Group, Membership, Person, UserMembership,
)
class M2MThroughTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.bob = Person.objects.create(name="Bob")
cls.jim = Person.objects.create(name="Jim")
cls.rock = Group.objects.create(name="Rock")
cls.roll = Group.objects.create(name="Roll")
cls.frank = User.objects.create_user("frank", "frank@example.com", "password")
cls.jane = User.objects.create_user("jane", "jane@example.com", "password")
# normal intermediate model
cls.bob_rock = Membership.objects.create(person=cls.bob, group=cls.rock)
cls.bob_roll = Membership.objects.create(person=cls.bob, group=cls.roll, price=50)
cls.jim_rock = Membership.objects.create(person=cls.jim, group=cls.rock, price=50)
# intermediate model with custom id column
cls.frank_rock = UserMembership.objects.create(user=cls.frank, group=cls.rock)
cls.frank_roll = UserMembership.objects.create(user=cls.frank, group=cls.roll)
cls.jane_rock = UserMembership.objects.create(user=cls.jane, group=cls.rock)
def test_retrieve_reverse_m2m_items(self):
self.assertQuerysetEqual(
self.bob.group_set.all(), [
"<Group: Rock>",
"<Group: Roll>",
],
ordered=False
)
def test_retrieve_forward_m2m_items(self):
self.assertQuerysetEqual(
self.roll.members.all(), [
"<Person: Bob>",
]
)
def test_cannot_use_setattr_on_reverse_m2m_with_intermediary_model(self):
msg = (
"Cannot set values on a ManyToManyField which specifies an "
"intermediary model. Use m2m_through_regress.Membership's Manager "
"instead."
)
with self.assertRaisesMessage(AttributeError, msg):
self.bob.group_set.set([])
def test_cannot_use_setattr_on_forward_m2m_with_intermediary_model(self):
msg = (
"Cannot set values on a ManyToManyField which specifies an "
"intermediary model. Use m2m_through_regress.Membership's Manager "
"instead."
)
with self.assertRaisesMessage(AttributeError, msg):
self.roll.members.set([])
def test_cannot_use_create_on_m2m_with_intermediary_model(self):
msg = (
"Cannot use create() on a ManyToManyField which specifies an "
"intermediary model. Use m2m_through_regress.Membership's "
"Manager instead."
)
with self.assertRaisesMessage(AttributeError, msg):
self.rock.members.create(name="Anne")
def test_cannot_use_create_on_reverse_m2m_with_intermediary_model(self):
msg = (
"Cannot use create() on a ManyToManyField which specifies an "
"intermediary model. Use m2m_through_regress.Membership's "
"Manager instead."
)
with self.assertRaisesMessage(AttributeError, msg):
self.bob.group_set.create(name="Funk")
def test_retrieve_reverse_m2m_items_via_custom_id_intermediary(self):
self.assertQuerysetEqual(
self.frank.group_set.all(), [
"<Group: Rock>",
"<Group: Roll>",
],
ordered=False
)
def test_retrieve_forward_m2m_items_via_custom_id_intermediary(self):
self.assertQuerysetEqual(
self.roll.user_members.all(), [
"<User: frank>",
]
)
def test_join_trimming_forwards(self):
"""
Too many copies of the intermediate table aren't involved when doing a
join (#8046, #8254).
"""
self.assertQuerysetEqual(
self.rock.members.filter(membership__price=50), [
"<Person: Jim>",
]
)
def test_join_trimming_reverse(self):
self.assertQuerysetEqual(
self.bob.group_set.filter(membership__price=50), [
"<Group: Roll>",
]
)
class M2MThroughSerializationTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.bob = Person.objects.create(name="Bob")
cls.roll = Group.objects.create(name="Roll")
cls.bob_roll = Membership.objects.create(person=cls.bob, group=cls.roll)
def test_serialization(self):
"m2m-through models aren't serialized as m2m fields. Refs #8134"
pks = {"p_pk": self.bob.pk, "g_pk": self.roll.pk, "m_pk": self.bob_roll.pk}
out = StringIO()
management.call_command("dumpdata", "m2m_through_regress", format="json", stdout=out)
self.assertJSONEqual(
out.getvalue().strip(),
'[{"pk": %(m_pk)s, "model": "m2m_through_regress.membership", "fields": {"person": %(p_pk)s, "price": '
'100, "group": %(g_pk)s}}, {"pk": %(p_pk)s, "model": "m2m_through_regress.person", "fields": {"name": '
'"Bob"}}, {"pk": %(g_pk)s, "model": "m2m_through_regress.group", "fields": {"name": "Roll"}}]'
% pks
)
out = StringIO()
management.call_command("dumpdata", "m2m_through_regress", format="xml", indent=2, stdout=out)
self.assertXMLEqual(out.getvalue().strip(), """
<?xml version="1.0" encoding="utf-8"?>
<django-objects version="1.0">
<object pk="%(m_pk)s" model="m2m_through_regress.membership">
<field to="m2m_through_regress.person" name="person" rel="ManyToOneRel">%(p_pk)s</field>
<field to="m2m_through_regress.group" name="group" rel="ManyToOneRel">%(g_pk)s</field>
<field type="IntegerField" name="price">100</field>
</object>
<object pk="%(p_pk)s" model="m2m_through_regress.person">
<field type="CharField" name="name">Bob</field>
</object>
<object pk="%(g_pk)s" model="m2m_through_regress.group">
<field type="CharField" name="name">Roll</field>
</object>
</django-objects>
""".strip() % pks)
class ToFieldThroughTests(TestCase):
def setUp(self):
self.car = Car.objects.create(make="Toyota")
self.driver = Driver.objects.create(name="Ryan Briscoe")
CarDriver.objects.create(car=self.car, driver=self.driver)
# We are testing if wrong objects get deleted due to using wrong
# field value in m2m queries. So, it is essential that the pk
# numberings do not match.
# Create one intentionally unused driver to mix up the autonumbering
self.unused_driver = Driver.objects.create(name="Barney Gumble")
# And two intentionally unused cars.
self.unused_car1 = Car.objects.create(make="Trabant")
self.unused_car2 = Car.objects.create(make="Wartburg")
def test_to_field(self):
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Ryan Briscoe>"]
)
def test_to_field_reverse(self):
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>"]
)
def test_to_field_clear_reverse(self):
self.driver.car_set.clear()
self.assertQuerysetEqual(
self.driver.car_set.all(), [])
def test_to_field_clear(self):
self.car.drivers.clear()
self.assertQuerysetEqual(
self.car.drivers.all(), [])
# Low level tests for _add_items and _remove_items. We test these methods
# because .add/.remove aren't available for m2m fields with through, but
# through is the only way to set to_field currently. We do want to make
# sure these methods are ready if the ability to use .add or .remove with
# to_field relations is added some day.
def test_add(self):
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Ryan Briscoe>"]
)
# Yikes - barney is going to drive...
self.car.drivers._add_items('car', 'driver', self.unused_driver)
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Barney Gumble>", "<Driver: Ryan Briscoe>"]
)
def test_m2m_relations_unusable_on_null_to_field(self):
nullcar = Car(make=None)
msg = (
'"<Car: None>" needs to have a value for field "make" before this '
'many-to-many relationship can be used.'
)
with self.assertRaisesMessage(ValueError, msg):
nullcar.drivers.all()
def test_m2m_relations_unusable_on_null_pk_obj(self):
msg = (
"'Car' instance needs to have a primary key value before a "
"many-to-many relationship can be used."
)
with self.assertRaisesMessage(ValueError, msg):
Car(make='Ford').drivers.all()
def test_add_related_null(self):
nulldriver = Driver.objects.create(name=None)
msg = 'Cannot add "<Driver: None>": the value for field "driver" is None'
with self.assertRaisesMessage(ValueError, msg):
self.car.drivers._add_items('car', 'driver', nulldriver)
def test_add_reverse(self):
car2 = Car.objects.create(make="Honda")
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>"]
)
self.driver.car_set._add_items('driver', 'car', car2)
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>", "<Car: Honda>"],
ordered=False
)
def test_add_null_reverse(self):
nullcar = Car.objects.create(make=None)
msg = 'Cannot add "<Car: None>": the value for field "car" is None'
with self.assertRaisesMessage(ValueError, msg):
self.driver.car_set._add_items('driver', 'car', nullcar)
def test_add_null_reverse_related(self):
nulldriver = Driver.objects.create(name=None)
msg = (
'"<Driver: None>" needs to have a value for field "name" before '
'this many-to-many relationship can be used.'
)
with self.assertRaisesMessage(ValueError, msg):
nulldriver.car_set._add_items('driver', 'car', self.car)
def test_remove(self):
self.assertQuerysetEqual(
self.car.drivers.all(),
["<Driver: Ryan Briscoe>"]
)
self.car.drivers._remove_items('car', 'driver', self.driver)
self.assertQuerysetEqual(
self.car.drivers.all(), [])
def test_remove_reverse(self):
self.assertQuerysetEqual(
self.driver.car_set.all(),
["<Car: Toyota>"]
)
self.driver.car_set._remove_items('driver', 'car', self.car)
self.assertQuerysetEqual(
self.driver.car_set.all(), [])
class ThroughLoadDataTestCase(TestCase):
fixtures = ["m2m_through"]
def test_sequence_creation(self):
"""
Sequences on an m2m_through are created for the through model, not a
phantom auto-generated m2m table (#11107).
"""
out = StringIO()
management.call_command("dumpdata", "m2m_through_regress", format="json", stdout=out)
self.assertJSONEqual(
out.getvalue().strip(),
'[{"pk": 1, "model": "m2m_through_regress.usermembership", "fields": {"price": 100, "group": 1, "user"'
': 1}}, {"pk": 1, "model": "m2m_through_regress.person", "fields": {"name": "Guido"}}, {"pk": 1, '
'"model": "m2m_through_regress.group", "fields": {"name": "Python Core Group"}}]'
)
| bsd-3-clause |
diofeher/django-nfa | django/contrib/sites/models.py | 1 | 2612 | from django.db import models
from django.utils.translation import ugettext_lazy as _
SITE_CACHE = {}
class SiteManager(models.Manager):
def get_current(self):
"""
Returns the current ``Site`` based on the SITE_ID in the
project's settings. The ``Site`` object is cached the first
time it's retrieved from the database.
"""
from django.conf import settings
try:
sid = settings.SITE_ID
except AttributeError:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("You're using the Django \"sites framework\" without having set the SITE_ID setting. Create a site in your database and set the SITE_ID setting to fix this error.")
try:
current_site = SITE_CACHE[sid]
except KeyError:
current_site = self.get(pk=sid)
SITE_CACHE[sid] = current_site
return current_site
def clear_cache(self):
"""Clears the ``Site`` object cache."""
global SITE_CACHE
SITE_CACHE = {}
class Site(models.Model):
domain = models.CharField(_('domain name'), max_length=100)
name = models.CharField(_('display name'), max_length=50)
objects = SiteManager()
class Meta:
db_table = 'django_site'
verbose_name = _('site')
verbose_name_plural = _('sites')
ordering = ('domain',)
def __unicode__(self):
return self.domain
def delete(self):
pk = self.pk
super(Site, self).delete()
try:
del(SITE_CACHE[pk])
except KeyError:
pass
# Register the admin options for these models.
# TODO: Maybe this should live in a separate module admin.py, but how would we
# ensure that module was loaded?
from django.contrib import admin
class SiteAdmin(admin.ModelAdmin):
list_display = ('domain', 'name')
search_fields = ('domain', 'name')
admin.site.register(Site, SiteAdmin)
class RequestSite(object):
"""
A class that shares the primary interface of Site (i.e., it has
``domain`` and ``name`` attributes) but gets its data from a Django
HttpRequest object rather than from a database.
The save() and delete() methods raise NotImplementedError.
"""
def __init__(self, request):
self.domain = self.name = request.get_host()
def __unicode__(self):
return self.domain
def save(self):
raise NotImplementedError('RequestSite cannot be saved.')
def delete(self):
raise NotImplementedError('RequestSite cannot be deleted.')
| bsd-3-clause |
saikrishnar/Forced-Alignment | Post_1/scripts/run_kaldi/utils/nnet/make_nnet_proto.py | 5 | 10968 | #!/usr/bin/python
# Copyright 2014 Brno University of Technology (author: Karel Vesely)
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# THIS CODE IS PROVIDED *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY IMPLIED
# WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
# MERCHANTABLITY OR NON-INFRINGEMENT.
# See the Apache 2 License for the specific language governing permissions and
# limitations under the License.
# Generated Nnet prototype, to be initialized by 'nnet-initialize'.
import math, random, sys
###
### Parse options
###
from optparse import OptionParser
usage="%prog [options] <feat-dim> <num-leaves> <num-hid-layers> <num-hid-neurons> >nnet-proto-file"
parser = OptionParser(usage)
parser.add_option('--no-proto-head', dest='with_proto_head',
help='Do not put <NnetProto> head-tag in the prototype [default: %default]',
default=True, action='store_false');
parser.add_option('--no-softmax', dest='with_softmax',
help='Do not put <SoftMax> in the prototype [default: %default]',
default=True, action='store_false');
parser.add_option('--block-softmax-dims', dest='block_softmax_dims',
help='Generate <BlockSoftmax> with dims D1:D2:D3 [default: %default]',
default="", type='string');
parser.add_option('--activation-type', dest='activation_type',
help='Select type of activation function : (<Sigmoid>|<Tanh>) [default: %default]',
default='<Sigmoid>', type='string');
parser.add_option('--hid-bias-mean', dest='hid_bias_mean',
help='Set bias for hidden activations [default: %default]',
default=-2.0, type='float');
parser.add_option('--hid-bias-range', dest='hid_bias_range',
help='Set bias range for hidden activations (+/- 1/2 range around mean) [default: %default]',
default=4.0, type='float');
parser.add_option('--param-stddev-factor', dest='param_stddev_factor',
help='Factor to rescale Normal distriburtion for initalizing weight matrices [default: %default]',
default=0.1, type='float');
parser.add_option('--bottleneck-dim', dest='bottleneck_dim',
help='Make bottleneck network with desired bn-dim (0 = no bottleneck) [default: %default]',
default=0, type='int');
parser.add_option('--no-glorot-scaled-stddev', dest='with_glorot',
help='Generate normalized weights according to X.Glorot paper, but mapping U->N with same variance (factor sqrt(x/(dim_in+dim_out)))',
action='store_false', default=True);
parser.add_option('--no-smaller-input-weights', dest='smaller_input_weights',
help='Disable 1/12 reduction of stddef in input layer [default: %default]',
action='store_false', default=True);
parser.add_option('--no-bottleneck-trick', dest='bottleneck_trick',
help='Disable smaller initial weights and learning rate around bottleneck',
action='store_false', default=True);
parser.add_option('--max-norm', dest='max_norm',
help='Max radius of neuron-weights in L2 space (if longer weights get shrinked, not applied to last layer, 0.0 = disable) [default: %default]',
default=0.0, type='float');
(o,args) = parser.parse_args()
if len(args) != 4 :
parser.print_help()
sys.exit(1)
(feat_dim, num_leaves, num_hid_layers, num_hid_neurons) = map(int,args);
### End parse options
# Check
assert(feat_dim > 0)
assert(num_leaves > 0)
assert(num_hid_layers >= 0)
assert(num_hid_neurons > 0)
if o.block_softmax_dims:
assert(sum(map(int, o.block_softmax_dims.split(':'))) == num_leaves)
# Optionaly scale
def Glorot(dim1, dim2):
if o.with_glorot:
# 35.0 = magic number, gives ~1.0 in inner layers for hid-dim 1024dim,
return 35.0 * math.sqrt(2.0/(dim1+dim2));
else:
return 1.0
###
### Print prototype of the network
###
# NO HIDDEN LAYER, ADDING BOTTLENECK!
# No hidden layer while adding bottleneck means:
# - add bottleneck layer + hidden layer + output layer
if num_hid_layers == 0 and o.bottleneck_dim != 0:
assert(o.bottleneck_dim > 0)
assert(num_hid_layers == 0)
if o.with_proto_head : print "<NnetProto>"
if o.bottleneck_trick:
# 25% smaller stddev -> small bottleneck range, 10x smaller learning rate
print "<LinearTransform> <InputDim> %d <OutputDim> %d <ParamStddev> %f <LearnRateCoef> %f" % \
(feat_dim, o.bottleneck_dim, \
(o.param_stddev_factor * Glorot(feat_dim, o.bottleneck_dim) * 0.75 ), 0.1)
# 25% smaller stddev -> smaller gradient in prev. layer, 10x smaller learning rate for weigts & biases
print "<AffineTransform> <InputDim> %d <OutputDim> %d <BiasMean> %f <BiasRange> %f <ParamStddev> %f <LearnRateCoef> %f <BiasLearnRateCoef> %f <MaxNorm> %f" % \
(o.bottleneck_dim, num_hid_neurons, o.hid_bias_mean, o.hid_bias_range, \
(o.param_stddev_factor * Glorot(o.bottleneck_dim, num_hid_neurons) * 0.75 ), 0.1, 0.1, o.max_norm)
else:
print "<LinearTransform> <InputDim> %d <OutputDim> %d <ParamStddev> %f" % \
(feat_dim, o.bottleneck_dim, \
(o.param_stddev_factor * Glorot(feat_dim, o.bottleneck_dim)))
print "<AffineTransform> <InputDim> %d <OutputDim> %d <BiasMean> %f <BiasRange> %f <ParamStddev> %f <MaxNorm> %f" % \
(o.bottleneck_dim, num_hid_neurons, o.hid_bias_mean, o.hid_bias_range, \
(o.param_stddev_factor * Glorot(o.bottleneck_dim, num_hid_neurons)), o.max_norm)
print "%s <InputDim> %d <OutputDim> %d" % (o.activation_type, num_hid_neurons, num_hid_neurons) # Non-linearity
# Last AffineTransform (10x smaller learning rate on bias)
print "<AffineTransform> <InputDim> %d <OutputDim> %d <BiasMean> %f <BiasRange> %f <ParamStddev> %f <LearnRateCoef> %f <BiasLearnRateCoef> %f" % \
(num_hid_neurons, num_leaves, 0.0, 0.0, \
(o.param_stddev_factor * Glorot(num_hid_neurons, num_leaves)), 1.0, 0.1)
# Optionaly append softmax
if o.with_softmax:
if o.block_softmax_dims == "":
print "<Softmax> <InputDim> %d <OutputDim> %d" % (num_leaves, num_leaves)
else:
print "<BlockSoftmax> <InputDim> %d <OutputDim> %d <BlockDims> %s" % (num_leaves, num_leaves, o.block_softmax_dims)
print "</NnetProto>"
# We are done!
sys.exit(0)
# NO HIDDEN LAYERS!
# Add only last layer (logistic regression)
if num_hid_layers == 0:
if o.with_proto_head : print "<NnetProto>"
print "<AffineTransform> <InputDim> %d <OutputDim> %d <BiasMean> %f <BiasRange> %f <ParamStddev> %f" % \
(feat_dim, num_leaves, 0.0, 0.0, (o.param_stddev_factor * Glorot(feat_dim, num_leaves)))
if o.with_softmax:
if o.block_softmax_dims == "":
print "<Softmax> <InputDim> %d <OutputDim> %d" % (num_leaves, num_leaves)
else:
print "<BlockSoftmax> <InputDim> %d <OutputDim> %d <BlockDims> %s" % (num_leaves, num_leaves, o.block_softmax_dims)
print "</NnetProto>"
# We are done!
sys.exit(0)
# THE USUAL DNN PROTOTYPE STARTS HERE!
# Assuming we have >0 hidden layers,
assert(num_hid_layers > 0)
# Begin the prototype,
if o.with_proto_head : print "<NnetProto>"
# First AffineTranform,
print "<AffineTransform> <InputDim> %d <OutputDim> %d <BiasMean> %f <BiasRange> %f <ParamStddev> %f <MaxNorm> %f" % \
(feat_dim, num_hid_neurons, o.hid_bias_mean, o.hid_bias_range, \
(o.param_stddev_factor * Glorot(feat_dim, num_hid_neurons) * \
(math.sqrt(1.0/12.0) if o.smaller_input_weights else 1.0)), o.max_norm)
# Note.: compensating dynamic range mismatch between input features and Sigmoid-hidden layers,
# i.e. mapping the std-dev of N(0,1) (input features) to std-dev of U[0,1] (sigmoid-outputs).
# This is done by multiplying with stddev(U[0,1]) = sqrt(1/12).
# The stddev of weights is consequently reduced by 0.29x.
print "%s <InputDim> %d <OutputDim> %d" % (o.activation_type, num_hid_neurons, num_hid_neurons)
# Internal AffineTransforms,
for i in range(num_hid_layers-1):
print "<AffineTransform> <InputDim> %d <OutputDim> %d <BiasMean> %f <BiasRange> %f <ParamStddev> %f <MaxNorm> %f" % \
(num_hid_neurons, num_hid_neurons, o.hid_bias_mean, o.hid_bias_range, \
(o.param_stddev_factor * Glorot(num_hid_neurons, num_hid_neurons)), o.max_norm)
print "%s <InputDim> %d <OutputDim> %d" % (o.activation_type, num_hid_neurons, num_hid_neurons)
# Optionaly add bottleneck,
if o.bottleneck_dim != 0:
assert(o.bottleneck_dim > 0)
if o.bottleneck_trick:
# 25% smaller stddev -> small bottleneck range, 10x smaller learning rate
print "<LinearTransform> <InputDim> %d <OutputDim> %d <ParamStddev> %f <LearnRateCoef> %f" % \
(num_hid_neurons, o.bottleneck_dim, \
(o.param_stddev_factor * Glorot(num_hid_neurons, o.bottleneck_dim) * 0.75 ), 0.1)
# 25% smaller stddev -> smaller gradient in prev. layer, 10x smaller learning rate for weigts & biases
print "<AffineTransform> <InputDim> %d <OutputDim> %d <BiasMean> %f <BiasRange> %f <ParamStddev> %f <LearnRateCoef> %f <BiasLearnRateCoef> %f <MaxNorm> %f" % \
(o.bottleneck_dim, num_hid_neurons, o.hid_bias_mean, o.hid_bias_range, \
(o.param_stddev_factor * Glorot(o.bottleneck_dim, num_hid_neurons) * 0.75 ), 0.1, 0.1, o.max_norm)
else:
# Same learninig-rate and stddev-formula everywhere,
print "<LinearTransform> <InputDim> %d <OutputDim> %d <ParamStddev> %f" % \
(num_hid_neurons, o.bottleneck_dim, \
(o.param_stddev_factor * Glorot(num_hid_neurons, o.bottleneck_dim)))
print "<AffineTransform> <InputDim> %d <OutputDim> %d <BiasMean> %f <BiasRange> %f <ParamStddev> %f <MaxNorm> %f" % \
(o.bottleneck_dim, num_hid_neurons, o.hid_bias_mean, o.hid_bias_range, \
(o.param_stddev_factor * Glorot(o.bottleneck_dim, num_hid_neurons)), o.max_norm)
print "%s <InputDim> %d <OutputDim> %d" % (o.activation_type, num_hid_neurons, num_hid_neurons)
# Last AffineTransform (10x smaller learning rate on bias)
print "<AffineTransform> <InputDim> %d <OutputDim> %d <BiasMean> %f <BiasRange> %f <ParamStddev> %f <LearnRateCoef> %f <BiasLearnRateCoef> %f" % \
(num_hid_neurons, num_leaves, 0.0, 0.0, \
(o.param_stddev_factor * Glorot(num_hid_neurons, num_leaves)), 1.0, 0.1)
# Optionaly append softmax
if o.with_softmax:
if o.block_softmax_dims == "":
print "<Softmax> <InputDim> %d <OutputDim> %d" % (num_leaves, num_leaves)
else:
print "<BlockSoftmax> <InputDim> %d <OutputDim> %d <BlockDims> %s" % (num_leaves, num_leaves, o.block_softmax_dims)
# End the prototype
print "</NnetProto>"
# We are done!
sys.exit(0)
| gpl-2.0 |
barnsnake351/nova | nova/tests/unit/api/openstack/compute/test_api.py | 25 | 6295 | # Copyright 2010 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils
import six
import webob
import webob.dec
import webob.exc
from nova.api import openstack as openstack_api
from nova.api.openstack import wsgi
from nova import exception
from nova import test
from nova.tests.unit.api.openstack import fakes
class APITest(test.NoDBTestCase):
def setUp(self):
super(APITest, self).setUp()
self.wsgi_app = fakes.wsgi_app()
def _wsgi_app(self, inner_app):
# simpler version of the app than fakes.wsgi_app
return openstack_api.FaultWrapper(inner_app)
def test_malformed_json(self):
req = webob.Request.blank('/')
req.method = 'POST'
req.body = '{'
req.headers["content-type"] = "application/json"
res = req.get_response(self.wsgi_app)
self.assertEqual(res.status_int, 400)
def test_malformed_xml(self):
req = webob.Request.blank('/')
req.method = 'POST'
req.body = '<hi im not xml>'
req.headers["content-type"] = "application/xml"
res = req.get_response(self.wsgi_app)
self.assertEqual(res.status_int, 400)
def test_vendor_content_type_json(self):
ctype = 'application/vnd.openstack.compute+json'
req = webob.Request.blank('/')
req.headers['Accept'] = ctype
res = req.get_response(self.wsgi_app)
self.assertEqual(res.status_int, 200)
self.assertEqual(res.content_type, ctype)
jsonutils.loads(res.body)
def test_exceptions_are_converted_to_faults_webob_exc(self):
@webob.dec.wsgify
def raise_webob_exc(req):
raise webob.exc.HTTPNotFound(explanation='Raised a webob.exc')
# api.application = raise_webob_exc
api = self._wsgi_app(raise_webob_exc)
resp = webob.Request.blank('/').get_response(api)
self.assertEqual(resp.status_int, 404, resp.body)
def test_exceptions_are_converted_to_faults_api_fault(self):
@webob.dec.wsgify
def raise_api_fault(req):
exc = webob.exc.HTTPNotFound(explanation='Raised a webob.exc')
return wsgi.Fault(exc)
# api.application = raise_api_fault
api = self._wsgi_app(raise_api_fault)
resp = webob.Request.blank('/').get_response(api)
self.assertIn('itemNotFound', resp.body)
self.assertEqual(resp.status_int, 404, resp.body)
def test_exceptions_are_converted_to_faults_exception(self):
@webob.dec.wsgify
def fail(req):
raise Exception("Threw an exception")
# api.application = fail
api = self._wsgi_app(fail)
resp = webob.Request.blank('/').get_response(api)
self.assertIn('{"computeFault', resp.body)
self.assertEqual(resp.status_int, 500, resp.body)
def _do_test_exception_safety_reflected_in_faults(self, expose):
class ExceptionWithSafety(exception.NovaException):
safe = expose
@webob.dec.wsgify
def fail(req):
raise ExceptionWithSafety('some explanation')
api = self._wsgi_app(fail)
resp = webob.Request.blank('/').get_response(api)
self.assertIn('{"computeFault', resp.body)
expected = ('ExceptionWithSafety: some explanation' if expose else
'The server has either erred or is incapable '
'of performing the requested operation.')
self.assertIn(expected, resp.body)
self.assertEqual(resp.status_int, 500, resp.body)
def test_safe_exceptions_are_described_in_faults(self):
self._do_test_exception_safety_reflected_in_faults(True)
def test_unsafe_exceptions_are_not_described_in_faults(self):
self._do_test_exception_safety_reflected_in_faults(False)
def _do_test_exception_mapping(self, exception_type, msg):
@webob.dec.wsgify
def fail(req):
raise exception_type(msg)
api = self._wsgi_app(fail)
resp = webob.Request.blank('/').get_response(api)
self.assertIn(msg, resp.body)
self.assertEqual(resp.status_int, exception_type.code, resp.body)
if hasattr(exception_type, 'headers'):
for (key, value) in six.iteritems(exception_type.headers):
self.assertIn(key, resp.headers)
self.assertEqual(resp.headers[key], str(value))
def test_quota_error_mapping(self):
self._do_test_exception_mapping(exception.QuotaError, 'too many used')
def test_non_nova_notfound_exception_mapping(self):
class ExceptionWithCode(Exception):
code = 404
self._do_test_exception_mapping(ExceptionWithCode,
'NotFound')
def test_non_nova_exception_mapping(self):
class ExceptionWithCode(Exception):
code = 417
self._do_test_exception_mapping(ExceptionWithCode,
'Expectation failed')
def test_exception_with_none_code_throws_500(self):
class ExceptionWithNoneCode(Exception):
code = None
@webob.dec.wsgify
def fail(req):
raise ExceptionWithNoneCode()
api = self._wsgi_app(fail)
resp = webob.Request.blank('/').get_response(api)
self.assertEqual(500, resp.status_int)
class APITestV21(APITest):
def setUp(self):
super(APITestV21, self).setUp()
self.wsgi_app = fakes.wsgi_app_v21()
# TODO(alex_xu): Get rid of the case translate NovaException to
# HTTPException after V2 api code removed. Because V2.1 API required raise
# HTTPException explicitly, so V2.1 API needn't such translation.
| apache-2.0 |
cainiaocome/scikit-learn | sklearn/utils/testing.py | 47 | 23587 | """Testing utilities."""
# Copyright (c) 2011, 2012
# Authors: Pietro Berkes,
# Andreas Muller
# Mathieu Blondel
# Olivier Grisel
# Arnaud Joly
# Denis Engemann
# License: BSD 3 clause
import os
import inspect
import pkgutil
import warnings
import sys
import re
import platform
import scipy as sp
import scipy.io
from functools import wraps
try:
# Python 2
from urllib2 import urlopen
from urllib2 import HTTPError
except ImportError:
# Python 3+
from urllib.request import urlopen
from urllib.error import HTTPError
import sklearn
from sklearn.base import BaseEstimator
# Conveniently import all assertions in one place.
from nose.tools import assert_equal
from nose.tools import assert_not_equal
from nose.tools import assert_true
from nose.tools import assert_false
from nose.tools import assert_raises
from nose.tools import raises
from nose import SkipTest
from nose import with_setup
from numpy.testing import assert_almost_equal
from numpy.testing import assert_array_equal
from numpy.testing import assert_array_almost_equal
from numpy.testing import assert_array_less
import numpy as np
from sklearn.base import (ClassifierMixin, RegressorMixin, TransformerMixin,
ClusterMixin)
__all__ = ["assert_equal", "assert_not_equal", "assert_raises",
"assert_raises_regexp", "raises", "with_setup", "assert_true",
"assert_false", "assert_almost_equal", "assert_array_equal",
"assert_array_almost_equal", "assert_array_less",
"assert_less", "assert_less_equal",
"assert_greater", "assert_greater_equal"]
try:
from nose.tools import assert_in, assert_not_in
except ImportError:
# Nose < 1.0.0
def assert_in(x, container):
assert_true(x in container, msg="%r in %r" % (x, container))
def assert_not_in(x, container):
assert_false(x in container, msg="%r in %r" % (x, container))
try:
from nose.tools import assert_raises_regex
except ImportError:
# for Python 2
def assert_raises_regex(expected_exception, expected_regexp,
callable_obj=None, *args, **kwargs):
"""Helper function to check for message patterns in exceptions"""
not_raised = False
try:
callable_obj(*args, **kwargs)
not_raised = True
except expected_exception as e:
error_message = str(e)
if not re.compile(expected_regexp).search(error_message):
raise AssertionError("Error message should match pattern "
"%r. %r does not." %
(expected_regexp, error_message))
if not_raised:
raise AssertionError("%s not raised by %s" %
(expected_exception.__name__,
callable_obj.__name__))
# assert_raises_regexp is deprecated in Python 3.4 in favor of
# assert_raises_regex but lets keep the bacward compat in scikit-learn with
# the old name for now
assert_raises_regexp = assert_raises_regex
def _assert_less(a, b, msg=None):
message = "%r is not lower than %r" % (a, b)
if msg is not None:
message += ": " + msg
assert a < b, message
def _assert_greater(a, b, msg=None):
message = "%r is not greater than %r" % (a, b)
if msg is not None:
message += ": " + msg
assert a > b, message
def assert_less_equal(a, b, msg=None):
message = "%r is not lower than or equal to %r" % (a, b)
if msg is not None:
message += ": " + msg
assert a <= b, message
def assert_greater_equal(a, b, msg=None):
message = "%r is not greater than or equal to %r" % (a, b)
if msg is not None:
message += ": " + msg
assert a >= b, message
def assert_warns(warning_class, func, *args, **kw):
"""Test that a certain warning occurs.
Parameters
----------
warning_class : the warning class
The class to test for, e.g. UserWarning.
func : callable
Calable object to trigger warnings.
*args : the positional arguments to `func`.
**kw : the keyword arguments to `func`
Returns
-------
result : the return value of `func`
"""
# very important to avoid uncontrolled state propagation
clean_warning_registry()
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
# Trigger a warning.
result = func(*args, **kw)
if hasattr(np, 'VisibleDeprecationWarning'):
# Filter out numpy-specific warnings in numpy >= 1.9
w = [e for e in w
if e.category is not np.VisibleDeprecationWarning]
# Verify some things
if not len(w) > 0:
raise AssertionError("No warning raised when calling %s"
% func.__name__)
found = any(warning.category is warning_class for warning in w)
if not found:
raise AssertionError("%s did not give warning: %s( is %s)"
% (func.__name__, warning_class, w))
return result
def assert_warns_message(warning_class, message, func, *args, **kw):
# very important to avoid uncontrolled state propagation
"""Test that a certain warning occurs and with a certain message.
Parameters
----------
warning_class : the warning class
The class to test for, e.g. UserWarning.
message : str | callable
The entire message or a substring to test for. If callable,
it takes a string as argument and will trigger an assertion error
if it returns `False`.
func : callable
Calable object to trigger warnings.
*args : the positional arguments to `func`.
**kw : the keyword arguments to `func`.
Returns
-------
result : the return value of `func`
"""
clean_warning_registry()
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter("always")
if hasattr(np, 'VisibleDeprecationWarning'):
# Let's not catch the numpy internal DeprecationWarnings
warnings.simplefilter('ignore', np.VisibleDeprecationWarning)
# Trigger a warning.
result = func(*args, **kw)
# Verify some things
if not len(w) > 0:
raise AssertionError("No warning raised when calling %s"
% func.__name__)
found = [issubclass(warning.category, warning_class) for warning in w]
if not any(found):
raise AssertionError("No warning raised for %s with class "
"%s"
% (func.__name__, warning_class))
message_found = False
# Checks the message of all warnings belong to warning_class
for index in [i for i, x in enumerate(found) if x]:
# substring will match, the entire message with typo won't
msg = w[index].message # For Python 3 compatibility
msg = str(msg.args[0] if hasattr(msg, 'args') else msg)
if callable(message): # add support for certain tests
check_in_message = message
else:
check_in_message = lambda msg: message in msg
if check_in_message(msg):
message_found = True
break
if not message_found:
raise AssertionError("Did not receive the message you expected "
"('%s') for <%s>, got: '%s'"
% (message, func.__name__, msg))
return result
# To remove when we support numpy 1.7
def assert_no_warnings(func, *args, **kw):
# XXX: once we may depend on python >= 2.6, this can be replaced by the
# warnings module context manager.
# very important to avoid uncontrolled state propagation
clean_warning_registry()
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
result = func(*args, **kw)
if hasattr(np, 'VisibleDeprecationWarning'):
# Filter out numpy-specific warnings in numpy >= 1.9
w = [e for e in w
if e.category is not np.VisibleDeprecationWarning]
if len(w) > 0:
raise AssertionError("Got warnings when calling %s: %s"
% (func.__name__, w))
return result
def ignore_warnings(obj=None):
""" Context manager and decorator to ignore warnings
Note. Using this (in both variants) will clear all warnings
from all python modules loaded. In case you need to test
cross-module-warning-logging this is not your tool of choice.
Examples
--------
>>> with ignore_warnings():
... warnings.warn('buhuhuhu')
>>> def nasty_warn():
... warnings.warn('buhuhuhu')
... print(42)
>>> ignore_warnings(nasty_warn)()
42
"""
if callable(obj):
return _ignore_warnings(obj)
else:
return _IgnoreWarnings()
def _ignore_warnings(fn):
"""Decorator to catch and hide warnings without visual nesting"""
@wraps(fn)
def wrapper(*args, **kwargs):
# very important to avoid uncontrolled state propagation
clean_warning_registry()
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
return fn(*args, **kwargs)
w[:] = []
return wrapper
class _IgnoreWarnings(object):
"""Improved and simplified Python warnings context manager
Copied from Python 2.7.5 and modified as required.
"""
def __init__(self):
"""
Parameters
==========
category : warning class
The category to filter. Defaults to Warning. If None,
all categories will be muted.
"""
self._record = True
self._module = sys.modules['warnings']
self._entered = False
self.log = []
def __repr__(self):
args = []
if self._record:
args.append("record=True")
if self._module is not sys.modules['warnings']:
args.append("module=%r" % self._module)
name = type(self).__name__
return "%s(%s)" % (name, ", ".join(args))
def __enter__(self):
clean_warning_registry() # be safe and not propagate state + chaos
warnings.simplefilter('always')
if self._entered:
raise RuntimeError("Cannot enter %r twice" % self)
self._entered = True
self._filters = self._module.filters
self._module.filters = self._filters[:]
self._showwarning = self._module.showwarning
if self._record:
self.log = []
def showwarning(*args, **kwargs):
self.log.append(warnings.WarningMessage(*args, **kwargs))
self._module.showwarning = showwarning
return self.log
else:
return None
def __exit__(self, *exc_info):
if not self._entered:
raise RuntimeError("Cannot exit %r without entering first" % self)
self._module.filters = self._filters
self._module.showwarning = self._showwarning
self.log[:] = []
clean_warning_registry() # be safe and not propagate state + chaos
try:
from nose.tools import assert_less
except ImportError:
assert_less = _assert_less
try:
from nose.tools import assert_greater
except ImportError:
assert_greater = _assert_greater
def _assert_allclose(actual, desired, rtol=1e-7, atol=0,
err_msg='', verbose=True):
actual, desired = np.asanyarray(actual), np.asanyarray(desired)
if np.allclose(actual, desired, rtol=rtol, atol=atol):
return
msg = ('Array not equal to tolerance rtol=%g, atol=%g: '
'actual %s, desired %s') % (rtol, atol, actual, desired)
raise AssertionError(msg)
if hasattr(np.testing, 'assert_allclose'):
assert_allclose = np.testing.assert_allclose
else:
assert_allclose = _assert_allclose
def assert_raise_message(exceptions, message, function, *args, **kwargs):
"""Helper function to test error messages in exceptions
Parameters
----------
exceptions : exception or tuple of exception
Name of the estimator
func : callable
Calable object to raise error
*args : the positional arguments to `func`.
**kw : the keyword arguments to `func`
"""
try:
function(*args, **kwargs)
except exceptions as e:
error_message = str(e)
if message not in error_message:
raise AssertionError("Error message does not include the expected"
" string: %r. Observed error message: %r" %
(message, error_message))
else:
# concatenate exception names
if isinstance(exceptions, tuple):
names = " or ".join(e.__name__ for e in exceptions)
else:
names = exceptions.__name__
raise AssertionError("%s not raised by %s" %
(names, function.__name__))
def fake_mldata(columns_dict, dataname, matfile, ordering=None):
"""Create a fake mldata data set.
Parameters
----------
columns_dict : dict, keys=str, values=ndarray
Contains data as columns_dict[column_name] = array of data.
dataname : string
Name of data set.
matfile : string or file object
The file name string or the file-like object of the output file.
ordering : list, default None
List of column_names, determines the ordering in the data set.
Notes
-----
This function transposes all arrays, while fetch_mldata only transposes
'data', keep that into account in the tests.
"""
datasets = dict(columns_dict)
# transpose all variables
for name in datasets:
datasets[name] = datasets[name].T
if ordering is None:
ordering = sorted(list(datasets.keys()))
# NOTE: setting up this array is tricky, because of the way Matlab
# re-packages 1D arrays
datasets['mldata_descr_ordering'] = sp.empty((1, len(ordering)),
dtype='object')
for i, name in enumerate(ordering):
datasets['mldata_descr_ordering'][0, i] = name
scipy.io.savemat(matfile, datasets, oned_as='column')
class mock_mldata_urlopen(object):
def __init__(self, mock_datasets):
"""Object that mocks the urlopen function to fake requests to mldata.
`mock_datasets` is a dictionary of {dataset_name: data_dict}, or
{dataset_name: (data_dict, ordering).
`data_dict` itself is a dictionary of {column_name: data_array},
and `ordering` is a list of column_names to determine the ordering
in the data set (see `fake_mldata` for details).
When requesting a dataset with a name that is in mock_datasets,
this object creates a fake dataset in a StringIO object and
returns it. Otherwise, it raises an HTTPError.
"""
self.mock_datasets = mock_datasets
def __call__(self, urlname):
dataset_name = urlname.split('/')[-1]
if dataset_name in self.mock_datasets:
resource_name = '_' + dataset_name
from io import BytesIO
matfile = BytesIO()
dataset = self.mock_datasets[dataset_name]
ordering = None
if isinstance(dataset, tuple):
dataset, ordering = dataset
fake_mldata(dataset, resource_name, matfile, ordering)
matfile.seek(0)
return matfile
else:
raise HTTPError(urlname, 404, dataset_name + " is not available",
[], None)
def install_mldata_mock(mock_datasets):
# Lazy import to avoid mutually recursive imports
from sklearn import datasets
datasets.mldata.urlopen = mock_mldata_urlopen(mock_datasets)
def uninstall_mldata_mock():
# Lazy import to avoid mutually recursive imports
from sklearn import datasets
datasets.mldata.urlopen = urlopen
# Meta estimators need another estimator to be instantiated.
META_ESTIMATORS = ["OneVsOneClassifier",
"OutputCodeClassifier", "OneVsRestClassifier", "RFE",
"RFECV", "BaseEnsemble"]
# estimators that there is no way to default-construct sensibly
OTHER = ["Pipeline", "FeatureUnion", "GridSearchCV",
"RandomizedSearchCV"]
# some trange ones
DONT_TEST = ['SparseCoder', 'EllipticEnvelope', 'DictVectorizer',
'LabelBinarizer', 'LabelEncoder',
'MultiLabelBinarizer', 'TfidfTransformer',
'TfidfVectorizer', 'IsotonicRegression',
'OneHotEncoder', 'RandomTreesEmbedding',
'FeatureHasher', 'DummyClassifier', 'DummyRegressor',
'TruncatedSVD', 'PolynomialFeatures',
'GaussianRandomProjectionHash', 'HashingVectorizer',
'CheckingClassifier', 'PatchExtractor', 'CountVectorizer',
# GradientBoosting base estimators, maybe should
# exclude them in another way
'ZeroEstimator', 'ScaledLogOddsEstimator',
'QuantileEstimator', 'MeanEstimator',
'LogOddsEstimator', 'PriorProbabilityEstimator',
'_SigmoidCalibration', 'VotingClassifier']
def all_estimators(include_meta_estimators=False,
include_other=False, type_filter=None,
include_dont_test=False):
"""Get a list of all estimators from sklearn.
This function crawls the module and gets all classes that inherit
from BaseEstimator. Classes that are defined in test-modules are not
included.
By default meta_estimators such as GridSearchCV are also not included.
Parameters
----------
include_meta_estimators : boolean, default=False
Whether to include meta-estimators that can be constructed using
an estimator as their first argument. These are currently
BaseEnsemble, OneVsOneClassifier, OutputCodeClassifier,
OneVsRestClassifier, RFE, RFECV.
include_other : boolean, default=False
Wether to include meta-estimators that are somehow special and can
not be default-constructed sensibly. These are currently
Pipeline, FeatureUnion and GridSearchCV
include_dont_test : boolean, default=False
Whether to include "special" label estimator or test processors.
type_filter : string, list of string, or None, default=None
Which kind of estimators should be returned. If None, no filter is
applied and all estimators are returned. Possible values are
'classifier', 'regressor', 'cluster' and 'transformer' to get
estimators only of these specific types, or a list of these to
get the estimators that fit at least one of the types.
Returns
-------
estimators : list of tuples
List of (name, class), where ``name`` is the class name as string
and ``class`` is the actuall type of the class.
"""
def is_abstract(c):
if not(hasattr(c, '__abstractmethods__')):
return False
if not len(c.__abstractmethods__):
return False
return True
all_classes = []
# get parent folder
path = sklearn.__path__
for importer, modname, ispkg in pkgutil.walk_packages(
path=path, prefix='sklearn.', onerror=lambda x: None):
if ".tests." in modname:
continue
module = __import__(modname, fromlist="dummy")
classes = inspect.getmembers(module, inspect.isclass)
all_classes.extend(classes)
all_classes = set(all_classes)
estimators = [c for c in all_classes
if (issubclass(c[1], BaseEstimator)
and c[0] != 'BaseEstimator')]
# get rid of abstract base classes
estimators = [c for c in estimators if not is_abstract(c[1])]
if not include_dont_test:
estimators = [c for c in estimators if not c[0] in DONT_TEST]
if not include_other:
estimators = [c for c in estimators if not c[0] in OTHER]
# possibly get rid of meta estimators
if not include_meta_estimators:
estimators = [c for c in estimators if not c[0] in META_ESTIMATORS]
if type_filter is not None:
if not isinstance(type_filter, list):
type_filter = [type_filter]
else:
type_filter = list(type_filter) # copy
filtered_estimators = []
filters = {'classifier': ClassifierMixin,
'regressor': RegressorMixin,
'transformer': TransformerMixin,
'cluster': ClusterMixin}
for name, mixin in filters.items():
if name in type_filter:
type_filter.remove(name)
filtered_estimators.extend([est for est in estimators
if issubclass(est[1], mixin)])
estimators = filtered_estimators
if type_filter:
raise ValueError("Parameter type_filter must be 'classifier', "
"'regressor', 'transformer', 'cluster' or None, got"
" %s." % repr(type_filter))
# drop duplicates, sort for reproducibility
return sorted(set(estimators))
def set_random_state(estimator, random_state=0):
if "random_state" in estimator.get_params().keys():
estimator.set_params(random_state=random_state)
def if_matplotlib(func):
"""Test decorator that skips test if matplotlib not installed. """
@wraps(func)
def run_test(*args, **kwargs):
try:
import matplotlib
matplotlib.use('Agg', warn=False)
# this fails if no $DISPLAY specified
import matplotlib.pyplot as plt
plt.figure()
except ImportError:
raise SkipTest('Matplotlib not available.')
else:
return func(*args, **kwargs)
return run_test
def if_not_mac_os(versions=('10.7', '10.8', '10.9'),
message='Multi-process bug in Mac OS X >= 10.7 '
'(see issue #636)'):
"""Test decorator that skips test if OS is Mac OS X and its
major version is one of ``versions``.
"""
mac_version, _, _ = platform.mac_ver()
skip = '.'.join(mac_version.split('.')[:2]) in versions
def decorator(func):
if skip:
@wraps(func)
def func(*args, **kwargs):
raise SkipTest(message)
return func
return decorator
def clean_warning_registry():
"""Safe way to reset warnings """
warnings.resetwarnings()
reg = "__warningregistry__"
for mod_name, mod in list(sys.modules.items()):
if 'six.moves' in mod_name:
continue
if hasattr(mod, reg):
getattr(mod, reg).clear()
def check_skip_network():
if int(os.environ.get('SKLEARN_SKIP_NETWORK_TESTS', 0)):
raise SkipTest("Text tutorial requires large dataset download")
def check_skip_travis():
"""Skip test if being run on Travis."""
if os.environ.get('TRAVIS') == "true":
raise SkipTest("This test needs to be skipped on Travis")
with_network = with_setup(check_skip_network)
with_travis = with_setup(check_skip_travis)
| bsd-3-clause |
WxOutside/software | telemetry/sensors/weatherPiArduino/weatherPiArduino_agent.py | 1 | 4910 | #!/usr/bin/env python
import json
import os
import socket
import sys
import time
import RPi.GPIO as GPIO
sys.path.append(os.path.abspath('/home/pi/telemetry/sensors/weatherPiArduino/RTC_SDL_DS3231'))
sys.path.append(os.path.abspath('/home/pi/telemetry/sensors/weatherPiArduino/Adafruit_Python_GPIO'))
sys.path.append(os.path.abspath('/home/pi/telemetry/sensors/weatherPiArduino/SDL_Pi_Weather_80422'))
sys.path.append(os.path.abspath('/home/pi/telemetry/sensors/weatherPiArduino/SDL_Pi_Weather_80422/Adafruit_ADS1x15'))
import SDL_DS3231
import SDL_Pi_Weather_80422 as SDL_Pi_Weather_80422
sys.path.append(os.path.abspath('/home/pi/telemetry/'))
from config import couchdb_baseurl
from functions import run_proc, date_time, get_sensor_config_value, update_last_record
def create_connection():
try:
result = SDL_Pi_Weather_80422.SDL_Pi_Weather_80422(anenometerPin, rainPin, 0,0, SDL_MODE_I2C_ADS1015)
except:
GPIO.cleanup()
result=False
return result
################
anenometerPin = 23
rainPin = 24
# constants
SDL_MODE_INTERNAL_AD = 0
SDL_MODE_I2C_ADS1015 = 1
#sample mode means return immediately. THe wind speed is averaged at sampleTime or when you ask, whichever is longer
SDL_MODE_SAMPLE = 0
#Delay mode means to wait for sampleTime and the average after that time.
SDL_MODE_DELAY = 1
weatherStation=create_connection()
#weatherStation.setWindMode(SDL_MODE_SAMPLE, 5.0)
weatherStation.setWindMode(SDL_MODE_DELAY, 5.0)
# Main Program
host_name=socket.gethostname()
wind_speeds=[]
max_wind_gust=0
total_rain = 0
first=True
continue_loop=True
last_updated=0
while continue_loop:
current_wind_speed = weatherStation.current_wind_speed() * 1.852
current_wind_gust = weatherStation.get_wind_gust() * 1.852
total_rain = total_rain + weatherStation.get_current_rain_total()
current_wind_direction=weatherStation.current_wind_direction()
# If the wind speed is zero, then the wind direction if false (ie, no direction)
if current_wind_speed==0:
current_wind_direction=False
if first==False:
wind_speeds.append(current_wind_speed)
#print ('current wind gust: ', current_wind_gust)
#print ('max wind gust: ', max_wind_gust)
if current_wind_gust>=max_wind_gust:
#print ('setting new wind details')
max_wind_gust=current_wind_gust
max_wind_gust_direction=current_wind_direction
#print ('max wind gust direction: ', max_wind_gust_direction)
#print ("")
avg_wind_speed=sum(wind_speeds) / float(len(wind_speeds))
#print ('average wind speed: ', avg_wind_speed)
#print ('max wind gust:', max_wind_gust)
#print ('max wind gust direction:', str(max_wind_gust_direction))
#print ('current wind direction:', str(current_wind_direction))
#print ('total rain: ' , total_rain)
current_minute=time.strftime("%M")
current_time=time.strftime("%Y-%m-%d %H:%M")
#print ('current minute', current_minute)
if current_minute=='00' and current_time!=last_updated:
#print ('updating official record')
date,hour,hour_readable,minutes=date_time()
doc_name=host_name + '_' + date + ':' + hour
output=run_proc('GET', couchdb_baseurl + '/telemetry/' + doc_name)
json_items={}
try:
if output['_rev']:
json_items=output
except:
# We need to add these values so that we can retreive them in views.
# We only add these for new records because these values shouldn't change if the record is updated
json_items['host_name']=host_name
json_items['date']=date
json_items['hour']=hour
#Now load in our telemetry readings:
json_items['anemometer_wind_speed']=avg_wind_speed
json_items['anemometer_wind_gust']=max_wind_gust
json_items['anemometer_wind_direction']=max_wind_gust_direction
json_items['anemometer_precipitation']=total_rain
# Now convert this data and save it
json_string=json.dumps(json_items)
replication_output=run_proc('PUT', couchdb_baseurl + '/telemetry/' + doc_name, json_string)
###################################################
# update the last_record entry:
json_items['anemometer_last_updated']=current_time
json_items['ignore']=True
update_last_record(couchdb_baseurl, host_name, json_items)
print ('Time: ' + current_time)
print ('Code: 100')
print ('Rain: ', total_rain)
print ('Average wind: ', avg_wind_speed)
print ('Max wind gust: ', max_wind_gust)
print ('Max wind gust direction: ', max_wind_gust_direction)
print ('Message: ' + hour_readable + ' record - Rain: ' + str(total_rain) + ', average wind: ' + str(avg_wind_speed) + ', max wind gust: ' + str(max_wind_gust) + ', max wind gust direction: ' + str(max_wind_gust_direction))
wind_speeds=[]
max_wind_gust=0
total_rain = 0
last_updated=current_time
time.sleep(10.0)
else:
#print ('skipping first attempt')
first=False
time.sleep(10.0)
max_wind_gust=0
total_rain=0
| unlicense |
superdesk/superdesk-ntb | server/ntb/tests/publish/ntb_event_test.py | 2 | 20100 | import lxml
from unittest import mock
from superdesk.tests import TestCase
from planning.common import POST_STATE, WORKFLOW_STATE
from ntb.publish.ntb_event import NTBEventFormatter
class NTBEventTestCase(TestCase):
def setUp(self):
super(NTBEventTestCase, self).setUp()
contact = [{
'_id': '5ab491271d41c88e98ad9336',
'contact_email': [
'jdoe@fubar.com'
],
'_updated': '2018-03-23T05:31:19.000Z',
'postcode': '2040',
'is_active': True,
'locality': 'Rhodes',
'website': 'fubar.com',
'public': True,
'contact_state': 'NSW',
'last_name': 'Doe',
'notes': 'Exceptionally good looking',
'mobile': [
{
'public': False,
'number': '999',
'usage': 'Private Mobile'
},
{
'public': True,
'number': '666',
'usage': 'Office Mobile'
}
],
'organisation': 'FUBAR',
'first_name': 'John',
'country': 'Australia',
'city': 'Sydney',
'job_title': 'Media Contact',
'honorific': 'Mr',
'contact_phone': [
{
'usage': 'Business',
'public': True,
'number': '99999999'
}
],
'_created': '2018-03-23T05:31:19.000Z'
}]
self.app.data.insert('contacts', contact)
self.item = {
'_id': '6116453c-abf6-4a31-b6a9-a02f4207a7be',
'_created': '2016-10-31T08:27:25+0000',
'name': 'Kronprinsparet besøker bydelen Gamle Oslo',
'firstcreated': '2016-10-31T08:27:25+0000',
'versioncreated': '2016-10-31T09:33:40+0000',
'dates': {
'start': '2016-10-31T23:00:00+0000',
'end': '2016-11-01T22:59:59+0000',
'tz': 'Europe/Oslo',
},
'definition_short': 'Kronprinsparet besøker bydelen Gamle Oslo.',
'anpa_category': [
{'qcode': 'n', 'name': 'Nyhetstjenesten'},
],
'subject': [
{'qcode': '05001000', 'name': 'adult education', 'parent': '0500000'},
{'name': 'Innenriks', 'qcode': 'Innenriks', 'scheme': 'category'},
{'name': 'Forurensning', 'qcode': '06005000', 'scheme': 'subject_custom'}
],
'location': [
{
'location': {'lon': 14.4212535, 'lat': 50.0874654},
'name': 'Prague',
'address': {
'area': 'Old Town',
'country': 'Czechia',
'locality': 'Prague',
'postal_code': '11000',
'line': [
'1092/10 Salvatorska street'
]
}
},
],
'links': [
'http://example.com',
'https://github.com',
]
}
self.item_duplicated = self.item.copy()
self.item_duplicated['_id'] = '152d0084-fcae-4ef3-abba-c4f8292d2fd7'
self.item_duplicated['_created'] = '2016-10-31T08:37:25+0000'
self.item_duplicated['duplicate_from'] = self.item['_id']
self.item_rescheduled = self.item.copy()
self.item_rescheduled['_id'] = '8f2541ad-2cda-4f49-9d4f-d8db3cb822e2'
self.item_rescheduled['reschedule_from'] = self.item['_id']
self.item_rescheduled['dates'] = {
'start': '2018-10-25T23:00:00+0000',
'end': '2018-11-01T22:59:59+0000',
'tz': 'Europe/Oslo',
}
self.item_ingested = self.item.copy()
self.item_ingested['_id'] = 'd31dde37-272e-4437-8216-d74ec871b586'
self.item_ingested['guid'] = 'd31dde37-272e-4437-8216-d74ec871b586'
self.item_ingested['state'] = 'ingested'
self.item_ingested['ntb_id'] = 'NBRP123456_123456_na_00'
self.item_ingested_rescheduled = self.item_ingested.copy()
self.item_ingested_rescheduled['_id'] = 'ae5290f1-633a-4642-b7c9-6887d6f3e295'
self.item_ingested_rescheduled['reschedule_from'] = self.item_ingested['_id']
self.item_ingested_rescheduled['dates'] = {
'start': '2018-10-25T23:00:00+0000',
'end': '2018-11-01T22:59:59+0000',
'tz': 'Europe/Oslo',
}
with self.app.app_context():
self.app.data.insert('events', [
self.item,
self.item_duplicated,
self.item_rescheduled,
self.item_ingested
])
def test_formatter(self):
formatter = NTBEventFormatter()
output = formatter.format(self.item, {})[0]
self.assertIsInstance(output['formatted_item'], str)
self.assertIsInstance(output['encoded_item'], bytes)
root = lxml.etree.fromstring(output['encoded_item'])
self.assertEqual('document', root.tag)
self.assertEqual('True', root.find('publiseres').text)
self.assertEqual('newscalendar', root.find('service').text)
self.assertEqual(self.item['name'], root.find('title').text)
self.assertEqual('2016-10-31T10:33:40', root.find('time').text) # utc + 1
self.assertEqual('NBRP161031_092725_hh_00', root.find('ntbId').text)
self.assertEqual('2016-11-01T00:00:00', root.find('timeStart').text)
self.assertEqual('2016-11-01T23:59:59', root.find('timeEnd').text)
self.assertEqual('5', root.find('priority').text)
self.assertEqual(self.item['definition_short'], root.find('content').text)
self.assertEqual(self.item['subject'][1]['name'], root.find('category').text)
subjects = root.find('subjects')
self.assertEqual(2, len(subjects))
self.assertEqual(self.item['subject'][0]['name'], subjects[0].text)
self.assertEqual(self.item['subject'][2]['name'], subjects[1].text)
geo = root.find('geo')
self.assertEqual(str(self.item['location'][0]['location']['lat']), geo.find('latitude').text)
self.assertEqual(str(self.item['location'][0]['location']['lon']), geo.find('longitude').text)
def test_unpost(self):
item = self.item.copy()
item['pubstatus'] = POST_STATE.CANCELLED
formatter = NTBEventFormatter()
output = formatter.format(item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
self.assertEqual('true', root.get('DeleteRequest'))
def test_cancel(self):
item = self.item.copy()
for state in [WORKFLOW_STATE.CANCELLED, WORKFLOW_STATE.POSTPONED]:
item['state'] = state
formatter = NTBEventFormatter()
output = formatter.format(item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
self.assertEqual('true', root.get('DeleteRequest'))
def test_alldayevent_included(self):
# just in case main self.item['dates'] will be changed in setUp
self.item['dates']['start'] = '2016-10-31T23:00:00+0000'
self.item['dates']['end'] = '2016-11-01T22:59:59+0000'
formatter = NTBEventFormatter()
output = formatter.format(self.item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
alldayevent = root.find('alldayevent')
self.assertIsNotNone(alldayevent)
def test_alldayevent_is_true(self):
# just in case main self.item['dates'] will be changed in setUp
self.item['dates']['start'] = '2016-10-31T23:00:00+0000'
self.item['dates']['end'] = '2016-11-01T22:59:59+0000'
formatter = NTBEventFormatter()
output = formatter.format(self.item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
alldayevent = root.find('alldayevent')
self.assertEqual(alldayevent.text, str(True))
def test_alldayevent_is_false(self):
self.item['dates']['start'] = '2016-10-31T14:00:00+0000'
self.item['dates']['end'] = '2016-11-01T21:30:59+0000'
formatter = NTBEventFormatter()
output = formatter.format(self.item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
alldayevent = root.find('alldayevent')
self.assertEqual(alldayevent.text, str(False))
def test_location(self):
formatter = NTBEventFormatter()
# full address data
output = formatter.format(self.item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
location = root.find('location')
self.assertEqual(
location.text,
'Prague, 1092/10 Salvatorska street, Old Town, Prague, 11000, Czechia'
)
# partly address data
del self.item['location'][0]['address']['line']
output = formatter.format(self.item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
location = root.find('location')
self.assertEqual(
location.text,
'Prague, Old Town, Prague, 11000, Czechia'
)
# no address data
del self.item['location'][0]['address']
output = formatter.format(self.item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
location = root.find('location')
self.assertEqual(location.text, 'Prague')
# empty location name and no address data
del self.item['location'][0]['name']
output = formatter.format(self.item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
location = root.find('location')
self.assertIsNone(location.text)
def test_contactweb_included(self):
formatter = NTBEventFormatter()
output = formatter.format(self.item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
contactweb = root.find('contactweb')
contactweb_count = root.xpath('count(contactweb)')
self.assertIsNotNone(contactweb)
self.assertEqual(contactweb_count, 1)
def test_contactweb_not_included(self):
del self.item['links']
formatter = NTBEventFormatter()
output = formatter.format(self.item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
contactweb = root.find('contactweb')
self.assertIsNone(contactweb)
def test_contactweb_text(self):
formatter = NTBEventFormatter()
output = formatter.format(self.item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
contactweb = root.find('contactweb')
# include only 1st external link
self.assertEqual(contactweb.text, self.item['links'][0])
@mock.patch('apps.contacts.service.ContactsService.find_one', return_value={
'first_name': 'John',
'last_name': 'Smith',
'organisation': 'NASA',
})
def test_contactname_included(self, magic_mock):
self.item['event_contact_info'] = [
'5b7a8228f7ab23b336d7f84d',
'7ab23b336d7f84d5b7a8228f',
]
formatter = NTBEventFormatter()
output = formatter.format(self.item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
contactname = root.find('contactname')
contactname_count = root.xpath('count(contactname)')
self.assertIsNotNone(contactname)
self.assertEqual(contactname_count, 1)
def test_contactname_not_included(self):
formatter = NTBEventFormatter()
output = formatter.format(self.item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
contactname = root.find('contactname')
self.assertIsNone(contactname)
@mock.patch('apps.contacts.service.ContactsService.find_one', return_value={
'first_name': 'John',
'last_name': 'Smith',
})
def test_contactname_format_no_organisation(self, magic_mock):
self.item['event_contact_info'] = [
'5b7a8228f7ab23b336d7f84d',
'7ab23b336d7f84d5b7a8228f',
]
formatter = NTBEventFormatter()
output = formatter.format(self.item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
contactname = root.find('contactname')
self.assertEqual(contactname.text, 'John Smith')
@mock.patch('apps.contacts.service.ContactsService.find_one', return_value={
'first_name': 'John',
'last_name': 'Smith',
'organisation': 'NASA',
})
def test_contactname_format_organisation(self, magic_mock):
self.item['event_contact_info'] = [
'5b7a8228f7ab23b336d7f84d',
'7ab23b336d7f84d5b7a8228f',
]
formatter = NTBEventFormatter()
output = formatter.format(self.item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
contactname = root.find('contactname')
self.assertEqual(contactname.text, 'John Smith, NASA')
@mock.patch('apps.contacts.service.ContactsService.find_one', return_value={
'last_name': 'Smith',
'organisation': 'NASA',
})
def test_contactname_format_no_firstname(self, magic_mock):
self.item['event_contact_info'] = [
'5b7a8228f7ab23b336d7f84d',
'7ab23b336d7f84d5b7a8228f',
]
formatter = NTBEventFormatter()
output = formatter.format(self.item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
contactname = root.find('contactname')
self.assertEqual(contactname.text, 'Smith, NASA')
@mock.patch('apps.contacts.service.ContactsService.find_one', return_value={
'first_name': 'John',
'organisation': 'NASA',
})
def test_contactname_format_no_lastname(self, magic_mock):
self.item['event_contact_info'] = [
'5b7a8228f7ab23b336d7f84d',
'7ab23b336d7f84d5b7a8228f',
]
formatter = NTBEventFormatter()
output = formatter.format(self.item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
contactname = root.find('contactname')
self.assertEqual(contactname.text, 'John, NASA')
@mock.patch('apps.contacts.service.ContactsService.find_one', return_value={
'organisation': 'NASA',
})
def test_contactname_format_no_names(self, magic_mock):
self.item['event_contact_info'] = [
'5b7a8228f7ab23b336d7f84d',
'7ab23b336d7f84d5b7a8228f',
]
formatter = NTBEventFormatter()
output = formatter.format(self.item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
contactname = root.find('contactname')
self.assertEqual(contactname.text, 'NASA')
@mock.patch('apps.contacts.service.ContactsService.find_one', return_value={
'contact_email': ['john.smith@nasa.org']
})
def test_contactmail_included(self, magic_mock):
self.item['event_contact_info'] = [
'5b7a8228f7ab23b336d7f84d',
'7ab23b336d7f84d5b7a8228f',
]
formatter = NTBEventFormatter()
output = formatter.format(self.item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
contactmail = root.find('contactmail')
contactmail_count = root.xpath('count(contactmail)')
self.assertIsNotNone(contactmail)
self.assertEqual(contactmail_count, 1)
@mock.patch('apps.contacts.service.ContactsService.find_one', return_value={
'contact_email': []
})
def test_contactmail_not_included(self, magic_mock):
self.item['event_contact_info'] = [
'5b7a8228f7ab23b336d7f84d',
'7ab23b336d7f84d5b7a8228f',
]
formatter = NTBEventFormatter()
output = formatter.format(self.item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
contactmail = root.find('contactmail')
contactmail_count = root.xpath('count(contactmail)')
self.assertIsNone(contactmail)
self.assertEqual(contactmail_count, 0)
@mock.patch('apps.contacts.service.ContactsService.find_one', return_value={
'contact_phone': [{'number': '99999', 'public': True}]
})
def test_contactphone_included(self, magic_mock):
self.item['event_contact_info'] = [
'5b7a8228f7ab23b336d7f84d',
'7ab23b336d7f84d5b7a8228f',
]
formatter = NTBEventFormatter()
output = formatter.format(self.item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
contactphone = root.find('contactphone')
contactphone_count = root.xpath('count(contactphone)')
self.assertIsNotNone(contactphone)
self.assertEqual(contactphone_count, 1)
@mock.patch('apps.contacts.service.ContactsService.find_one', return_value={
'contact_phone': [{'number': '99999', 'public': False}]
})
def test_contactphone_not_included(self, magic_mock):
self.item['event_contact_info'] = [
'5b7a8228f7ab23b336d7f84d',
'7ab23b336d7f84d5b7a8228f',
]
formatter = NTBEventFormatter()
output = formatter.format(self.item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
contactphone = root.find('contactphone')
contactphone_count = root.xpath('count(contactphone)')
self.assertIsNone(contactphone)
self.assertEqual(contactphone_count, 0)
def test_content_missing_desc_short(self):
formatter = NTBEventFormatter()
item = self.item.copy()
item['definition_short'] = None
item['definition_long'] = 'Long desc'
output = formatter.format(item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
self.assertIsNone(root.find('content').text)
def test_ingested_item_dates(self):
formatter = NTBEventFormatter()
item = self.item.copy()
item['dates'] = {
'start': '2018-07-01T16:00:00+0000',
'end': '2018-07-01T18:00:00+0000',
'tz': ''
}
output = formatter.format(item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
self.assertEqual('2018-07-01T18:00:00', root.find('timeStart').text) # CEST + 2
def test_ntbid(self):
formatter = NTBEventFormatter()
item = self.item.copy()
output = formatter.format(item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
self.assertEqual('NBRP161031_092725_hh_00', root.find('ntbId').text)
def test_ntbid_duplicated(self):
formatter = NTBEventFormatter()
item = self.item_duplicated.copy()
output = formatter.format(item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
self.assertEqual('NBRP161031_093725_hh_00', root.find('ntbId').text)
def test_ntb_id_ingested(self):
formatter = NTBEventFormatter()
item = self.item_ingested.copy()
output = formatter.format(item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
self.assertEqual('NBRP123456_123456_na_00', root.find('ntbId').text)
def test_ntb_id_rescheduled(self):
formatter = NTBEventFormatter()
item = self.item_rescheduled.copy()
output = formatter.format(item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
self.assertEqual('NBRP161031_092725_hh_00', root.find('ntbId').text)
def test_ntb_id_ingested_rescheduled(self):
formatter = NTBEventFormatter()
item = self.item_ingested_rescheduled.copy()
output = formatter.format(item, {})[0]
root = lxml.etree.fromstring(output['encoded_item'])
self.assertEqual('NBRP123456_123456_na_00', root.find('ntbId').text)
| agpl-3.0 |
woltage/ansible | v1/ansible/runner/lookup_plugins/first_found.py | 143 | 6490 | # (c) 2013, seth vidal <skvidal@fedoraproject.org> red hat, inc
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# take a list of files and (optionally) a list of paths
# return the first existing file found in the paths
# [file1, file2, file3], [path1, path2, path3]
# search order is:
# path1/file1
# path1/file2
# path1/file3
# path2/file1
# path2/file2
# path2/file3
# path3/file1
# path3/file2
# path3/file3
# first file found with os.path.exists() is returned
# no file matches raises ansibleerror
# EXAMPLES
# - name: copy first existing file found to /some/file
# action: copy src=$item dest=/some/file
# with_first_found:
# - files: foo ${inventory_hostname} bar
# paths: /tmp/production /tmp/staging
# that will look for files in this order:
# /tmp/production/foo
# ${inventory_hostname}
# bar
# /tmp/staging/foo
# ${inventory_hostname}
# bar
# - name: copy first existing file found to /some/file
# action: copy src=$item dest=/some/file
# with_first_found:
# - files: /some/place/foo ${inventory_hostname} /some/place/else
# that will look for files in this order:
# /some/place/foo
# $relative_path/${inventory_hostname}
# /some/place/else
# example - including tasks:
# tasks:
# - include: $item
# with_first_found:
# - files: generic
# paths: tasks/staging tasks/production
# this will include the tasks in the file generic where it is found first (staging or production)
# example simple file lists
#tasks:
#- name: first found file
# action: copy src=$item dest=/etc/file.cfg
# with_first_found:
# - files: foo.${inventory_hostname} foo
# example skipping if no matched files
# First_found also offers the ability to control whether or not failing
# to find a file returns an error or not
#
#- name: first found file - or skip
# action: copy src=$item dest=/etc/file.cfg
# with_first_found:
# - files: foo.${inventory_hostname}
# skip: true
# example a role with default configuration and configuration per host
# you can set multiple terms with their own files and paths to look through.
# consider a role that sets some configuration per host falling back on a default config.
#
#- name: some configuration template
# template: src={{ item }} dest=/etc/file.cfg mode=0444 owner=root group=root
# with_first_found:
# - files:
# - ${inventory_hostname}/etc/file.cfg
# paths:
# - ../../../templates.overwrites
# - ../../../templates
# - files:
# - etc/file.cfg
# paths:
# - templates
# the above will return an empty list if the files cannot be found at all
# if skip is unspecificed or if it is set to false then it will return a list
# error which can be caught bye ignore_errors: true for that action.
# finally - if you want you can use it, in place to replace first_available_file:
# you simply cannot use the - files, path or skip options. simply replace
# first_available_file with with_first_found and leave the file listing in place
#
#
# - name: with_first_found like first_available_file
# action: copy src=$item dest=/tmp/faftest
# with_first_found:
# - ../files/foo
# - ../files/bar
# - ../files/baz
# ignore_errors: true
from ansible import utils, errors
import os
class LookupModule(object):
def __init__(self, basedir=None, **kwargs):
self.basedir = basedir
def run(self, terms, inject=None, **kwargs):
terms = utils.listify_lookup_plugin_terms(terms, self.basedir, inject)
result = None
anydict = False
skip = False
for term in terms:
if isinstance(term, dict):
anydict = True
total_search = []
if anydict:
for term in terms:
if isinstance(term, dict):
files = term.get('files', [])
paths = term.get('paths', [])
skip = utils.boolean(term.get('skip', False))
filelist = files
if isinstance(files, basestring):
files = files.replace(',', ' ')
files = files.replace(';', ' ')
filelist = files.split(' ')
pathlist = paths
if paths:
if isinstance(paths, basestring):
paths = paths.replace(',', ' ')
paths = paths.replace(':', ' ')
paths = paths.replace(';', ' ')
pathlist = paths.split(' ')
if not pathlist:
total_search = filelist
else:
for path in pathlist:
for fn in filelist:
f = os.path.join(path, fn)
total_search.append(f)
else:
total_search.append(term)
else:
total_search = terms
for fn in total_search:
if inject and '_original_file' in inject:
# check the templates and vars directories too,
# if they exist
for roledir in ('templates', 'vars'):
path = utils.path_dwim(os.path.join(self.basedir, '..', roledir), fn)
if os.path.exists(path):
return [path]
# if none of the above were found, just check the
# current filename against the basedir (this will already
# have ../files from runner, if it's a role task
path = utils.path_dwim(self.basedir, fn)
if os.path.exists(path):
return [path]
else:
if skip:
return []
else:
return [None]
| gpl-3.0 |
camradal/ansible | lib/ansible/modules/web_infrastructure/jenkins_script.py | 6 | 5223 | #!/usr/bin/python
# encoding: utf-8
# (c) 2016, James Hogarth <james.hogarth@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
author: James Hogarth
module: jenkins_script
short_description: Executes a groovy script in the jenkins instance
version_added: '2.3'
description:
- The M(jenkins_script) module takes a script plus a dict of values
to use within the script and returns the result of the script being run.
options:
script:
description:
- The groovy script to be executed.
This gets passed as a string Template if args is defined.
required: true
default: null
url:
description:
- The jenkins server to execute the script against. The default is a local
jenkins instance that is not being proxied through a webserver.
required: false
default: http://localhost:8080
validate_certs:
description:
- If set to C(no), the SSL certificates will not be validated.
This should only set to C(no) used on personally controlled sites
using self-signed certificates as it avoids verifying the source site.
required: false
default: True
user:
description:
- The username to connect to the jenkins server with.
required: false
default: null
password:
description:
- The password to connect to the jenkins server with.
require: false
default: null
args:
description:
- A dict of key-value pairs used in formatting the script.
required: false
default: null
notes:
- Since the script can do anything this does not report on changes.
Knowing the script is being run it's important to set changed_when
for the ansible output to be clear on any alterations made.
'''
EXAMPLES = '''
- name: Obtaining a list of plugins
jenkins_script:
script: 'println(Jenkins.instance.pluginManager.plugins)'
user: admin
password: admin
- name: Setting master using a variable to hold a more complicate script
vars:
setmaster_mode: |
import jenkins.model.*
instance = Jenkins.getInstance()
instance.setMode(${jenkins_mode})
instance.save()
- name: use the variable as the script
jenkins_script:
script: "{{ setmaster_mode }}"
args:
jenkins_mode: Node.Mode.EXCLUSIVE
- name: interacting with an untrusted HTTPS connection
jenkins_script:
script: "println(Jenkins.instance.pluginManager.plugins)"
user: admin
password: admin
url: https://localhost
validate_certs: no
'''
RETURN = '''
output:
description: Result of script
returned: success
type: string
sample: 'Result: true'
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.urls import fetch_url
try:
# python2
from urllib import urlencode
except ImportError:
# python3
from urllib.parse import urlencode
def main():
module = AnsibleModule(
argument_spec = dict(
script = dict(required=True, type="str"),
url = dict(required=False, type="str", default="http://localhost:8080"),
validate_certs = dict(required=False, type="bool", default=True),
user = dict(required=False, no_log=True, type="str",default=None),
password = dict(required=False, no_log=True, type="str",default=None),
args = dict(required=False, type="dict", default=None)
)
)
if module.params['user'] is not None:
if module.params['password'] is None:
module.fail_json(msg="password required when user provided")
module.params['url_username'] = module.params['user']
module.params['url_password'] = module.params['password']
module.params['force_basic_auth'] = True
if module.params['args'] is not None:
from string import Template
script_contents = Template(module.params['script']).substitute(module.params['args'])
else:
script_contents = module.params['script']
resp, info = fetch_url(module,
module.params['url'] + "/scriptText",
data=urlencode({'script': script_contents}),
method="POST")
if info["status"] != 200:
module.fail_json(msg="HTTP error " + str(info["status"]) + " " + info["msg"])
result = resp.read()
if 'Exception:' in result and 'at java.lang.Thread' in result:
module.fail_json(msg="script failed with stacktrace:\n " + result)
module.exit_json(
output = result,
)
if __name__ == '__main__':
main()
| gpl-3.0 |
mtnman38/Aggregate | Executables/Aggregate 0.8.7 for Macintosh.app/Contents/Resources/lib/python2.7/email/feedparser.py | 91 | 20606 | # Copyright (C) 2004-2006 Python Software Foundation
# Authors: Baxter, Wouters and Warsaw
# Contact: email-sig@python.org
"""FeedParser - An email feed parser.
The feed parser implements an interface for incrementally parsing an email
message, line by line. This has advantages for certain applications, such as
those reading email messages off a socket.
FeedParser.feed() is the primary interface for pushing new data into the
parser. It returns when there's nothing more it can do with the available
data. When you have no more data to push into the parser, call .close().
This completes the parsing and returns the root message object.
The other advantage of this parser is that it will never raise a parsing
exception. Instead, when it finds something unexpected, it adds a 'defect' to
the current message. Defects are just instances that live on the message
object's .defects attribute.
"""
__all__ = ['FeedParser']
import re
from email import errors
from email import message
NLCRE = re.compile('\r\n|\r|\n')
NLCRE_bol = re.compile('(\r\n|\r|\n)')
NLCRE_eol = re.compile('(\r\n|\r|\n)\Z')
NLCRE_crack = re.compile('(\r\n|\r|\n)')
# RFC 2822 $3.6.8 Optional fields. ftext is %d33-57 / %d59-126, Any character
# except controls, SP, and ":".
headerRE = re.compile(r'^(From |[\041-\071\073-\176]{1,}:|[\t ])')
EMPTYSTRING = ''
NL = '\n'
NeedMoreData = object()
class BufferedSubFile(object):
"""A file-ish object that can have new data loaded into it.
You can also push and pop line-matching predicates onto a stack. When the
current predicate matches the current line, a false EOF response
(i.e. empty string) is returned instead. This lets the parser adhere to a
simple abstraction -- it parses until EOF closes the current message.
"""
def __init__(self):
# The last partial line pushed into this object.
self._partial = ''
# The list of full, pushed lines, in reverse order
self._lines = []
# The stack of false-EOF checking predicates.
self._eofstack = []
# A flag indicating whether the file has been closed or not.
self._closed = False
def push_eof_matcher(self, pred):
self._eofstack.append(pred)
def pop_eof_matcher(self):
return self._eofstack.pop()
def close(self):
# Don't forget any trailing partial line.
self._lines.append(self._partial)
self._partial = ''
self._closed = True
def readline(self):
if not self._lines:
if self._closed:
return ''
return NeedMoreData
# Pop the line off the stack and see if it matches the current
# false-EOF predicate.
line = self._lines.pop()
# RFC 2046, section 5.1.2 requires us to recognize outer level
# boundaries at any level of inner nesting. Do this, but be sure it's
# in the order of most to least nested.
for ateof in self._eofstack[::-1]:
if ateof(line):
# We're at the false EOF. But push the last line back first.
self._lines.append(line)
return ''
return line
def unreadline(self, line):
# Let the consumer push a line back into the buffer.
assert line is not NeedMoreData
self._lines.append(line)
def push(self, data):
"""Push some new data into this object."""
# Handle any previous leftovers
data, self._partial = self._partial + data, ''
# Crack into lines, but preserve the newlines on the end of each
parts = NLCRE_crack.split(data)
# The *ahem* interesting behaviour of re.split when supplied grouping
# parentheses is that the last element of the resulting list is the
# data after the final RE. In the case of a NL/CR terminated string,
# this is the empty string.
self._partial = parts.pop()
#GAN 29Mar09 bugs 1555570, 1721862 Confusion at 8K boundary ending with \r:
# is there a \n to follow later?
if not self._partial and parts and parts[-1].endswith('\r'):
self._partial = parts.pop(-2)+parts.pop()
# parts is a list of strings, alternating between the line contents
# and the eol character(s). Gather up a list of lines after
# re-attaching the newlines.
lines = []
for i in range(len(parts) // 2):
lines.append(parts[i*2] + parts[i*2+1])
self.pushlines(lines)
def pushlines(self, lines):
# Reverse and insert at the front of the lines.
self._lines[:0] = lines[::-1]
def is_closed(self):
return self._closed
def __iter__(self):
return self
def next(self):
line = self.readline()
if line == '':
raise StopIteration
return line
class FeedParser:
"""A feed-style parser of email."""
def __init__(self, _factory=message.Message):
"""_factory is called with no arguments to create a new message obj"""
self._factory = _factory
self._input = BufferedSubFile()
self._msgstack = []
self._parse = self._parsegen().next
self._cur = None
self._last = None
self._headersonly = False
# Non-public interface for supporting Parser's headersonly flag
def _set_headersonly(self):
self._headersonly = True
def feed(self, data):
"""Push more data into the parser."""
self._input.push(data)
self._call_parse()
def _call_parse(self):
try:
self._parse()
except StopIteration:
pass
def close(self):
"""Parse all remaining data and return the root message object."""
self._input.close()
self._call_parse()
root = self._pop_message()
assert not self._msgstack
# Look for final set of defects
if root.get_content_maintype() == 'multipart' \
and not root.is_multipart():
root.defects.append(errors.MultipartInvariantViolationDefect())
return root
def _new_message(self):
msg = self._factory()
if self._cur and self._cur.get_content_type() == 'multipart/digest':
msg.set_default_type('message/rfc822')
if self._msgstack:
self._msgstack[-1].attach(msg)
self._msgstack.append(msg)
self._cur = msg
self._last = msg
def _pop_message(self):
retval = self._msgstack.pop()
if self._msgstack:
self._cur = self._msgstack[-1]
else:
self._cur = None
return retval
def _parsegen(self):
# Create a new message and start by parsing headers.
self._new_message()
headers = []
# Collect the headers, searching for a line that doesn't match the RFC
# 2822 header or continuation pattern (including an empty line).
for line in self._input:
if line is NeedMoreData:
yield NeedMoreData
continue
if not headerRE.match(line):
# If we saw the RFC defined header/body separator
# (i.e. newline), just throw it away. Otherwise the line is
# part of the body so push it back.
if not NLCRE.match(line):
self._input.unreadline(line)
break
headers.append(line)
# Done with the headers, so parse them and figure out what we're
# supposed to see in the body of the message.
self._parse_headers(headers)
# Headers-only parsing is a backwards compatibility hack, which was
# necessary in the older parser, which could raise errors. All
# remaining lines in the input are thrown into the message body.
if self._headersonly:
lines = []
while True:
line = self._input.readline()
if line is NeedMoreData:
yield NeedMoreData
continue
if line == '':
break
lines.append(line)
self._cur.set_payload(EMPTYSTRING.join(lines))
return
if self._cur.get_content_type() == 'message/delivery-status':
# message/delivery-status contains blocks of headers separated by
# a blank line. We'll represent each header block as a separate
# nested message object, but the processing is a bit different
# than standard message/* types because there is no body for the
# nested messages. A blank line separates the subparts.
while True:
self._input.push_eof_matcher(NLCRE.match)
for retval in self._parsegen():
if retval is NeedMoreData:
yield NeedMoreData
continue
break
msg = self._pop_message()
# We need to pop the EOF matcher in order to tell if we're at
# the end of the current file, not the end of the last block
# of message headers.
self._input.pop_eof_matcher()
# The input stream must be sitting at the newline or at the
# EOF. We want to see if we're at the end of this subpart, so
# first consume the blank line, then test the next line to see
# if we're at this subpart's EOF.
while True:
line = self._input.readline()
if line is NeedMoreData:
yield NeedMoreData
continue
break
while True:
line = self._input.readline()
if line is NeedMoreData:
yield NeedMoreData
continue
break
if line == '':
break
# Not at EOF so this is a line we're going to need.
self._input.unreadline(line)
return
if self._cur.get_content_maintype() == 'message':
# The message claims to be a message/* type, then what follows is
# another RFC 2822 message.
for retval in self._parsegen():
if retval is NeedMoreData:
yield NeedMoreData
continue
break
self._pop_message()
return
if self._cur.get_content_maintype() == 'multipart':
boundary = self._cur.get_boundary()
if boundary is None:
# The message /claims/ to be a multipart but it has not
# defined a boundary. That's a problem which we'll handle by
# reading everything until the EOF and marking the message as
# defective.
self._cur.defects.append(errors.NoBoundaryInMultipartDefect())
lines = []
for line in self._input:
if line is NeedMoreData:
yield NeedMoreData
continue
lines.append(line)
self._cur.set_payload(EMPTYSTRING.join(lines))
return
# Create a line match predicate which matches the inter-part
# boundary as well as the end-of-multipart boundary. Don't push
# this onto the input stream until we've scanned past the
# preamble.
separator = '--' + boundary
boundaryre = re.compile(
'(?P<sep>' + re.escape(separator) +
r')(?P<end>--)?(?P<ws>[ \t]*)(?P<linesep>\r\n|\r|\n)?$')
capturing_preamble = True
preamble = []
linesep = False
while True:
line = self._input.readline()
if line is NeedMoreData:
yield NeedMoreData
continue
if line == '':
break
mo = boundaryre.match(line)
if mo:
# If we're looking at the end boundary, we're done with
# this multipart. If there was a newline at the end of
# the closing boundary, then we need to initialize the
# epilogue with the empty string (see below).
if mo.group('end'):
linesep = mo.group('linesep')
break
# We saw an inter-part boundary. Were we in the preamble?
if capturing_preamble:
if preamble:
# According to RFC 2046, the last newline belongs
# to the boundary.
lastline = preamble[-1]
eolmo = NLCRE_eol.search(lastline)
if eolmo:
preamble[-1] = lastline[:-len(eolmo.group(0))]
self._cur.preamble = EMPTYSTRING.join(preamble)
capturing_preamble = False
self._input.unreadline(line)
continue
# We saw a boundary separating two parts. Consume any
# multiple boundary lines that may be following. Our
# interpretation of RFC 2046 BNF grammar does not produce
# body parts within such double boundaries.
while True:
line = self._input.readline()
if line is NeedMoreData:
yield NeedMoreData
continue
mo = boundaryre.match(line)
if not mo:
self._input.unreadline(line)
break
# Recurse to parse this subpart; the input stream points
# at the subpart's first line.
self._input.push_eof_matcher(boundaryre.match)
for retval in self._parsegen():
if retval is NeedMoreData:
yield NeedMoreData
continue
break
# Because of RFC 2046, the newline preceding the boundary
# separator actually belongs to the boundary, not the
# previous subpart's payload (or epilogue if the previous
# part is a multipart).
if self._last.get_content_maintype() == 'multipart':
epilogue = self._last.epilogue
if epilogue == '':
self._last.epilogue = None
elif epilogue is not None:
mo = NLCRE_eol.search(epilogue)
if mo:
end = len(mo.group(0))
self._last.epilogue = epilogue[:-end]
else:
payload = self._last.get_payload()
if isinstance(payload, basestring):
mo = NLCRE_eol.search(payload)
if mo:
payload = payload[:-len(mo.group(0))]
self._last.set_payload(payload)
self._input.pop_eof_matcher()
self._pop_message()
# Set the multipart up for newline cleansing, which will
# happen if we're in a nested multipart.
self._last = self._cur
else:
# I think we must be in the preamble
assert capturing_preamble
preamble.append(line)
# We've seen either the EOF or the end boundary. If we're still
# capturing the preamble, we never saw the start boundary. Note
# that as a defect and store the captured text as the payload.
# Everything from here to the EOF is epilogue.
if capturing_preamble:
self._cur.defects.append(errors.StartBoundaryNotFoundDefect())
self._cur.set_payload(EMPTYSTRING.join(preamble))
epilogue = []
for line in self._input:
if line is NeedMoreData:
yield NeedMoreData
continue
self._cur.epilogue = EMPTYSTRING.join(epilogue)
return
# If the end boundary ended in a newline, we'll need to make sure
# the epilogue isn't None
if linesep:
epilogue = ['']
else:
epilogue = []
for line in self._input:
if line is NeedMoreData:
yield NeedMoreData
continue
epilogue.append(line)
# Any CRLF at the front of the epilogue is not technically part of
# the epilogue. Also, watch out for an empty string epilogue,
# which means a single newline.
if epilogue:
firstline = epilogue[0]
bolmo = NLCRE_bol.match(firstline)
if bolmo:
epilogue[0] = firstline[len(bolmo.group(0)):]
self._cur.epilogue = EMPTYSTRING.join(epilogue)
return
# Otherwise, it's some non-multipart type, so the entire rest of the
# file contents becomes the payload.
lines = []
for line in self._input:
if line is NeedMoreData:
yield NeedMoreData
continue
lines.append(line)
self._cur.set_payload(EMPTYSTRING.join(lines))
def _parse_headers(self, lines):
# Passed a list of lines that make up the headers for the current msg
lastheader = ''
lastvalue = []
for lineno, line in enumerate(lines):
# Check for continuation
if line[0] in ' \t':
if not lastheader:
# The first line of the headers was a continuation. This
# is illegal, so let's note the defect, store the illegal
# line, and ignore it for purposes of headers.
defect = errors.FirstHeaderLineIsContinuationDefect(line)
self._cur.defects.append(defect)
continue
lastvalue.append(line)
continue
if lastheader:
# XXX reconsider the joining of folded lines
lhdr = EMPTYSTRING.join(lastvalue)[:-1].rstrip('\r\n')
self._cur[lastheader] = lhdr
lastheader, lastvalue = '', []
# Check for envelope header, i.e. unix-from
if line.startswith('From '):
if lineno == 0:
# Strip off the trailing newline
mo = NLCRE_eol.search(line)
if mo:
line = line[:-len(mo.group(0))]
self._cur.set_unixfrom(line)
continue
elif lineno == len(lines) - 1:
# Something looking like a unix-from at the end - it's
# probably the first line of the body, so push back the
# line and stop.
self._input.unreadline(line)
return
else:
# Weirdly placed unix-from line. Note this as a defect
# and ignore it.
defect = errors.MisplacedEnvelopeHeaderDefect(line)
self._cur.defects.append(defect)
continue
# Split the line on the colon separating field name from value.
i = line.find(':')
if i < 0:
defect = errors.MalformedHeaderDefect(line)
self._cur.defects.append(defect)
continue
lastheader = line[:i]
lastvalue = [line[i+1:].lstrip()]
# Done with all the lines, so handle the last header.
if lastheader:
# XXX reconsider the joining of folded lines
self._cur[lastheader] = EMPTYSTRING.join(lastvalue).rstrip('\r\n')
| gpl-2.0 |
vmax-feihu/hue | desktop/core/ext-py/boto-2.38.0/boto/dynamodb/layer2.py | 135 | 33814 | # Copyright (c) 2011 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2011 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.dynamodb.layer1 import Layer1
from boto.dynamodb.table import Table
from boto.dynamodb.schema import Schema
from boto.dynamodb.item import Item
from boto.dynamodb.batch import BatchList, BatchWriteList
from boto.dynamodb.types import get_dynamodb_type, Dynamizer, \
LossyFloatDynamizer, NonBooleanDynamizer
class TableGenerator(object):
"""
This is an object that wraps up the table_generator function.
The only real reason to have this is that we want to be able
to accumulate and return the ConsumedCapacityUnits element that
is part of each response.
:ivar last_evaluated_key: A sequence representing the key(s)
of the item last evaluated, or None if no additional
results are available.
:ivar remaining: The remaining quantity of results requested.
:ivar table: The table to which the call was made.
"""
def __init__(self, table, callable, remaining, item_class, kwargs):
self.table = table
self.callable = callable
self.remaining = -1 if remaining is None else remaining
self.item_class = item_class
self.kwargs = kwargs
self._consumed_units = 0.0
self.last_evaluated_key = None
self._count = 0
self._scanned_count = 0
self._response = None
@property
def count(self):
"""
The total number of items retrieved thus far. This value changes with
iteration and even when issuing a call with count=True, it is necessary
to complete the iteration to assert an accurate count value.
"""
self.response
return self._count
@property
def scanned_count(self):
"""
As above, but representing the total number of items scanned by
DynamoDB, without regard to any filters.
"""
self.response
return self._scanned_count
@property
def consumed_units(self):
"""
Returns a float representing the ConsumedCapacityUnits accumulated.
"""
self.response
return self._consumed_units
@property
def response(self):
"""
The current response to the call from DynamoDB.
"""
return self.next_response() if self._response is None else self._response
def next_response(self):
"""
Issue a call and return the result. You can invoke this method
while iterating over the TableGenerator in order to skip to the
next "page" of results.
"""
# preserve any existing limit in case the user alters self.remaining
limit = self.kwargs.get('limit')
if (self.remaining > 0 and (limit is None or limit > self.remaining)):
self.kwargs['limit'] = self.remaining
self._response = self.callable(**self.kwargs)
self.kwargs['limit'] = limit
self._consumed_units += self._response.get('ConsumedCapacityUnits', 0.0)
self._count += self._response.get('Count', 0)
self._scanned_count += self._response.get('ScannedCount', 0)
# at the expense of a possibly gratuitous dynamize, ensure that
# early generator termination won't result in bad LEK values
if 'LastEvaluatedKey' in self._response:
lek = self._response['LastEvaluatedKey']
esk = self.table.layer2.dynamize_last_evaluated_key(lek)
self.kwargs['exclusive_start_key'] = esk
lektuple = (lek['HashKeyElement'],)
if 'RangeKeyElement' in lek:
lektuple += (lek['RangeKeyElement'],)
self.last_evaluated_key = lektuple
else:
self.last_evaluated_key = None
return self._response
def __iter__(self):
while self.remaining != 0:
response = self.response
for item in response.get('Items', []):
self.remaining -= 1
yield self.item_class(self.table, attrs=item)
if self.remaining == 0:
break
if response is not self._response:
break
else:
if self.last_evaluated_key is not None:
self.next_response()
continue
break
if response is not self._response:
continue
break
class Layer2(object):
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
is_secure=True, port=None, proxy=None, proxy_port=None,
debug=0, security_token=None, region=None,
validate_certs=True, dynamizer=LossyFloatDynamizer,
profile_name=None):
self.layer1 = Layer1(aws_access_key_id, aws_secret_access_key,
is_secure, port, proxy, proxy_port,
debug, security_token, region,
validate_certs=validate_certs,
profile_name=profile_name)
self.dynamizer = dynamizer()
def use_decimals(self, use_boolean=False):
"""
Use the ``decimal.Decimal`` type for encoding/decoding numeric types.
By default, ints/floats are used to represent numeric types
('N', 'NS') received from DynamoDB. Using the ``Decimal``
type is recommended to prevent loss of precision.
"""
# Eventually this should be made the default dynamizer.
self.dynamizer = Dynamizer() if use_boolean else NonBooleanDynamizer()
def dynamize_attribute_updates(self, pending_updates):
"""
Convert a set of pending item updates into the structure
required by Layer1.
"""
d = {}
for attr_name in pending_updates:
action, value = pending_updates[attr_name]
if value is None:
# DELETE without an attribute value
d[attr_name] = {"Action": action}
else:
d[attr_name] = {"Action": action,
"Value": self.dynamizer.encode(value)}
return d
def dynamize_item(self, item):
d = {}
for attr_name in item:
d[attr_name] = self.dynamizer.encode(item[attr_name])
return d
def dynamize_range_key_condition(self, range_key_condition):
"""
Convert a layer2 range_key_condition parameter into the
structure required by Layer1.
"""
return range_key_condition.to_dict()
def dynamize_scan_filter(self, scan_filter):
"""
Convert a layer2 scan_filter parameter into the
structure required by Layer1.
"""
d = None
if scan_filter:
d = {}
for attr_name in scan_filter:
condition = scan_filter[attr_name]
d[attr_name] = condition.to_dict()
return d
def dynamize_expected_value(self, expected_value):
"""
Convert an expected_value parameter into the data structure
required for Layer1.
"""
d = None
if expected_value:
d = {}
for attr_name in expected_value:
attr_value = expected_value[attr_name]
if attr_value is True:
attr_value = {'Exists': True}
elif attr_value is False:
attr_value = {'Exists': False}
else:
val = self.dynamizer.encode(expected_value[attr_name])
attr_value = {'Value': val}
d[attr_name] = attr_value
return d
def dynamize_last_evaluated_key(self, last_evaluated_key):
"""
Convert a last_evaluated_key parameter into the data structure
required for Layer1.
"""
d = None
if last_evaluated_key:
hash_key = last_evaluated_key['HashKeyElement']
d = {'HashKeyElement': self.dynamizer.encode(hash_key)}
if 'RangeKeyElement' in last_evaluated_key:
range_key = last_evaluated_key['RangeKeyElement']
d['RangeKeyElement'] = self.dynamizer.encode(range_key)
return d
def build_key_from_values(self, schema, hash_key, range_key=None):
"""
Build a Key structure to be used for accessing items
in Amazon DynamoDB. This method takes the supplied hash_key
and optional range_key and validates them against the
schema. If there is a mismatch, a TypeError is raised.
Otherwise, a Python dict version of a Amazon DynamoDB Key
data structure is returned.
:type hash_key: int|float|str|unicode|Binary
:param hash_key: The hash key of the item you are looking for.
The type of the hash key should match the type defined in
the schema.
:type range_key: int|float|str|unicode|Binary
:param range_key: The range key of the item your are looking for.
This should be supplied only if the schema requires a
range key. The type of the range key should match the
type defined in the schema.
"""
dynamodb_key = {}
dynamodb_value = self.dynamizer.encode(hash_key)
if list(dynamodb_value.keys())[0] != schema.hash_key_type:
msg = 'Hashkey must be of type: %s' % schema.hash_key_type
raise TypeError(msg)
dynamodb_key['HashKeyElement'] = dynamodb_value
if range_key is not None:
dynamodb_value = self.dynamizer.encode(range_key)
if list(dynamodb_value.keys())[0] != schema.range_key_type:
msg = 'RangeKey must be of type: %s' % schema.range_key_type
raise TypeError(msg)
dynamodb_key['RangeKeyElement'] = dynamodb_value
return dynamodb_key
def new_batch_list(self):
"""
Return a new, empty :class:`boto.dynamodb.batch.BatchList`
object.
"""
return BatchList(self)
def new_batch_write_list(self):
"""
Return a new, empty :class:`boto.dynamodb.batch.BatchWriteList`
object.
"""
return BatchWriteList(self)
def list_tables(self, limit=None):
"""
Return a list of the names of all tables associated with the
current account and region.
:type limit: int
:param limit: The maximum number of tables to return.
"""
tables = []
start_table = None
while not limit or len(tables) < limit:
this_round_limit = None
if limit:
this_round_limit = limit - len(tables)
this_round_limit = min(this_round_limit, 100)
result = self.layer1.list_tables(limit=this_round_limit, start_table=start_table)
tables.extend(result.get('TableNames', []))
start_table = result.get('LastEvaluatedTableName', None)
if not start_table:
break
return tables
def describe_table(self, name):
"""
Retrieve information about an existing table.
:type name: str
:param name: The name of the desired table.
"""
return self.layer1.describe_table(name)
def table_from_schema(self, name, schema):
"""
Create a Table object from a schema.
This method will create a Table object without
making any API calls. If you know the name and schema
of the table, you can use this method instead of
``get_table``.
Example usage::
table = layer2.table_from_schema(
'tablename',
Schema.create(hash_key=('foo', 'N')))
:type name: str
:param name: The name of the table.
:type schema: :class:`boto.dynamodb.schema.Schema`
:param schema: The schema associated with the table.
:rtype: :class:`boto.dynamodb.table.Table`
:return: A Table object representing the table.
"""
return Table.create_from_schema(self, name, schema)
def get_table(self, name):
"""
Retrieve the Table object for an existing table.
:type name: str
:param name: The name of the desired table.
:rtype: :class:`boto.dynamodb.table.Table`
:return: A Table object representing the table.
"""
response = self.layer1.describe_table(name)
return Table(self, response)
lookup = get_table
def create_table(self, name, schema, read_units, write_units):
"""
Create a new Amazon DynamoDB table.
:type name: str
:param name: The name of the desired table.
:type schema: :class:`boto.dynamodb.schema.Schema`
:param schema: The Schema object that defines the schema used
by this table.
:type read_units: int
:param read_units: The value for ReadCapacityUnits.
:type write_units: int
:param write_units: The value for WriteCapacityUnits.
:rtype: :class:`boto.dynamodb.table.Table`
:return: A Table object representing the new Amazon DynamoDB table.
"""
response = self.layer1.create_table(name, schema.dict,
{'ReadCapacityUnits': read_units,
'WriteCapacityUnits': write_units})
return Table(self, response)
def update_throughput(self, table, read_units, write_units):
"""
Update the ProvisionedThroughput for the Amazon DynamoDB Table.
:type table: :class:`boto.dynamodb.table.Table`
:param table: The Table object whose throughput is being updated.
:type read_units: int
:param read_units: The new value for ReadCapacityUnits.
:type write_units: int
:param write_units: The new value for WriteCapacityUnits.
"""
response = self.layer1.update_table(table.name,
{'ReadCapacityUnits': read_units,
'WriteCapacityUnits': write_units})
table.update_from_response(response)
def delete_table(self, table):
"""
Delete this table and all items in it. After calling this
the Table objects status attribute will be set to 'DELETING'.
:type table: :class:`boto.dynamodb.table.Table`
:param table: The Table object that is being deleted.
"""
response = self.layer1.delete_table(table.name)
table.update_from_response(response)
def create_schema(self, hash_key_name, hash_key_proto_value,
range_key_name=None, range_key_proto_value=None):
"""
Create a Schema object used when creating a Table.
:type hash_key_name: str
:param hash_key_name: The name of the HashKey for the schema.
:type hash_key_proto_value: int|long|float|str|unicode|Binary
:param hash_key_proto_value: A sample or prototype of the type
of value you want to use for the HashKey. Alternatively,
you can also just pass in the Python type (e.g. int, float, etc.).
:type range_key_name: str
:param range_key_name: The name of the RangeKey for the schema.
This parameter is optional.
:type range_key_proto_value: int|long|float|str|unicode|Binary
:param range_key_proto_value: A sample or prototype of the type
of value you want to use for the RangeKey. Alternatively,
you can also pass in the Python type (e.g. int, float, etc.)
This parameter is optional.
"""
hash_key = (hash_key_name, get_dynamodb_type(hash_key_proto_value))
if range_key_name and range_key_proto_value is not None:
range_key = (range_key_name,
get_dynamodb_type(range_key_proto_value))
else:
range_key = None
return Schema.create(hash_key, range_key)
def get_item(self, table, hash_key, range_key=None,
attributes_to_get=None, consistent_read=False,
item_class=Item):
"""
Retrieve an existing item from the table.
:type table: :class:`boto.dynamodb.table.Table`
:param table: The Table object from which the item is retrieved.
:type hash_key: int|long|float|str|unicode|Binary
:param hash_key: The HashKey of the requested item. The
type of the value must match the type defined in the
schema for the table.
:type range_key: int|long|float|str|unicode|Binary
:param range_key: The optional RangeKey of the requested item.
The type of the value must match the type defined in the
schema for the table.
:type attributes_to_get: list
:param attributes_to_get: A list of attribute names.
If supplied, only the specified attribute names will
be returned. Otherwise, all attributes will be returned.
:type consistent_read: bool
:param consistent_read: If True, a consistent read
request is issued. Otherwise, an eventually consistent
request is issued.
:type item_class: Class
:param item_class: Allows you to override the class used
to generate the items. This should be a subclass of
:class:`boto.dynamodb.item.Item`
"""
key = self.build_key_from_values(table.schema, hash_key, range_key)
response = self.layer1.get_item(table.name, key,
attributes_to_get, consistent_read,
object_hook=self.dynamizer.decode)
item = item_class(table, hash_key, range_key, response['Item'])
if 'ConsumedCapacityUnits' in response:
item.consumed_units = response['ConsumedCapacityUnits']
return item
def batch_get_item(self, batch_list):
"""
Return a set of attributes for a multiple items in
multiple tables using their primary keys.
:type batch_list: :class:`boto.dynamodb.batch.BatchList`
:param batch_list: A BatchList object which consists of a
list of :class:`boto.dynamoddb.batch.Batch` objects.
Each Batch object contains the information about one
batch of objects that you wish to retrieve in this
request.
"""
request_items = batch_list.to_dict()
return self.layer1.batch_get_item(request_items,
object_hook=self.dynamizer.decode)
def batch_write_item(self, batch_list):
"""
Performs multiple Puts and Deletes in one batch.
:type batch_list: :class:`boto.dynamodb.batch.BatchWriteList`
:param batch_list: A BatchWriteList object which consists of a
list of :class:`boto.dynamoddb.batch.BatchWrite` objects.
Each Batch object contains the information about one
batch of objects that you wish to put or delete.
"""
request_items = batch_list.to_dict()
return self.layer1.batch_write_item(request_items,
object_hook=self.dynamizer.decode)
def put_item(self, item, expected_value=None, return_values=None):
"""
Store a new item or completely replace an existing item
in Amazon DynamoDB.
:type item: :class:`boto.dynamodb.item.Item`
:param item: The Item to write to Amazon DynamoDB.
:type expected_value: dict
:param expected_value: A dictionary of name/value pairs that you expect.
This dictionary should have name/value pairs where the name
is the name of the attribute and the value is either the value
you are expecting or False if you expect the attribute not to
exist.
:type return_values: str
:param return_values: Controls the return of attribute
name-value pairs before then were changed. Possible
values are: None or 'ALL_OLD'. If 'ALL_OLD' is
specified and the item is overwritten, the content
of the old item is returned.
"""
expected_value = self.dynamize_expected_value(expected_value)
response = self.layer1.put_item(item.table.name,
self.dynamize_item(item),
expected_value, return_values,
object_hook=self.dynamizer.decode)
if 'ConsumedCapacityUnits' in response:
item.consumed_units = response['ConsumedCapacityUnits']
return response
def update_item(self, item, expected_value=None, return_values=None):
"""
Commit pending item updates to Amazon DynamoDB.
:type item: :class:`boto.dynamodb.item.Item`
:param item: The Item to update in Amazon DynamoDB. It is expected
that you would have called the add_attribute, put_attribute
and/or delete_attribute methods on this Item prior to calling
this method. Those queued changes are what will be updated.
:type expected_value: dict
:param expected_value: A dictionary of name/value pairs that you
expect. This dictionary should have name/value pairs where the
name is the name of the attribute and the value is either the
value you are expecting or False if you expect the attribute
not to exist.
:type return_values: str
:param return_values: Controls the return of attribute name/value pairs
before they were updated. Possible values are: None, 'ALL_OLD',
'UPDATED_OLD', 'ALL_NEW' or 'UPDATED_NEW'. If 'ALL_OLD' is
specified and the item is overwritten, the content of the old item
is returned. If 'ALL_NEW' is specified, then all the attributes of
the new version of the item are returned. If 'UPDATED_NEW' is
specified, the new versions of only the updated attributes are
returned.
"""
expected_value = self.dynamize_expected_value(expected_value)
key = self.build_key_from_values(item.table.schema,
item.hash_key, item.range_key)
attr_updates = self.dynamize_attribute_updates(item._updates)
response = self.layer1.update_item(item.table.name, key,
attr_updates,
expected_value, return_values,
object_hook=self.dynamizer.decode)
item._updates.clear()
if 'ConsumedCapacityUnits' in response:
item.consumed_units = response['ConsumedCapacityUnits']
return response
def delete_item(self, item, expected_value=None, return_values=None):
"""
Delete the item from Amazon DynamoDB.
:type item: :class:`boto.dynamodb.item.Item`
:param item: The Item to delete from Amazon DynamoDB.
:type expected_value: dict
:param expected_value: A dictionary of name/value pairs that you expect.
This dictionary should have name/value pairs where the name
is the name of the attribute and the value is either the value
you are expecting or False if you expect the attribute not to
exist.
:type return_values: str
:param return_values: Controls the return of attribute
name-value pairs before then were changed. Possible
values are: None or 'ALL_OLD'. If 'ALL_OLD' is
specified and the item is overwritten, the content
of the old item is returned.
"""
expected_value = self.dynamize_expected_value(expected_value)
key = self.build_key_from_values(item.table.schema,
item.hash_key, item.range_key)
return self.layer1.delete_item(item.table.name, key,
expected=expected_value,
return_values=return_values,
object_hook=self.dynamizer.decode)
def query(self, table, hash_key, range_key_condition=None,
attributes_to_get=None, request_limit=None,
max_results=None, consistent_read=False,
scan_index_forward=True, exclusive_start_key=None,
item_class=Item, count=False):
"""
Perform a query on the table.
:type table: :class:`boto.dynamodb.table.Table`
:param table: The Table object that is being queried.
:type hash_key: int|long|float|str|unicode|Binary
:param hash_key: The HashKey of the requested item. The
type of the value must match the type defined in the
schema for the table.
:type range_key_condition: :class:`boto.dynamodb.condition.Condition`
:param range_key_condition: A Condition object.
Condition object can be one of the following types:
EQ|LE|LT|GE|GT|BEGINS_WITH|BETWEEN
The only condition which expects or will accept two
values is 'BETWEEN', otherwise a single value should
be passed to the Condition constructor.
:type attributes_to_get: list
:param attributes_to_get: A list of attribute names.
If supplied, only the specified attribute names will
be returned. Otherwise, all attributes will be returned.
:type request_limit: int
:param request_limit: The maximum number of items to retrieve
from Amazon DynamoDB on each request. You may want to set
a specific request_limit based on the provisioned throughput
of your table. The default behavior is to retrieve as many
results as possible per request.
:type max_results: int
:param max_results: The maximum number of results that will
be retrieved from Amazon DynamoDB in total. For example,
if you only wanted to see the first 100 results from the
query, regardless of how many were actually available, you
could set max_results to 100 and the generator returned
from the query method will only yeild 100 results max.
:type consistent_read: bool
:param consistent_read: If True, a consistent read
request is issued. Otherwise, an eventually consistent
request is issued.
:type scan_index_forward: bool
:param scan_index_forward: Specified forward or backward
traversal of the index. Default is forward (True).
:type count: bool
:param count: If True, Amazon DynamoDB returns a total
number of items for the Query operation, even if the
operation has no matching items for the assigned filter.
If count is True, the actual items are not returned and
the count is accessible as the ``count`` attribute of
the returned object.
:type exclusive_start_key: list or tuple
:param exclusive_start_key: Primary key of the item from
which to continue an earlier query. This would be
provided as the LastEvaluatedKey in that query.
:type item_class: Class
:param item_class: Allows you to override the class used
to generate the items. This should be a subclass of
:class:`boto.dynamodb.item.Item`
:rtype: :class:`boto.dynamodb.layer2.TableGenerator`
"""
if range_key_condition:
rkc = self.dynamize_range_key_condition(range_key_condition)
else:
rkc = None
if exclusive_start_key:
esk = self.build_key_from_values(table.schema,
*exclusive_start_key)
else:
esk = None
kwargs = {'table_name': table.name,
'hash_key_value': self.dynamizer.encode(hash_key),
'range_key_conditions': rkc,
'attributes_to_get': attributes_to_get,
'limit': request_limit,
'count': count,
'consistent_read': consistent_read,
'scan_index_forward': scan_index_forward,
'exclusive_start_key': esk,
'object_hook': self.dynamizer.decode}
return TableGenerator(table, self.layer1.query,
max_results, item_class, kwargs)
def scan(self, table, scan_filter=None,
attributes_to_get=None, request_limit=None, max_results=None,
exclusive_start_key=None, item_class=Item, count=False):
"""
Perform a scan of DynamoDB.
:type table: :class:`boto.dynamodb.table.Table`
:param table: The Table object that is being scanned.
:type scan_filter: A dict
:param scan_filter: A dictionary where the key is the
attribute name and the value is a
:class:`boto.dynamodb.condition.Condition` object.
Valid Condition objects include:
* EQ - equal (1)
* NE - not equal (1)
* LE - less than or equal (1)
* LT - less than (1)
* GE - greater than or equal (1)
* GT - greater than (1)
* NOT_NULL - attribute exists (0, use None)
* NULL - attribute does not exist (0, use None)
* CONTAINS - substring or value in list (1)
* NOT_CONTAINS - absence of substring or value in list (1)
* BEGINS_WITH - substring prefix (1)
* IN - exact match in list (N)
* BETWEEN - >= first value, <= second value (2)
:type attributes_to_get: list
:param attributes_to_get: A list of attribute names.
If supplied, only the specified attribute names will
be returned. Otherwise, all attributes will be returned.
:type request_limit: int
:param request_limit: The maximum number of items to retrieve
from Amazon DynamoDB on each request. You may want to set
a specific request_limit based on the provisioned throughput
of your table. The default behavior is to retrieve as many
results as possible per request.
:type max_results: int
:param max_results: The maximum number of results that will
be retrieved from Amazon DynamoDB in total. For example,
if you only wanted to see the first 100 results from the
query, regardless of how many were actually available, you
could set max_results to 100 and the generator returned
from the query method will only yeild 100 results max.
:type count: bool
:param count: If True, Amazon DynamoDB returns a total
number of items for the Scan operation, even if the
operation has no matching items for the assigned filter.
If count is True, the actual items are not returned and
the count is accessible as the ``count`` attribute of
the returned object.
:type exclusive_start_key: list or tuple
:param exclusive_start_key: Primary key of the item from
which to continue an earlier query. This would be
provided as the LastEvaluatedKey in that query.
:type item_class: Class
:param item_class: Allows you to override the class used
to generate the items. This should be a subclass of
:class:`boto.dynamodb.item.Item`
:rtype: :class:`boto.dynamodb.layer2.TableGenerator`
"""
if exclusive_start_key:
esk = self.build_key_from_values(table.schema,
*exclusive_start_key)
else:
esk = None
kwargs = {'table_name': table.name,
'scan_filter': self.dynamize_scan_filter(scan_filter),
'attributes_to_get': attributes_to_get,
'limit': request_limit,
'count': count,
'exclusive_start_key': esk,
'object_hook': self.dynamizer.decode}
return TableGenerator(table, self.layer1.scan,
max_results, item_class, kwargs)
| apache-2.0 |
liangazhou/django-rdp | packages/PyDev/plugins/org.python.pydev.jython_4.4.0.201510052309/Lib/mimetools.py | 334 | 7168 | """Various tools used by MIME-reading or MIME-writing programs."""
import os
import sys
import tempfile
from warnings import filterwarnings, catch_warnings
with catch_warnings():
if sys.py3kwarning:
filterwarnings("ignore", ".*rfc822 has been removed", DeprecationWarning)
import rfc822
from warnings import warnpy3k
warnpy3k("in 3.x, mimetools has been removed in favor of the email package",
stacklevel=2)
__all__ = ["Message","choose_boundary","encode","decode","copyliteral",
"copybinary"]
class Message(rfc822.Message):
"""A derived class of rfc822.Message that knows about MIME headers and
contains some hooks for decoding encoded and multipart messages."""
def __init__(self, fp, seekable = 1):
rfc822.Message.__init__(self, fp, seekable)
self.encodingheader = \
self.getheader('content-transfer-encoding')
self.typeheader = \
self.getheader('content-type')
self.parsetype()
self.parseplist()
def parsetype(self):
str = self.typeheader
if str is None:
str = 'text/plain'
if ';' in str:
i = str.index(';')
self.plisttext = str[i:]
str = str[:i]
else:
self.plisttext = ''
fields = str.split('/')
for i in range(len(fields)):
fields[i] = fields[i].strip().lower()
self.type = '/'.join(fields)
self.maintype = fields[0]
self.subtype = '/'.join(fields[1:])
def parseplist(self):
str = self.plisttext
self.plist = []
while str[:1] == ';':
str = str[1:]
if ';' in str:
# XXX Should parse quotes!
end = str.index(';')
else:
end = len(str)
f = str[:end]
if '=' in f:
i = f.index('=')
f = f[:i].strip().lower() + \
'=' + f[i+1:].strip()
self.plist.append(f.strip())
str = str[end:]
def getplist(self):
return self.plist
def getparam(self, name):
name = name.lower() + '='
n = len(name)
for p in self.plist:
if p[:n] == name:
return rfc822.unquote(p[n:])
return None
def getparamnames(self):
result = []
for p in self.plist:
i = p.find('=')
if i >= 0:
result.append(p[:i].lower())
return result
def getencoding(self):
if self.encodingheader is None:
return '7bit'
return self.encodingheader.lower()
def gettype(self):
return self.type
def getmaintype(self):
return self.maintype
def getsubtype(self):
return self.subtype
# Utility functions
# -----------------
try:
import thread
except ImportError:
import dummy_thread as thread
_counter_lock = thread.allocate_lock()
del thread
_counter = 0
def _get_next_counter():
global _counter
_counter_lock.acquire()
_counter += 1
result = _counter
_counter_lock.release()
return result
_prefix = None
def choose_boundary():
"""Return a string usable as a multipart boundary.
The string chosen is unique within a single program run, and
incorporates the user id (if available), process id (if available),
and current time. So it's very unlikely the returned string appears
in message text, but there's no guarantee.
The boundary contains dots so you have to quote it in the header."""
global _prefix
import time
if _prefix is None:
import socket
try:
hostid = socket.gethostbyname(socket.gethostname())
except socket.gaierror:
hostid = '127.0.0.1'
try:
uid = repr(os.getuid())
except AttributeError:
uid = '1'
try:
pid = repr(os.getpid())
except AttributeError:
pid = '1'
_prefix = hostid + '.' + uid + '.' + pid
return "%s.%.3f.%d" % (_prefix, time.time(), _get_next_counter())
# Subroutines for decoding some common content-transfer-types
def decode(input, output, encoding):
"""Decode common content-transfer-encodings (base64, quopri, uuencode)."""
if encoding == 'base64':
import base64
return base64.decode(input, output)
if encoding == 'quoted-printable':
import quopri
return quopri.decode(input, output)
if encoding in ('uuencode', 'x-uuencode', 'uue', 'x-uue'):
import uu
return uu.decode(input, output)
if encoding in ('7bit', '8bit'):
return output.write(input.read())
if encoding in decodetab:
pipethrough(input, decodetab[encoding], output)
else:
raise ValueError, \
'unknown Content-Transfer-Encoding: %s' % encoding
def encode(input, output, encoding):
"""Encode common content-transfer-encodings (base64, quopri, uuencode)."""
if encoding == 'base64':
import base64
return base64.encode(input, output)
if encoding == 'quoted-printable':
import quopri
return quopri.encode(input, output, 0)
if encoding in ('uuencode', 'x-uuencode', 'uue', 'x-uue'):
import uu
return uu.encode(input, output)
if encoding in ('7bit', '8bit'):
return output.write(input.read())
if encoding in encodetab:
pipethrough(input, encodetab[encoding], output)
else:
raise ValueError, \
'unknown Content-Transfer-Encoding: %s' % encoding
# The following is no longer used for standard encodings
# XXX This requires that uudecode and mmencode are in $PATH
uudecode_pipe = '''(
TEMP=/tmp/@uu.$$
sed "s%^begin [0-7][0-7]* .*%begin 600 $TEMP%" | uudecode
cat $TEMP
rm $TEMP
)'''
decodetab = {
'uuencode': uudecode_pipe,
'x-uuencode': uudecode_pipe,
'uue': uudecode_pipe,
'x-uue': uudecode_pipe,
'quoted-printable': 'mmencode -u -q',
'base64': 'mmencode -u -b',
}
encodetab = {
'x-uuencode': 'uuencode tempfile',
'uuencode': 'uuencode tempfile',
'x-uue': 'uuencode tempfile',
'uue': 'uuencode tempfile',
'quoted-printable': 'mmencode -q',
'base64': 'mmencode -b',
}
def pipeto(input, command):
pipe = os.popen(command, 'w')
copyliteral(input, pipe)
pipe.close()
def pipethrough(input, command, output):
(fd, tempname) = tempfile.mkstemp()
temp = os.fdopen(fd, 'w')
copyliteral(input, temp)
temp.close()
pipe = os.popen(command + ' <' + tempname, 'r')
copybinary(pipe, output)
pipe.close()
os.unlink(tempname)
def copyliteral(input, output):
while 1:
line = input.readline()
if not line: break
output.write(line)
def copybinary(input, output):
BUFSIZE = 8192
while 1:
line = input.read(BUFSIZE)
if not line: break
output.write(line)
| apache-2.0 |
drzoidberg33/plexpy | lib/urllib3/packages/six.py | 2715 | 30098 | """Utilities for writing code that runs on Python 2 and 3"""
# Copyright (c) 2010-2015 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import
import functools
import itertools
import operator
import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.10.0"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
PY34 = sys.version_info[0:2] >= (3, 4)
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result) # Invokes __set__.
try:
# This is a bit ugly, but it avoids running this again by
# removing this descriptor.
delattr(obj.__class__, self.name)
except AttributeError:
pass
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
def __getattr__(self, attr):
_module = self._resolve()
value = getattr(_module, attr)
setattr(self, attr, value)
return value
class _LazyModule(types.ModuleType):
def __init__(self, name):
super(_LazyModule, self).__init__(name)
self.__doc__ = self.__class__.__doc__
def __dir__(self):
attrs = ["__doc__", "__name__"]
attrs += [attr.name for attr in self._moved_attributes]
return attrs
# Subclasses should override this
_moved_attributes = []
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _SixMetaPathImporter(object):
"""
A meta path importer to import six.moves and its submodules.
This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}
def _add_module(self, mod, *fullnames):
for fullname in fullnames:
self.known_modules[self.name + "." + fullname] = mod
def _get_module(self, fullname):
return self.known_modules[self.name + "." + fullname]
def find_module(self, fullname, path=None):
if fullname in self.known_modules:
return self
return None
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
except KeyError:
raise ImportError("This loader does not know module " + fullname)
def load_module(self, fullname):
try:
# in case of a reload
return sys.modules[fullname]
except KeyError:
pass
mod = self.__get_module(fullname)
if isinstance(mod, MovedModule):
mod = mod._resolve()
else:
mod.__loader__ = self
sys.modules[fullname] = mod
return mod
def is_package(self, fullname):
"""
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
"""
return hasattr(self.__get_module(fullname), "__path__")
def get_code(self, fullname):
"""Return None
Required, if is_package is implemented"""
self.__get_module(fullname) # eventually raises ImportError
return None
get_source = get_code # same as get_code
_importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
"""Lazy loading of moved objects"""
__path__ = [] # mark as package
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserDict", "UserDict", "collections"),
MovedAttribute("UserList", "UserList", "collections"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
]
# Add windows specific modules.
if sys.platform == "win32":
_moved_attributes += [
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
_importer._add_module(attr, "moves." + attr.name)
del attr
_MovedItems._moved_attributes = _moved_attributes
moves = _MovedItems(__name__ + ".moves")
_importer._add_module(moves, "moves")
class Module_six_moves_urllib_parse(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
"moves.urllib_parse", "moves.urllib.parse")
class Module_six_moves_urllib_error(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_error"""
_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
"moves.urllib_error", "moves.urllib.error")
class Module_six_moves_urllib_request(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_request"""
_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
"moves.urllib_request", "moves.urllib.request")
class Module_six_moves_urllib_response(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_response"""
_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
"moves.urllib_response", "moves.urllib.response")
class Module_six_moves_urllib_robotparser(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
"moves.urllib_robotparser", "moves.urllib.robotparser")
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
error = _importer._get_module("moves.urllib_error")
request = _importer._get_module("moves.urllib_request")
response = _importer._get_module("moves.urllib_response")
robotparser = _importer._get_module("moves.urllib_robotparser")
def __dir__(self):
return ['parse', 'error', 'request', 'response', 'robotparser']
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
"moves.urllib")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
create_bound_method = types.MethodType
def create_unbound_method(func, cls):
return func
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
def create_unbound_method(func, cls):
return types.MethodType(func, None, cls)
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
if PY3:
def iterkeys(d, **kw):
return iter(d.keys(**kw))
def itervalues(d, **kw):
return iter(d.values(**kw))
def iteritems(d, **kw):
return iter(d.items(**kw))
def iterlists(d, **kw):
return iter(d.lists(**kw))
viewkeys = operator.methodcaller("keys")
viewvalues = operator.methodcaller("values")
viewitems = operator.methodcaller("items")
else:
def iterkeys(d, **kw):
return d.iterkeys(**kw)
def itervalues(d, **kw):
return d.itervalues(**kw)
def iteritems(d, **kw):
return d.iteritems(**kw)
def iterlists(d, **kw):
return d.iterlists(**kw)
viewkeys = operator.methodcaller("viewkeys")
viewvalues = operator.methodcaller("viewvalues")
viewitems = operator.methodcaller("viewitems")
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
_add_doc(iteritems,
"Return an iterator over the (key, value) pairs of a dictionary.")
_add_doc(iterlists,
"Return an iterator over the (key, [values]) pairs of a dictionary.")
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
unichr = chr
import struct
int2byte = struct.Struct(">B").pack
del struct
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
_assertCountEqual = "assertCountEqual"
if sys.version_info[1] <= 1:
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
else:
_assertRaisesRegex = "assertRaisesRegex"
_assertRegex = "assertRegex"
else:
def b(s):
return s
# Workaround for standalone backslash
def u(s):
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
unichr = unichr
int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
iterbytes = functools.partial(itertools.imap, ord)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_assertCountEqual = "assertItemsEqual"
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
def assertCountEqual(self, *args, **kwargs):
return getattr(self, _assertCountEqual)(*args, **kwargs)
def assertRaisesRegex(self, *args, **kwargs):
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
def assertRegex(self, *args, **kwargs):
return getattr(self, _assertRegex)(*args, **kwargs)
if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
if sys.version_info[:2] == (3, 2):
exec_("""def raise_from(value, from_value):
if from_value is None:
raise value
raise value from from_value
""")
elif sys.version_info[:2] > (3, 2):
exec_("""def raise_from(value, from_value):
raise value from from_value
""")
else:
def raise_from(value, from_value):
raise value
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
"""The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (isinstance(fp, file) and
isinstance(data, unicode) and
fp.encoding is not None):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
data = data.encode(fp.encoding, errors)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
if sys.version_info[:2] < (3, 3):
_print = print_
def print_(*args, **kwargs):
fp = kwargs.get("file", sys.stdout)
flush = kwargs.pop("flush", False)
_print(*args, **kwargs)
if flush and fp is not None:
fp.flush()
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES):
def wrapper(f):
f = functools.wraps(wrapped, assigned, updated)(f)
f.__wrapped__ = wrapped
return f
return wrapper
else:
wraps = functools.wraps
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
def python_2_unicode_compatible(klass):
"""
A decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if PY2:
if '__str__' not in klass.__dict__:
raise ValueError("@python_2_unicode_compatible cannot be applied "
"to %s because it doesn't define __str__()." %
klass.__name__)
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
return klass
# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
__path__ = [] # required for PEP 302 and PEP 451
__package__ = __name__ # see PEP 366 @ReservedAssignment
if globals().get("__spec__") is not None:
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
# Remove other six meta path importers, since they cause problems. This can
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
# this for some reason.)
if sys.meta_path:
for i, importer in enumerate(sys.meta_path):
# Here's some real nastiness: Another "instance" of the six module might
# be floating around. Therefore, we can't use isinstance() to check for
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
if (type(importer).__name__ == "_SixMetaPathImporter" and
importer.name == __name__):
del sys.meta_path[i]
break
del i, importer
# Finally, add the importer to the meta path import hook.
sys.meta_path.append(_importer)
| gpl-3.0 |
pombredanne/erpnext | erpnext/selling/doctype/sales_order/sales_order.py | 7 | 17041 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
import json
import frappe.utils
from frappe.utils import cstr, flt, getdate, comma_and, cint
from frappe import _
from frappe.model.mapper import get_mapped_doc
from erpnext.stock.stock_balance import update_bin_qty, get_reserved_qty
from frappe.desk.notifications import clear_doctype_notifications
from erpnext.controllers.selling_controller import SellingController
form_grid_templates = {
"items": "templates/form_grid/item_grid.html"
}
class WarehouseRequired(frappe.ValidationError): pass
class SalesOrder(SellingController):
def validate_mandatory(self):
# validate transaction date v/s delivery date
if self.delivery_date:
if getdate(self.transaction_date) > getdate(self.delivery_date):
frappe.throw(_("Expected Delivery Date cannot be before Sales Order Date"))
def validate_po(self):
# validate p.o date v/s delivery date
if self.po_date and self.delivery_date and getdate(self.po_date) > getdate(self.delivery_date):
frappe.throw(_("Expected Delivery Date cannot be before Purchase Order Date"))
if self.po_no and self.customer:
so = frappe.db.sql("select name from `tabSales Order` \
where ifnull(po_no, '') = %s and name != %s and docstatus < 2\
and customer = %s", (self.po_no, self.name, self.customer))
if so and so[0][0] and not \
cint(frappe.db.get_single_value("Selling Settings", "allow_against_multiple_purchase_orders")):
frappe.msgprint(_("Warning: Sales Order {0} already exists against Customer's Purchase Order {1}").format(so[0][0], self.po_no))
def validate_for_items(self):
check_list = []
for d in self.get('items'):
check_list.append(cstr(d.item_code))
if (frappe.db.get_value("Item", d.item_code, "is_stock_item")==1 or
(self.has_product_bundle(d.item_code) and self.product_bundle_has_stock_item(d.item_code))) \
and not d.warehouse:
frappe.throw(_("Delivery warehouse required for stock item {0}").format(d.item_code),
WarehouseRequired)
# used for production plan
d.transaction_date = self.transaction_date
tot_avail_qty = frappe.db.sql("select projected_qty from `tabBin` \
where item_code = %s and warehouse = %s", (d.item_code,d.warehouse))
d.projected_qty = tot_avail_qty and flt(tot_avail_qty[0][0]) or 0
# check for same entry multiple times
unique_chk_list = set(check_list)
if len(unique_chk_list) != len(check_list) and \
not cint(frappe.db.get_single_value("Selling Settings", "allow_multiple_items")):
frappe.msgprint(_("Warning: Same item has been entered multiple times."))
def product_bundle_has_stock_item(self, product_bundle):
"""Returns true if product bundle has stock item"""
ret = len(frappe.db.sql("""select i.name from tabItem i, `tabProduct Bundle Item` pbi
where pbi.parent = %s and pbi.item_code = i.name and i.is_stock_item = 1""", product_bundle))
return ret
def validate_sales_mntc_quotation(self):
for d in self.get('items'):
if d.prevdoc_docname:
res = frappe.db.sql("select name from `tabQuotation` where name=%s and order_type = %s", (d.prevdoc_docname, self.order_type))
if not res:
frappe.msgprint(_("Quotation {0} not of type {1}").format(d.prevdoc_docname, self.order_type))
def validate_order_type(self):
super(SalesOrder, self).validate_order_type()
def validate_delivery_date(self):
if self.order_type == 'Sales' and not self.delivery_date:
frappe.throw(_("Please enter 'Expected Delivery Date'"))
self.validate_sales_mntc_quotation()
def validate_proj_cust(self):
if self.project_name and self.customer_name:
res = frappe.db.sql("""select name from `tabProject` where name = %s
and (customer = %s or ifnull(customer,'')='')""",
(self.project_name, self.customer))
if not res:
frappe.throw(_("Customer {0} does not belong to project {1}").format(self.customer, self.project_name))
def validate(self):
super(SalesOrder, self).validate()
self.validate_order_type()
self.validate_delivery_date()
self.validate_mandatory()
self.validate_proj_cust()
self.validate_po()
self.validate_uom_is_integer("stock_uom", "qty")
self.validate_for_items()
self.validate_warehouse()
from erpnext.stock.doctype.packed_item.packed_item import make_packing_list
make_packing_list(self,'items')
self.validate_with_previous_doc()
if not self.status:
self.status = "Draft"
from erpnext.controllers.status_updater import validate_status
validate_status(self.status, ["Draft", "Submitted", "Stopped",
"Cancelled"])
if not self.billing_status: self.billing_status = 'Not Billed'
if not self.delivery_status: self.delivery_status = 'Not Delivered'
def validate_warehouse(self):
from erpnext.stock.utils import validate_warehouse_company
warehouses = list(set([d.warehouse for d in
self.get("items") if d.warehouse]))
for w in warehouses:
validate_warehouse_company(w, self.company)
def validate_with_previous_doc(self):
super(SalesOrder, self).validate_with_previous_doc({
"Quotation": {
"ref_dn_field": "prevdoc_docname",
"compare_fields": [["company", "="], ["currency", "="]]
}
})
def update_enquiry_status(self, prevdoc, flag):
enq = frappe.db.sql("select t2.prevdoc_docname from `tabQuotation` t1, `tabQuotation Item` t2 where t2.parent = t1.name and t1.name=%s", prevdoc)
if enq:
frappe.db.sql("update `tabOpportunity` set status = %s where name=%s",(flag,enq[0][0]))
def update_prevdoc_status(self, flag):
for quotation in list(set([d.prevdoc_docname for d in self.get("items")])):
if quotation:
doc = frappe.get_doc("Quotation", quotation)
if doc.docstatus==2:
frappe.throw(_("Quotation {0} is cancelled").format(quotation))
doc.set_status(update=True)
doc.update_opportunity()
def on_submit(self):
super(SalesOrder, self).on_submit()
self.check_credit_limit()
self.update_reserved_qty()
frappe.get_doc('Authorization Control').validate_approving_authority(self.doctype, self.base_grand_total, self)
self.update_prevdoc_status('submit')
frappe.db.set(self, 'status', 'Submitted')
def on_cancel(self):
# Cannot cancel stopped SO
if self.status == 'Stopped':
frappe.throw(_("Stopped order cannot be cancelled. Unstop to cancel."))
self.check_nextdoc_docstatus()
self.update_reserved_qty()
self.update_prevdoc_status('cancel')
frappe.db.set(self, 'status', 'Cancelled')
def check_credit_limit(self):
from erpnext.selling.doctype.customer.customer import check_credit_limit
check_credit_limit(self.customer, self.company)
def check_nextdoc_docstatus(self):
# Checks Delivery Note
submit_dn = frappe.db.sql_list("""select t1.name from `tabDelivery Note` t1,`tabDelivery Note Item` t2
where t1.name = t2.parent and t2.against_sales_order = %s and t1.docstatus = 1""", self.name)
if submit_dn:
frappe.throw(_("Delivery Notes {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_dn)))
# Checks Sales Invoice
submit_rv = frappe.db.sql_list("""select t1.name
from `tabSales Invoice` t1,`tabSales Invoice Item` t2
where t1.name = t2.parent and t2.sales_order = %s and t1.docstatus = 1""",
self.name)
if submit_rv:
frappe.throw(_("Sales Invoice {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_rv)))
#check maintenance schedule
submit_ms = frappe.db.sql_list("""select t1.name from `tabMaintenance Schedule` t1,
`tabMaintenance Schedule Item` t2
where t2.parent=t1.name and t2.prevdoc_docname = %s and t1.docstatus = 1""", self.name)
if submit_ms:
frappe.throw(_("Maintenance Schedule {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_ms)))
# check maintenance visit
submit_mv = frappe.db.sql_list("""select t1.name from `tabMaintenance Visit` t1, `tabMaintenance Visit Purpose` t2
where t2.parent=t1.name and t2.prevdoc_docname = %s and t1.docstatus = 1""",self.name)
if submit_mv:
frappe.throw(_("Maintenance Visit {0} must be cancelled before cancelling this Sales Order").format(comma_and(submit_mv)))
# check production order
pro_order = frappe.db.sql_list("""select name from `tabProduction Order`
where sales_order = %s and docstatus = 1""", self.name)
if pro_order:
frappe.throw(_("Production Order {0} must be cancelled before cancelling this Sales Order").format(comma_and(pro_order)))
def check_modified_date(self):
mod_db = frappe.db.get_value("Sales Order", self.name, "modified")
date_diff = frappe.db.sql("select TIMEDIFF('%s', '%s')" %
( mod_db, cstr(self.modified)))
if date_diff and date_diff[0][0]:
frappe.throw(_("{0} {1} has been modified. Please refresh.").format(self.doctype, self.name))
def stop_sales_order(self):
self.check_modified_date()
frappe.db.set(self, 'status', 'Stopped')
self.update_reserved_qty()
frappe.msgprint(_("{0} {1} status is Stopped").format(self.doctype, self.name))
self.notify_update()
clear_doctype_notifications(self)
def unstop_sales_order(self):
self.check_modified_date()
frappe.db.set(self, 'status', 'Submitted')
self.update_reserved_qty()
frappe.msgprint(_("{0} {1} status is Unstopped").format(self.doctype, self.name))
clear_doctype_notifications(self)
def update_reserved_qty(self, so_item_rows=None):
"""update requested qty (before ordered_qty is updated)"""
item_wh_list = []
def _valid_for_reserve(item_code, warehouse):
if item_code and warehouse and [item_code, warehouse] not in item_wh_list \
and frappe.db.get_value("Item", item_code, "is_stock_item"):
item_wh_list.append([item_code, warehouse])
for d in self.get("items"):
if (not so_item_rows or d.name in so_item_rows):
_valid_for_reserve(d.item_code, d.warehouse)
if self.has_product_bundle(d.item_code):
for p in self.get("packed_items"):
if p.parent_detail_docname == d.name and p.parent_item == d.item_code:
_valid_for_reserve(p.item_code, p.warehouse)
for item_code, warehouse in item_wh_list:
update_bin_qty(item_code, warehouse, {
"reserved_qty": get_reserved_qty(item_code, warehouse)
})
def on_update(self):
pass
def get_list_context(context=None):
from erpnext.controllers.website_list_for_contact import get_list_context
list_context = get_list_context(context)
list_context["title"] = _("My Orders")
return list_context
@frappe.whitelist()
def stop_or_unstop_sales_orders(names, status):
if not frappe.has_permission("Sales Order", "write"):
frappe.throw(_("Not permitted"), frappe.PermissionError)
names = json.loads(names)
for name in names:
so = frappe.get_doc("Sales Order", name)
if so.docstatus == 1:
if status=="Stop":
if so.status not in ("Stopped", "Cancelled") and (so.per_delivered < 100 or so.per_billed < 100):
so.stop_sales_order()
else:
if so.status == "Stopped":
so.unstop_sales_order()
frappe.local.message_log = []
def before_recurring(self):
super(SalesOrder, self).before_recurring()
for field in ("delivery_status", "per_delivered", "billing_status", "per_billed"):
self.set(field, None)
for d in self.get("items"):
for field in ("delivered_qty", "billed_amt", "planned_qty", "prevdoc_docname"):
d.set(field, None)
@frappe.whitelist()
def make_material_request(source_name, target_doc=None):
def postprocess(source, doc):
doc.material_request_type = "Purchase"
so = frappe.get_doc("Sales Order", source_name)
item_table = "Packed Item" if so.packed_items else "Sales Order Item"
doc = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Material Request",
"validation": {
"docstatus": ["=", 1]
}
},
item_table: {
"doctype": "Material Request Item",
"field_map": {
"parent": "sales_order_no",
"stock_uom": "uom"
}
}
}, target_doc, postprocess)
return doc
@frappe.whitelist()
def make_delivery_note(source_name, target_doc=None):
def set_missing_values(source, target):
if source.po_no:
if target.po_no:
target_po_no = target.po_no.split(", ")
target_po_no.append(source.po_no)
target.po_no = ", ".join(list(set(target_po_no))) if len(target_po_no) > 1 else target_po_no[0]
else:
target.po_no = source.po_no
target.ignore_pricing_rule = 1
target.run_method("set_missing_values")
target.run_method("calculate_taxes_and_totals")
def update_item(source, target, source_parent):
target.base_amount = (flt(source.qty) - flt(source.delivered_qty)) * flt(source.base_rate)
target.amount = (flt(source.qty) - flt(source.delivered_qty)) * flt(source.rate)
target.qty = flt(source.qty) - flt(source.delivered_qty)
target_doc = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Delivery Note",
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Delivery Note Item",
"field_map": {
"rate": "rate",
"name": "so_detail",
"parent": "against_sales_order",
},
"postprocess": update_item,
"condition": lambda doc: doc.delivered_qty < doc.qty
},
"Sales Taxes and Charges": {
"doctype": "Sales Taxes and Charges",
"add_if_empty": True
},
"Sales Team": {
"doctype": "Sales Team",
"add_if_empty": True
}
}, target_doc, set_missing_values)
return target_doc
@frappe.whitelist()
def make_sales_invoice(source_name, target_doc=None):
def postprocess(source, target):
set_missing_values(source, target)
#Get the advance paid Journal Entries in Sales Invoice Advance
target.get_advances()
def set_missing_values(source, target):
target.is_pos = 0
target.ignore_pricing_rule = 1
target.run_method("set_missing_values")
target.run_method("calculate_taxes_and_totals")
def update_item(source, target, source_parent):
target.amount = flt(source.amount) - flt(source.billed_amt)
target.base_amount = target.amount * flt(source_parent.conversion_rate)
target.qty = target.amount / flt(source.rate) if (source.rate and source.billed_amt) else source.qty
doclist = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Sales Invoice",
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Sales Invoice Item",
"field_map": {
"name": "so_detail",
"parent": "sales_order",
},
"postprocess": update_item,
"condition": lambda doc: doc.base_amount==0 or doc.billed_amt < doc.amount
},
"Sales Taxes and Charges": {
"doctype": "Sales Taxes and Charges",
"add_if_empty": True
},
"Sales Team": {
"doctype": "Sales Team",
"add_if_empty": True
}
}, target_doc, postprocess)
return doclist
@frappe.whitelist()
def make_maintenance_schedule(source_name, target_doc=None):
maint_schedule = frappe.db.sql("""select t1.name
from `tabMaintenance Schedule` t1, `tabMaintenance Schedule Item` t2
where t2.parent=t1.name and t2.prevdoc_docname=%s and t1.docstatus=1""", source_name)
if not maint_schedule:
doclist = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Maintenance Schedule",
"field_map": {
"name": "sales_order_no"
},
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Maintenance Schedule Item",
"field_map": {
"parent": "prevdoc_docname"
},
"add_if_empty": True
}
}, target_doc)
return doclist
@frappe.whitelist()
def make_maintenance_visit(source_name, target_doc=None):
visit = frappe.db.sql("""select t1.name
from `tabMaintenance Visit` t1, `tabMaintenance Visit Purpose` t2
where t2.parent=t1.name and t2.prevdoc_docname=%s
and t1.docstatus=1 and t1.completion_status='Fully Completed'""", source_name)
if not visit:
doclist = get_mapped_doc("Sales Order", source_name, {
"Sales Order": {
"doctype": "Maintenance Visit",
"field_map": {
"name": "sales_order_no"
},
"validation": {
"docstatus": ["=", 1]
}
},
"Sales Order Item": {
"doctype": "Maintenance Visit Purpose",
"field_map": {
"parent": "prevdoc_docname",
"parenttype": "prevdoc_doctype"
},
"add_if_empty": True
}
}, target_doc)
return doclist
@frappe.whitelist()
def get_events(start, end, filters=None):
"""Returns events for Gantt / Calendar view rendering.
:param start: Start date-time.
:param end: End date-time.
:param filters: Filters (JSON).
"""
from frappe.desk.calendar import get_event_conditions
conditions = get_event_conditions("Sales Order", filters)
data = frappe.db.sql("""select name, customer_name, delivery_status, billing_status, delivery_date
from `tabSales Order`
where (ifnull(delivery_date, '0000-00-00')!= '0000-00-00') \
and (delivery_date between %(start)s and %(end)s) {conditions}
""".format(conditions=conditions), {
"start": start,
"end": end
}, as_dict=True, update={"allDay": 0})
return data
| agpl-3.0 |
bluecoiner/bluecoin-new | qa/rpc-tests/test_framework/blockstore.py | 97 | 5416 | #!/usr/bin/env python3
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# BlockStore: a helper class that keeps a map of blocks and implements
# helper functions for responding to getheaders and getdata,
# and for constructing a getheaders message
#
from .mininode import *
from io import BytesIO
import dbm.dumb as dbmd
class BlockStore(object):
def __init__(self, datadir):
self.blockDB = dbmd.open(datadir + "/blocks", 'c')
self.currentBlock = 0
self.headers_map = dict()
def close(self):
self.blockDB.close()
def erase(self, blockhash):
del self.blockDB[repr(blockhash)]
# lookup an entry and return the item as raw bytes
def get(self, blockhash):
value = None
try:
value = self.blockDB[repr(blockhash)]
except KeyError:
return None
return value
# lookup an entry and return it as a CBlock
def get_block(self, blockhash):
ret = None
serialized_block = self.get(blockhash)
if serialized_block is not None:
f = BytesIO(serialized_block)
ret = CBlock()
ret.deserialize(f)
ret.calc_sha256()
return ret
def get_header(self, blockhash):
try:
return self.headers_map[blockhash]
except KeyError:
return None
# Note: this pulls full blocks out of the database just to retrieve
# the headers -- perhaps we could keep a separate data structure
# to avoid this overhead.
def headers_for(self, locator, hash_stop, current_tip=None):
if current_tip is None:
current_tip = self.currentBlock
current_block_header = self.get_header(current_tip)
if current_block_header is None:
return None
response = msg_headers()
headersList = [ current_block_header ]
maxheaders = 2000
while (headersList[0].sha256 not in locator.vHave):
prevBlockHash = headersList[0].hashPrevBlock
prevBlockHeader = self.get_header(prevBlockHash)
if prevBlockHeader is not None:
headersList.insert(0, prevBlockHeader)
else:
break
headersList = headersList[:maxheaders] # truncate if we have too many
hashList = [x.sha256 for x in headersList]
index = len(headersList)
if (hash_stop in hashList):
index = hashList.index(hash_stop)+1
response.headers = headersList[:index]
return response
def add_block(self, block):
block.calc_sha256()
try:
self.blockDB[repr(block.sha256)] = bytes(block.serialize())
except TypeError as e:
print("Unexpected error: ", sys.exc_info()[0], e.args)
self.currentBlock = block.sha256
self.headers_map[block.sha256] = CBlockHeader(block)
def add_header(self, header):
self.headers_map[header.sha256] = header
# lookup the hashes in "inv", and return p2p messages for delivering
# blocks found.
def get_blocks(self, inv):
responses = []
for i in inv:
if (i.type == 2): # MSG_BLOCK
data = self.get(i.hash)
if data is not None:
# Use msg_generic to avoid re-serialization
responses.append(msg_generic(b"block", data))
return responses
def get_locator(self, current_tip=None):
if current_tip is None:
current_tip = self.currentBlock
r = []
counter = 0
step = 1
lastBlock = self.get_block(current_tip)
while lastBlock is not None:
r.append(lastBlock.hashPrevBlock)
for i in range(step):
lastBlock = self.get_block(lastBlock.hashPrevBlock)
if lastBlock is None:
break
counter += 1
if counter > 10:
step *= 2
locator = CBlockLocator()
locator.vHave = r
return locator
class TxStore(object):
def __init__(self, datadir):
self.txDB = dbmd.open(datadir + "/transactions", 'c')
def close(self):
self.txDB.close()
# lookup an entry and return the item as raw bytes
def get(self, txhash):
value = None
try:
value = self.txDB[repr(txhash)]
except KeyError:
return None
return value
def get_transaction(self, txhash):
ret = None
serialized_tx = self.get(txhash)
if serialized_tx is not None:
f = BytesIO(serialized_tx)
ret = CTransaction()
ret.deserialize(f)
ret.calc_sha256()
return ret
def add_transaction(self, tx):
tx.calc_sha256()
try:
self.txDB[repr(tx.sha256)] = bytes(tx.serialize())
except TypeError as e:
print("Unexpected error: ", sys.exc_info()[0], e.args)
def get_transactions(self, inv):
responses = []
for i in inv:
if (i.type == 1): # MSG_TX
tx = self.get(i.hash)
if tx is not None:
responses.append(msg_generic(b"tx", tx))
return responses
| mit |
AsgerPetersen/QGIS | python/plugins/processing/algs/gdal/sieve.py | 3 | 3374 | # -*- coding: utf-8 -*-
"""
***************************************************************************
sieve.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from qgis.PyQt.QtGui import QIcon
from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm
from processing.core.parameters import ParameterRaster
from processing.core.parameters import ParameterSelection
from processing.core.parameters import ParameterNumber
from processing.core.outputs import OutputRaster
from processing.tools.system import isWindows
from processing.algs.gdal.GdalUtils import GdalUtils
pluginPath = os.path.split(os.path.split(os.path.dirname(__file__))[0])[0]
class sieve(GdalAlgorithm):
INPUT = 'INPUT'
THRESHOLD = 'THRESHOLD'
CONNECTIONS = 'CONNECTIONS'
OUTPUT = 'OUTPUT'
PIXEL_CONNECTIONS = ['4', '8']
def getIcon(self):
return QIcon(os.path.join(pluginPath, 'images', 'gdaltools', 'sieve.png'))
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('Sieve')
self.group, self.i18n_group = self.trAlgorithm('[GDAL] Analysis')
self.addParameter(ParameterRaster(self.INPUT, self.tr('Input layer'), False))
self.addParameter(ParameterNumber(self.THRESHOLD,
self.tr('Threshold'), 0, 9999, 2))
self.addParameter(ParameterSelection(self.CONNECTIONS,
self.tr('Pixel connection'), self.PIXEL_CONNECTIONS, 0))
self.addOutput(OutputRaster(self.OUTPUT, self.tr('Sieved')))
def getConsoleCommands(self):
output = self.getOutputValue(self.OUTPUT)
arguments = []
arguments.append('-st')
arguments.append(unicode(self.getParameterValue(self.THRESHOLD)))
arguments.append('-' +
self.PIXEL_CONNECTIONS[self.getParameterValue(
self.CONNECTIONS)])
arguments.append('-of')
arguments.append(GdalUtils.getFormatShortNameFromFilename(output))
arguments.append(self.getParameterValue(self.INPUT))
arguments.append(output)
commands = []
if isWindows():
commands = ['cmd.exe', '/C ', 'gdal_sieve.bat',
GdalUtils.escapeAndJoin(arguments)]
else:
commands = ['gdal_sieve.py', GdalUtils.escapeAndJoin(arguments)]
return commands
| gpl-2.0 |
segment-routing/openwrt | tools/perf/scripts/python/syscall-counts.py | 1996 | 1700 | # system call counts
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts.py [comm]\n";
for_comm = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
raw_syscalls__sys_enter(**locals())
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
print "%-40s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
heysion/deepin-auto-build | tests/daemon.py | 1 | 9092 | import fcntl
import atexit
import os
import sys
import time
import signal
import multiprocessing as mp
import time
import multiprocessing as mp
import random
#from signal import signal, SIGINT, SIGHUP, SIG_IGN, siginterrupt
from multiprocessing import Process,Queue,Event,Manager
import subprocess
import os
import sys
class Daemon(object):
def __init__(self):
self.foreground = True
self.pidfile = "/tmp/pidfile-001"
pass
def get_pid(self):
try:
pf = file(self.pidfile, 'r')
pid = int(pf.read().strip())
pf.close()
except IOError:
pid = None
except SystemExit:
pid = None
return pid
def start(self):
"""
Start the daemon
"""
if os.path.isfile(self.pidfile):
with open(self.pidfile, "r") as old_pidfile:
old_pid = old_pidfile.read()
try:
lockfile = open(self.pidfile, "w")
except IOError:
print("Unable to create the pidfile.")
sys.exit(1)
try:
# locked.
fcntl.flock(lockfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
print("Unable to lock on the pidfile.")
# We need to overwrite the pidfile if we got here.
with open(self.pidfile, "w") as pidfile:
pidfile.write(old_pid)
sys.exit(1)
# Start the daemon
if not self.foreground :
self.daemonize()
# Flush pid in pidfile
try:
lockfile.write("%s" % (os.getpid()))
lockfile.flush()
except IOError:
print("Unable to write pid to the pidfile.")
sys.exit(1)
# Start the logger
self.run()
def restart(self):
"""
Restart the daemon
"""
self.stop()
self.start()
def stop(self):
"""
Stop the daemon
"""
# Get the pid from the pidfile
pid = self.get_pid()
if not pid:
if os.path.exists(self.pidfile):
os.remove(self.pidfile)
return # Not an error in a restart
# Try killing the daemon process
try:
i = 0
while True:
os.kill(pid, signal.SIGTERM)
time.sleep(0.1)
i = i + 1
if i % 10 == 0:
os.kill(pid, signal.SIGHUP)
except OSError as err:
err = str(err)
if err.find("No such process") > 0:
if os.path.exists(self.pidfile):
os.remove(self.pidfile)
else:
self.logger.warn(err)
sys.exit(1)
def daemonize(self):
#Do first Fork
try:
pid = os.fork()
if pid != 0:
sys.exit(0)
except OSError as e:
sys.stderr.write(
"fork #1 failed: %d (%s)\n" % (e.errno, e.strerror))
sys.exit(1)
os.chdir("/tmp")
os.setsid()
os.umask(022)
# Do second fork
try:
pid = os.fork()
if pid != 0:
sys.exit(0)
except OSError as e:
sys.stderr.write(
"fork #2 failed: %d (%s)\n" % (e.errno, e.strerror))
sys.exit(1)
devnull = "/dev/null"
if hasattr(os, "devnull"):
devnull = os.devnull
devnull_fd = os.open(devnull, os.O_RDWR)
os.dup2(devnull_fd, 0)
os.dup2(devnull_fd, 1)
os.dup2(devnull_fd, 2)
os.close(devnull_fd)
def sigtermhandler(signum, frame):
self.daemon_alive = False
sys.exit()
signal.signal(signal.SIGTERM, sigtermhandler)
signal.signal(signal.SIGINT, sigtermhandler)
# Write pidfile
atexit.register(self.quit)
def quit(self):
"""cleanup pid file at exit"""
sys.exit(0)
if os.path.isfile(self.pidfile):
with open(self.pidfile, "r") as old_pidfile:
old_pid = old_pidfile.read()
try:
lockfile = open(self.pidfile, "w")
except IOError:
print("Unable to create the pidfile.")
sys.exit(1)
try:
# locked.
fcntl.flock(lopckfile, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError:
print("Unable to lock on the pidfile.")
# We need to overwrite the pidfile if we got here.
with open(self.pidfile, "w") as pidfile:
pidfile.write(old_pid)
sys.exit(1)
# Start the daemon
if not self.foreground :
self.daemonize()
# Flush pid in pidfile
try:
lockfile.write("%s" % (os.getpid()))
lockfile.flush()
except IOError:
print("Unable to write pid to the pidfile.")
sys.exit(1)
# Start the logger
self.run(*args, **kwargs)
def run(self):
"""
You should override this method when you subclass Daemon.
daemonized by start() or restart().
"""
raise NotImplementedError
'''
mutliprocces counter
'''
class Counter(object):
def __init__(self):
self.val = mp.Value('i', 0)
def increment(self, n=1):
with self.val.get_lock():
self.val.value += n
def reduce(self, n=1):
with self.val.get_lock():
self.val.value -= n
@property
def value(self):
return self.val.value
proc_counter = Counter()
def update_counter():
global proc_counter
print("counter %d"%proc_counter.value)
proc_counter.increment()
def reduce_counter():
global proc_counter
proc_counter.reduce()
class TestMain(Daemon):
def main(self):
#pdb.set_trace()
if len(sys.argv) >= 2:
if 'start' in sys.argv:
self.start()
elif 'stop' in sys.argv:
self.stop()
elif 'restart' in sys.argv:
self.restart()
else:
print "Unknown command"
sys.exit(2)
sys.exit(0)
else:
print "usage: %s start|stop|restart other config" % sys.argv[0]
sys.exit(2)
def run(self):
manager = Manager()
proc_pool = {}
task_cntl_queue = Queue()
task_data_queue = Queue()
exit_flag = mp.Event()
signal.signal(signal.SIGHUP, lambda x, y: exit_flag.set())
signal.signal(signal.SIGTERM, lambda x, y: exit_flag.set())
signal.siginterrupt(signal.SIGHUP, False)
signal.siginterrupt(signal.SIGTERM, False)
print 'main {} started'.format(os.getpid())
proc = mp.Process(target=self.proxy_task_process,
args=(task_cntl_queue, task_data_queue, exit_flag))
proc.start()
proc_pid = proc.pid
proc_pool[proc_pid] = proc
update_counter()
print 'proxy {} started'.format(proc.pid)
while True:
item = task_cntl_queue.get()
if item['event'] == 'newtask':
proc = mp.Process(target=self.task_worker, args=(task_cntl_queue, task_data_queue))
proc.start()
proc_pool[proc.pid] = proc
update_counter()
print 'worker {} started'.format(proc.pid)
elif item['event'] == 'exit':
proc = proc_pool.pop(item['pid'])
reduce_counter()
proc.join()
print 'child {} stopped'.format(item['pid'])
else:
print 'It\'s impossible !'
if not proc_pool:
break
print 'main {} stopped'.format(os.getpid())
def task_worker(self,task_cntl_queue, task_data_queue):
taskinfo = task_data_queue.get()
print taskinfo
print "pid %d"%(os.getpid())
time.sleep(random.choice([10,20,30]))
task_cntl_queue.put({'event': 'exit', 'pid': os.getpid()})
pass
def fetch_task_api(self):
pass
def failed_sleep(self):
time.sleep(5)
def proxy_task_process(self,task_cntl_queue, task_data_queue, exit_flag):
while True:
print("proc counter %d"%(proc_counter.value))
if exit_flag.is_set():
#if exit kill self
task_cntl_queue.put({'event': 'exit', 'pid': os.getpid()})
break
print("proc counter %d"%(proc_counter.value))
if proc_counter.value > 4:
self.failed_sleep()
print("====runing proc %d"%proc_counter.value)
continue
else:
task_cntl_queue.put({'event': 'newtask'})
taskinfo={"id":1}
task_data_queue.put(taskinfo)
time.sleep(1)
if __name__ == "__main__":
test = TestMain()
test.main()
pass
| gpl-3.0 |
babyliynfg/cross | tools/project-creator/Python2.6.6/Lib/commands.py | 5 | 2633 | """Execute shell commands via os.popen() and return status, output.
Interface summary:
import commands
outtext = commands.getoutput(cmd)
(exitstatus, outtext) = commands.getstatusoutput(cmd)
outtext = commands.getstatus(file) # returns output of "ls -ld file"
A trailing newline is removed from the output string.
Encapsulates the basic operation:
pipe = os.popen('{ ' + cmd + '; } 2>&1', 'r')
text = pipe.read()
sts = pipe.close()
[Note: it would be nice to add functions to interpret the exit status.]
"""
__all__ = ["getstatusoutput","getoutput","getstatus"]
# Module 'commands'
#
# Various tools for executing commands and looking at their output and status.
#
# NB This only works (and is only relevant) for UNIX.
# Get 'ls -l' status for an object into a string
#
def getstatus(file):
"""Return output of "ls -ld <file>" in a string."""
import warnings
warnings.warn("commands.getstatus() is deprecated", DeprecationWarning, 2)
return getoutput('ls -ld' + mkarg(file))
# Get the output from a shell command into a string.
# The exit status is ignored; a trailing newline is stripped.
# Assume the command will work with '{ ... ; } 2>&1' around it..
#
def getoutput(cmd):
"""Return output (stdout or stderr) of executing cmd in a shell."""
return getstatusoutput(cmd)[1]
# Ditto but preserving the exit status.
# Returns a pair (sts, output)
#
def getstatusoutput(cmd):
"""Return (status, output) of executing cmd in a shell."""
import os
pipe = os.popen('{ ' + cmd + '; } 2>&1', 'r')
text = pipe.read()
sts = pipe.close()
if sts is None: sts = 0
if text[-1:] == '\n': text = text[:-1]
return sts, text
# Make command argument from directory and pathname (prefix space, add quotes).
#
def mk2arg(head, x):
from warnings import warnpy3k
warnpy3k("In 3.x, mk2arg has been removed.")
import os
return mkarg(os.path.join(head, x))
# Make a shell command argument from a string.
# Return a string beginning with a space followed by a shell-quoted
# version of the argument.
# Two strategies: enclose in single quotes if it contains none;
# otherwise, enclose in double quotes and prefix quotable characters
# with backslash.
#
def mkarg(x):
from warnings import warnpy3k
warnpy3k("in 3.x, mkarg has been removed.")
if '\'' not in x:
return ' \'' + x + '\''
s = ' "'
for c in x:
if c in '\\$"`':
s = s + '\\'
s = s + c
s = s + '"'
return s
| mit |
romain-dartigues/ansible | lib/ansible/parsing/dataloader.py | 9 | 18490 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import copy
import os
import os.path
import re
import tempfile
from ansible import constants as C
from ansible.errors import AnsibleFileNotFound, AnsibleParserError
from ansible.module_utils.basic import is_executable
from ansible.module_utils.six import binary_type, text_type
from ansible.module_utils._text import to_bytes, to_native, to_text
from ansible.parsing.quoting import unquote
from ansible.parsing.utils.yaml import from_yaml
from ansible.parsing.vault import VaultLib, b_HEADER, is_encrypted, is_encrypted_file, parse_vaulttext_envelope
from ansible.utils.path import unfrackpath
from ansible.utils.display import Display
display = Display()
# Tries to determine if a path is inside a role, last dir must be 'tasks'
# this is not perfect but people should really avoid 'tasks' dirs outside roles when using Ansible.
RE_TASKS = re.compile(u'(?:^|%s)+tasks%s?$' % (os.path.sep, os.path.sep))
class DataLoader:
'''
The DataLoader class is used to load and parse YAML or JSON content,
either from a given file name or from a string that was previously
read in through other means. A Vault password can be specified, and
any vault-encrypted files will be decrypted.
Data read from files will also be cached, so the file will never be
read from disk more than once.
Usage:
dl = DataLoader()
# optionally: dl.set_vault_password('foo')
ds = dl.load('...')
ds = dl.load_from_file('/path/to/file')
'''
def __init__(self):
self._basedir = '.'
self._FILE_CACHE = dict()
self._tempfiles = set()
# initialize the vault stuff with an empty password
# TODO: replace with a ref to something that can get the password
# a creds/auth provider
# self.set_vault_password(None)
self._vaults = {}
self._vault = VaultLib()
self.set_vault_secrets(None)
# TODO: since we can query vault_secrets late, we could provide this to DataLoader init
def set_vault_secrets(self, vault_secrets):
self._vault.secrets = vault_secrets
def load(self, data, file_name='<string>', show_content=True):
'''Backwards compat for now'''
return from_yaml(data, file_name, show_content, self._vault.secrets)
def load_from_file(self, file_name, cache=True, unsafe=False):
''' Loads data from a file, which can contain either JSON or YAML. '''
file_name = self.path_dwim(file_name)
display.debug("Loading data from %s" % file_name)
# if the file has already been read in and cached, we'll
# return those results to avoid more file/vault operations
if cache and file_name in self._FILE_CACHE:
parsed_data = self._FILE_CACHE[file_name]
else:
# read the file contents and load the data structure from them
(b_file_data, show_content) = self._get_file_contents(file_name)
file_data = to_text(b_file_data, errors='surrogate_or_strict')
parsed_data = self.load(data=file_data, file_name=file_name, show_content=show_content)
# cache the file contents for next time
self._FILE_CACHE[file_name] = parsed_data
if unsafe:
return parsed_data
else:
# return a deep copy here, so the cache is not affected
return copy.deepcopy(parsed_data)
def path_exists(self, path):
path = self.path_dwim(path)
return os.path.exists(to_bytes(path, errors='surrogate_or_strict'))
def is_file(self, path):
path = self.path_dwim(path)
return os.path.isfile(to_bytes(path, errors='surrogate_or_strict')) or path == os.devnull
def is_directory(self, path):
path = self.path_dwim(path)
return os.path.isdir(to_bytes(path, errors='surrogate_or_strict'))
def list_directory(self, path):
path = self.path_dwim(path)
return os.listdir(path)
def is_executable(self, path):
'''is the given path executable?'''
path = self.path_dwim(path)
return is_executable(path)
def _decrypt_if_vault_data(self, b_vault_data, b_file_name=None):
'''Decrypt b_vault_data if encrypted and return b_data and the show_content flag'''
if not is_encrypted(b_vault_data):
show_content = True
return b_vault_data, show_content
b_ciphertext, b_version, cipher_name, vault_id = parse_vaulttext_envelope(b_vault_data)
b_data = self._vault.decrypt(b_vault_data, filename=b_file_name)
show_content = False
return b_data, show_content
def _get_file_contents(self, file_name):
'''
Reads the file contents from the given file name
If the contents are vault-encrypted, it will decrypt them and return
the decrypted data
:arg file_name: The name of the file to read. If this is a relative
path, it will be expanded relative to the basedir
:raises AnsibleFileNotFOund: if the file_name does not refer to a file
:raises AnsibleParserError: if we were unable to read the file
:return: Returns a byte string of the file contents
'''
if not file_name or not isinstance(file_name, (binary_type, text_type)):
raise AnsibleParserError("Invalid filename: '%s'" % to_native(file_name))
b_file_name = to_bytes(self.path_dwim(file_name))
# This is what we really want but have to fix unittests to make it pass
# if not os.path.exists(b_file_name) or not os.path.isfile(b_file_name):
if not self.path_exists(b_file_name):
raise AnsibleFileNotFound("Unable to retrieve file contents", file_name=file_name)
try:
with open(b_file_name, 'rb') as f:
data = f.read()
return self._decrypt_if_vault_data(data, b_file_name)
except (IOError, OSError) as e:
raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (file_name, to_native(e)), orig_exc=e)
def get_basedir(self):
''' returns the current basedir '''
return self._basedir
def set_basedir(self, basedir):
''' sets the base directory, used to find files when a relative path is given '''
if basedir is not None:
self._basedir = to_text(basedir)
def path_dwim(self, given):
'''
make relative paths work like folks expect.
'''
given = unquote(given)
given = to_text(given, errors='surrogate_or_strict')
if given.startswith(to_text(os.path.sep)) or given.startswith(u'~'):
path = given
else:
basedir = to_text(self._basedir, errors='surrogate_or_strict')
path = os.path.join(basedir, given)
return unfrackpath(path, follow=False)
def _is_role(self, path):
''' imperfect role detection, roles are still valid w/o tasks|meta/main.yml|yaml|etc '''
b_path = to_bytes(path, errors='surrogate_or_strict')
b_upath = to_bytes(unfrackpath(path, follow=False), errors='surrogate_or_strict')
for b_finddir in (b'meta', b'tasks'):
for b_suffix in (b'.yml', b'.yaml', b''):
b_main = b'main%s' % (b_suffix)
b_tasked = os.path.join(b_finddir, b_main)
if (
RE_TASKS.search(path) and
os.path.exists(os.path.join(b_path, b_main)) or
os.path.exists(os.path.join(b_upath, b_tasked)) or
os.path.exists(os.path.join(os.path.dirname(b_path), b_tasked))
):
return True
return False
def path_dwim_relative(self, path, dirname, source, is_role=False):
'''
find one file in either a role or playbook dir with or without
explicitly named dirname subdirs
Used in action plugins and lookups to find supplemental files that
could be in either place.
'''
search = []
source = to_text(source, errors='surrogate_or_strict')
# I have full path, nothing else needs to be looked at
if source.startswith(to_text(os.path.sep)) or source.startswith(u'~'):
search.append(unfrackpath(source, follow=False))
else:
# base role/play path + templates/files/vars + relative filename
search.append(os.path.join(path, dirname, source))
basedir = unfrackpath(path, follow=False)
# not told if role, but detect if it is a role and if so make sure you get correct base path
if not is_role:
is_role = self._is_role(path)
if is_role and RE_TASKS.search(path):
basedir = unfrackpath(os.path.dirname(path), follow=False)
cur_basedir = self._basedir
self.set_basedir(basedir)
# resolved base role/play path + templates/files/vars + relative filename
search.append(unfrackpath(os.path.join(basedir, dirname, source), follow=False))
self.set_basedir(cur_basedir)
if is_role and not source.endswith(dirname):
# look in role's tasks dir w/o dirname
search.append(unfrackpath(os.path.join(basedir, 'tasks', source), follow=False))
# try to create absolute path for loader basedir + templates/files/vars + filename
search.append(unfrackpath(os.path.join(dirname, source), follow=False))
# try to create absolute path for loader basedir
search.append(unfrackpath(os.path.join(basedir, source), follow=False))
# try to create absolute path for dirname + filename
search.append(self.path_dwim(os.path.join(dirname, source)))
# try to create absolute path for filename
search.append(self.path_dwim(source))
for candidate in search:
if os.path.exists(to_bytes(candidate, errors='surrogate_or_strict')):
break
return candidate
def path_dwim_relative_stack(self, paths, dirname, source, is_role=False):
'''
find one file in first path in stack taking roles into account and adding play basedir as fallback
:arg paths: A list of text strings which are the paths to look for the filename in.
:arg dirname: A text string representing a directory. The directory
is prepended to the source to form the path to search for.
:arg source: A text string which is the filename to search for
:rtype: A text string
:returns: An absolute path to the filename ``source`` if found
:raises: An AnsibleFileNotFound Exception if the file is found to exist in the search paths
'''
b_dirname = to_bytes(dirname)
b_source = to_bytes(source)
result = None
search = []
if source is None:
display.warning('Invalid request to find a file that matches a "null" value')
elif source and (source.startswith('~') or source.startswith(os.path.sep)):
# path is absolute, no relative needed, check existence and return source
test_path = unfrackpath(b_source, follow=False)
if os.path.exists(to_bytes(test_path, errors='surrogate_or_strict')):
result = test_path
else:
display.debug(u'evaluation_path:\n\t%s' % '\n\t'.join(paths))
for path in paths:
upath = unfrackpath(path, follow=False)
b_upath = to_bytes(upath, errors='surrogate_or_strict')
b_mydir = os.path.dirname(b_upath)
# if path is in role and 'tasks' not there already, add it into the search
if (is_role or self._is_role(path)) and b_mydir.endswith(b'tasks'):
search.append(os.path.join(os.path.dirname(b_mydir), b_dirname, b_source))
search.append(os.path.join(b_mydir, b_source))
else:
# don't add dirname if user already is using it in source
if b_source.split(b'/')[0] != dirname:
search.append(os.path.join(b_upath, b_dirname, b_source))
search.append(os.path.join(b_upath, b_source))
# always append basedir as last resort
# don't add dirname if user already is using it in source
if b_source.split(b'/')[0] != dirname:
search.append(os.path.join(to_bytes(self.get_basedir()), b_dirname, b_source))
search.append(os.path.join(to_bytes(self.get_basedir()), b_source))
display.debug(u'search_path:\n\t%s' % to_text(b'\n\t'.join(search)))
for b_candidate in search:
display.vvvvv(u'looking for "%s" at "%s"' % (source, to_text(b_candidate)))
if os.path.exists(b_candidate):
result = to_text(b_candidate)
break
if result is None:
raise AnsibleFileNotFound(file_name=source, paths=[to_text(p) for p in search])
return result
def _create_content_tempfile(self, content):
''' Create a tempfile containing defined content '''
fd, content_tempfile = tempfile.mkstemp()
f = os.fdopen(fd, 'wb')
content = to_bytes(content)
try:
f.write(content)
except Exception as err:
os.remove(content_tempfile)
raise Exception(err)
finally:
f.close()
return content_tempfile
def get_real_file(self, file_path, decrypt=True):
"""
If the file is vault encrypted return a path to a temporary decrypted file
If the file is not encrypted then the path is returned
Temporary files are cleanup in the destructor
"""
if not file_path or not isinstance(file_path, (binary_type, text_type)):
raise AnsibleParserError("Invalid filename: '%s'" % to_native(file_path))
b_file_path = to_bytes(file_path, errors='surrogate_or_strict')
if not self.path_exists(b_file_path) or not self.is_file(b_file_path):
raise AnsibleFileNotFound(file_name=file_path)
real_path = self.path_dwim(file_path)
try:
if decrypt:
with open(to_bytes(real_path), 'rb') as f:
# Limit how much of the file is read since we do not know
# whether this is a vault file and therefore it could be very
# large.
if is_encrypted_file(f, count=len(b_HEADER)):
# if the file is encrypted and no password was specified,
# the decrypt call would throw an error, but we check first
# since the decrypt function doesn't know the file name
data = f.read()
if not self._vault.secrets:
raise AnsibleParserError("A vault password or secret must be specified to decrypt %s" % to_native(file_path))
data = self._vault.decrypt(data, filename=real_path)
# Make a temp file
real_path = self._create_content_tempfile(data)
self._tempfiles.add(real_path)
return real_path
except (IOError, OSError) as e:
raise AnsibleParserError("an error occurred while trying to read the file '%s': %s" % (to_native(real_path), to_native(e)), orig_exc=e)
def cleanup_tmp_file(self, file_path):
"""
Removes any temporary files created from a previous call to
get_real_file. file_path must be the path returned from a
previous call to get_real_file.
"""
if file_path in self._tempfiles:
os.unlink(file_path)
self._tempfiles.remove(file_path)
def cleanup_all_tmp_files(self):
for f in self._tempfiles:
try:
self.cleanup_tmp_file(f)
except Exception as e:
display.warning("Unable to cleanup temp files: %s" % to_native(e))
def find_vars_files(self, path, name, extensions=None, allow_dir=True):
"""
Find vars files in a given path with specified name. This will find
files in a dir named <name>/ or a file called <name> ending in known
extensions.
"""
b_path = to_bytes(os.path.join(path, name))
found = []
if extensions is None:
# Look for file with no extension first to find dir before file
extensions = [''] + C.YAML_FILENAME_EXTENSIONS
# add valid extensions to name
for ext in extensions:
if '.' in ext:
full_path = b_path + to_bytes(ext)
elif ext:
full_path = b'.'.join([b_path, to_bytes(ext)])
else:
full_path = b_path
if self.path_exists(full_path):
if self.is_directory(full_path):
if allow_dir:
found.extend(self._get_dir_vars_files(to_text(full_path), extensions))
else:
continue
else:
found.append(full_path)
break
return found
def _get_dir_vars_files(self, path, extensions):
found = []
for spath in sorted(self.list_directory(path)):
if not spath.startswith(u'.') and not spath.endswith(u'~'): # skip hidden and backups
ext = os.path.splitext(spath)[-1]
full_spath = os.path.join(path, spath)
if self.is_directory(full_spath) and not ext: # recursive search if dir
found.extend(self._get_dir_vars_files(full_spath, extensions))
elif self.is_file(full_spath) and (not ext or to_text(ext) in extensions):
# only consider files with valid extensions or no extension
found.append(full_spath)
return found
| gpl-3.0 |
karmacoin-team/karmacoin | contrib/spendfrom/spendfrom.py | 1 | 10053 | #!/usr/bin/env python
#
# Use the raw transactions API to spend bitcoins received on particular addresses,
# and send any change back to that same address.
#
# Example usage:
# spendfrom.py # Lists available funds
# spendfrom.py --from=ADDRESS --to=ADDRESS --amount=11.00
#
# Assumes it will talk to a bitcoind or Bitcoin-Qt running
# on localhost.
#
# Depends on jsonrpc
#
from decimal import *
import getpass
import math
import os
import os.path
import platform
import sys
import time
from jsonrpc import ServiceProxy, json
BASE_FEE=Decimal("0.001")
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def determine_db_dir():
"""Return the default location of the bitcoin data directory"""
if platform.system() == "Darwin":
return os.path.expanduser("~/Library/Application Support/Bitcoin/")
elif platform.system() == "Windows":
return os.path.join(os.environ['APPDATA'], "Bitcoin")
return os.path.expanduser("~/.bitcoin")
def read_bitcoin_config(dbdir):
"""Read the bitcoin.conf file from dbdir, returns dictionary of settings"""
from ConfigParser import SafeConfigParser
class FakeSecHead(object):
def __init__(self, fp):
self.fp = fp
self.sechead = '[all]\n'
def readline(self):
if self.sechead:
try: return self.sechead
finally: self.sechead = None
else:
s = self.fp.readline()
if s.find('#') != -1:
s = s[0:s.find('#')].strip() +"\n"
return s
config_parser = SafeConfigParser()
config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, "bitcoin.conf"))))
return dict(config_parser.items("all"))
def connect_JSON(config):
"""Connect to a bitcoin JSON-RPC server"""
testnet = config.get('testnet', '0')
testnet = (int(testnet) > 0) # 0/1 in config file, convert to True/False
if not 'rpcport' in config:
config['rpcport'] = 19431 if testnet else 9431
connect = "http://%s:%s@127.0.0.1:%s"%(config['rpcuser'], config['rpcpassword'], config['rpcport'])
try:
result = ServiceProxy(connect)
# ServiceProxy is lazy-connect, so send an RPC command mostly to catch connection errors,
# but also make sure the bitcoind we're talking to is/isn't testnet:
if result.getmininginfo()['testnet'] != testnet:
sys.stderr.write("RPC server at "+connect+" testnet setting mismatch\n")
sys.exit(1)
return result
except:
sys.stderr.write("Error connecting to RPC server at "+connect+"\n")
sys.exit(1)
def unlock_wallet(bitcoind):
info = bitcoind.getinfo()
if 'unlocked_until' not in info:
return True # wallet is not encrypted
t = int(info['unlocked_until'])
if t <= time.time():
try:
passphrase = getpass.getpass("Wallet is locked; enter passphrase: ")
bitcoind.walletpassphrase(passphrase, 5)
except:
sys.stderr.write("Wrong passphrase\n")
info = bitcoind.getinfo()
return int(info['unlocked_until']) > time.time()
def list_available(bitcoind):
address_summary = dict()
address_to_account = dict()
for info in bitcoind.listreceivedbyaddress(0):
address_to_account[info["address"]] = info["account"]
unspent = bitcoind.listunspent(0)
for output in unspent:
# listunspent doesn't give addresses, so:
rawtx = bitcoind.getrawtransaction(output['txid'], 1)
vout = rawtx["vout"][output['vout']]
pk = vout["scriptPubKey"]
# This code only deals with ordinary pay-to-bitcoin-address
# or pay-to-script-hash outputs right now; anything exotic is ignored.
if pk["type"] != "pubkeyhash" and pk["type"] != "scripthash":
continue
address = pk["addresses"][0]
if address in address_summary:
address_summary[address]["total"] += vout["value"]
address_summary[address]["outputs"].append(output)
else:
address_summary[address] = {
"total" : vout["value"],
"outputs" : [output],
"account" : address_to_account.get(address, "")
}
return address_summary
def select_coins(needed, inputs):
# Feel free to improve this, this is good enough for my simple needs:
outputs = []
have = Decimal("0.0")
n = 0
while have < needed and n < len(inputs):
outputs.append({ "txid":inputs[n]["txid"], "vout":inputs[n]["vout"]})
have += inputs[n]["amount"]
n += 1
return (outputs, have-needed)
def create_tx(bitcoind, fromaddresses, toaddress, amount, fee):
all_coins = list_available(bitcoind)
total_available = Decimal("0.0")
needed = amount+fee
potential_inputs = []
for addr in fromaddresses:
if addr not in all_coins:
continue
potential_inputs.extend(all_coins[addr]["outputs"])
total_available += all_coins[addr]["total"]
if total_available < needed:
sys.stderr.write("Error, only %f BTC available, need %f\n"%(total_available, needed));
sys.exit(1)
#
# Note:
# Python's json/jsonrpc modules have inconsistent support for Decimal numbers.
# Instead of wrestling with getting json.dumps() (used by jsonrpc) to encode
# Decimals, I'm casting amounts to float before sending them to bitcoind.
#
outputs = { toaddress : float(amount) }
(inputs, change_amount) = select_coins(needed, potential_inputs)
if change_amount > BASE_FEE: # don't bother with zero or tiny change
change_address = fromaddresses[-1]
if change_address in outputs:
outputs[change_address] += float(change_amount)
else:
outputs[change_address] = float(change_amount)
rawtx = bitcoind.createrawtransaction(inputs, outputs)
signed_rawtx = bitcoind.signrawtransaction(rawtx)
if not signed_rawtx["complete"]:
sys.stderr.write("signrawtransaction failed\n")
sys.exit(1)
txdata = signed_rawtx["hex"]
return txdata
def compute_amount_in(bitcoind, txinfo):
result = Decimal("0.0")
for vin in txinfo['vin']:
in_info = bitcoind.getrawtransaction(vin['txid'], 1)
vout = in_info['vout'][vin['vout']]
result = result + vout['value']
return result
def compute_amount_out(txinfo):
result = Decimal("0.0")
for vout in txinfo['vout']:
result = result + vout['value']
return result
def sanity_test_fee(bitcoind, txdata_hex, max_fee):
class FeeError(RuntimeError):
pass
try:
txinfo = bitcoind.decoderawtransaction(txdata_hex)
total_in = compute_amount_in(bitcoind, txinfo)
total_out = compute_amount_out(txinfo)
if total_in-total_out > max_fee:
raise FeeError("Rejecting transaction, unreasonable fee of "+str(total_in-total_out))
tx_size = len(txdata_hex)/2
kb = tx_size/1000 # integer division rounds down
if kb > 1 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee transaction, larger than 1000 bytes")
if total_in < 0.01 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee, tiny-amount transaction")
# Exercise for the reader: compute transaction priority, and
# warn if this is a very-low-priority transaction
except FeeError as err:
sys.stderr.write((str(err)+"\n"))
sys.exit(1)
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--from", dest="fromaddresses", default=None,
help="addresses to get bitcoins from")
parser.add_option("--to", dest="to", default=None,
help="address to get send bitcoins to")
parser.add_option("--amount", dest="amount", default=None,
help="amount to send")
parser.add_option("--fee", dest="fee", default="0.0",
help="fee to include")
parser.add_option("--datadir", dest="datadir", default=determine_db_dir(),
help="location of bitcoin.conf file with RPC username/password (default: %default)")
parser.add_option("--testnet", dest="testnet", default=False, action="store_true",
help="Use the test network")
parser.add_option("--dry_run", dest="dry_run", default=False, action="store_true",
help="Don't broadcast the transaction, just create and print the transaction data")
(options, args) = parser.parse_args()
check_json_precision()
config = read_bitcoin_config(options.datadir)
if options.testnet: config['testnet'] = True
bitcoind = connect_JSON(config)
if options.amount is None:
address_summary = list_available(bitcoind)
for address,info in address_summary.iteritems():
n_transactions = len(info['outputs'])
if n_transactions > 1:
print("%s %.8f %s (%d transactions)"%(address, info['total'], info['account'], n_transactions))
else:
print("%s %.8f %s"%(address, info['total'], info['account']))
else:
fee = Decimal(options.fee)
amount = Decimal(options.amount)
while unlock_wallet(bitcoind) == False:
pass # Keep asking for passphrase until they get it right
txdata = create_tx(bitcoind, options.fromaddresses.split(","), options.to, amount, fee)
sanity_test_fee(bitcoind, txdata, amount*Decimal("0.01"))
if options.dry_run:
print(txdata)
else:
txid = bitcoind.sendrawtransaction(txdata)
print(txid)
if __name__ == '__main__':
main()
| mit |
kantlove/flask-simple-page | Lib/site-packages/wheel/test/test_tagopt.py | 236 | 3550 | """
Tests for the bdist_wheel tag options (--python-tag and --universal)
"""
import sys
import shutil
import pytest
import py.path
import tempfile
import subprocess
SETUP_PY = """\
from setuptools import setup
setup(
name="Test",
version="1.0",
author_email="author@example.com",
py_modules=["test"],
)
"""
@pytest.fixture
def temp_pkg(request):
tempdir = tempfile.mkdtemp()
def fin():
shutil.rmtree(tempdir)
request.addfinalizer(fin)
temppath = py.path.local(tempdir)
temppath.join('test.py').write('print("Hello, world")')
temppath.join('setup.py').write(SETUP_PY)
return temppath
def test_default_tag(temp_pkg):
subprocess.check_call([sys.executable, 'setup.py', 'bdist_wheel'],
cwd=str(temp_pkg))
dist_dir = temp_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename.startswith('Test-1.0-py%s-' % (sys.version[0],))
assert wheels[0].ext == '.whl'
def test_explicit_tag(temp_pkg):
subprocess.check_call(
[sys.executable, 'setup.py', 'bdist_wheel', '--python-tag=py32'],
cwd=str(temp_pkg))
dist_dir = temp_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename.startswith('Test-1.0-py32-')
assert wheels[0].ext == '.whl'
def test_universal_tag(temp_pkg):
subprocess.check_call(
[sys.executable, 'setup.py', 'bdist_wheel', '--universal'],
cwd=str(temp_pkg))
dist_dir = temp_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename.startswith('Test-1.0-py2.py3-')
assert wheels[0].ext == '.whl'
def test_universal_beats_explicit_tag(temp_pkg):
subprocess.check_call(
[sys.executable, 'setup.py', 'bdist_wheel', '--universal', '--python-tag=py32'],
cwd=str(temp_pkg))
dist_dir = temp_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename.startswith('Test-1.0-py2.py3-')
assert wheels[0].ext == '.whl'
def test_universal_in_setup_cfg(temp_pkg):
temp_pkg.join('setup.cfg').write('[bdist_wheel]\nuniversal=1')
subprocess.check_call(
[sys.executable, 'setup.py', 'bdist_wheel'],
cwd=str(temp_pkg))
dist_dir = temp_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename.startswith('Test-1.0-py2.py3-')
assert wheels[0].ext == '.whl'
def test_pythontag_in_setup_cfg(temp_pkg):
temp_pkg.join('setup.cfg').write('[bdist_wheel]\npython_tag=py32')
subprocess.check_call(
[sys.executable, 'setup.py', 'bdist_wheel'],
cwd=str(temp_pkg))
dist_dir = temp_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename.startswith('Test-1.0-py32-')
assert wheels[0].ext == '.whl'
def test_legacy_wheel_section_in_setup_cfg(temp_pkg):
temp_pkg.join('setup.cfg').write('[wheel]\nuniversal=1')
subprocess.check_call(
[sys.executable, 'setup.py', 'bdist_wheel'],
cwd=str(temp_pkg))
dist_dir = temp_pkg.join('dist')
assert dist_dir.check(dir=1)
wheels = dist_dir.listdir()
assert len(wheels) == 1
assert wheels[0].basename.startswith('Test-1.0-py2.py3-')
assert wheels[0].ext == '.whl'
| mit |
krafczyk/spack | var/spack/repos/builtin/packages/py-memory-profiler/package.py | 5 | 1681 | ##############################################################################
# Copyright (c) 2013-2018, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyMemoryProfiler(PythonPackage):
"""A module for monitoring memory usage of a python program"""
homepage = "https://github.com/fabianp/memory_profiler"
url = "https://pypi.io/packages/source/m/memory_profiler/memory_profiler-0.47.tar.gz"
version('0.47', 'ed340aaaa0c7118f2a4c5b4edec6da1e')
depends_on('py-setuptools', type='build')
depends_on('py-psutil', type=('build', 'run'))
| lgpl-2.1 |
pandeyadarsh/sympy | sympy/utilities/mathml/__init__.py | 117 | 2030 | """Module with some functions for MathML, like transforming MathML
content in MathML presentation.
To use this module, you will need lxml.
"""
from sympy.utilities.pkgdata import get_resource
from sympy.utilities.decorator import doctest_depends_on
import xml.dom.minidom
def add_mathml_headers(s):
return """<math xmlns:mml="http://www.w3.org/1998/Math/MathML"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://www.w3.org/1998/Math/MathML
http://www.w3.org/Math/XMLSchema/mathml2/mathml2.xsd">""" + s + "</math>"
@doctest_depends_on(modules=('lxml',))
def apply_xsl(mml, xsl):
"""Apply a xsl to a MathML string
@param mml: a string with MathML code
@param xsl: a string representing a path to a xsl (xml stylesheet)
file. This file name is relative to the PYTHONPATH
>>> from sympy.utilities.mathml import apply_xsl
>>> xsl = 'mathml/data/simple_mmlctop.xsl'
>>> mml = '<apply> <plus/> <ci>a</ci> <ci>b</ci> </apply>'
>>> res = apply_xsl(mml,xsl)
>>> ''.join(res.splitlines())
'<?xml version="1.0"?><mrow xmlns="http://www.w3.org/1998/Math/MathML"> <mi>a</mi> <mo> + </mo> <mi>b</mi></mrow>'
"""
from lxml import etree
s = etree.XML(get_resource(xsl).read())
transform = etree.XSLT(s)
doc = etree.XML(mml)
result = transform(doc)
s = str(result)
return s
@doctest_depends_on(modules=('lxml',))
def c2p(mml, simple=False):
"""Transforms a document in MathML content (like the one that sympy produces)
in one document in MathML presentation, more suitable for printing, and more
widely accepted
>>> from sympy.utilities.mathml import c2p
>>> mml = '<apply> <exp/> <cn>2</cn> </apply>'
>>> c2p(mml,simple=True) != c2p(mml,simple=False)
True
"""
if not mml.startswith('<math'):
mml = add_mathml_headers(mml)
if simple:
return apply_xsl(mml, 'mathml/data/simple_mmlctop.xsl')
return apply_xsl(mml, 'mathml/data/mmlctop.xsl')
| bsd-3-clause |
ademariag/kapitan | kapitan/initialiser.py | 1 | 1119 | #!/usr/bin/env python3
# Copyright 2019 The Kapitan Authors
# SPDX-FileCopyrightText: 2020 The Kapitan Authors <kapitan-admins@googlegroups.com>
#
# SPDX-License-Identifier: Apache-2.0
"initialiser module"
import logging
import os
from distutils.dir_util import copy_tree
logger = logging.getLogger(__name__)
def initialise_skeleton(args):
"""Initialises a directory with a recommended skeleton structure
Args:
args.directory (string): path which to initialise, directory is assumed to exist
"""
current_pwd = os.path.dirname(__file__)
templates_directory = os.path.join(current_pwd, "inputs", "templates")
copy_tree(templates_directory, args.directory)
logger.info("Populated {} with:".format(args.directory))
for dirName, subdirList, fileList in os.walk(args.directory):
logger.info("{}".format(dirName))
for fname in fileList:
logger.info("\t {}".format(fname))
# Remove the first entry in the list of sub-directories
# if there are any sub-directories present
if len(subdirList) > 0:
del subdirList[0]
| apache-2.0 |
aduric/crossfit | nonrel/django/db/models/query_utils.py | 240 | 5799 | """
Various data structures used in query construction.
Factored out from django.db.models.query to avoid making the main module very
large and/or so that they can be used by other modules without getting into
circular import difficulties.
"""
import weakref
from django.utils.copycompat import deepcopy
from django.db.backends import util
from django.utils import tree
from django.utils.datastructures import SortedDict
class InvalidQuery(Exception):
"""
The query passed to raw isn't a safe query to use with raw.
"""
pass
class QueryWrapper(object):
"""
A type that indicates the contents are an SQL fragment and the associate
parameters. Can be used to pass opaque data to a where-clause, for example.
"""
def __init__(self, sql, params):
self.data = sql, params
def as_sql(self, qn=None, connection=None):
return self.data
class Q(tree.Node):
"""
Encapsulates filters as objects that can then be combined logically (using
& and |).
"""
# Connection types
AND = 'AND'
OR = 'OR'
default = AND
def __init__(self, *args, **kwargs):
super(Q, self).__init__(children=list(args) + kwargs.items())
def _combine(self, other, conn):
if not isinstance(other, Q):
raise TypeError(other)
obj = type(self)()
obj.add(self, conn)
obj.add(other, conn)
return obj
def __or__(self, other):
return self._combine(other, self.OR)
def __and__(self, other):
return self._combine(other, self.AND)
def __invert__(self):
obj = type(self)()
obj.add(self, self.AND)
obj.negate()
return obj
class DeferredAttribute(object):
"""
A wrapper for a deferred-loading field. When the value is read from this
object the first time, the query is executed.
"""
def __init__(self, field_name, model):
self.field_name = field_name
self.model_ref = weakref.ref(model)
self.loaded = False
def __get__(self, instance, owner):
"""
Retrieves and caches the value from the datastore on the first lookup.
Returns the cached value.
"""
from django.db.models.fields import FieldDoesNotExist
assert instance is not None
cls = self.model_ref()
data = instance.__dict__
if data.get(self.field_name, self) is self:
# self.field_name is the attname of the field, but only() takes the
# actual name, so we need to translate it here.
try:
cls._meta.get_field_by_name(self.field_name)
name = self.field_name
except FieldDoesNotExist:
name = [f.name for f in cls._meta.fields
if f.attname == self.field_name][0]
# We use only() instead of values() here because we want the
# various data coersion methods (to_python(), etc.) to be called
# here.
val = getattr(
cls._base_manager.filter(pk=instance.pk).only(name).using(
instance._state.db).get(),
self.field_name
)
data[self.field_name] = val
return data[self.field_name]
def __set__(self, instance, value):
"""
Deferred loading attributes can be set normally (which means there will
never be a database lookup involved.
"""
instance.__dict__[self.field_name] = value
def select_related_descend(field, restricted, requested, reverse=False):
"""
Returns True if this field should be used to descend deeper for
select_related() purposes. Used by both the query construction code
(sql.query.fill_related_selections()) and the model instance creation code
(query.get_cached_row()).
Arguments:
* field - the field to be checked
* restricted - a boolean field, indicating if the field list has been
manually restricted using a requested clause)
* requested - The select_related() dictionary.
* reverse - boolean, True if we are checking a reverse select related
"""
if not field.rel:
return False
if field.rel.parent_link and not reverse:
return False
if restricted:
if reverse and field.related_query_name() not in requested:
return False
if not reverse and field.name not in requested:
return False
if not restricted and field.null:
return False
return True
# This function is needed because data descriptors must be defined on a class
# object, not an instance, to have any effect.
def deferred_class_factory(model, attrs):
"""
Returns a class object that is a copy of "model" with the specified "attrs"
being replaced with DeferredAttribute objects. The "pk_value" ties the
deferred attributes to a particular instance of the model.
"""
class Meta:
proxy = True
app_label = model._meta.app_label
# The app_cache wants a unique name for each model, otherwise the new class
# won't be created (we get an old one back). Therefore, we generate the
# name using the passed in attrs. It's OK to reuse an existing class
# object if the attrs are identical.
name = "%s_Deferred_%s" % (model.__name__, '_'.join(sorted(list(attrs))))
name = util.truncate_name(name, 80, 32)
overrides = dict([(attr, DeferredAttribute(attr, model))
for attr in attrs])
overrides["Meta"] = Meta
overrides["__module__"] = model.__module__
overrides["_deferred"] = True
return type(name, (model,), overrides)
# The above function is also used to unpickle model instances with deferred
# fields.
deferred_class_factory.__safe_for_unpickling__ = True
| bsd-3-clause |
luisgg/iteexe | exe/webui/clozefpdblock.py | 2 | 8021 | # ===========================================================================
# eXe
# Copyright 2004-2006, University of Auckland
# Copyright 2004-2008 eXe Project, http://eXeLearning.org/
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
# ===========================================================================
"""
Renders a paragraph where the content creator can choose which words the student
must fill in.
"""
import logging
from exe.webui.block import Block
from exe.webui import common
from exe.webui.element import ClozeElement, TextAreaElement
log = logging.getLogger(__name__)
# ===========================================================================
class ClozefpdBlock(Block):
"""
Renders a paragraph where the content creator can choose which words the
student must fill in.
"""
def __init__(self, parent, idevice):
"""
Pre-create our field ids
"""
Block.__init__(self, parent, idevice)
# to compensate for the strange unpickling timing when objects are
# loaded from an elp, ensure that proper idevices are set:
if idevice.instructionsForLearners.idevice is None:
idevice.instructionsForLearners.idevice = idevice
if idevice.content.idevice is None:
idevice.content.idevice = idevice
if idevice.feedback.idevice is None:
idevice.feedback.idevice = idevice
self.instructionElement = \
TextAreaElement(idevice.instructionsForLearners)
self.clozeElement = ClozeElement(idevice.content)
self.feedbackElement = \
TextAreaElement(idevice.feedback)
self.previewing = False # In view or preview render
if not hasattr(self.idevice,'undo'):
self.idevice.undo = True
def process(self, request):
"""
Handles changes in the paragraph text from editing
"""
is_cancel = common.requestHasCancel(request)
if "title"+self.id in request.args \
and not is_cancel:
self.idevice.title = request.args["title"+self.id][0]
object = request.args.get('object', [''])[0]
action = request.args.get('action', [''])[0]
self.instructionElement.process(request)
self.clozeElement.process(request)
self.feedbackElement.process(request)
Block.process(self, request)
def renderEdit(self, style):
"""
Renders a screen that allows the user to enter paragraph text and choose
which words are hidden.
"""
"""
html = [
u'<div class="iDevice">',
u'<div class="block">',
# common.textInput("title"+self.id, self.idevice.title),
common.textInput("title"+self.id, "Autoevaluación"),
u'</div>',
self.instructionElement.renderEdit(),
self.clozeElement.renderEdit(),
self.feedbackElement.renderEdit(),
self.renderEditButtons(),
u'</div>'
]
return u'\n '.join(html)"""
html = "<div class=\"iDevice\"><br/>\n"
html = "<div class=\"block\">"
# JR
# Quitamos el prefijo "FPD -"
if self.idevice.title.find("FPD - ") == 0:
self.idevice.title = x_(u"Autoevaluacion")
html += common.textInput("title"+self.id, self.idevice.title)
html += "</div>"
html += self.instructionElement.renderEdit()
html += self.clozeElement.renderEdit()
html += self.feedbackElement.renderEdit()
html += self.renderEditButtons()
html += "</div>"
return html
def renderPreview(self, style):
"""
Remembers if we're previewing or not,
then implicitly calls self.renderViewContent (via Block.renderPreview)
"""
self.previewing = True
return Block.renderPreview(self, style)
def renderView(self, style):
"""
Remembers if we're previewing or not,
then implicitly calls self.renderViewContent (via Block.renderPreview)
"""
self.previewing = False
return Block.renderView(self, style)
def renderViewContent(self):
"""
Returns an XHTML string for this block
"""
# Only show feedback button if feedback is present
if self.feedbackElement.field.content.strip():
# Cloze Idevice needs id of div for feedback content
feedbackID = self.feedbackElement.id
if self.previewing:
clozeContent = self.clozeElement.renderPreview(feedbackID)
else:
clozeContent = self.clozeElement.renderView(feedbackID)
else:
if self.previewing:
clozeContent = self.clozeElement.renderPreview()
else:
clozeContent = self.clozeElement.renderView()
instruction_html = ""
if self.previewing:
instruction_html = self.instructionElement.renderPreview()
else:
instruction_html = self.instructionElement.renderView()
html = u'<script type="text/javascript" src="common.js"></script>\n'
html += u'<div class="iDevice_inner">\n'
html += instruction_html
html += clozeContent
if self.feedbackElement.field.content:
if self.previewing:
html += self.feedbackElement.renderPreview(False, class_="feedback")
else:
html += self.feedbackElement.renderView(False, class_="feedback")
html += u'</div>\n'
#JR: Anadimos la etiqueta noscript
if self.previewing:
cloze = self.clozeElement.field.content_w_resourcePaths
feedback = self.feedbackElement.field.content_w_resourcePaths
else:
cloze = self.clozeElement.field.content_w_resourcePaths
feedback = self.feedbackElement.field.content_wo_resourcePaths
html += u'<noscript><div class="feedback">\n'
html += u"<strong>" + _("Solucion") + u": </strong><br/>\n"
html += cloze
if self.feedbackElement.field.content:
html += u"<br/><br/><strong>" + _("Retroalimentacion") + ": </strong><br/>\n"
html += feedback
html += u"</div></noscript>"
return html
def renderText(self):
"""
Returns an XHTML string for text file export.
"""
if self.previewing:
html = '<p>' + self.instructionElement.renderPreview() +'</p>'
else:
html = '<p>' + self.instructionElement.renderView() +'</p>'
html += '<p>' + self.clozeElement.renderText() + '</p>'
if self.feedbackElement.field.content:
html += '<p>%s:</P>' % _(u"Feedback")
if self.previewing:
html += '<p>' +self.feedbackElement.renderPreview(False,
class_="feedback")
html += '</p>'
else:
html += '<p>' +self.feedbackElement.renderView(False,
class_="feedback")
html += '</p>'
html += self.clozeElement.renderAnswers()
return html
from exe.engine.clozefpdidevice import ClozefpdIdevice
from exe.webui.blockfactory import g_blockFactory
g_blockFactory.registerBlockType(ClozefpdBlock, ClozefpdIdevice)
# ===========================================================================
| gpl-2.0 |
python-wink/python-wink | src/pywink/devices/water_heater.py | 3 | 2036 | from ..devices.base import WinkDevice
# pylint: disable=too-many-public-methods
class WinkWaterHeater(WinkDevice):
"""
Represents a Wink water heater.
"""
def state(self):
return self.current_mode()
def modes(self):
return self._last_reading.get('modes_allowed')
def current_mode(self):
return self._last_reading.get('mode')
def current_set_point(self):
return self._last_reading.get('set_point')
def max_set_point(self):
return self._last_reading.get('max_set_point_allowed')
def min_set_point(self):
return self._last_reading.get('min_set_point_allowed')
def is_on(self):
return self._last_reading.get('powered', False)
def vacation_mode_enabled(self):
return self._last_reading.get('vacation_mode', False)
def rheem_type(self):
return self._last_reading.get('rheem_type')
def set_operation_mode(self, mode):
"""
:param mode: a string one of self.modes()
:return: nothing
"""
if mode == "off":
desired_state = {"powered": False}
else:
desired_state = {"powered": True, "mode": mode}
response = self.api_interface.set_device_state(self, {
"desired_state": desired_state
})
self._update_state_from_response(response)
def set_temperature(self, set_point):
"""
:param set_point: a float for the set point value in celsius
:return: nothing
"""
response = self.api_interface.set_device_state(self, {
"desired_state": {'set_point': set_point}
})
self._update_state_from_response(response)
def set_vacation_mode(self, state):
"""
:param state: a boolean of ture (on) or false ('off')
:return: nothing
"""
values = {"desired_state": {"vacation_mode": state}}
response = self.api_interface.local_set_state(self, values)
self._update_state_from_response(response)
| mit |
twoolie/ProjectNarwhal | narwhal/core/profile/views.py | 1 | 1212 |
from django.http import Http404, HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.contrib.auth import login
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from annoying.decorators import render_to
from django.contrib.auth.models import User
from .forms import LoginForm
@login_required
@render_to('profile/profile.html')
def profile(request, username=None):
if not username or username==request.user.username:
user = request.user
elif username:
user = get_object_or_404(User, username=username)
else:
raise Http404("Could not find the requested user.")
return {'user':user }
@login_required
@render_to('profile/_user-card.html')
def user_card(request, username=None):
if username == request.user.username or (not username and request.user.is_authenticated()):
return {'user': request.user, 'is_me':True }
elif username:
return {'user': get_object_or_404(User.objects.select_related('profile', depth=4),
username=username), 'is_me':False}
else:
raise Http404("Could not find the requested user.") | gpl-3.0 |
vlk239/pystlink | lib/stm32devices.py | 2 | 44886 | # : by PART_NO/CORE
# : by DEV_ID
# : by flash_size and or device type
DEVICES = [
{
'part_no': 0xc20,
'core': 'CortexM0',
'idcode_reg': 0x40015800,
'devices': [
{
'dev_id': 0x440,
'flash_size_reg': 0x1ffff7cc,
'flash_driver': 'STM32FP',
'erase_sizes': (1024, ),
'devices': [
{'type': 'STM32F030x8', 'flash_size': 64, 'sram_size': 8, 'eeprom_size': 0, 'freq': 48},
{'type': 'STM32F051x4', 'flash_size': 16, 'sram_size': 8, 'eeprom_size': 0, 'freq': 48},
{'type': 'STM32F051x6', 'flash_size': 32, 'sram_size': 8, 'eeprom_size': 0, 'freq': 48},
{'type': 'STM32F051x8', 'flash_size': 64, 'sram_size': 8, 'eeprom_size': 0, 'freq': 48},
{'type': 'STM32F058x8', 'flash_size': 64, 'sram_size': 8, 'eeprom_size': 0, 'freq': 48},
],
},
{
'dev_id': 0x442,
'flash_size_reg': 0x1ffff7cc,
'flash_driver': 'STM32FP',
'erase_sizes': (2048, ),
'devices': [
{'type': 'STM32F030xC', 'flash_size': 256, 'sram_size': 32, 'eeprom_size': 0, 'freq': 48},
{'type': 'STM32F091xB', 'flash_size': 128, 'sram_size': 32, 'eeprom_size': 0, 'freq': 48},
{'type': 'STM32F091xC', 'flash_size': 256, 'sram_size': 32, 'eeprom_size': 0, 'freq': 48},
{'type': 'STM32F098xC', 'flash_size': 256, 'sram_size': 32, 'eeprom_size': 0, 'freq': 48},
],
},
{
'dev_id': 0x444,
'flash_size_reg': 0x1ffff7cc,
'flash_driver': 'STM32FP',
'erase_sizes': (1024, ),
'devices': [
{'type': 'STM32F030x4', 'flash_size': 16, 'sram_size': 4, 'eeprom_size': 0, 'freq': 48},
{'type': 'STM32F030x6', 'flash_size': 32, 'sram_size': 4, 'eeprom_size': 0, 'freq': 48},
{'type': 'STM32F031x4', 'flash_size': 16, 'sram_size': 4, 'eeprom_size': 0, 'freq': 48},
{'type': 'STM32F031x6', 'flash_size': 32, 'sram_size': 4, 'eeprom_size': 0, 'freq': 48},
{'type': 'STM32F038x6', 'flash_size': 32, 'sram_size': 4, 'eeprom_size': 0, 'freq': 48},
],
},
{
'dev_id': 0x445,
'flash_size_reg': 0x1ffff7cc,
'flash_driver': 'STM32FP',
'erase_sizes': (1024, ),
'devices': [
{'type': 'STM32F042x4', 'flash_size': 16, 'sram_size': 6, 'eeprom_size': 0, 'freq': 48},
{'type': 'STM32F042x6', 'flash_size': 32, 'sram_size': 6, 'eeprom_size': 0, 'freq': 48},
{'type': 'STM32F048x6', 'flash_size': 32, 'sram_size': 6, 'eeprom_size': 0, 'freq': 48},
{'type': 'STM32F070x6', 'flash_size': 32, 'sram_size': 6, 'eeprom_size': 0, 'freq': 48},
],
},
{
'dev_id': 0x448,
'flash_size_reg': 0x1ffff7cc,
'flash_driver': 'STM32FP',
'erase_sizes': (2048, ),
'devices': [
{'type': 'STM32F070xB', 'flash_size': 128, 'sram_size': 16, 'eeprom_size': 0, 'freq': 48},
{'type': 'STM32F071x8', 'flash_size': 64, 'sram_size': 16, 'eeprom_size': 0, 'freq': 48},
{'type': 'STM32F071xB', 'flash_size': 128, 'sram_size': 16, 'eeprom_size': 0, 'freq': 48},
{'type': 'STM32F072x8', 'flash_size': 64, 'sram_size': 16, 'eeprom_size': 0, 'freq': 48},
{'type': 'STM32F072xB', 'flash_size': 128, 'sram_size': 16, 'eeprom_size': 0, 'freq': 48},
{'type': 'STM32F078xB', 'flash_size': 128, 'sram_size': 16, 'eeprom_size': 0, 'freq': 48},
],
},
],
},
{
'part_no': 0xc60,
'core': 'CortexM0+',
'idcode_reg': 0x40015800,
'devices': [
{
'dev_id': 0x457, # category 1
'flash_size_reg': 0x1ff8007c,
'flash_driver': 'STM32L0',
'erase_sizes': (128, ),
'devices': [
{'type': 'STM32L011x3', 'flash_size': 8, 'sram_size': 2, 'eeprom_size': 0.5, 'freq': 32},
{'type': 'STM32L011x4', 'flash_size': 16, 'sram_size': 2, 'eeprom_size': 0.5, 'freq': 32},
{'type': 'STM32L021x4', 'flash_size': 16, 'sram_size': 2, 'eeprom_size': 0.5, 'freq': 32},
],
},
{
'dev_id': 0x425, # category 2
'flash_size_reg': 0x1ff8007c,
'flash_driver': 'STM32L0',
'erase_sizes': (128, ),
'devices': [
{'type': 'STM32L031x4', 'flash_size': 16, 'sram_size': 8, 'eeprom_size': 1, 'freq': 32},
{'type': 'STM32L031x6', 'flash_size': 32, 'sram_size': 8, 'eeprom_size': 1, 'freq': 32},
{'type': 'STM32L041x6', 'flash_size': 32, 'sram_size': 8, 'eeprom_size': 1, 'freq': 32},
],
},
{
'dev_id': 0x417, # category 3
'flash_size_reg': 0x1ff8007c,
'flash_driver': 'STM32L0',
'erase_sizes': (128, ),
'devices': [
{'type': 'STM32L051x6', 'flash_size': 32, 'sram_size': 8, 'eeprom_size': 2, 'freq': 32},
{'type': 'STM32L051x8', 'flash_size': 64, 'sram_size': 8, 'eeprom_size': 2, 'freq': 32},
{'type': 'STM32L052x6', 'flash_size': 32, 'sram_size': 8, 'eeprom_size': 2, 'freq': 32},
{'type': 'STM32L052x8', 'flash_size': 64, 'sram_size': 8, 'eeprom_size': 2, 'freq': 32},
{'type': 'STM32L053x6', 'flash_size': 32, 'sram_size': 8, 'eeprom_size': 2, 'freq': 32},
{'type': 'STM32L053x8', 'flash_size': 64, 'sram_size': 8, 'eeprom_size': 2, 'freq': 32},
{'type': 'STM32L062x8', 'flash_size': 64, 'sram_size': 8, 'eeprom_size': 2, 'freq': 32},
{'type': 'STM32L063x8', 'flash_size': 64, 'sram_size': 8, 'eeprom_size': 2, 'freq': 32},
],
},
{
'dev_id': 0x447, # category 5
'flash_size_reg': 0x1ff8007c,
'flash_driver': 'STM32L0',
'erase_sizes': (128, ),
'devices': [
{'type': 'STM32L071x8', 'flash_size': 64, 'sram_size': 20, 'eeprom_size': 3, 'freq': 32},
{'type': 'STM32L071xB', 'flash_size': 128, 'sram_size': 20, 'eeprom_size': 6, 'freq': 32},
{'type': 'STM32L071xZ', 'flash_size': 192, 'sram_size': 20, 'eeprom_size': 6, 'freq': 32},
{'type': 'STM32L072x8', 'flash_size': 64, 'sram_size': 20, 'eeprom_size': 3, 'freq': 32},
{'type': 'STM32L072xB', 'flash_size': 128, 'sram_size': 20, 'eeprom_size': 6, 'freq': 32},
{'type': 'STM32L072xZ', 'flash_size': 192, 'sram_size': 20, 'eeprom_size': 6, 'freq': 32},
{'type': 'STM32L073x8', 'flash_size': 64, 'sram_size': 20, 'eeprom_size': 3, 'freq': 32},
{'type': 'STM32L073xB', 'flash_size': 128, 'sram_size': 20, 'eeprom_size': 6, 'freq': 32},
{'type': 'STM32L073xZ', 'flash_size': 192, 'sram_size': 20, 'eeprom_size': 6, 'freq': 32},
{'type': 'STM32L081xZ', 'flash_size': 192, 'sram_size': 20, 'eeprom_size': 6, 'freq': 32},
{'type': 'STM32L081xB', 'flash_size': 128, 'sram_size': 20, 'eeprom_size': 6, 'freq': 32},
{'type': 'STM32L082xZ', 'flash_size': 192, 'sram_size': 20, 'eeprom_size': 6, 'freq': 32},
{'type': 'STM32L082xB', 'flash_size': 128, 'sram_size': 20, 'eeprom_size': 6, 'freq': 32},
{'type': 'STM32L083x8', 'flash_size': 64, 'sram_size': 20, 'eeprom_size': 3, 'freq': 32},
{'type': 'STM32L083xB', 'flash_size': 128, 'sram_size': 20, 'eeprom_size': 6, 'freq': 32},
{'type': 'STM32L083xZ', 'flash_size': 192, 'sram_size': 20, 'eeprom_size': 6, 'freq': 32},
],
},
{
'dev_id': 0x460,
'flash_size_reg': 0x1fff75e0,
'flash_driver': 'STM32L4',
'erase_sizes': (2 * 1024, ),
'devices': [
{'type': 'STM32G070x8', 'flash_size': 64, 'sram_size': 32, 'eeprom_size': 0, 'freq': 64},
{'type': 'STM32G070xB', 'flash_size': 128, 'sram_size': 32, 'eeprom_size': 0, 'freq': 64},
{'type': 'STM32G071x8', 'flash_size': 64, 'sram_size': 32, 'eeprom_size': 0, 'freq': 64},
{'type': 'STM32G071xB', 'flash_size': 128, 'sram_size': 32, 'eeprom_size': 0, 'freq': 64},
],
},
],
},
{
'part_no': 0xc23,
'core': 'CortexM3',
'idcode_reg': 0xE0042000,
'devices': [
{
'dev_id': 0x410,
'flash_size_reg': 0x1ffff7e0,
'flash_driver': 'STM32FP',
'erase_sizes': (1024, ),
'devices': [
{'type': 'STM32F101x8', 'flash_size': 64, 'sram_size': 10, 'eeprom_size': 0, 'freq': 36},
{'type': 'STM32F101xB', 'flash_size': 128, 'sram_size': 16, 'eeprom_size': 0, 'freq': 36},
{'type': 'STM32F102x8', 'flash_size': 64, 'sram_size': 10, 'eeprom_size': 0, 'freq': 48},
{'type': 'STM32F102xB', 'flash_size': 128, 'sram_size': 16, 'eeprom_size': 0, 'freq': 48},
{'type': 'STM32F103x8', 'flash_size': 64, 'sram_size': 20, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F103xB', 'flash_size': 128, 'sram_size': 20, 'eeprom_size': 0, 'freq': 72},
],
},
{
'dev_id': 0x411,
'flash_size_reg': 0x1fff7a22,
'flash_driver': 'STM32FS',
'erase_sizes': (16*1024, 16*1024, 16*1024, 16*1024, 64*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, ),
'devices': [
{'type': 'STM32F205xB', 'flash_size': 128, 'sram_size': 64, 'eeprom_size': 0, 'freq': 120},
{'type': 'STM32F205xC', 'flash_size': 256, 'sram_size': 96, 'eeprom_size': 0, 'freq': 120},
{'type': 'STM32F205xE', 'flash_size': 512, 'sram_size': 128, 'eeprom_size': 0, 'freq': 120},
{'type': 'STM32F205xF', 'flash_size': 768, 'sram_size': 128, 'eeprom_size': 0, 'freq': 120},
{'type': 'STM32F205xG', 'flash_size': 1024, 'sram_size': 128, 'eeprom_size': 0, 'freq': 120},
{'type': 'STM32F207xC', 'flash_size': 256, 'sram_size': 128, 'eeprom_size': 0, 'freq': 120},
{'type': 'STM32F207xE', 'flash_size': 512, 'sram_size': 128, 'eeprom_size': 0, 'freq': 120},
{'type': 'STM32F207xF', 'flash_size': 768, 'sram_size': 128, 'eeprom_size': 0, 'freq': 120},
{'type': 'STM32F207xG', 'flash_size': 1024, 'sram_size': 128, 'eeprom_size': 0, 'freq': 120},
{'type': 'STM32F215xE', 'flash_size': 512, 'sram_size': 128, 'eeprom_size': 0, 'freq': 120},
{'type': 'STM32F215xG', 'flash_size': 1024, 'sram_size': 128, 'eeprom_size': 0, 'freq': 120},
{'type': 'STM32F217xE', 'flash_size': 512, 'sram_size': 128, 'eeprom_size': 0, 'freq': 120},
{'type': 'STM32F217xG', 'flash_size': 1024, 'sram_size': 128, 'eeprom_size': 0, 'freq': 120},
],
},
{
'dev_id': 0x412,
'flash_size_reg': 0x1ffff7e0,
'flash_driver': 'STM32FP',
'erase_sizes': (1024, ),
'devices': [
{'type': 'STM32F101x4', 'flash_size': 16, 'sram_size': 4, 'eeprom_size': 0, 'freq': 36},
{'type': 'STM32F101x6', 'flash_size': 32, 'sram_size': 6, 'eeprom_size': 0, 'freq': 36},
{'type': 'STM32F102x4', 'flash_size': 16, 'sram_size': 4, 'eeprom_size': 0, 'freq': 48},
{'type': 'STM32F102x6', 'flash_size': 32, 'sram_size': 6, 'eeprom_size': 0, 'freq': 48},
{'type': 'STM32F103x4', 'flash_size': 16, 'sram_size': 6, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F103x6', 'flash_size': 32, 'sram_size': 10, 'eeprom_size': 0, 'freq': 72},
],
},
{
'dev_id': 0x414,
'flash_size_reg': 0x1ffff7e0,
'flash_driver': 'STM32FP',
'erase_sizes': (2048, ),
'devices': [
{'type': 'STM32F101xC', 'flash_size': 256, 'sram_size': 32, 'eeprom_size': 0, 'freq': 36},
{'type': 'STM32F101xD', 'flash_size': 384, 'sram_size': 48, 'eeprom_size': 0, 'freq': 36},
{'type': 'STM32F101xE', 'flash_size': 512, 'sram_size': 48, 'eeprom_size': 0, 'freq': 36},
{'type': 'STM32F103xC', 'flash_size': 256, 'sram_size': 48, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F103xD', 'flash_size': 384, 'sram_size': 64, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F103xE', 'flash_size': 512, 'sram_size': 64, 'eeprom_size': 0, 'freq': 72},
],
},
{
'dev_id': 0x416,
'flash_size_reg': 0x1ff8004c,
'flash_driver': 'STM32L0',
'erase_sizes': (256, ),
'devices': [
{'type': 'STM32L100x6', 'flash_size': 32, 'sram_size': 4, 'eeprom_size': 2, 'freq': 32},
{'type': 'STM32L100x8', 'flash_size': 64, 'sram_size': 8, 'eeprom_size': 2, 'freq': 32},
{'type': 'STM32L100xB', 'flash_size': 128, 'sram_size': 10, 'eeprom_size': 2, 'freq': 32},
{'type': 'STM32L151x6', 'flash_size': 32, 'sram_size': 10, 'eeprom_size': 4, 'freq': 32},
{'type': 'STM32L151x8', 'flash_size': 64, 'sram_size': 10, 'eeprom_size': 4, 'freq': 32},
{'type': 'STM32L151xB', 'flash_size': 128, 'sram_size': 16, 'eeprom_size': 4, 'freq': 32},
{'type': 'STM32L152x6', 'flash_size': 32, 'sram_size': 10, 'eeprom_size': 4, 'freq': 32},
{'type': 'STM32L152x8', 'flash_size': 64, 'sram_size': 10, 'eeprom_size': 4, 'freq': 32},
{'type': 'STM32L152xB', 'flash_size': 128, 'sram_size': 16, 'eeprom_size': 4, 'freq': 32},
],
},
{
'dev_id': 0x418,
'flash_size_reg': 0x1ffff7e0,
'flash_driver': 'STM32FP',
'erase_sizes': (2048, ),
'devices': [
{'type': 'STM32F105x8', 'flash_size': 64, 'sram_size': 64, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F105xB', 'flash_size': 128, 'sram_size': 64, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F105xC', 'flash_size': 256, 'sram_size': 64, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F107xB', 'flash_size': 128, 'sram_size': 64, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F107xC', 'flash_size': 256, 'sram_size': 64, 'eeprom_size': 0, 'freq': 72},
],
},
{
'dev_id': 0x420,
'flash_size_reg': 0x1ffff7e0,
'flash_driver': 'STM32FP',
'erase_sizes': (1024, ),
'devices': [
{'type': 'STM32F100x4', 'flash_size': 16, 'sram_size': 4, 'eeprom_size': 0, 'freq': 24},
{'type': 'STM32F100x6', 'flash_size': 32, 'sram_size': 4, 'eeprom_size': 0, 'freq': 24},
{'type': 'STM32F100x8', 'flash_size': 64, 'sram_size': 8, 'eeprom_size': 0, 'freq': 24},
{'type': 'STM32F100xB', 'flash_size': 128, 'sram_size': 8, 'eeprom_size': 0, 'freq': 24},
],
},
{
'dev_id': 0x427,
'flash_size_reg': 0x1ff800cc,
'flash_driver': 'STM32L0',
'erase_sizes': (256, ),
'devices': [
{'type': 'STM32L100xC', 'flash_size': 256, 'sram_size': 16, 'eeprom_size': 4, 'freq': 32},
],
},
{
'dev_id': 0x428,
'flash_size_reg': 0x1ffff7e0,
'flash_driver': 'STM32FP',
'erase_sizes': (2048, ),
'devices': [
{'type': 'STM32F100xC', 'flash_size': 256, 'sram_size': 24, 'eeprom_size': 0, 'freq': 24},
{'type': 'STM32F100xD', 'flash_size': 384, 'sram_size': 32, 'eeprom_size': 0, 'freq': 24},
{'type': 'STM32F100xE', 'flash_size': 512, 'sram_size': 32, 'eeprom_size': 0, 'freq': 24},
],
},
{
'dev_id': 0x429,
'flash_size_reg': 0x1ff8004c,
'flash_driver': 'STM32L0',
'erase_sizes': (256, ),
'devices': [
{'type': 'STM32L100x6-A', 'flash_size': 32, 'sram_size': 4, 'eeprom_size': 2, 'freq': 32},
{'type': 'STM32L100x8-A', 'flash_size': 64, 'sram_size': 8, 'eeprom_size': 2, 'freq': 32},
{'type': 'STM32L100xB-A', 'flash_size': 128, 'sram_size': 16, 'eeprom_size': 2, 'freq': 32},
{'type': 'STM32L151x6-A', 'flash_size': 32, 'sram_size': 16, 'eeprom_size': 4, 'freq': 32},
{'type': 'STM32L151x8-A', 'flash_size': 64, 'sram_size': 32, 'eeprom_size': 4, 'freq': 32},
{'type': 'STM32L151xB-A', 'flash_size': 128, 'sram_size': 32, 'eeprom_size': 4, 'freq': 32},
{'type': 'STM32L152x6-A', 'flash_size': 32, 'sram_size': 16, 'eeprom_size': 4, 'freq': 32},
{'type': 'STM32L152x8-A', 'flash_size': 64, 'sram_size': 32, 'eeprom_size': 4, 'freq': 32},
{'type': 'STM32L152xB-A', 'flash_size': 128, 'sram_size': 32, 'eeprom_size': 4, 'freq': 32},
],
},
{
'dev_id': 0x430,
'flash_size_reg': 0x1ffff7e0,
'flash_driver': 'STM32FPXL',
'erase_sizes': (2048, ),
'devices': [
{'type': 'STM32F101xF', 'flash_size': 768, 'sram_size': 80, 'eeprom_size': 0, 'freq': 36},
{'type': 'STM32F101xG', 'flash_size': 1024, 'sram_size': 80, 'eeprom_size': 0, 'freq': 36},
{'type': 'STM32F103xF', 'flash_size': 768, 'sram_size': 96, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F103xG', 'flash_size': 1024, 'sram_size': 96, 'eeprom_size': 0, 'freq': 72},
],
},
{
'dev_id': 0x436,
'flash_size_reg': 0x1ff800cc,
'flash_driver': 'STM32L0',
'erase_sizes': (256, ),
'devices': [
{'type': 'STM32L151xC', 'flash_size': 256, 'sram_size': 32, 'eeprom_size': 8, 'freq': 32},
{'type': 'STM32L151xC-A', 'flash_size': 256, 'sram_size': 32, 'eeprom_size': 8, 'freq': 32},
{'type': 'STM32L151xD', 'flash_size': 384, 'sram_size': 48, 'eeprom_size': 12, 'freq': 32},
{'type': 'STM32L152xC', 'flash_size': 256, 'sram_size': 32, 'eeprom_size': 8, 'freq': 32},
{'type': 'STM32L152xC-A', 'flash_size': 256, 'sram_size': 32, 'eeprom_size': 8, 'freq': 32},
{'type': 'STM32L152xD', 'flash_size': 384, 'sram_size': 48, 'eeprom_size': 12, 'freq': 32},
{'type': 'STM32L162xC', 'flash_size': 256, 'sram_size': 32, 'eeprom_size': 8, 'freq': 32},
{'type': 'STM32L162xC-A', 'flash_size': 256, 'sram_size': 32, 'eeprom_size': 8, 'freq': 32},
{'type': 'STM32L162xD', 'flash_size': 384, 'sram_size': 48, 'eeprom_size': 12, 'freq': 32},
],
},
{
'dev_id': 0x437,
'flash_size_reg': 0x1ff800cc,
'flash_driver': 'STM32L0',
'erase_sizes': (256, ),
'devices': [
{'type': 'STM32L151xD-X', 'flash_size': 384, 'sram_size': 80, 'eeprom_size': 16, 'freq': 32},
{'type': 'STM32L151xE', 'flash_size': 512, 'sram_size': 80, 'eeprom_size': 16, 'freq': 32},
{'type': 'STM32L152xD-X', 'flash_size': 384, 'sram_size': 80, 'eeprom_size': 16, 'freq': 32},
{'type': 'STM32L152xE', 'flash_size': 512, 'sram_size': 80, 'eeprom_size': 16, 'freq': 32},
{'type': 'STM32L162xD-X', 'flash_size': 384, 'sram_size': 80, 'eeprom_size': 16, 'freq': 32},
{'type': 'STM32L162xE', 'flash_size': 512, 'sram_size': 80, 'eeprom_size': 16, 'freq': 32},
],
},
],
},
{
'part_no': 0xc24,
'core': 'CortexM4',
'idcode_reg': 0xE0042000,
'devices': [
{
'dev_id': 0x422,
'flash_size_reg': 0x1ffff7cc,
'flash_driver': 'STM32FP',
'erase_sizes': (2048, ),
'devices': [
{'type': 'STM32F302xB', 'flash_size': 128, 'sram_size': 32, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F302xC', 'flash_size': 256, 'sram_size': 40, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F303xB', 'flash_size': 128, 'sram_size': 40, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F303xC', 'flash_size': 256, 'sram_size': 48, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F358xC', 'flash_size': 256, 'sram_size': 48, 'eeprom_size': 0, 'freq': 72},
],
},
{
'dev_id': 0x432,
'flash_size_reg': 0x1ffff7cc,
'flash_driver': 'STM32FP',
'erase_sizes': (2048, ),
'devices': [
{'type': 'STM32F373x8', 'flash_size': 64, 'sram_size': 16, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F373xB', 'flash_size': 128, 'sram_size': 24, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F373xC', 'flash_size': 256, 'sram_size': 32, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F378xC', 'flash_size': 256, 'sram_size': 32, 'eeprom_size': 0, 'freq': 72},
],
},
{
'dev_id': 0x438,
'flash_size_reg': 0x1ffff7cc,
'flash_driver': 'STM32FP',
'erase_sizes': (2048, ),
'devices': [
{'type': 'STM32F303x6', 'flash_size': 32, 'sram_size': 16, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F303x8', 'flash_size': 64, 'sram_size': 16, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F328x8', 'flash_size': 64, 'sram_size': 16, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F334x4', 'flash_size': 16, 'sram_size': 16, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F334x6', 'flash_size': 32, 'sram_size': 16, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F334x8', 'flash_size': 64, 'sram_size': 16, 'eeprom_size': 0, 'freq': 72},
],
},
{
'dev_id': 0x439,
'flash_size_reg': 0x1ffff7cc,
'flash_driver': 'STM32FP',
'erase_sizes': (2048, ),
'devices': [
{'type': 'STM32F301x6', 'flash_size': 32, 'sram_size': 16, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F301x8', 'flash_size': 64, 'sram_size': 16, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F302x6', 'flash_size': 32, 'sram_size': 16, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F302x8', 'flash_size': 64, 'sram_size': 16, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F318x8', 'flash_size': 64, 'sram_size': 16, 'eeprom_size': 0, 'freq': 72},
],
},
{
'dev_id': 0x446,
'flash_size_reg': 0x1ffff7cc,
'flash_driver': 'STM32FP',
'erase_sizes': (2048, ),
'devices': [
{'type': 'STM32F302xD', 'flash_size': 384, 'sram_size': 64, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F302xE', 'flash_size': 512, 'sram_size': 64, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F303xD', 'flash_size': 384, 'sram_size': 80, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F303xE', 'flash_size': 512, 'sram_size': 80, 'eeprom_size': 0, 'freq': 72},
{'type': 'STM32F398xE', 'flash_size': 512, 'sram_size': 80, 'eeprom_size': 0, 'freq': 72},
],
},
{
'dev_id': 0x413,
'flash_size_reg': 0x1fff7a22,
'flash_driver': 'STM32FS',
'erase_sizes': (16*1024, 16*1024, 16*1024, 16*1024, 64*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, ),
'devices': [
{'type': 'STM32F405xE', 'flash_size': 512, 'sram_size': 192, 'eeprom_size': 0, 'freq': 168},
{'type': 'STM32F405xG', 'flash_size': 1024, 'sram_size': 192, 'eeprom_size': 0, 'freq': 168},
{'type': 'STM32F407xE', 'flash_size': 512, 'sram_size': 192, 'eeprom_size': 0, 'freq': 168},
{'type': 'STM32F407xG', 'flash_size': 1024, 'sram_size': 192, 'eeprom_size': 0, 'freq': 168},
{'type': 'STM32F415xG', 'flash_size': 1024, 'sram_size': 192, 'eeprom_size': 0, 'freq': 168},
{'type': 'STM32F417xE', 'flash_size': 512, 'sram_size': 192, 'eeprom_size': 0, 'freq': 168},
{'type': 'STM32F417xG', 'flash_size': 1024, 'sram_size': 192, 'eeprom_size': 0, 'freq': 168},
],
},
{
'dev_id': 0x415,
'flash_size_reg': 0x1fff75e0,
'flash_driver': 'STM32L4',
'erase_sizes': (2 * 1024,),
'devices': [
{'type': 'STM32L475xC', 'flash_size': 256, 'sram_size': 128, 'eeprom_size': 0, 'freq': 80},
{'type': 'STM32L475xE', 'flash_size': 512, 'sram_size': 128, 'eeprom_size': 0, 'freq': 80},
{'type': 'STM32L475xG', 'flash_size': 1024, 'sram_size': 128, 'eeprom_size': 0, 'freq': 80},
{'type': 'STM32L476xC', 'flash_size': 256, 'sram_size': 128, 'eeprom_size': 0, 'freq': 80},
{'type': 'STM32L476xE', 'flash_size': 512, 'sram_size': 128, 'eeprom_size': 0, 'freq': 80},
{'type': 'STM32L476xG', 'flash_size': 1024, 'sram_size': 128, 'eeprom_size': 0, 'freq': 80},
{'type': 'STM32L486xG', 'flash_size': 1024, 'sram_size': 128, 'eeprom_size': 0, 'freq': 80},
{'type': 'STM32L471xE', 'flash_size': 512, 'sram_size': 128, 'eeprom_size': 0, 'freq': 80},
{'type': 'STM32L471xG', 'flash_size': 1024, 'sram_size': 128, 'eeprom_size': 0, 'freq': 80},
],
},
{
'dev_id': 0x419,
'flash_size_reg': 0x1fff7a22,
'flash_driver': 'STM32FS',
'erase_sizes': (16*1024, 16*1024, 16*1024, 16*1024, 64*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, ),
'devices': [
{'type': 'STM32F427xG', 'flash_size': 1024, 'sram_size': 256, 'eeprom_size': 0, 'freq': 180},
{'type': 'STM32F427xI', 'flash_size': 2048, 'sram_size': 256, 'eeprom_size': 0, 'freq': 180},
{'type': 'STM32F429xE', 'flash_size': 512, 'sram_size': 256, 'eeprom_size': 0, 'freq': 180},
{'type': 'STM32F429xG', 'flash_size': 1024, 'sram_size': 256, 'eeprom_size': 0, 'freq': 180},
{'type': 'STM32F429xI', 'flash_size': 2048, 'sram_size': 256, 'eeprom_size': 0, 'freq': 180},
{'type': 'STM32F437xG', 'flash_size': 1024, 'sram_size': 256, 'eeprom_size': 0, 'freq': 180},
{'type': 'STM32F437xI', 'flash_size': 2048, 'sram_size': 256, 'eeprom_size': 0, 'freq': 180},
{'type': 'STM32F439xG', 'flash_size': 1024, 'sram_size': 256, 'eeprom_size': 0, 'freq': 180},
{'type': 'STM32F439xI', 'flash_size': 2048, 'sram_size': 256, 'eeprom_size': 0, 'freq': 180},
],
},
{
'dev_id': 0x421,
'flash_size_reg': 0x1fff7a22,
'flash_driver': 'STM32FS',
'erase_sizes': (16*1024, 16*1024, 16*1024, 16*1024, 64*1024, 128*1024, 128*1024, 128*1024, ),
'devices': [
{'type': 'STM32F446xC', 'flash_size': 256, 'sram_size': 128, 'eeprom_size': 0, 'freq': 180},
{'type': 'STM32F446xE', 'flash_size': 512, 'sram_size': 128, 'eeprom_size': 0, 'freq': 180},
],
},
{
'dev_id': 0x423,
'flash_size_reg': 0x1fff7a22,
'flash_driver': 'STM32FS',
'erase_sizes': (16*1024, 16*1024, 16*1024, 16*1024, 64*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, ),
'devices': [
{'type': 'STM32F401xB', 'flash_size': 128, 'sram_size': 64, 'eeprom_size': 0, 'freq': 84},
{'type': 'STM32F401xC', 'flash_size': 256, 'sram_size': 64, 'eeprom_size': 0, 'freq': 84},
],
},
{
'dev_id': 0x431,
'flash_size_reg': 0x1fff7a22,
'flash_driver': 'STM32FS',
'erase_sizes': (16*1024, 16*1024, 16*1024, 16*1024, 64*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, ),
'devices': [
{'type': 'STM32F411xC', 'flash_size': 256, 'sram_size': 128, 'eeprom_size': 0, 'freq': 100},
{'type': 'STM32F411xE', 'flash_size': 512, 'sram_size': 128, 'eeprom_size': 0, 'freq': 100},
],
},
{
'dev_id': 0x433,
'flash_size_reg': 0x1fff7a22,
'flash_driver': 'STM32FS',
'erase_sizes': (16*1024, 16*1024, 16*1024, 16*1024, 64*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, ),
'devices': [
{'type': 'STM32F401xD', 'flash_size': 384, 'sram_size': 96, 'eeprom_size': 0, 'freq': 84},
{'type': 'STM32F401xE', 'flash_size': 512, 'sram_size': 96, 'eeprom_size': 0, 'freq': 84},
],
},
{
'dev_id': 0x434,
'flash_size_reg': 0x1fff7a22,
'flash_driver': 'STM32FS',
'erase_sizes': (16*1024, 16*1024, 16*1024, 16*1024, 64*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, ),
'devices': [
{'type': 'STM32F469xE', 'flash_size': 512, 'sram_size': 384, 'eeprom_size': 0, 'freq': 180},
{'type': 'STM32F469xG', 'flash_size': 1024, 'sram_size': 384, 'eeprom_size': 0, 'freq': 180},
{'type': 'STM32F469xI', 'flash_size': 2048, 'sram_size': 384, 'eeprom_size': 0, 'freq': 180},
{'type': 'STM32F479xG', 'flash_size': 1024, 'sram_size': 384, 'eeprom_size': 0, 'freq': 180},
{'type': 'STM32F479xI', 'flash_size': 2048, 'sram_size': 384, 'eeprom_size': 0, 'freq': 180},
],
},
{
'dev_id': 0x464,
'flash_size_reg': 0x1fff75e0,
'flash_driver': 'STM32L4',
'erase_sizes': (2 * 1024,),
'devices': [
{'type': 'STM32L412x8', 'flash_size': 64, 'sram_size': 40, 'eeprom_size': 0, 'freq': 80},
{'type': 'STM32L412xB', 'flash_size': 128, 'sram_size': 40, 'eeprom_size': 0, 'freq': 80},
{'type': 'STM32L422xB', 'flash_size': 128, 'sram_size': 40, 'eeprom_size': 0, 'freq': 80},
],
},
{
'dev_id': 0x435,
'flash_size_reg': 0x1fff75e0,
'flash_driver': 'STM32L4',
'erase_sizes': (2 * 1024,),
'devices': [
{'type': 'STM32L431xB', 'flash_size': 128, 'sram_size': 64, 'eeprom_size': 0, 'freq': 80},
{'type': 'STM32L431xC', 'flash_size': 256, 'sram_size': 64, 'eeprom_size': 0, 'freq': 80},
{'type': 'STM32L432xB', 'flash_size': 128, 'sram_size': 64, 'eeprom_size': 0, 'freq': 80},
{'type': 'STM32L432xC', 'flash_size': 256, 'sram_size': 64, 'eeprom_size': 0, 'freq': 80},
{'type': 'STM32L433xB', 'flash_size': 128, 'sram_size': 64, 'eeprom_size': 0, 'freq': 80},
{'type': 'STM32L433xC', 'flash_size': 256, 'sram_size': 64, 'eeprom_size': 0, 'freq': 80},
{'type': 'STM32L442xC', 'flash_size': 256, 'sram_size': 64, 'eeprom_size': 0, 'freq': 80},
{'type': 'STM32L443xC', 'flash_size': 256, 'sram_size': 64, 'eeprom_size': 0, 'freq': 80},
],
},
{
'dev_id': 0x462,
'flash_size_reg': 0x1fff75e0,
'flash_driver': 'STM32L4',
'erase_sizes': (2 * 1024,),
'devices': [
{'type': 'STM32L451xC', 'flash_size': 256, 'sram_size': 160, 'eeprom_size': 0, 'freq': 80},
{'type': 'STM32L451xE', 'flash_size': 512, 'sram_size': 160, 'eeprom_size': 0, 'freq': 80},
{'type': 'STM32L452xC', 'flash_size': 256, 'sram_size': 160, 'eeprom_size': 0, 'freq': 80},
{'type': 'STM32L452xE', 'flash_size': 512, 'sram_size': 160, 'eeprom_size': 0, 'freq': 80},
{'type': 'STM32L462xE', 'flash_size': 512, 'sram_size': 160, 'eeprom_size': 0, 'freq': 80},
],
},
{
'dev_id': 0x441,
'flash_size_reg': 0x1fff7a22,
'flash_driver': 'STM32FS',
'erase_sizes': (16*1024, 16*1024, 16*1024, 16*1024, 64*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, ),
'devices': [
{'type': 'STM32F412xE', 'flash_size': 512, 'sram_size': 256, 'eeprom_size': 0, 'freq': 100},
{'type': 'STM32F412xG', 'flash_size': 1024, 'sram_size': 256, 'eeprom_size': 0, 'freq': 100},
],
},
{
'dev_id': 0x458,
'flash_size_reg': 0x1fff7a22,
'flash_driver': 'STM32FS',
'erase_sizes': (16*1024, 16*1024, 16*1024, 16*1024, 64*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, ),
'devices': [
{'type': 'STM32F410x8', 'flash_size': 64, 'sram_size': 32, 'eeprom_size': 0, 'freq': 100},
{'type': 'STM32F410xB', 'flash_size': 128, 'sram_size': 32, 'eeprom_size': 0, 'freq': 100},
],
},
{
'dev_id': 0x461,
'flash_size_reg': 0x1fff75e0,
'flash_driver': 'STM32L4',
'erase_sizes': (2 * 1024, ),
'devices': [
{'type': 'STM32L496xE', 'flash_size': 512, 'sram_size': 320, 'eeprom_size': 0, 'freq': 80},
{'type': 'STM32L496xG', 'flash_size': 1024, 'sram_size': 320, 'eeprom_size': 0, 'freq': 80},
{'type': 'STM32L4A6xG', 'flash_size': 1024, 'sram_size': 320, 'eeprom_size': 0, 'freq': 80},
],
},
{
'dev_id': 0x463,
'flash_size_reg': 0x1fff7a22,
'flash_driver': 'STM32FS',
'erase_sizes': (16*1024, 16*1024, 16*1024, 16*1024, 64*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, 128*1024, ),
'devices': [
{'type': 'STM32F413xG', 'flash_size': 1024, 'sram_size': 320, 'eeprom_size': 0, 'freq': 100},
{'type': 'STM32F413xH', 'flash_size': 1536, 'sram_size': 320, 'eeprom_size': 0, 'freq': 100},
{'type': 'STM32F423xH', 'flash_size': 1536, 'sram_size': 320, 'eeprom_size': 0, 'freq': 100},
],
},
{
'dev_id': 0x470,
'flash_size_reg': 0x1fff75e0,
'flash_driver': 'STM32L4',
'erase_sizes': (4*1024, ),
'devices': [
{'type': 'STM32L4[R|S]xxG', 'flash_size': 1024, 'sram_size': 640, 'eeprom_size': 0, 'freq': 120},
{'type': 'STM32L4[R|S]xxI', 'flash_size': 2048, 'sram_size': 640, 'eeprom_size': 0, 'freq': 120},
],
},
{
'dev_id': 0x495,
'flash_size_reg': 0x1fff75e0,
'flash_driver': 'STM32L4',
'erase_sizes': (4 * 1024, ),
'devices': [
{'type': 'STM32WB55xC', 'flash_size': 256, 'sram_size': 128, 'eeprom_size': 0, 'freq': 64},
{'type': 'STM32WB55xE', 'flash_size': 512, 'sram_size': 256, 'eeprom_size': 0, 'freq': 64},
{'type': 'STM32WB55xG', 'flash_size': 1024, 'sram_size': 256, 'eeprom_size': 0, 'freq': 64},
],
},
{
'dev_id': 0x468,
'flash_size_reg': 0x1fff75e0,
'flash_driver': 'STM32L4',
'erase_sizes': (2 * 1024, ),
'devices': [
{'type': 'STM32G43xx6', 'flash_size': 32, 'sram_size': 32, 'eeprom_size': 0, 'freq': 170},
{'type': 'STM32G43xx8', 'flash_size': 64, 'sram_size': 32, 'eeprom_size': 0, 'freq': 170},
{'type': 'STM32G43xxB', 'flash_size': 128, 'sram_size': 32, 'eeprom_size': 0, 'freq': 170},
],
},
{
'dev_id': 0x469,
'flash_size_reg': 0x1fff75e0,
'flash_driver': 'STM32L4',
'erase_sizes': (2 * 1024, ),
'devices': [
{'type': 'STM32G47xxB', 'flash_size': 128, 'sram_size': 128, 'eeprom_size': 0, 'freq': 170},
{'type': 'STM32G47xxC', 'flash_size': 256, 'sram_size': 128, 'eeprom_size': 0, 'freq': 170},
{'type': 'STM32G47xxE', 'flash_size': 512, 'sram_size': 128, 'eeprom_size': 0, 'freq': 170},
],
},
],
},
{
'part_no': 0xc27,
'core': 'CortexM7',
'idcode_reg': [0xe0042000, 0x5c001000],
'devices': [
{
'dev_id': 0x449,
'flash_size_reg': 0x1ff0f442,
'flash_driver': 'STM32FS',
'erase_sizes': (32*1024, 32*1024, 32*1024, 32*1024, 128*1024, 256*1024, 256*1024, 256*1024, ),
'devices': [
{'type': 'STM32F745xE', 'flash_size': 512, 'sram_size': 320, 'eeprom_size': 0, 'freq': 216},
{'type': 'STM32F745xG', 'flash_size': 1024, 'sram_size': 320, 'eeprom_size': 0, 'freq': 216},
{'type': 'STM32F746xE', 'flash_size': 512, 'sram_size': 320, 'eeprom_size': 0, 'freq': 216},
{'type': 'STM32F746xG', 'flash_size': 1024, 'sram_size': 320, 'eeprom_size': 0, 'freq': 216},
{'type': 'STM32F756xG', 'flash_size': 1024, 'sram_size': 320, 'eeprom_size': 0, 'freq': 216},
]
},
{
'dev_id': 0x451,
'flash_size_reg': 0x1ff0f442,
'flash_driver': 'STM32FS',
'erase_sizes': (32*1024, 32*1024, 32*1024, 32*1024, 128*1024, 256*1024, 256*1024, 256*1024, ),
'devices': [
{'type': 'STM32F765xG', 'flash_size': 1024, 'sram_size': 512, 'eeprom_size': 0, 'freq': 216},
{'type': 'STM32F765xI', 'flash_size': 2048, 'sram_size': 512, 'eeprom_size': 0, 'freq': 216},
{'type': 'STM32F767xG', 'flash_size': 1024, 'sram_size': 512, 'eeprom_size': 0, 'freq': 216},
{'type': 'STM32F767xI', 'flash_size': 2048, 'sram_size': 512, 'eeprom_size': 0, 'freq': 216},
{'type': 'STM32F769xG', 'flash_size': 1024, 'sram_size': 512, 'eeprom_size': 0, 'freq': 216},
{'type': 'STM32F769xI', 'flash_size': 2048, 'sram_size': 512, 'eeprom_size': 0, 'freq': 216},
{'type': 'STM32F777xI', 'flash_size': 2048, 'sram_size': 512, 'eeprom_size': 0, 'freq': 216},
{'type': 'STM32F778xI', 'flash_size': 2048, 'sram_size': 512, 'eeprom_size': 0, 'freq': 216},
{'type': 'STM32F779xI', 'flash_size': 2048, 'sram_size': 512, 'eeprom_size': 0, 'freq': 216},
]
},
{
'dev_id': 0x452,
'flash_size_reg': 0x1ff07a22,
'flash_driver': 'STM32FS',
'erase_sizes': (16*1024, 16*1024, 16*1024, 16*1024, 64*1024, 128*1024, 128*1024, 128*1024, ),
'devices': [
{'type': 'STM32F722xC', 'flash_size': 256, 'sram_size': 256, 'eeprom_size': 0, 'freq': 216},
{'type': 'STM32F722xE', 'flash_size': 512, 'sram_size': 256, 'eeprom_size': 0, 'freq': 216},
{'type': 'STM32F723xC', 'flash_size': 256, 'sram_size': 256, 'eeprom_size': 0, 'freq': 216},
{'type': 'STM32F723xE', 'flash_size': 512, 'sram_size': 256, 'eeprom_size': 0, 'freq': 216},
{'type': 'STM32F732xE', 'flash_size': 512, 'sram_size': 256, 'eeprom_size': 0, 'freq': 216},
{'type': 'STM32F733xE', 'flash_size': 512, 'sram_size': 256, 'eeprom_size': 0, 'freq': 216},
]
},
{
'dev_id': 0x450,
'flash_size_reg': 0x1ff1e880,
'flash_driver': 'STM32H7',
'erase_sizes': (128*1024,),
'devices': [
{'type': 'STM32H743xI', 'flash_size': 2048, 'sram_size': 1024, 'eeprom_size': 0, 'freq': 480},
{'type': 'STM32H753xI', 'flash_size': 2048, 'sram_size': 1024, 'eeprom_size': 0, 'freq': 480},
{'type': 'STM32H750xI', 'flash_size': 128, 'sram_size': 1024, 'eeprom_size': 0, 'freq': 480},
{'type': 'STM32H745xI', 'flash_size': 2048, 'sram_size': 1024, 'eeprom_size': 0, 'freq': 480},
{'type': 'STM32H755xI', 'flash_size': 2048, 'sram_size': 1024, 'eeprom_size': 0, 'freq': 480},
]
},
]
},
]
| gpl-2.0 |
pytroll/pyorbital | pyorbital/astronomy.py | 1 | 5962 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2011, 2013
# Author(s):
# Martin Raspaud <martin.raspaud@smhi.se>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Astronomy module.
Parts taken from http://www.geoastro.de/elevaz/basics/index.htm
"""
import numpy as np
from pyorbital import dt2np
F = 1 / 298.257223563 # Earth flattening WGS-84
A = 6378.137 # WGS84 Equatorial radius
MFACTOR = 7.292115E-5
def jdays2000(utc_time):
"""Get the days since year 2000.
"""
return _days(dt2np(utc_time) - np.datetime64('2000-01-01T12:00'))
def jdays(utc_time):
"""Get the julian day of *utc_time*.
"""
return jdays2000(utc_time) + 2451545
def _days(dt):
"""Get the days (floating point) from *d_t*.
"""
return dt / np.timedelta64(1, 'D')
def gmst(utc_time):
"""Greenwich mean sidereal utc_time, in radians.
As defined in the AIAA 2006 implementation:
http://www.celestrak.com/publications/AIAA/2006-6753/
"""
ut1 = jdays2000(utc_time) / 36525.0
theta = 67310.54841 + ut1 * (876600 * 3600 + 8640184.812866 + ut1 *
(0.093104 - ut1 * 6.2 * 10e-6))
return np.deg2rad(theta / 240.0) % (2 * np.pi)
def _lmst(utc_time, longitude):
"""Local mean sidereal time, computed from *utc_time* and *longitude*.
In radians.
"""
return gmst(utc_time) + longitude
def sun_ecliptic_longitude(utc_time):
"""Ecliptic longitude of the sun at *utc_time*.
"""
jdate = jdays2000(utc_time) / 36525.0
# mean anomaly, rad
m_a = np.deg2rad(357.52910 +
35999.05030 * jdate -
0.0001559 * jdate * jdate -
0.00000048 * jdate * jdate * jdate)
# mean longitude, deg
l_0 = 280.46645 + 36000.76983 * jdate + 0.0003032 * jdate * jdate
d_l = ((1.914600 - 0.004817 * jdate - 0.000014 * jdate * jdate) * np.sin(m_a) +
(0.019993 - 0.000101 * jdate) * np.sin(2 * m_a) + 0.000290 * np.sin(3 * m_a))
# true longitude, deg
l__ = l_0 + d_l
return np.deg2rad(l__)
def sun_ra_dec(utc_time):
"""Right ascension and declination of the sun at *utc_time*.
"""
jdate = jdays2000(utc_time) / 36525.0
eps = np.deg2rad(23.0 + 26.0 / 60.0 + 21.448 / 3600.0 -
(46.8150 * jdate + 0.00059 * jdate * jdate -
0.001813 * jdate * jdate * jdate) / 3600)
eclon = sun_ecliptic_longitude(utc_time)
x__ = np.cos(eclon)
y__ = np.cos(eps) * np.sin(eclon)
z__ = np.sin(eps) * np.sin(eclon)
r__ = np.sqrt(1.0 - z__ * z__)
# sun declination
declination = np.arctan2(z__, r__)
# right ascension
right_ascension = 2 * np.arctan2(y__, (x__ + r__))
return right_ascension, declination
def _local_hour_angle(utc_time, longitude, right_ascension):
"""Hour angle at *utc_time* for the given *longitude* and
*right_ascension*
longitude in radians
"""
return _lmst(utc_time, longitude) - right_ascension
def get_alt_az(utc_time, lon, lat):
"""Return sun altitude and azimuth from *utc_time*, *lon*, and *lat*.
lon,lat in degrees
The returned angles are given in radians.
"""
lon = np.deg2rad(lon)
lat = np.deg2rad(lat)
ra_, dec = sun_ra_dec(utc_time)
h__ = _local_hour_angle(utc_time, lon, ra_)
return (np.arcsin(np.sin(lat) * np.sin(dec) +
np.cos(lat) * np.cos(dec) * np.cos(h__)),
np.arctan2(-np.sin(h__), (np.cos(lat) * np.tan(dec) -
np.sin(lat) * np.cos(h__))))
def cos_zen(utc_time, lon, lat):
"""Cosine of the sun-zenith angle for *lon*, *lat* at *utc_time*.
utc_time: datetime.datetime instance of the UTC time
lon and lat in degrees.
"""
lon = np.deg2rad(lon)
lat = np.deg2rad(lat)
r_a, dec = sun_ra_dec(utc_time)
h__ = _local_hour_angle(utc_time, lon, r_a)
return (np.sin(lat) * np.sin(dec) + np.cos(lat) * np.cos(dec) * np.cos(h__))
def sun_zenith_angle(utc_time, lon, lat):
"""Sun-zenith angle for *lon*, *lat* at *utc_time*.
lon,lat in degrees.
The angle returned is given in degrees
"""
return np.rad2deg(np.arccos(cos_zen(utc_time, lon, lat)))
def sun_earth_distance_correction(utc_time):
"""Calculate the sun earth distance correction, relative to 1 AU.
"""
year = 365.256363004
# This is computed from
# http://curious.astro.cornell.edu/question.php?number=582
# AU = 149597870700.0
# a = 149598261000.0
# theta = (jdays2000(utc_time) - 2) * (2 * np.pi) / year
# e = 0.01671123
# r = a*(1-e*e)/(1+e * np.cos(theta))
# corr_me = (r / AU) ** 2
# from known software.
corr = 1 - 0.0334 * np.cos(2 * np.pi * (jdays2000(utc_time) - 2) / year)
return corr
def observer_position(time, lon, lat, alt):
"""Calculate observer ECI position.
http://celestrak.com/columns/v02n03/
"""
lon = np.deg2rad(lon)
lat = np.deg2rad(lat)
theta = (gmst(time) + lon) % (2 * np.pi)
c = 1 / np.sqrt(1 + F * (F - 2) * np.sin(lat)**2)
sq = c * (1 - F)**2
achcp = (A * c + alt) * np.cos(lat)
x = achcp * np.cos(theta) # kilometers
y = achcp * np.sin(theta)
z = (A * sq + alt) * np.sin(lat)
vx = -MFACTOR * y # kilometers/second
vy = MFACTOR * x
vz = 0
return (x, y, z), (vx, vy, vz)
| gpl-3.0 |
rowanc1/simpegflow | docs/conf.py | 3 | 7871 | # -*- coding: utf-8 -*-
#
# SimPEG documentation build configuration file, created by
# sphinx-quickstart on Fri Aug 30 18:42:44 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
sys.path.append('../')
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.todo', 'sphinx.ext.mathjax', 'sphinx.ext.viewcode', 'sphinx.ext.autodoc', 'matplotlib.sphinxext.plot_directive']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'SimPEG'
copyright = u'2013, SimPEG Developers'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0.1'
# The full version, including alpha/beta/rc tags.
release = '0.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'SimPEGdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'SimPEG.tex', u'SimPEG Documentation',
u'Rowan Cockett', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'simpeg', u'SimPEG Documentation',
[u'Rowan Cockett'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'SimPEG', u'SimPEG Documentation',
u'Rowan Cockett', 'SimPEG', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| mit |
gamesbrewer/kegger | kegger/myapp/libs/cherrypy/tutorial/bonus-sqlobject.py | 54 | 5721 | '''
Bonus Tutorial: Using SQLObject
This is a silly little contacts manager application intended to
demonstrate how to use SQLObject from within a CherryPy2 project. It
also shows how to use inline Cheetah templates.
SQLObject is an Object/Relational Mapper that allows you to access
data stored in an RDBMS in a pythonic fashion. You create data objects
as Python classes and let SQLObject take care of all the nasty details.
This code depends on the latest development version (0.6+) of SQLObject.
You can get it from the SQLObject Subversion server. You can find all
necessary information at <http://www.sqlobject.org>. This code will NOT
work with the 0.5.x version advertised on their website!
This code also depends on a recent version of Cheetah. You can find
Cheetah at <http://www.cheetahtemplate.org>.
After starting this application for the first time, you will need to
access the /reset URI in order to create the database table and some
sample data. Accessing /reset again will drop and re-create the table,
so you may want to be careful. :-)
This application isn't supposed to be fool-proof, it's not even supposed
to be very GOOD. Play around with it some, browse the source code, smile.
:)
-- Hendrik Mans <hendrik@mans.de>
'''
import cherrypy
from Cheetah.Template import Template
from sqlobject import *
# configure your database connection here
__connection__ = 'mysql://root:@localhost/test'
# this is our (only) data class.
class Contact(SQLObject):
lastName = StringCol(length = 50, notNone = True)
firstName = StringCol(length = 50, notNone = True)
phone = StringCol(length = 30, notNone = True, default = '')
email = StringCol(length = 30, notNone = True, default = '')
url = StringCol(length = 100, notNone = True, default = '')
class ContactManager:
def index(self):
# Let's display a list of all stored contacts.
contacts = Contact.select()
template = Template('''
<h2>All Contacts</h2>
#for $contact in $contacts
<a href="mailto:$contact.email">$contact.lastName, $contact.firstName</a>
[<a href="./edit?id=$contact.id">Edit</a>]
[<a href="./delete?id=$contact.id">Delete</a>]
<br/>
#end for
<p>[<a href="./edit">Add new contact</a>]</p>
''', [locals(), globals()])
return template.respond()
index.exposed = True
def edit(self, id = 0):
# we really want id as an integer. Since GET/POST parameters
# are always passed as strings, let's convert it.
id = int(id)
if id > 0:
# if an id is specified, we're editing an existing contact.
contact = Contact.get(id)
title = "Edit Contact"
else:
# if no id is specified, we're entering a new contact.
contact = None
title = "New Contact"
# In the following template code, please note that we use
# Cheetah's $getVar() construct for the form values. We have
# to do this because contact may be set to None (see above).
template = Template('''
<h2>$title</h2>
<form action="./store" method="POST">
<input type="hidden" name="id" value="$id" />
Last Name: <input name="lastName" value="$getVar('contact.lastName', '')" /><br/>
First Name: <input name="firstName" value="$getVar('contact.firstName', '')" /><br/>
Phone: <input name="phone" value="$getVar('contact.phone', '')" /><br/>
Email: <input name="email" value="$getVar('contact.email', '')" /><br/>
URL: <input name="url" value="$getVar('contact.url', '')" /><br/>
<input type="submit" value="Store" />
</form>
''', [locals(), globals()])
return template.respond()
edit.exposed = True
def delete(self, id):
# Delete the specified contact
contact = Contact.get(int(id))
contact.destroySelf()
return 'Deleted. <a href="./">Return to Index</a>'
delete.exposed = True
def store(self, lastName, firstName, phone, email, url, id = None):
if id and int(id) > 0:
# If an id was specified, update an existing contact.
contact = Contact.get(int(id))
# We could set one field after another, but that would
# cause multiple UPDATE clauses. So we'll just do it all
# in a single pass through the set() method.
contact.set(
lastName = lastName,
firstName = firstName,
phone = phone,
email = email,
url = url)
else:
# Otherwise, add a new contact.
contact = Contact(
lastName = lastName,
firstName = firstName,
phone = phone,
email = email,
url = url)
return 'Stored. <a href="./">Return to Index</a>'
store.exposed = True
def reset(self):
# Drop existing table
Contact.dropTable(True)
# Create new table
Contact.createTable()
# Create some sample data
Contact(
firstName = 'Hendrik',
lastName = 'Mans',
email = 'hendrik@mans.de',
phone = '++49 89 12345678',
url = 'http://www.mornography.de')
return "reset completed!"
reset.exposed = True
print("If you're running this application for the first time, please go to http://localhost:8080/reset once in order to create the database!")
cherrypy.quickstart(ContactManager())
| cc0-1.0 |
gnuhub/intellij-community | plugins/hg4idea/testData/bin/mercurial/error.py | 93 | 2749 | # error.py - Mercurial exceptions
#
# Copyright 2005-2008 Matt Mackall <mpm@selenic.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
"""Mercurial exceptions.
This allows us to catch exceptions at higher levels without forcing
imports.
"""
# Do not import anything here, please
class RevlogError(Exception):
pass
class LookupError(RevlogError, KeyError):
def __init__(self, name, index, message):
self.name = name
if isinstance(name, str) and len(name) == 20:
from node import short
name = short(name)
RevlogError.__init__(self, '%s@%s: %s' % (index, name, message))
def __str__(self):
return RevlogError.__str__(self)
class ManifestLookupError(LookupError):
pass
class CommandError(Exception):
"""Exception raised on errors in parsing the command line."""
class InterventionRequired(Exception):
"""Exception raised when a command requires human intervention."""
class Abort(Exception):
"""Raised if a command needs to print an error and exit."""
def __init__(self, *args, **kw):
Exception.__init__(self, *args)
self.hint = kw.get('hint')
class ConfigError(Abort):
'Exception raised when parsing config files'
class OutOfBandError(Exception):
'Exception raised when a remote repo reports failure'
class ParseError(Exception):
'Exception raised when parsing config files (msg[, pos])'
class RepoError(Exception):
def __init__(self, *args, **kw):
Exception.__init__(self, *args)
self.hint = kw.get('hint')
class RepoLookupError(RepoError):
pass
class CapabilityError(RepoError):
pass
class RequirementError(RepoError):
"""Exception raised if .hg/requires has an unknown entry."""
pass
class LockError(IOError):
def __init__(self, errno, strerror, filename, desc):
IOError.__init__(self, errno, strerror, filename)
self.desc = desc
class LockHeld(LockError):
def __init__(self, errno, filename, desc, locker):
LockError.__init__(self, errno, 'Lock held', filename, desc)
self.locker = locker
class LockUnavailable(LockError):
pass
class ResponseError(Exception):
"""Raised to print an error with part of output and exit."""
class UnknownCommand(Exception):
"""Exception raised if command is not in the command table."""
class AmbiguousCommand(Exception):
"""Exception raised if command shortcut matches more than one command."""
# derived from KeyboardInterrupt to simplify some breakout code
class SignalInterrupt(KeyboardInterrupt):
"""Exception raised on SIGTERM and SIGHUP."""
class SignatureError(Exception):
pass
| apache-2.0 |
Teagan42/home-assistant | tests/testing_config/custom_components/test/alarm_control_panel.py | 3 | 2956 | """
Provide a mock alarm_control_panel platform.
Call init before using it in your tests to ensure clean test data.
"""
from homeassistant.components.alarm_control_panel import AlarmControlPanel
from homeassistant.components.alarm_control_panel.const import (
SUPPORT_ALARM_ARM_AWAY,
SUPPORT_ALARM_ARM_HOME,
SUPPORT_ALARM_ARM_NIGHT,
SUPPORT_ALARM_TRIGGER,
)
from homeassistant.const import (
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_DISARMED,
STATE_ALARM_TRIGGERED,
)
from tests.common import MockEntity
ENTITIES = {}
def init(empty=False):
"""Initialize the platform with entities."""
global ENTITIES
ENTITIES = (
{}
if empty
else {
"arm_code": MockAlarm(
name=f"Alarm arm code",
code_arm_required=True,
unique_id="unique_arm_code",
),
"no_arm_code": MockAlarm(
name=f"Alarm no arm code",
code_arm_required=False,
unique_id="unique_no_arm_code",
),
}
)
async def async_setup_platform(
hass, config, async_add_entities_callback, discovery_info=None
):
"""Return mock entities."""
async_add_entities_callback(list(ENTITIES.values()))
class MockAlarm(MockEntity, AlarmControlPanel):
"""Mock Alarm control panel class."""
def __init__(self, **values):
"""Init the Mock Alarm Control Panel."""
self._state = None
MockEntity.__init__(self, **values)
@property
def code_arm_required(self):
"""Whether the code is required for arm actions."""
return self._handle("code_arm_required")
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
return (
SUPPORT_ALARM_ARM_HOME
| SUPPORT_ALARM_ARM_AWAY
| SUPPORT_ALARM_ARM_NIGHT
| SUPPORT_ALARM_TRIGGER
)
def alarm_arm_away(self, code=None):
"""Send arm away command."""
self._state = STATE_ALARM_ARMED_AWAY
self.async_write_ha_state()
def alarm_arm_home(self, code=None):
"""Send arm home command."""
self._state = STATE_ALARM_ARMED_HOME
self.async_write_ha_state()
def alarm_arm_night(self, code=None):
"""Send arm night command."""
self._state = STATE_ALARM_ARMED_NIGHT
self.async_write_ha_state()
def alarm_disarm(self, code=None):
"""Send disarm command."""
if code == "1234":
self._state = STATE_ALARM_DISARMED
self.async_write_ha_state()
def alarm_trigger(self, code=None):
"""Send alarm trigger command."""
self._state = STATE_ALARM_TRIGGERED
self.async_write_ha_state()
| apache-2.0 |
mmcdermo/helpinghand | server/venv/lib/python2.7/site-packages/django/conf/locale/hr/formats.py | 118 | 2068 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. E Y.'
TIME_FORMAT = 'H:i:s'
DATETIME_FORMAT = 'j. E Y. H:i'
YEAR_MONTH_FORMAT = 'F Y.'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'j.m.Y.'
SHORT_DATETIME_FORMAT = 'j.m.Y. H:i'
FIRST_DAY_OF_WEEK = 1
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# Kept ISO formats as they are in first position
DATE_INPUT_FORMATS = (
'%Y-%m-%d', # '2006-10-25'
'%d.%m.%Y.', '%d.%m.%y.', # '25.10.2006.', '25.10.06.'
'%d. %m. %Y.', '%d. %m. %y.', # '25. 10. 2006.', '25. 10. 06.'
)
DATETIME_INPUT_FORMATS = (
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%d.%m.%Y. %H:%M:%S', # '25.10.2006. 14:30:59'
'%d.%m.%Y. %H:%M:%S.%f', # '25.10.2006. 14:30:59.000200'
'%d.%m.%Y. %H:%M', # '25.10.2006. 14:30'
'%d.%m.%Y.', # '25.10.2006.'
'%d.%m.%y. %H:%M:%S', # '25.10.06. 14:30:59'
'%d.%m.%y. %H:%M:%S.%f', # '25.10.06. 14:30:59.000200'
'%d.%m.%y. %H:%M', # '25.10.06. 14:30'
'%d.%m.%y.', # '25.10.06.'
'%d. %m. %Y. %H:%M:%S', # '25. 10. 2006. 14:30:59'
'%d. %m. %Y. %H:%M:%S.%f',# '25. 10. 2006. 14:30:59.000200'
'%d. %m. %Y. %H:%M', # '25. 10. 2006. 14:30'
'%d. %m. %Y.', # '25. 10. 2006.'
'%d. %m. %y. %H:%M:%S', # '25. 10. 06. 14:30:59'
'%d. %m. %y. %H:%M:%S.%f',# '25. 10. 06. 14:30:59.000200'
'%d. %m. %y. %H:%M', # '25. 10. 06. 14:30'
'%d. %m. %y.', # '25. 10. 06.'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
| mit |
antepsis/anteplahmacun | sympy/physics/vector/fieldfunctions.py | 87 | 8504 | from sympy import diff, integrate, S
from sympy.physics.vector import Vector, express
from sympy.physics.vector.frame import _check_frame
from sympy.physics.vector.vector import _check_vector
__all__ = ['curl', 'divergence', 'gradient', 'is_conservative',
'is_solenoidal', 'scalar_potential',
'scalar_potential_difference']
def curl(vect, frame):
"""
Returns the curl of a vector field computed wrt the coordinate
symbols of the given frame.
Parameters
==========
vect : Vector
The vector operand
frame : ReferenceFrame
The reference frame to calculate the curl in
Examples
========
>>> from sympy.physics.vector import ReferenceFrame
>>> from sympy.physics.vector import curl
>>> R = ReferenceFrame('R')
>>> v1 = R[1]*R[2]*R.x + R[0]*R[2]*R.y + R[0]*R[1]*R.z
>>> curl(v1, R)
0
>>> v2 = R[0]*R[1]*R[2]*R.x
>>> curl(v2, R)
R_x*R_y*R.y - R_x*R_z*R.z
"""
_check_vector(vect)
if vect == 0:
return Vector(0)
vect = express(vect, frame, variables=True)
#A mechanical approach to avoid looping overheads
vectx = vect.dot(frame.x)
vecty = vect.dot(frame.y)
vectz = vect.dot(frame.z)
outvec = Vector(0)
outvec += (diff(vectz, frame[1]) - diff(vecty, frame[2])) * frame.x
outvec += (diff(vectx, frame[2]) - diff(vectz, frame[0])) * frame.y
outvec += (diff(vecty, frame[0]) - diff(vectx, frame[1])) * frame.z
return outvec
def divergence(vect, frame):
"""
Returns the divergence of a vector field computed wrt the coordinate
symbols of the given frame.
Parameters
==========
vect : Vector
The vector operand
frame : ReferenceFrame
The reference frame to calculate the divergence in
Examples
========
>>> from sympy.physics.vector import ReferenceFrame
>>> from sympy.physics.vector import divergence
>>> R = ReferenceFrame('R')
>>> v1 = R[0]*R[1]*R[2] * (R.x+R.y+R.z)
>>> divergence(v1, R)
R_x*R_y + R_x*R_z + R_y*R_z
>>> v2 = 2*R[1]*R[2]*R.y
>>> divergence(v2, R)
2*R_z
"""
_check_vector(vect)
if vect == 0:
return S(0)
vect = express(vect, frame, variables=True)
vectx = vect.dot(frame.x)
vecty = vect.dot(frame.y)
vectz = vect.dot(frame.z)
out = S(0)
out += diff(vectx, frame[0])
out += diff(vecty, frame[1])
out += diff(vectz, frame[2])
return out
def gradient(scalar, frame):
"""
Returns the vector gradient of a scalar field computed wrt the
coordinate symbols of the given frame.
Parameters
==========
scalar : sympifiable
The scalar field to take the gradient of
frame : ReferenceFrame
The frame to calculate the gradient in
Examples
========
>>> from sympy.physics.vector import ReferenceFrame
>>> from sympy.physics.vector import gradient
>>> R = ReferenceFrame('R')
>>> s1 = R[0]*R[1]*R[2]
>>> gradient(s1, R)
R_y*R_z*R.x + R_x*R_z*R.y + R_x*R_y*R.z
>>> s2 = 5*R[0]**2*R[2]
>>> gradient(s2, R)
10*R_x*R_z*R.x + 5*R_x**2*R.z
"""
_check_frame(frame)
outvec = Vector(0)
scalar = express(scalar, frame, variables=True)
for i, x in enumerate(frame):
outvec += diff(scalar, frame[i]) * x
return outvec
def is_conservative(field):
"""
Checks if a field is conservative.
Paramaters
==========
field : Vector
The field to check for conservative property
Examples
========
>>> from sympy.physics.vector import ReferenceFrame
>>> from sympy.physics.vector import is_conservative
>>> R = ReferenceFrame('R')
>>> is_conservative(R[1]*R[2]*R.x + R[0]*R[2]*R.y + R[0]*R[1]*R.z)
True
>>> is_conservative(R[2] * R.y)
False
"""
#Field is conservative irrespective of frame
#Take the first frame in the result of the
#separate method of Vector
if field == Vector(0):
return True
frame = list(field.separate())[0]
return curl(field, frame).simplify() == Vector(0)
def is_solenoidal(field):
"""
Checks if a field is solenoidal.
Paramaters
==========
field : Vector
The field to check for solenoidal property
Examples
========
>>> from sympy.physics.vector import ReferenceFrame
>>> from sympy.physics.vector import is_solenoidal
>>> R = ReferenceFrame('R')
>>> is_solenoidal(R[1]*R[2]*R.x + R[0]*R[2]*R.y + R[0]*R[1]*R.z)
True
>>> is_solenoidal(R[1] * R.y)
False
"""
#Field is solenoidal irrespective of frame
#Take the first frame in the result of the
#separate method in Vector
if field == Vector(0):
return True
frame = list(field.separate())[0]
return divergence(field, frame).simplify() == S(0)
def scalar_potential(field, frame):
"""
Returns the scalar potential function of a field in a given frame
(without the added integration constant).
Parameters
==========
field : Vector
The vector field whose scalar potential function is to be
calculated
frame : ReferenceFrame
The frame to do the calculation in
Examples
========
>>> from sympy.physics.vector import ReferenceFrame
>>> from sympy.physics.vector import scalar_potential, gradient
>>> R = ReferenceFrame('R')
>>> scalar_potential(R.z, R) == R[2]
True
>>> scalar_field = 2*R[0]**2*R[1]*R[2]
>>> grad_field = gradient(scalar_field, R)
>>> scalar_potential(grad_field, R)
2*R_x**2*R_y*R_z
"""
#Check whether field is conservative
if not is_conservative(field):
raise ValueError("Field is not conservative")
if field == Vector(0):
return S(0)
#Express the field exntirely in frame
#Subsitute coordinate variables also
_check_frame(frame)
field = express(field, frame, variables=True)
#Make a list of dimensions of the frame
dimensions = [x for x in frame]
#Calculate scalar potential function
temp_function = integrate(field.dot(dimensions[0]), frame[0])
for i, dim in enumerate(dimensions[1:]):
partial_diff = diff(temp_function, frame[i + 1])
partial_diff = field.dot(dim) - partial_diff
temp_function += integrate(partial_diff, frame[i + 1])
return temp_function
def scalar_potential_difference(field, frame, point1, point2, origin):
"""
Returns the scalar potential difference between two points in a
certain frame, wrt a given field.
If a scalar field is provided, its values at the two points are
considered. If a conservative vector field is provided, the values
of its scalar potential function at the two points are used.
Returns (potential at position 2) - (potential at position 1)
Parameters
==========
field : Vector/sympyfiable
The field to calculate wrt
frame : ReferenceFrame
The frame to do the calculations in
point1 : Point
The initial Point in given frame
position2 : Point
The second Point in the given frame
origin : Point
The Point to use as reference point for position vector
calculation
Examples
========
>>> from sympy.physics.vector import ReferenceFrame, Point
>>> from sympy.physics.vector import scalar_potential_difference
>>> R = ReferenceFrame('R')
>>> O = Point('O')
>>> P = O.locatenew('P', R[0]*R.x + R[1]*R.y + R[2]*R.z)
>>> vectfield = 4*R[0]*R[1]*R.x + 2*R[0]**2*R.y
>>> scalar_potential_difference(vectfield, R, O, P, O)
2*R_x**2*R_y
>>> Q = O.locatenew('O', 3*R.x + R.y + 2*R.z)
>>> scalar_potential_difference(vectfield, R, P, Q, O)
-2*R_x**2*R_y + 18
"""
_check_frame(frame)
if isinstance(field, Vector):
#Get the scalar potential function
scalar_fn = scalar_potential(field, frame)
else:
#Field is a scalar
scalar_fn = field
#Express positions in required frame
position1 = express(point1.pos_from(origin), frame, variables=True)
position2 = express(point2.pos_from(origin), frame, variables=True)
#Get the two positions as substitution dicts for coordinate variables
subs_dict1 = {}
subs_dict2 = {}
for i, x in enumerate(frame):
subs_dict1[frame[i]] = x.dot(position1)
subs_dict2[frame[i]] = x.dot(position2)
return scalar_fn.subs(subs_dict2) - scalar_fn.subs(subs_dict1)
| bsd-3-clause |
lmazuel/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_08_01/models/express_route_circuits_routes_table_list_result_py3.py | 1 | 1286 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ExpressRouteCircuitsRoutesTableListResult(Model):
"""Response for ListRoutesTable associated with the Express Route Circuits
API.
:param value: The list of routes table.
:type value:
list[~azure.mgmt.network.v2017_08_01.models.ExpressRouteCircuitRoutesTable]
:param next_link: The URL to get the next set of results.
:type next_link: str
"""
_attribute_map = {
'value': {'key': 'value', 'type': '[ExpressRouteCircuitRoutesTable]'},
'next_link': {'key': 'nextLink', 'type': 'str'},
}
def __init__(self, *, value=None, next_link: str=None, **kwargs) -> None:
super(ExpressRouteCircuitsRoutesTableListResult, self).__init__(**kwargs)
self.value = value
self.next_link = next_link
| mit |
hamogu/astrospec | astrospec/io/IUE.py | 2 | 3732 | import os
import numpy as np
from astropy.io import fits
from astropy.nddata.nduncertainty import StdDevUncertainty
from astropy.io import registry
from astropy.extern import six
from ..spectrum1d import Spectrum1D
from ..wcs import WCSTable
class ApertureException(Exception):
def __init__(self, apertures):
self.apertures = apertures
def __str__(self):
message = "There is more than one spectrum in the requested file. " + \
"Each spectrum corresponds to one aperture. " + \
"Please specify the aperture desired with the aperture " + \
"= argument. The available apertures are:\n\n" + str(list(self.apertures))
return message
def _find_col_index(ColDefs, colname):
'''find index for a column with certain name
This function can be removed or made easier, once astropy.io.fits returns a Table object'''
ind = (np.array(ColDefs.names) == colname)
return np.where(ind)[0][0]
def _get_unit(unitstring):
'''The only unit I have seen is ERG/CM2/S/A and that does not parse
with astropy. For now, just translate that.'''
if unitstring == 'ERG/CM2/S/A':
return 'erg / (cm2 s Angstrom)'
else:
return unitstring
def read_IUE_mxlo(filename, aperture = ''):
'''
Read low resolution IUE spectra in fits format
IUE spectra are available from MAST in two different formats:
IUESIPS and NEWSIPS. While the former is an (now obsolete) intrinsic IUE
format that requires special software to read, NEWSPIS is a fits file.
This procedure implements a reader for NEWSIPS files.
Parameters
----------
filename : string
aperture : string
Some IUE files contain spectra taken with different apertures.
This keyword allows the user to select one of them (an exception
is raised if multiple apterures are present, but this keyword
is not set appropriatly).
Returns
-------
spec : Spectrum1D
http://archive.stsci.edu/iue/mdr_help.html?print=1
Sometimes there are two or more apertures in one file e.g. swp02283.mxlo .
'''
hdus = fits.open(filename)
try:
tab = hdus[1].data
meta = hdus[0].header
wave = tab.WAVELENGTH[0]+np.arange(tab.NPOINTS[0])*tab.DELTAW[0]
if len(tab) >1:
if six.PY2:
ap = aperture
else:
ap = bytes(aperture, encoding='latin-1')
if ap in tab.APERTURE:
index = np.where(tab.APERTURE == aperture)
flux = tab.FLUX[index].flatten()
sigma = tab.SIGMA[index].flatten()
flags = tab.QUALITY[index].flatten()
else:
raise ApertureException(tab.APERTURE)
else:
flux = tab.FLUX.flatten()
sigma = tab.SIGMA.flatten()
flags = tab.QUALITY.flatten()
finally:
hdus.close()
dispersion_unit = tab.columns[_find_col_index(tab.columns, 'WAVELENGTH')].unit.lower()
flux_unit = _get_unit(tab.columns[_find_col_index(tab.columns, 'FLUX')].unit)
wcs = WCSTable(wave, dispersion_unit)
spec = Spectrum1D(data=flux,
uncertainty=StdDevUncertainty(sigma, flux_unit),
meta=meta,
unit=flux_unit, mask=(flags!=0),
wcs=wcs)
#flags=flags)
return spec
registry.register_reader('IUE-mxlo', Spectrum1D, read_IUE_mxlo)
def identify_IUE_mxlo(origin, *args, **kwargs):
return (isinstance(args[0], six.string_types) and
os.path.splitext(args[0].lower())[1] == '.mxlo')
registry.register_identifier('IUE-mxlo', Spectrum1D, identify_IUE_mxlo)
| bsd-3-clause |
shastikk/youtube-dl | test/test_age_restriction.py | 171 | 1379 | #!/usr/bin/env python
from __future__ import unicode_literals
# Allow direct execution
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from test.helper import try_rm
from youtube_dl import YoutubeDL
def _download_restricted(url, filename, age):
""" Returns true if the file has been downloaded """
params = {
'age_limit': age,
'skip_download': True,
'writeinfojson': True,
'outtmpl': '%(id)s.%(ext)s',
}
ydl = YoutubeDL(params)
ydl.add_default_info_extractors()
json_filename = os.path.splitext(filename)[0] + '.info.json'
try_rm(json_filename)
ydl.download([url])
res = os.path.exists(json_filename)
try_rm(json_filename)
return res
class TestAgeRestriction(unittest.TestCase):
def _assert_restricted(self, url, filename, age, old_age=None):
self.assertTrue(_download_restricted(url, filename, old_age))
self.assertFalse(_download_restricted(url, filename, age))
def test_youtube(self):
self._assert_restricted('07FYdnEawAQ', '07FYdnEawAQ.mp4', 10)
def test_youporn(self):
self._assert_restricted(
'http://www.youporn.com/watch/505835/sex-ed-is-it-safe-to-masturbate-daily/',
'505835.mp4', 2, old_age=25)
if __name__ == '__main__':
unittest.main()
| unlicense |
JingJunYin/tensorflow | tensorflow/contrib/seq2seq/python/ops/beam_search_ops.py | 124 | 1149 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Beam Search helper ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.seq2seq.ops import gen_beam_search_ops
from tensorflow.contrib.util import loader
from tensorflow.python.platform import resource_loader
_beam_search_ops_so = loader.load_op_library(
resource_loader.get_path_to_datafile("_beam_search_ops.so"))
gather_tree = gen_beam_search_ops.gather_tree
| apache-2.0 |
genavarov/ladacoin | contrib/linearize/linearize-hashes.py | 2 | 3036 | #!/usr/bin/python
#
# linearize-hashes.py: List blocks in a linear, no-fork version of the chain.
#
# Copyright (c) 2013-2014 The Bitcoin developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
from __future__ import print_function
import json
import struct
import re
import base64
import httplib
import sys
settings = {}
class BitcoinRPC:
def __init__(self, host, port, username, password):
authpair = "%s:%s" % (username, password)
self.authhdr = "Basic %s" % (base64.b64encode(authpair))
self.conn = httplib.HTTPConnection(host, port, False, 30)
def execute(self, obj):
self.conn.request('POST', '/', json.dumps(obj),
{ 'Authorization' : self.authhdr,
'Content-type' : 'application/json' })
resp = self.conn.getresponse()
if resp is None:
print("JSON-RPC: no response", file=sys.stderr)
return None
body = resp.read()
resp_obj = json.loads(body)
return resp_obj
@staticmethod
def build_request(idx, method, params):
obj = { 'version' : '1.1',
'method' : method,
'id' : idx }
if params is None:
obj['params'] = []
else:
obj['params'] = params
return obj
@staticmethod
def response_is_error(resp_obj):
return 'error' in resp_obj and resp_obj['error'] is not None
def get_block_hashes(settings, max_blocks_per_call=10000):
rpc = BitcoinRPC(settings['host'], settings['port'],
settings['rpcuser'], settings['rpcpassword'])
height = settings['min_height']
while height < settings['max_height']+1:
num_blocks = min(settings['max_height']+1-height, max_blocks_per_call)
batch = []
for x in range(num_blocks):
batch.append(rpc.build_request(x, 'getblockhash', [height + x]))
reply = rpc.execute(batch)
for x,resp_obj in enumerate(reply):
if rpc.response_is_error(resp_obj):
print('JSON-RPC: error at height', height+x, ': ', resp_obj['error'], file=sys.stderr)
exit(1)
assert(resp_obj['id'] == x) # assume replies are in-sequence
print(resp_obj['result'])
height += num_blocks
if __name__ == '__main__':
if len(sys.argv) != 2:
print("Usage: linearize-hashes.py CONFIG-FILE")
sys.exit(1)
f = open(sys.argv[1])
for line in f:
# skip comment lines
m = re.search('^\s*#', line)
if m:
continue
# parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
if 'host' not in settings:
settings['host'] = '127.0.0.1'
if 'port' not in settings:
settings['port'] = 8832
if 'min_height' not in settings:
settings['min_height'] = 0
if 'max_height' not in settings:
settings['max_height'] = 313000
if 'rpcuser' not in settings or 'rpcpassword' not in settings:
print("Missing username and/or password in cfg file", file=stderr)
sys.exit(1)
settings['port'] = int(settings['port'])
settings['min_height'] = int(settings['min_height'])
settings['max_height'] = int(settings['max_height'])
get_block_hashes(settings)
| mit |
vaygr/ansible | lib/ansible/utils/module_docs_fragments/ironware.py | 22 | 3850 | #
# (c) 2017, Paul Baker <@paulquack>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# Standard files documentation fragment
DOCUMENTATION = """
options:
authorize:
description:
- Instructs the module to enter privileged mode on the remote device
before sending any commands. If not specified, the device will
attempt to execute all commands in non-privileged mode. If the value
is not specified in the task, the value of environment variable
C(ANSIBLE_NET_AUTHORIZE) will be used instead.
type: bool
default: 'no'
provider:
description:
- A dict object containing connection details.
suboptions:
host:
description:
- Specifies the DNS host name or address for connecting to the remote
device over the specified transport. The value of host is used as
the destination address for the transport.
port:
description:
- Specifies the port to use when building the connection to the remote
device.
default: 22
username:
description:
- Configures the username to use to authenticate the connection to
the remote device. This value is used to authenticate
the SSH session. If the value is not specified in the task, the
value of environment variable C(ANSIBLE_NET_USERNAME) will be used instead.
password:
description:
- Specifies the password to use to authenticate the connection to
the remote device. This value is used to authenticate
the SSH session. If the value is not specified in the task, the
value of environment variable C(ANSIBLE_NET_PASSWORD) will be used instead.
ssh_keyfile:
description:
- Specifies the SSH key to use to authenticate the connection to
the remote device. This value is the path to the
key used to authenticate the SSH session. If the value is not specified
in the task, the value of environment variable C(ANSIBLE_NET_SSH_KEYFILE)
will be used instead.
authorize:
description:
- Instructs the module to enter privileged mode on the remote device
before sending any commands. If not specified, the device will
attempt to execute all commands in non-privileged mode. If the value
is not specified in the task, the value of environment variable
C(ANSIBLE_NET_AUTHORIZE) will be used instead.
type: bool
default: 'no'
auth_pass:
description:
- Specifies the password to use if required to enter privileged mode
on the remote device. If I(authorize) is false, then this argument
does nothing. If the value is not specified in the task, the value of
environment variable C(ANSIBLE_NET_AUTH_PASS) will be used instead.
timeout:
description:
- Specifies idle timeout in seconds for the connection, in seconds. Useful
if the console freezes before continuing. For example when saving
configurations.
default: 10
"""
| gpl-3.0 |
rue89-tech/edx-analytics-pipeline | edx/analytics/tasks/tests/acceptance/test_studentmodule_dist.py | 3 | 5414 | """
Run end-to-end acceptance tests regarding the import of courseware_studentmodule, the computation of histgrams,
and the exporting of that data into the analytics database.
The goal of these tests is to emulate (as closely as possible) user actions and validate user visible outputs.
"""
import logging
import os
from edx.analytics.tasks.url import url_path_join
from edx.analytics.tasks.tests.acceptance import AcceptanceTestCase
log = logging.getLogger(__name__)
class CSMHistogramAcceptanceTest(AcceptanceTestCase):
"""Validate the courseware_studentmodule -> luigi -> analytics db data pipeline"""
ENVIRONMENT = 'acceptance'
SQL_FILE = 'courseware_studentmodule_for_histograms.sql'
def setUp(self):
super(CSMHistogramAcceptanceTest, self).setUp()
def load_data_from_sql_file(self):
"""
External Effect: Drops courseware_studentmodule table and loads it with data from a static file.
"""
self.import_db.execute_sql_file(
os.path.join(self.data_dir, 'input', self.SQL_FILE)
)
def test_grade_dist_db_to_db(self):
"""
Tests the grade_dist data flow starting from courseware_studentmodule table in mysql, including running sqoop
"""
self.load_data_from_sql_file()
self.run_grade_dist_sqoop_workflow()
self.validate_grade_dist_in_analytics_db()
def run_grade_dist_sqoop_workflow(self):
"""
Preconditions: Populated courseware_studentmodule table in the MySQL database.
External Effect: Generates a sqoop dump with the contents of courseware_studentmodule from the MySQL
database for the test course and stores it in S3. Then populates the analytics database table
grade_distribution with histogram data.
"""
self.task.launch([
'GradeDistFromSqoopToMySQLWorkflow',
'--import-credentials', self.import_db.credentials_file_url,
'--credentials', self.export_db.credentials_file_url,
'--name', self.ENVIRONMENT,
'--dest', url_path_join(self.test_out, self.ENVIRONMENT),
'--num-mappers', str(self.NUM_MAPPERS),
'--n-reduce-tasks', str(self.NUM_REDUCERS),
'--sqoop-overwrite',
])
def validate_grade_dist_in_analytics_db(self):
"""
Preconditions: Table grade_distribution has been populated by luigi workflows
"""
expected_grade_dist_rows = (
# (module_id, course_id, grade, max_grade, count)
("i4x://stanford/analytics/problem/test1", "stanford/analytics/tests", 0.0, 2.0, 3),
("i4x://stanford/analytics/problem/test1", "stanford/analytics/tests", 1.0, 2.0, 2),
("i4x://stanford/analytics/problem/test1", "stanford/analytics/tests", 2.0, 2.0, 1),
("i4x://stanford/analytics/problem/test2", "stanford/analytics/tests", 0.0, 1.0, 1),
("i4x://stanford/analytics/problem/test2", "stanford/analytics/tests", 1.0, 1.0, 1),
("i4x://stanford/analytics/problem/test2", "stanford/analytics/tests", 1.0, None, 1), # max_grade = NULL
)
with self.export_db.cursor() as cursor:
cursor.execute('SELECT `module_id`, `course_id`, `grade`, `max_grade`, `count` from grade_distribution')
all_rows = cursor.fetchall()
self.assertEqual(set(all_rows), set(expected_grade_dist_rows))
def test_seq_open_dist_db_to_db(self):
"""
Tests the seq_open_dist data flow starting from courseware_studentmodule table in mysql, including running sqoop
"""
self.load_data_from_sql_file()
self.run_seq_open_dist_sqoop_workflow()
self.validate_seq_open_dist_in_analytics_db()
def run_seq_open_dist_sqoop_workflow(self):
"""
Preconditions: Populated courseware_studentmodule table in the MySQL database.
External Effect: Generates a sqoop dump with the contents of courseware_studentmodule from the MySQL
database for the test course and stores it in S3. Then populates the analytics database table
sequential_open_distribution with histogram data.
"""
self.task.launch([
'SeqOpenDistFromSqoopToMySQLWorkflow',
'--import-credentials', self.import_db.credentials_file_url,
'--credentials', self.export_db.credentials_file_url,
'--name', self.ENVIRONMENT,
'--dest', url_path_join(self.test_out, self.ENVIRONMENT),
'--num-mappers', str(self.NUM_MAPPERS),
'--n-reduce-tasks', str(self.NUM_REDUCERS),
'--sqoop-overwrite',
])
def validate_seq_open_dist_in_analytics_db(self):
"""
Preconditions: Table sequential_open_distribution has been populated by luigi workflows
"""
expected_seq_open_dist_rows = (
# (module_id, course_id, count)
("i4x://stanford/analytics/sequential/test1", "stanford/analytics/tests", 1),
("i4x://stanford/analytics/sequential/test2", "stanford/analytics/tests", 2),
)
with self.export_db.cursor() as cursor:
cursor.execute('SELECT `module_id`, `course_id`, `count` from sequential_open_distribution')
all_rows = cursor.fetchall()
self.assertEqual(set(all_rows), set(expected_seq_open_dist_rows))
| agpl-3.0 |
wfxiang08/django190 | tests/template_tests/syntax_tests/test_i18n.py | 16 | 21207 | # coding: utf-8
from __future__ import unicode_literals
from django.template import TemplateSyntaxError
from django.test import SimpleTestCase
from django.utils import translation
from django.utils.safestring import mark_safe
from ..utils import setup
class I18nTagTests(SimpleTestCase):
libraries = {
'custom': 'template_tests.templatetags.custom',
'i18n': 'django.templatetags.i18n',
}
@setup({'i18n01': '{% load i18n %}{% trans \'xxxyyyxxx\' %}'})
def test_i18n01(self):
"""
simple translation of a string delimited by '
"""
output = self.engine.render_to_string('i18n01')
self.assertEqual(output, 'xxxyyyxxx')
@setup({'i18n02': '{% load i18n %}{% trans "xxxyyyxxx" %}'})
def test_i18n02(self):
"""
simple translation of a string delimited by "
"""
output = self.engine.render_to_string('i18n02')
self.assertEqual(output, 'xxxyyyxxx')
@setup({'i18n03': '{% load i18n %}{% blocktrans %}{{ anton }}{% endblocktrans %}'})
def test_i18n03(self):
"""
simple translation of a variable
"""
output = self.engine.render_to_string('i18n03', {'anton': b'\xc3\x85'})
self.assertEqual(output, 'Å')
@setup({'i18n04': '{% load i18n %}{% blocktrans with berta=anton|lower %}{{ berta }}{% endblocktrans %}'})
def test_i18n04(self):
"""
simple translation of a variable and filter
"""
output = self.engine.render_to_string('i18n04', {'anton': b'\xc3\x85'})
self.assertEqual(output, 'å')
@setup({'legacyi18n04': '{% load i18n %}'
'{% blocktrans with anton|lower as berta %}{{ berta }}{% endblocktrans %}'})
def test_legacyi18n04(self):
"""
simple translation of a variable and filter
"""
output = self.engine.render_to_string('legacyi18n04', {'anton': b'\xc3\x85'})
self.assertEqual(output, 'å')
@setup({'i18n05': '{% load i18n %}{% blocktrans %}xxx{{ anton }}xxx{% endblocktrans %}'})
def test_i18n05(self):
"""
simple translation of a string with interpolation
"""
output = self.engine.render_to_string('i18n05', {'anton': 'yyy'})
self.assertEqual(output, 'xxxyyyxxx')
@setup({'i18n06': '{% load i18n %}{% trans "Page not found" %}'})
def test_i18n06(self):
"""
simple translation of a string to german
"""
with translation.override('de'):
output = self.engine.render_to_string('i18n06')
self.assertEqual(output, 'Seite nicht gefunden')
@setup({'i18n07': '{% load i18n %}'
'{% blocktrans count counter=number %}singular{% plural %}'
'{{ counter }} plural{% endblocktrans %}'})
def test_i18n07(self):
"""
translation of singular form
"""
output = self.engine.render_to_string('i18n07', {'number': 1})
self.assertEqual(output, 'singular')
@setup({'legacyi18n07': '{% load i18n %}'
'{% blocktrans count number as counter %}singular{% plural %}'
'{{ counter }} plural{% endblocktrans %}'})
def test_legacyi18n07(self):
"""
translation of singular form
"""
output = self.engine.render_to_string('legacyi18n07', {'number': 1})
self.assertEqual(output, 'singular')
@setup({'i18n08': '{% load i18n %}'
'{% blocktrans count number as counter %}singular{% plural %}'
'{{ counter }} plural{% endblocktrans %}'})
def test_i18n08(self):
"""
translation of plural form
"""
output = self.engine.render_to_string('i18n08', {'number': 2})
self.assertEqual(output, '2 plural')
@setup({'legacyi18n08': '{% load i18n %}'
'{% blocktrans count counter=number %}singular{% plural %}'
'{{ counter }} plural{% endblocktrans %}'})
def test_legacyi18n08(self):
"""
translation of plural form
"""
output = self.engine.render_to_string('legacyi18n08', {'number': 2})
self.assertEqual(output, '2 plural')
@setup({'i18n09': '{% load i18n %}{% trans "Page not found" noop %}'})
def test_i18n09(self):
"""
simple non-translation (only marking) of a string to german
"""
with translation.override('de'):
output = self.engine.render_to_string('i18n09')
self.assertEqual(output, 'Page not found')
@setup({'i18n10': '{{ bool|yesno:_("yes,no,maybe") }}'})
def test_i18n10(self):
"""
translation of a variable with a translated filter
"""
with translation.override('de'):
output = self.engine.render_to_string('i18n10', {'bool': True})
self.assertEqual(output, 'Ja')
@setup({'i18n11': '{{ bool|yesno:"ja,nein" }}'})
def test_i18n11(self):
"""
translation of a variable with a non-translated filter
"""
output = self.engine.render_to_string('i18n11', {'bool': True})
self.assertEqual(output, 'ja')
@setup({'i18n12': '{% load i18n %}'
'{% get_available_languages as langs %}{% for lang in langs %}'
'{% ifequal lang.0 "de" %}{{ lang.0 }}{% endifequal %}{% endfor %}'})
def test_i18n12(self):
"""
usage of the get_available_languages tag
"""
output = self.engine.render_to_string('i18n12')
self.assertEqual(output, 'de')
@setup({'i18n13': '{{ _("Password") }}'})
def test_i18n13(self):
"""
translation of constant strings
"""
with translation.override('de'):
output = self.engine.render_to_string('i18n13')
self.assertEqual(output, 'Passwort')
@setup({'i18n14': '{% cycle "foo" _("Password") _(\'Password\') as c %} {% cycle c %} {% cycle c %}'})
def test_i18n14(self):
"""
translation of constant strings
"""
with translation.override('de'):
output = self.engine.render_to_string('i18n14')
self.assertEqual(output, 'foo Passwort Passwort')
@setup({'i18n15': '{{ absent|default:_("Password") }}'})
def test_i18n15(self):
"""
translation of constant strings
"""
with translation.override('de'):
output = self.engine.render_to_string('i18n15', {'absent': ''})
self.assertEqual(output, 'Passwort')
@setup({'i18n16': '{{ _("<") }}'})
def test_i18n16(self):
"""
translation of constant strings
"""
with translation.override('de'):
output = self.engine.render_to_string('i18n16')
self.assertEqual(output, '<')
@setup({'i18n17': '{% load i18n %}'
'{% blocktrans with berta=anton|escape %}{{ berta }}{% endblocktrans %}'})
def test_i18n17(self):
"""
Escaping inside blocktrans and trans works as if it was directly in the template.
"""
output = self.engine.render_to_string('i18n17', {'anton': 'α & β'})
self.assertEqual(output, 'α & β')
@setup({'i18n18': '{% load i18n %}'
'{% blocktrans with berta=anton|force_escape %}{{ berta }}{% endblocktrans %}'})
def test_i18n18(self):
output = self.engine.render_to_string('i18n18', {'anton': 'α & β'})
self.assertEqual(output, 'α & β')
@setup({'i18n19': '{% load i18n %}{% blocktrans %}{{ andrew }}{% endblocktrans %}'})
def test_i18n19(self):
output = self.engine.render_to_string('i18n19', {'andrew': 'a & b'})
self.assertEqual(output, 'a & b')
@setup({'i18n20': '{% load i18n %}{% trans andrew %}'})
def test_i18n20(self):
output = self.engine.render_to_string('i18n20', {'andrew': 'a & b'})
self.assertEqual(output, 'a & b')
@setup({'i18n21': '{% load i18n %}{% blocktrans %}{{ andrew }}{% endblocktrans %}'})
def test_i18n21(self):
output = self.engine.render_to_string('i18n21', {'andrew': mark_safe('a & b')})
self.assertEqual(output, 'a & b')
@setup({'i18n22': '{% load i18n %}{% trans andrew %}'})
def test_i18n22(self):
output = self.engine.render_to_string('i18n22', {'andrew': mark_safe('a & b')})
self.assertEqual(output, 'a & b')
@setup({'legacyi18n17': '{% load i18n %}'
'{% blocktrans with anton|escape as berta %}{{ berta }}{% endblocktrans %}'})
def test_legacyi18n17(self):
output = self.engine.render_to_string('legacyi18n17', {'anton': 'α & β'})
self.assertEqual(output, 'α & β')
@setup({'legacyi18n18': '{% load i18n %}'
'{% blocktrans with anton|force_escape as berta %}'
'{{ berta }}{% endblocktrans %}'})
def test_legacyi18n18(self):
output = self.engine.render_to_string('legacyi18n18', {'anton': 'α & β'})
self.assertEqual(output, 'α & β')
@setup({'i18n23': '{% load i18n %}{% trans "Page not found"|capfirst|slice:"6:" %}'})
def test_i18n23(self):
"""
#5972 - Use filters with the {% trans %} tag
"""
with translation.override('de'):
output = self.engine.render_to_string('i18n23')
self.assertEqual(output, 'nicht gefunden')
@setup({'i18n24': '{% load i18n %}{% trans \'Page not found\'|upper %}'})
def test_i18n24(self):
with translation.override('de'):
output = self.engine.render_to_string('i18n24')
self.assertEqual(output, 'SEITE NICHT GEFUNDEN')
@setup({'i18n25': '{% load i18n %}{% trans somevar|upper %}'})
def test_i18n25(self):
with translation.override('de'):
output = self.engine.render_to_string('i18n25', {'somevar': 'Page not found'})
self.assertEqual(output, 'SEITE NICHT GEFUNDEN')
@setup({'i18n26': '{% load i18n %}'
'{% blocktrans with extra_field=myextra_field count counter=number %}'
'singular {{ extra_field }}{% plural %}plural{% endblocktrans %}'})
def test_i18n26(self):
"""
translation of plural form with extra field in singular form (#13568)
"""
output = self.engine.render_to_string('i18n26', {'myextra_field': 'test', 'number': 1})
self.assertEqual(output, 'singular test')
@setup({'legacyi18n26': '{% load i18n %}'
'{% blocktrans with myextra_field as extra_field count number as counter %}'
'singular {{ extra_field }}{% plural %}plural{% endblocktrans %}'})
def test_legacyi18n26(self):
output = self.engine.render_to_string('legacyi18n26', {'myextra_field': 'test', 'number': 1})
self.assertEqual(output, 'singular test')
@setup({'i18n27': '{% load i18n %}{% blocktrans count counter=number %}'
'{{ counter }} result{% plural %}{{ counter }} results'
'{% endblocktrans %}'})
def test_i18n27(self):
"""
translation of singular form in russian (#14126)
"""
with translation.override('ru'):
output = self.engine.render_to_string('i18n27', {'number': 1})
self.assertEqual(output, '1 \u0440\u0435\u0437\u0443\u043b\u044c\u0442\u0430\u0442')
@setup({'legacyi18n27': '{% load i18n %}'
'{% blocktrans count number as counter %}{{ counter }} result'
'{% plural %}{{ counter }} results{% endblocktrans %}'})
def test_legacyi18n27(self):
with translation.override('ru'):
output = self.engine.render_to_string('legacyi18n27', {'number': 1})
self.assertEqual(output, '1 \u0440\u0435\u0437\u0443\u043b\u044c\u0442\u0430\u0442')
@setup({'i18n28': '{% load i18n %}'
'{% blocktrans with a=anton b=berta %}{{ a }} + {{ b }}{% endblocktrans %}'})
def test_i18n28(self):
"""
simple translation of multiple variables
"""
output = self.engine.render_to_string('i18n28', {'anton': 'α', 'berta': 'β'})
self.assertEqual(output, 'α + β')
@setup({'legacyi18n28': '{% load i18n %}'
'{% blocktrans with anton as a and berta as b %}'
'{{ a }} + {{ b }}{% endblocktrans %}'})
def test_legacyi18n28(self):
output = self.engine.render_to_string('legacyi18n28', {'anton': 'α', 'berta': 'β'})
self.assertEqual(output, 'α + β')
# retrieving language information
@setup({'i18n28_2': '{% load i18n %}'
'{% get_language_info for "de" as l %}'
'{{ l.code }}: {{ l.name }}/{{ l.name_local }} bidi={{ l.bidi }}'})
def test_i18n28_2(self):
output = self.engine.render_to_string('i18n28_2')
self.assertEqual(output, 'de: German/Deutsch bidi=False')
@setup({'i18n29': '{% load i18n %}'
'{% get_language_info for LANGUAGE_CODE as l %}'
'{{ l.code }}: {{ l.name }}/{{ l.name_local }} bidi={{ l.bidi }}'})
def test_i18n29(self):
output = self.engine.render_to_string('i18n29', {'LANGUAGE_CODE': 'fi'})
self.assertEqual(output, 'fi: Finnish/suomi bidi=False')
@setup({'i18n30': '{% load i18n %}'
'{% get_language_info_list for langcodes as langs %}'
'{% for l in langs %}{{ l.code }}: {{ l.name }}/'
'{{ l.name_local }} bidi={{ l.bidi }}; {% endfor %}'})
def test_i18n30(self):
output = self.engine.render_to_string('i18n30', {'langcodes': ['it', 'no']})
self.assertEqual(output, 'it: Italian/italiano bidi=False; no: Norwegian/norsk bidi=False; ')
@setup({'i18n31': '{% load i18n %}'
'{% get_language_info_list for langcodes as langs %}'
'{% for l in langs %}{{ l.code }}: {{ l.name }}/'
'{{ l.name_local }} bidi={{ l.bidi }}; {% endfor %}'})
def test_i18n31(self):
output = self.engine.render_to_string('i18n31', {'langcodes': (('sl', 'Slovenian'), ('fa', 'Persian'))})
self.assertEqual(
output,
'sl: Slovenian/Sloven\u0161\u010dina bidi=False; '
'fa: Persian/\u0641\u0627\u0631\u0633\u06cc bidi=True; '
)
@setup({'i18n32': '{% load i18n %}{{ "hu"|language_name }} '
'{{ "hu"|language_name_local }} {{ "hu"|language_bidi }} '
'{{ "hu"|language_name_translated }}'})
def test_i18n32(self):
output = self.engine.render_to_string('i18n32')
self.assertEqual(output, 'Hungarian Magyar False Hungarian')
with translation.override('cs'):
output = self.engine.render_to_string('i18n32')
self.assertEqual(output, 'Hungarian Magyar False maďarsky')
@setup({'i18n33': '{% load i18n %}'
'{{ langcode|language_name }} {{ langcode|language_name_local }} '
'{{ langcode|language_bidi }} {{ langcode|language_name_translated }}'})
def test_i18n33(self):
output = self.engine.render_to_string('i18n33', {'langcode': 'nl'})
self.assertEqual(output, 'Dutch Nederlands False Dutch')
with translation.override('cs'):
output = self.engine.render_to_string('i18n33', {'langcode': 'nl'})
self.assertEqual(output, 'Dutch Nederlands False nizozemsky')
# blocktrans handling of variables which are not in the context.
# this should work as if blocktrans was not there (#19915)
@setup({'i18n34': '{% load i18n %}{% blocktrans %}{{ missing }}{% endblocktrans %}'})
def test_i18n34(self):
output = self.engine.render_to_string('i18n34')
if self.engine.string_if_invalid:
self.assertEqual(output, 'INVALID')
else:
self.assertEqual(output, '')
@setup({'i18n34_2': '{% load i18n %}{% blocktrans with a=\'α\' %}{{ missing }}{% endblocktrans %}'})
def test_i18n34_2(self):
output = self.engine.render_to_string('i18n34_2')
if self.engine.string_if_invalid:
self.assertEqual(output, 'INVALID')
else:
self.assertEqual(output, '')
@setup({'i18n34_3': '{% load i18n %}{% blocktrans with a=anton %}{{ missing }}{% endblocktrans %}'})
def test_i18n34_3(self):
output = self.engine.render_to_string('i18n34_3', {'anton': '\xce\xb1'})
if self.engine.string_if_invalid:
self.assertEqual(output, 'INVALID')
else:
self.assertEqual(output, '')
# trans tag with as var
@setup({'i18n35': '{% load i18n %}{% trans "Page not found" as page_not_found %}{{ page_not_found }}'})
def test_i18n35(self):
with translation.override('de'):
output = self.engine.render_to_string('i18n35')
self.assertEqual(output, 'Seite nicht gefunden')
@setup({'i18n36': '{% load i18n %}'
'{% trans "Page not found" noop as page_not_found %}{{ page_not_found }}'})
def test_i18n36(self):
with translation.override('de'):
output = self.engine.render_to_string('i18n36')
self.assertEqual(output, 'Page not found')
@setup({'i18n37': '{% load i18n %}'
'{% trans "Page not found" as page_not_found %}'
'{% blocktrans %}Error: {{ page_not_found }}{% endblocktrans %}'})
def test_i18n37(self):
with translation.override('de'):
output = self.engine.render_to_string('i18n37')
self.assertEqual(output, 'Error: Seite nicht gefunden')
# Test whitespace in filter arguments
@setup({'i18n38': '{% load i18n custom %}'
'{% get_language_info for "de"|noop:"x y" as l %}'
'{{ l.code }}: {{ l.name }}/{{ l.name_local }}/'
'{{ l.name_translated }} bidi={{ l.bidi }}'})
def test_i18n38(self):
with translation.override('cs'):
output = self.engine.render_to_string('i18n38')
self.assertEqual(output, 'de: German/Deutsch/německy bidi=False')
@setup({'i18n38_2': '{% load i18n custom %}'
'{% get_language_info_list for langcodes|noop:"x y" as langs %}'
'{% for l in langs %}{{ l.code }}: {{ l.name }}/'
'{{ l.name_local }}/{{ l.name_translated }} '
'bidi={{ l.bidi }}; {% endfor %}'})
def test_i18n38_2(self):
with translation.override('cs'):
output = self.engine.render_to_string('i18n38_2', {'langcodes': ['it', 'fr']})
self.assertEqual(
output,
'it: Italian/italiano/italsky bidi=False; '
'fr: French/français/francouzsky bidi=False; '
)
@setup({'template': '{% load i18n %}{% trans %}A}'})
def test_syntax_error_no_arguments(self):
msg = "'trans' takes at least one argument"
with self.assertRaisesMessage(TemplateSyntaxError, msg):
self.engine.render_to_string('template')
@setup({'template': '{% load i18n %}{% trans "Yes" badoption %}'})
def test_syntax_error_bad_option(self):
msg = "Unknown argument for 'trans' tag: 'badoption'"
with self.assertRaisesMessage(TemplateSyntaxError, msg):
self.engine.render_to_string('template')
@setup({'template': '{% load i18n %}{% trans "Yes" as %}'})
def test_syntax_error_missing_assignment(self):
msg = "No argument provided to the 'trans' tag for the as option."
with self.assertRaisesMessage(TemplateSyntaxError, msg):
self.engine.render_to_string('template')
@setup({'template': '{% load i18n %}{% trans "Yes" as var context %}'})
def test_syntax_error_missing_context(self):
msg = "No argument provided to the 'trans' tag for the context option."
with self.assertRaisesMessage(TemplateSyntaxError, msg):
self.engine.render_to_string('template')
@setup({'template': '{% load i18n %}{% trans "Yes" context as var %}'})
def test_syntax_error_context_as(self):
msg = "Invalid argument 'as' provided to the 'trans' tag for the context option"
with self.assertRaisesMessage(TemplateSyntaxError, msg):
self.engine.render_to_string('template')
@setup({'template': '{% load i18n %}{% trans "Yes" context noop %}'})
def test_syntax_error_context_noop(self):
msg = "Invalid argument 'noop' provided to the 'trans' tag for the context option"
with self.assertRaisesMessage(TemplateSyntaxError, msg):
self.engine.render_to_string('template')
@setup({'template': '{% load i18n %}{% trans "Yes" noop noop %}'})
def test_syntax_error_duplicate_option(self):
msg = "The 'noop' option was specified more than once."
with self.assertRaisesMessage(TemplateSyntaxError, msg):
self.engine.render_to_string('template')
| bsd-3-clause |
ConnorGBrewster/servo | tests/wpt/web-platform-tests/tools/third_party/html5lib/html5lib/constants.py | 102 | 83518 | from __future__ import absolute_import, division, unicode_literals
import string
EOF = None
E = {
"null-character":
"Null character in input stream, replaced with U+FFFD.",
"invalid-codepoint":
"Invalid codepoint in stream.",
"incorrectly-placed-solidus":
"Solidus (/) incorrectly placed in tag.",
"incorrect-cr-newline-entity":
"Incorrect CR newline entity, replaced with LF.",
"illegal-windows-1252-entity":
"Entity used with illegal number (windows-1252 reference).",
"cant-convert-numeric-entity":
"Numeric entity couldn't be converted to character "
"(codepoint U+%(charAsInt)08x).",
"illegal-codepoint-for-numeric-entity":
"Numeric entity represents an illegal codepoint: "
"U+%(charAsInt)08x.",
"numeric-entity-without-semicolon":
"Numeric entity didn't end with ';'.",
"expected-numeric-entity-but-got-eof":
"Numeric entity expected. Got end of file instead.",
"expected-numeric-entity":
"Numeric entity expected but none found.",
"named-entity-without-semicolon":
"Named entity didn't end with ';'.",
"expected-named-entity":
"Named entity expected. Got none.",
"attributes-in-end-tag":
"End tag contains unexpected attributes.",
'self-closing-flag-on-end-tag':
"End tag contains unexpected self-closing flag.",
"expected-tag-name-but-got-right-bracket":
"Expected tag name. Got '>' instead.",
"expected-tag-name-but-got-question-mark":
"Expected tag name. Got '?' instead. (HTML doesn't "
"support processing instructions.)",
"expected-tag-name":
"Expected tag name. Got something else instead",
"expected-closing-tag-but-got-right-bracket":
"Expected closing tag. Got '>' instead. Ignoring '</>'.",
"expected-closing-tag-but-got-eof":
"Expected closing tag. Unexpected end of file.",
"expected-closing-tag-but-got-char":
"Expected closing tag. Unexpected character '%(data)s' found.",
"eof-in-tag-name":
"Unexpected end of file in the tag name.",
"expected-attribute-name-but-got-eof":
"Unexpected end of file. Expected attribute name instead.",
"eof-in-attribute-name":
"Unexpected end of file in attribute name.",
"invalid-character-in-attribute-name":
"Invalid character in attribute name",
"duplicate-attribute":
"Dropped duplicate attribute on tag.",
"expected-end-of-tag-name-but-got-eof":
"Unexpected end of file. Expected = or end of tag.",
"expected-attribute-value-but-got-eof":
"Unexpected end of file. Expected attribute value.",
"expected-attribute-value-but-got-right-bracket":
"Expected attribute value. Got '>' instead.",
'equals-in-unquoted-attribute-value':
"Unexpected = in unquoted attribute",
'unexpected-character-in-unquoted-attribute-value':
"Unexpected character in unquoted attribute",
"invalid-character-after-attribute-name":
"Unexpected character after attribute name.",
"unexpected-character-after-attribute-value":
"Unexpected character after attribute value.",
"eof-in-attribute-value-double-quote":
"Unexpected end of file in attribute value (\").",
"eof-in-attribute-value-single-quote":
"Unexpected end of file in attribute value (').",
"eof-in-attribute-value-no-quotes":
"Unexpected end of file in attribute value.",
"unexpected-EOF-after-solidus-in-tag":
"Unexpected end of file in tag. Expected >",
"unexpected-character-after-solidus-in-tag":
"Unexpected character after / in tag. Expected >",
"expected-dashes-or-doctype":
"Expected '--' or 'DOCTYPE'. Not found.",
"unexpected-bang-after-double-dash-in-comment":
"Unexpected ! after -- in comment",
"unexpected-space-after-double-dash-in-comment":
"Unexpected space after -- in comment",
"incorrect-comment":
"Incorrect comment.",
"eof-in-comment":
"Unexpected end of file in comment.",
"eof-in-comment-end-dash":
"Unexpected end of file in comment (-)",
"unexpected-dash-after-double-dash-in-comment":
"Unexpected '-' after '--' found in comment.",
"eof-in-comment-double-dash":
"Unexpected end of file in comment (--).",
"eof-in-comment-end-space-state":
"Unexpected end of file in comment.",
"eof-in-comment-end-bang-state":
"Unexpected end of file in comment.",
"unexpected-char-in-comment":
"Unexpected character in comment found.",
"need-space-after-doctype":
"No space after literal string 'DOCTYPE'.",
"expected-doctype-name-but-got-right-bracket":
"Unexpected > character. Expected DOCTYPE name.",
"expected-doctype-name-but-got-eof":
"Unexpected end of file. Expected DOCTYPE name.",
"eof-in-doctype-name":
"Unexpected end of file in DOCTYPE name.",
"eof-in-doctype":
"Unexpected end of file in DOCTYPE.",
"expected-space-or-right-bracket-in-doctype":
"Expected space or '>'. Got '%(data)s'",
"unexpected-end-of-doctype":
"Unexpected end of DOCTYPE.",
"unexpected-char-in-doctype":
"Unexpected character in DOCTYPE.",
"eof-in-innerhtml":
"XXX innerHTML EOF",
"unexpected-doctype":
"Unexpected DOCTYPE. Ignored.",
"non-html-root":
"html needs to be the first start tag.",
"expected-doctype-but-got-eof":
"Unexpected End of file. Expected DOCTYPE.",
"unknown-doctype":
"Erroneous DOCTYPE.",
"expected-doctype-but-got-chars":
"Unexpected non-space characters. Expected DOCTYPE.",
"expected-doctype-but-got-start-tag":
"Unexpected start tag (%(name)s). Expected DOCTYPE.",
"expected-doctype-but-got-end-tag":
"Unexpected end tag (%(name)s). Expected DOCTYPE.",
"end-tag-after-implied-root":
"Unexpected end tag (%(name)s) after the (implied) root element.",
"expected-named-closing-tag-but-got-eof":
"Unexpected end of file. Expected end tag (%(name)s).",
"two-heads-are-not-better-than-one":
"Unexpected start tag head in existing head. Ignored.",
"unexpected-end-tag":
"Unexpected end tag (%(name)s). Ignored.",
"unexpected-start-tag-out-of-my-head":
"Unexpected start tag (%(name)s) that can be in head. Moved.",
"unexpected-start-tag":
"Unexpected start tag (%(name)s).",
"missing-end-tag":
"Missing end tag (%(name)s).",
"missing-end-tags":
"Missing end tags (%(name)s).",
"unexpected-start-tag-implies-end-tag":
"Unexpected start tag (%(startName)s) "
"implies end tag (%(endName)s).",
"unexpected-start-tag-treated-as":
"Unexpected start tag (%(originalName)s). Treated as %(newName)s.",
"deprecated-tag":
"Unexpected start tag %(name)s. Don't use it!",
"unexpected-start-tag-ignored":
"Unexpected start tag %(name)s. Ignored.",
"expected-one-end-tag-but-got-another":
"Unexpected end tag (%(gotName)s). "
"Missing end tag (%(expectedName)s).",
"end-tag-too-early":
"End tag (%(name)s) seen too early. Expected other end tag.",
"end-tag-too-early-named":
"Unexpected end tag (%(gotName)s). Expected end tag (%(expectedName)s).",
"end-tag-too-early-ignored":
"End tag (%(name)s) seen too early. Ignored.",
"adoption-agency-1.1":
"End tag (%(name)s) violates step 1, "
"paragraph 1 of the adoption agency algorithm.",
"adoption-agency-1.2":
"End tag (%(name)s) violates step 1, "
"paragraph 2 of the adoption agency algorithm.",
"adoption-agency-1.3":
"End tag (%(name)s) violates step 1, "
"paragraph 3 of the adoption agency algorithm.",
"adoption-agency-4.4":
"End tag (%(name)s) violates step 4, "
"paragraph 4 of the adoption agency algorithm.",
"unexpected-end-tag-treated-as":
"Unexpected end tag (%(originalName)s). Treated as %(newName)s.",
"no-end-tag":
"This element (%(name)s) has no end tag.",
"unexpected-implied-end-tag-in-table":
"Unexpected implied end tag (%(name)s) in the table phase.",
"unexpected-implied-end-tag-in-table-body":
"Unexpected implied end tag (%(name)s) in the table body phase.",
"unexpected-char-implies-table-voodoo":
"Unexpected non-space characters in "
"table context caused voodoo mode.",
"unexpected-hidden-input-in-table":
"Unexpected input with type hidden in table context.",
"unexpected-form-in-table":
"Unexpected form in table context.",
"unexpected-start-tag-implies-table-voodoo":
"Unexpected start tag (%(name)s) in "
"table context caused voodoo mode.",
"unexpected-end-tag-implies-table-voodoo":
"Unexpected end tag (%(name)s) in "
"table context caused voodoo mode.",
"unexpected-cell-in-table-body":
"Unexpected table cell start tag (%(name)s) "
"in the table body phase.",
"unexpected-cell-end-tag":
"Got table cell end tag (%(name)s) "
"while required end tags are missing.",
"unexpected-end-tag-in-table-body":
"Unexpected end tag (%(name)s) in the table body phase. Ignored.",
"unexpected-implied-end-tag-in-table-row":
"Unexpected implied end tag (%(name)s) in the table row phase.",
"unexpected-end-tag-in-table-row":
"Unexpected end tag (%(name)s) in the table row phase. Ignored.",
"unexpected-select-in-select":
"Unexpected select start tag in the select phase "
"treated as select end tag.",
"unexpected-input-in-select":
"Unexpected input start tag in the select phase.",
"unexpected-start-tag-in-select":
"Unexpected start tag token (%(name)s in the select phase. "
"Ignored.",
"unexpected-end-tag-in-select":
"Unexpected end tag (%(name)s) in the select phase. Ignored.",
"unexpected-table-element-start-tag-in-select-in-table":
"Unexpected table element start tag (%(name)s) in the select in table phase.",
"unexpected-table-element-end-tag-in-select-in-table":
"Unexpected table element end tag (%(name)s) in the select in table phase.",
"unexpected-char-after-body":
"Unexpected non-space characters in the after body phase.",
"unexpected-start-tag-after-body":
"Unexpected start tag token (%(name)s)"
" in the after body phase.",
"unexpected-end-tag-after-body":
"Unexpected end tag token (%(name)s)"
" in the after body phase.",
"unexpected-char-in-frameset":
"Unexpected characters in the frameset phase. Characters ignored.",
"unexpected-start-tag-in-frameset":
"Unexpected start tag token (%(name)s)"
" in the frameset phase. Ignored.",
"unexpected-frameset-in-frameset-innerhtml":
"Unexpected end tag token (frameset) "
"in the frameset phase (innerHTML).",
"unexpected-end-tag-in-frameset":
"Unexpected end tag token (%(name)s)"
" in the frameset phase. Ignored.",
"unexpected-char-after-frameset":
"Unexpected non-space characters in the "
"after frameset phase. Ignored.",
"unexpected-start-tag-after-frameset":
"Unexpected start tag (%(name)s)"
" in the after frameset phase. Ignored.",
"unexpected-end-tag-after-frameset":
"Unexpected end tag (%(name)s)"
" in the after frameset phase. Ignored.",
"unexpected-end-tag-after-body-innerhtml":
"Unexpected end tag after body(innerHtml)",
"expected-eof-but-got-char":
"Unexpected non-space characters. Expected end of file.",
"expected-eof-but-got-start-tag":
"Unexpected start tag (%(name)s)"
". Expected end of file.",
"expected-eof-but-got-end-tag":
"Unexpected end tag (%(name)s)"
". Expected end of file.",
"eof-in-table":
"Unexpected end of file. Expected table content.",
"eof-in-select":
"Unexpected end of file. Expected select content.",
"eof-in-frameset":
"Unexpected end of file. Expected frameset content.",
"eof-in-script-in-script":
"Unexpected end of file. Expected script content.",
"eof-in-foreign-lands":
"Unexpected end of file. Expected foreign content",
"non-void-element-with-trailing-solidus":
"Trailing solidus not allowed on element %(name)s",
"unexpected-html-element-in-foreign-content":
"Element %(name)s not allowed in a non-html context",
"unexpected-end-tag-before-html":
"Unexpected end tag (%(name)s) before html.",
"unexpected-inhead-noscript-tag":
"Element %(name)s not allowed in a inhead-noscript context",
"eof-in-head-noscript":
"Unexpected end of file. Expected inhead-noscript content",
"char-in-head-noscript":
"Unexpected non-space character. Expected inhead-noscript content",
"XXX-undefined-error":
"Undefined error (this sucks and should be fixed)",
}
namespaces = {
"html": "http://www.w3.org/1999/xhtml",
"mathml": "http://www.w3.org/1998/Math/MathML",
"svg": "http://www.w3.org/2000/svg",
"xlink": "http://www.w3.org/1999/xlink",
"xml": "http://www.w3.org/XML/1998/namespace",
"xmlns": "http://www.w3.org/2000/xmlns/"
}
scopingElements = frozenset([
(namespaces["html"], "applet"),
(namespaces["html"], "caption"),
(namespaces["html"], "html"),
(namespaces["html"], "marquee"),
(namespaces["html"], "object"),
(namespaces["html"], "table"),
(namespaces["html"], "td"),
(namespaces["html"], "th"),
(namespaces["mathml"], "mi"),
(namespaces["mathml"], "mo"),
(namespaces["mathml"], "mn"),
(namespaces["mathml"], "ms"),
(namespaces["mathml"], "mtext"),
(namespaces["mathml"], "annotation-xml"),
(namespaces["svg"], "foreignObject"),
(namespaces["svg"], "desc"),
(namespaces["svg"], "title"),
])
formattingElements = frozenset([
(namespaces["html"], "a"),
(namespaces["html"], "b"),
(namespaces["html"], "big"),
(namespaces["html"], "code"),
(namespaces["html"], "em"),
(namespaces["html"], "font"),
(namespaces["html"], "i"),
(namespaces["html"], "nobr"),
(namespaces["html"], "s"),
(namespaces["html"], "small"),
(namespaces["html"], "strike"),
(namespaces["html"], "strong"),
(namespaces["html"], "tt"),
(namespaces["html"], "u")
])
specialElements = frozenset([
(namespaces["html"], "address"),
(namespaces["html"], "applet"),
(namespaces["html"], "area"),
(namespaces["html"], "article"),
(namespaces["html"], "aside"),
(namespaces["html"], "base"),
(namespaces["html"], "basefont"),
(namespaces["html"], "bgsound"),
(namespaces["html"], "blockquote"),
(namespaces["html"], "body"),
(namespaces["html"], "br"),
(namespaces["html"], "button"),
(namespaces["html"], "caption"),
(namespaces["html"], "center"),
(namespaces["html"], "col"),
(namespaces["html"], "colgroup"),
(namespaces["html"], "command"),
(namespaces["html"], "dd"),
(namespaces["html"], "details"),
(namespaces["html"], "dir"),
(namespaces["html"], "div"),
(namespaces["html"], "dl"),
(namespaces["html"], "dt"),
(namespaces["html"], "embed"),
(namespaces["html"], "fieldset"),
(namespaces["html"], "figure"),
(namespaces["html"], "footer"),
(namespaces["html"], "form"),
(namespaces["html"], "frame"),
(namespaces["html"], "frameset"),
(namespaces["html"], "h1"),
(namespaces["html"], "h2"),
(namespaces["html"], "h3"),
(namespaces["html"], "h4"),
(namespaces["html"], "h5"),
(namespaces["html"], "h6"),
(namespaces["html"], "head"),
(namespaces["html"], "header"),
(namespaces["html"], "hr"),
(namespaces["html"], "html"),
(namespaces["html"], "iframe"),
# Note that image is commented out in the spec as "this isn't an
# element that can end up on the stack, so it doesn't matter,"
(namespaces["html"], "image"),
(namespaces["html"], "img"),
(namespaces["html"], "input"),
(namespaces["html"], "isindex"),
(namespaces["html"], "li"),
(namespaces["html"], "link"),
(namespaces["html"], "listing"),
(namespaces["html"], "marquee"),
(namespaces["html"], "menu"),
(namespaces["html"], "meta"),
(namespaces["html"], "nav"),
(namespaces["html"], "noembed"),
(namespaces["html"], "noframes"),
(namespaces["html"], "noscript"),
(namespaces["html"], "object"),
(namespaces["html"], "ol"),
(namespaces["html"], "p"),
(namespaces["html"], "param"),
(namespaces["html"], "plaintext"),
(namespaces["html"], "pre"),
(namespaces["html"], "script"),
(namespaces["html"], "section"),
(namespaces["html"], "select"),
(namespaces["html"], "style"),
(namespaces["html"], "table"),
(namespaces["html"], "tbody"),
(namespaces["html"], "td"),
(namespaces["html"], "textarea"),
(namespaces["html"], "tfoot"),
(namespaces["html"], "th"),
(namespaces["html"], "thead"),
(namespaces["html"], "title"),
(namespaces["html"], "tr"),
(namespaces["html"], "ul"),
(namespaces["html"], "wbr"),
(namespaces["html"], "xmp"),
(namespaces["svg"], "foreignObject")
])
htmlIntegrationPointElements = frozenset([
(namespaces["mathml"], "annotation-xml"),
(namespaces["svg"], "foreignObject"),
(namespaces["svg"], "desc"),
(namespaces["svg"], "title")
])
mathmlTextIntegrationPointElements = frozenset([
(namespaces["mathml"], "mi"),
(namespaces["mathml"], "mo"),
(namespaces["mathml"], "mn"),
(namespaces["mathml"], "ms"),
(namespaces["mathml"], "mtext")
])
adjustSVGAttributes = {
"attributename": "attributeName",
"attributetype": "attributeType",
"basefrequency": "baseFrequency",
"baseprofile": "baseProfile",
"calcmode": "calcMode",
"clippathunits": "clipPathUnits",
"contentscripttype": "contentScriptType",
"contentstyletype": "contentStyleType",
"diffuseconstant": "diffuseConstant",
"edgemode": "edgeMode",
"externalresourcesrequired": "externalResourcesRequired",
"filterres": "filterRes",
"filterunits": "filterUnits",
"glyphref": "glyphRef",
"gradienttransform": "gradientTransform",
"gradientunits": "gradientUnits",
"kernelmatrix": "kernelMatrix",
"kernelunitlength": "kernelUnitLength",
"keypoints": "keyPoints",
"keysplines": "keySplines",
"keytimes": "keyTimes",
"lengthadjust": "lengthAdjust",
"limitingconeangle": "limitingConeAngle",
"markerheight": "markerHeight",
"markerunits": "markerUnits",
"markerwidth": "markerWidth",
"maskcontentunits": "maskContentUnits",
"maskunits": "maskUnits",
"numoctaves": "numOctaves",
"pathlength": "pathLength",
"patterncontentunits": "patternContentUnits",
"patterntransform": "patternTransform",
"patternunits": "patternUnits",
"pointsatx": "pointsAtX",
"pointsaty": "pointsAtY",
"pointsatz": "pointsAtZ",
"preservealpha": "preserveAlpha",
"preserveaspectratio": "preserveAspectRatio",
"primitiveunits": "primitiveUnits",
"refx": "refX",
"refy": "refY",
"repeatcount": "repeatCount",
"repeatdur": "repeatDur",
"requiredextensions": "requiredExtensions",
"requiredfeatures": "requiredFeatures",
"specularconstant": "specularConstant",
"specularexponent": "specularExponent",
"spreadmethod": "spreadMethod",
"startoffset": "startOffset",
"stddeviation": "stdDeviation",
"stitchtiles": "stitchTiles",
"surfacescale": "surfaceScale",
"systemlanguage": "systemLanguage",
"tablevalues": "tableValues",
"targetx": "targetX",
"targety": "targetY",
"textlength": "textLength",
"viewbox": "viewBox",
"viewtarget": "viewTarget",
"xchannelselector": "xChannelSelector",
"ychannelselector": "yChannelSelector",
"zoomandpan": "zoomAndPan"
}
adjustMathMLAttributes = {"definitionurl": "definitionURL"}
adjustForeignAttributes = {
"xlink:actuate": ("xlink", "actuate", namespaces["xlink"]),
"xlink:arcrole": ("xlink", "arcrole", namespaces["xlink"]),
"xlink:href": ("xlink", "href", namespaces["xlink"]),
"xlink:role": ("xlink", "role", namespaces["xlink"]),
"xlink:show": ("xlink", "show", namespaces["xlink"]),
"xlink:title": ("xlink", "title", namespaces["xlink"]),
"xlink:type": ("xlink", "type", namespaces["xlink"]),
"xml:base": ("xml", "base", namespaces["xml"]),
"xml:lang": ("xml", "lang", namespaces["xml"]),
"xml:space": ("xml", "space", namespaces["xml"]),
"xmlns": (None, "xmlns", namespaces["xmlns"]),
"xmlns:xlink": ("xmlns", "xlink", namespaces["xmlns"])
}
unadjustForeignAttributes = dict([((ns, local), qname) for qname, (prefix, local, ns) in
adjustForeignAttributes.items()])
spaceCharacters = frozenset([
"\t",
"\n",
"\u000C",
" ",
"\r"
])
tableInsertModeElements = frozenset([
"table",
"tbody",
"tfoot",
"thead",
"tr"
])
asciiLowercase = frozenset(string.ascii_lowercase)
asciiUppercase = frozenset(string.ascii_uppercase)
asciiLetters = frozenset(string.ascii_letters)
digits = frozenset(string.digits)
hexDigits = frozenset(string.hexdigits)
asciiUpper2Lower = dict([(ord(c), ord(c.lower()))
for c in string.ascii_uppercase])
# Heading elements need to be ordered
headingElements = (
"h1",
"h2",
"h3",
"h4",
"h5",
"h6"
)
voidElements = frozenset([
"base",
"command",
"event-source",
"link",
"meta",
"hr",
"br",
"img",
"embed",
"param",
"area",
"col",
"input",
"source",
"track"
])
cdataElements = frozenset(['title', 'textarea'])
rcdataElements = frozenset([
'style',
'script',
'xmp',
'iframe',
'noembed',
'noframes',
'noscript'
])
booleanAttributes = {
"": frozenset(["irrelevant", "itemscope"]),
"style": frozenset(["scoped"]),
"img": frozenset(["ismap"]),
"audio": frozenset(["autoplay", "controls"]),
"video": frozenset(["autoplay", "controls"]),
"script": frozenset(["defer", "async"]),
"details": frozenset(["open"]),
"datagrid": frozenset(["multiple", "disabled"]),
"command": frozenset(["hidden", "disabled", "checked", "default"]),
"hr": frozenset(["noshade"]),
"menu": frozenset(["autosubmit"]),
"fieldset": frozenset(["disabled", "readonly"]),
"option": frozenset(["disabled", "readonly", "selected"]),
"optgroup": frozenset(["disabled", "readonly"]),
"button": frozenset(["disabled", "autofocus"]),
"input": frozenset(["disabled", "readonly", "required", "autofocus", "checked", "ismap"]),
"select": frozenset(["disabled", "readonly", "autofocus", "multiple"]),
"output": frozenset(["disabled", "readonly"]),
"iframe": frozenset(["seamless"]),
}
# entitiesWindows1252 has to be _ordered_ and needs to have an index. It
# therefore can't be a frozenset.
entitiesWindows1252 = (
8364, # 0x80 0x20AC EURO SIGN
65533, # 0x81 UNDEFINED
8218, # 0x82 0x201A SINGLE LOW-9 QUOTATION MARK
402, # 0x83 0x0192 LATIN SMALL LETTER F WITH HOOK
8222, # 0x84 0x201E DOUBLE LOW-9 QUOTATION MARK
8230, # 0x85 0x2026 HORIZONTAL ELLIPSIS
8224, # 0x86 0x2020 DAGGER
8225, # 0x87 0x2021 DOUBLE DAGGER
710, # 0x88 0x02C6 MODIFIER LETTER CIRCUMFLEX ACCENT
8240, # 0x89 0x2030 PER MILLE SIGN
352, # 0x8A 0x0160 LATIN CAPITAL LETTER S WITH CARON
8249, # 0x8B 0x2039 SINGLE LEFT-POINTING ANGLE QUOTATION MARK
338, # 0x8C 0x0152 LATIN CAPITAL LIGATURE OE
65533, # 0x8D UNDEFINED
381, # 0x8E 0x017D LATIN CAPITAL LETTER Z WITH CARON
65533, # 0x8F UNDEFINED
65533, # 0x90 UNDEFINED
8216, # 0x91 0x2018 LEFT SINGLE QUOTATION MARK
8217, # 0x92 0x2019 RIGHT SINGLE QUOTATION MARK
8220, # 0x93 0x201C LEFT DOUBLE QUOTATION MARK
8221, # 0x94 0x201D RIGHT DOUBLE QUOTATION MARK
8226, # 0x95 0x2022 BULLET
8211, # 0x96 0x2013 EN DASH
8212, # 0x97 0x2014 EM DASH
732, # 0x98 0x02DC SMALL TILDE
8482, # 0x99 0x2122 TRADE MARK SIGN
353, # 0x9A 0x0161 LATIN SMALL LETTER S WITH CARON
8250, # 0x9B 0x203A SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
339, # 0x9C 0x0153 LATIN SMALL LIGATURE OE
65533, # 0x9D UNDEFINED
382, # 0x9E 0x017E LATIN SMALL LETTER Z WITH CARON
376 # 0x9F 0x0178 LATIN CAPITAL LETTER Y WITH DIAERESIS
)
xmlEntities = frozenset(['lt;', 'gt;', 'amp;', 'apos;', 'quot;'])
entities = {
"AElig": "\xc6",
"AElig;": "\xc6",
"AMP": "&",
"AMP;": "&",
"Aacute": "\xc1",
"Aacute;": "\xc1",
"Abreve;": "\u0102",
"Acirc": "\xc2",
"Acirc;": "\xc2",
"Acy;": "\u0410",
"Afr;": "\U0001d504",
"Agrave": "\xc0",
"Agrave;": "\xc0",
"Alpha;": "\u0391",
"Amacr;": "\u0100",
"And;": "\u2a53",
"Aogon;": "\u0104",
"Aopf;": "\U0001d538",
"ApplyFunction;": "\u2061",
"Aring": "\xc5",
"Aring;": "\xc5",
"Ascr;": "\U0001d49c",
"Assign;": "\u2254",
"Atilde": "\xc3",
"Atilde;": "\xc3",
"Auml": "\xc4",
"Auml;": "\xc4",
"Backslash;": "\u2216",
"Barv;": "\u2ae7",
"Barwed;": "\u2306",
"Bcy;": "\u0411",
"Because;": "\u2235",
"Bernoullis;": "\u212c",
"Beta;": "\u0392",
"Bfr;": "\U0001d505",
"Bopf;": "\U0001d539",
"Breve;": "\u02d8",
"Bscr;": "\u212c",
"Bumpeq;": "\u224e",
"CHcy;": "\u0427",
"COPY": "\xa9",
"COPY;": "\xa9",
"Cacute;": "\u0106",
"Cap;": "\u22d2",
"CapitalDifferentialD;": "\u2145",
"Cayleys;": "\u212d",
"Ccaron;": "\u010c",
"Ccedil": "\xc7",
"Ccedil;": "\xc7",
"Ccirc;": "\u0108",
"Cconint;": "\u2230",
"Cdot;": "\u010a",
"Cedilla;": "\xb8",
"CenterDot;": "\xb7",
"Cfr;": "\u212d",
"Chi;": "\u03a7",
"CircleDot;": "\u2299",
"CircleMinus;": "\u2296",
"CirclePlus;": "\u2295",
"CircleTimes;": "\u2297",
"ClockwiseContourIntegral;": "\u2232",
"CloseCurlyDoubleQuote;": "\u201d",
"CloseCurlyQuote;": "\u2019",
"Colon;": "\u2237",
"Colone;": "\u2a74",
"Congruent;": "\u2261",
"Conint;": "\u222f",
"ContourIntegral;": "\u222e",
"Copf;": "\u2102",
"Coproduct;": "\u2210",
"CounterClockwiseContourIntegral;": "\u2233",
"Cross;": "\u2a2f",
"Cscr;": "\U0001d49e",
"Cup;": "\u22d3",
"CupCap;": "\u224d",
"DD;": "\u2145",
"DDotrahd;": "\u2911",
"DJcy;": "\u0402",
"DScy;": "\u0405",
"DZcy;": "\u040f",
"Dagger;": "\u2021",
"Darr;": "\u21a1",
"Dashv;": "\u2ae4",
"Dcaron;": "\u010e",
"Dcy;": "\u0414",
"Del;": "\u2207",
"Delta;": "\u0394",
"Dfr;": "\U0001d507",
"DiacriticalAcute;": "\xb4",
"DiacriticalDot;": "\u02d9",
"DiacriticalDoubleAcute;": "\u02dd",
"DiacriticalGrave;": "`",
"DiacriticalTilde;": "\u02dc",
"Diamond;": "\u22c4",
"DifferentialD;": "\u2146",
"Dopf;": "\U0001d53b",
"Dot;": "\xa8",
"DotDot;": "\u20dc",
"DotEqual;": "\u2250",
"DoubleContourIntegral;": "\u222f",
"DoubleDot;": "\xa8",
"DoubleDownArrow;": "\u21d3",
"DoubleLeftArrow;": "\u21d0",
"DoubleLeftRightArrow;": "\u21d4",
"DoubleLeftTee;": "\u2ae4",
"DoubleLongLeftArrow;": "\u27f8",
"DoubleLongLeftRightArrow;": "\u27fa",
"DoubleLongRightArrow;": "\u27f9",
"DoubleRightArrow;": "\u21d2",
"DoubleRightTee;": "\u22a8",
"DoubleUpArrow;": "\u21d1",
"DoubleUpDownArrow;": "\u21d5",
"DoubleVerticalBar;": "\u2225",
"DownArrow;": "\u2193",
"DownArrowBar;": "\u2913",
"DownArrowUpArrow;": "\u21f5",
"DownBreve;": "\u0311",
"DownLeftRightVector;": "\u2950",
"DownLeftTeeVector;": "\u295e",
"DownLeftVector;": "\u21bd",
"DownLeftVectorBar;": "\u2956",
"DownRightTeeVector;": "\u295f",
"DownRightVector;": "\u21c1",
"DownRightVectorBar;": "\u2957",
"DownTee;": "\u22a4",
"DownTeeArrow;": "\u21a7",
"Downarrow;": "\u21d3",
"Dscr;": "\U0001d49f",
"Dstrok;": "\u0110",
"ENG;": "\u014a",
"ETH": "\xd0",
"ETH;": "\xd0",
"Eacute": "\xc9",
"Eacute;": "\xc9",
"Ecaron;": "\u011a",
"Ecirc": "\xca",
"Ecirc;": "\xca",
"Ecy;": "\u042d",
"Edot;": "\u0116",
"Efr;": "\U0001d508",
"Egrave": "\xc8",
"Egrave;": "\xc8",
"Element;": "\u2208",
"Emacr;": "\u0112",
"EmptySmallSquare;": "\u25fb",
"EmptyVerySmallSquare;": "\u25ab",
"Eogon;": "\u0118",
"Eopf;": "\U0001d53c",
"Epsilon;": "\u0395",
"Equal;": "\u2a75",
"EqualTilde;": "\u2242",
"Equilibrium;": "\u21cc",
"Escr;": "\u2130",
"Esim;": "\u2a73",
"Eta;": "\u0397",
"Euml": "\xcb",
"Euml;": "\xcb",
"Exists;": "\u2203",
"ExponentialE;": "\u2147",
"Fcy;": "\u0424",
"Ffr;": "\U0001d509",
"FilledSmallSquare;": "\u25fc",
"FilledVerySmallSquare;": "\u25aa",
"Fopf;": "\U0001d53d",
"ForAll;": "\u2200",
"Fouriertrf;": "\u2131",
"Fscr;": "\u2131",
"GJcy;": "\u0403",
"GT": ">",
"GT;": ">",
"Gamma;": "\u0393",
"Gammad;": "\u03dc",
"Gbreve;": "\u011e",
"Gcedil;": "\u0122",
"Gcirc;": "\u011c",
"Gcy;": "\u0413",
"Gdot;": "\u0120",
"Gfr;": "\U0001d50a",
"Gg;": "\u22d9",
"Gopf;": "\U0001d53e",
"GreaterEqual;": "\u2265",
"GreaterEqualLess;": "\u22db",
"GreaterFullEqual;": "\u2267",
"GreaterGreater;": "\u2aa2",
"GreaterLess;": "\u2277",
"GreaterSlantEqual;": "\u2a7e",
"GreaterTilde;": "\u2273",
"Gscr;": "\U0001d4a2",
"Gt;": "\u226b",
"HARDcy;": "\u042a",
"Hacek;": "\u02c7",
"Hat;": "^",
"Hcirc;": "\u0124",
"Hfr;": "\u210c",
"HilbertSpace;": "\u210b",
"Hopf;": "\u210d",
"HorizontalLine;": "\u2500",
"Hscr;": "\u210b",
"Hstrok;": "\u0126",
"HumpDownHump;": "\u224e",
"HumpEqual;": "\u224f",
"IEcy;": "\u0415",
"IJlig;": "\u0132",
"IOcy;": "\u0401",
"Iacute": "\xcd",
"Iacute;": "\xcd",
"Icirc": "\xce",
"Icirc;": "\xce",
"Icy;": "\u0418",
"Idot;": "\u0130",
"Ifr;": "\u2111",
"Igrave": "\xcc",
"Igrave;": "\xcc",
"Im;": "\u2111",
"Imacr;": "\u012a",
"ImaginaryI;": "\u2148",
"Implies;": "\u21d2",
"Int;": "\u222c",
"Integral;": "\u222b",
"Intersection;": "\u22c2",
"InvisibleComma;": "\u2063",
"InvisibleTimes;": "\u2062",
"Iogon;": "\u012e",
"Iopf;": "\U0001d540",
"Iota;": "\u0399",
"Iscr;": "\u2110",
"Itilde;": "\u0128",
"Iukcy;": "\u0406",
"Iuml": "\xcf",
"Iuml;": "\xcf",
"Jcirc;": "\u0134",
"Jcy;": "\u0419",
"Jfr;": "\U0001d50d",
"Jopf;": "\U0001d541",
"Jscr;": "\U0001d4a5",
"Jsercy;": "\u0408",
"Jukcy;": "\u0404",
"KHcy;": "\u0425",
"KJcy;": "\u040c",
"Kappa;": "\u039a",
"Kcedil;": "\u0136",
"Kcy;": "\u041a",
"Kfr;": "\U0001d50e",
"Kopf;": "\U0001d542",
"Kscr;": "\U0001d4a6",
"LJcy;": "\u0409",
"LT": "<",
"LT;": "<",
"Lacute;": "\u0139",
"Lambda;": "\u039b",
"Lang;": "\u27ea",
"Laplacetrf;": "\u2112",
"Larr;": "\u219e",
"Lcaron;": "\u013d",
"Lcedil;": "\u013b",
"Lcy;": "\u041b",
"LeftAngleBracket;": "\u27e8",
"LeftArrow;": "\u2190",
"LeftArrowBar;": "\u21e4",
"LeftArrowRightArrow;": "\u21c6",
"LeftCeiling;": "\u2308",
"LeftDoubleBracket;": "\u27e6",
"LeftDownTeeVector;": "\u2961",
"LeftDownVector;": "\u21c3",
"LeftDownVectorBar;": "\u2959",
"LeftFloor;": "\u230a",
"LeftRightArrow;": "\u2194",
"LeftRightVector;": "\u294e",
"LeftTee;": "\u22a3",
"LeftTeeArrow;": "\u21a4",
"LeftTeeVector;": "\u295a",
"LeftTriangle;": "\u22b2",
"LeftTriangleBar;": "\u29cf",
"LeftTriangleEqual;": "\u22b4",
"LeftUpDownVector;": "\u2951",
"LeftUpTeeVector;": "\u2960",
"LeftUpVector;": "\u21bf",
"LeftUpVectorBar;": "\u2958",
"LeftVector;": "\u21bc",
"LeftVectorBar;": "\u2952",
"Leftarrow;": "\u21d0",
"Leftrightarrow;": "\u21d4",
"LessEqualGreater;": "\u22da",
"LessFullEqual;": "\u2266",
"LessGreater;": "\u2276",
"LessLess;": "\u2aa1",
"LessSlantEqual;": "\u2a7d",
"LessTilde;": "\u2272",
"Lfr;": "\U0001d50f",
"Ll;": "\u22d8",
"Lleftarrow;": "\u21da",
"Lmidot;": "\u013f",
"LongLeftArrow;": "\u27f5",
"LongLeftRightArrow;": "\u27f7",
"LongRightArrow;": "\u27f6",
"Longleftarrow;": "\u27f8",
"Longleftrightarrow;": "\u27fa",
"Longrightarrow;": "\u27f9",
"Lopf;": "\U0001d543",
"LowerLeftArrow;": "\u2199",
"LowerRightArrow;": "\u2198",
"Lscr;": "\u2112",
"Lsh;": "\u21b0",
"Lstrok;": "\u0141",
"Lt;": "\u226a",
"Map;": "\u2905",
"Mcy;": "\u041c",
"MediumSpace;": "\u205f",
"Mellintrf;": "\u2133",
"Mfr;": "\U0001d510",
"MinusPlus;": "\u2213",
"Mopf;": "\U0001d544",
"Mscr;": "\u2133",
"Mu;": "\u039c",
"NJcy;": "\u040a",
"Nacute;": "\u0143",
"Ncaron;": "\u0147",
"Ncedil;": "\u0145",
"Ncy;": "\u041d",
"NegativeMediumSpace;": "\u200b",
"NegativeThickSpace;": "\u200b",
"NegativeThinSpace;": "\u200b",
"NegativeVeryThinSpace;": "\u200b",
"NestedGreaterGreater;": "\u226b",
"NestedLessLess;": "\u226a",
"NewLine;": "\n",
"Nfr;": "\U0001d511",
"NoBreak;": "\u2060",
"NonBreakingSpace;": "\xa0",
"Nopf;": "\u2115",
"Not;": "\u2aec",
"NotCongruent;": "\u2262",
"NotCupCap;": "\u226d",
"NotDoubleVerticalBar;": "\u2226",
"NotElement;": "\u2209",
"NotEqual;": "\u2260",
"NotEqualTilde;": "\u2242\u0338",
"NotExists;": "\u2204",
"NotGreater;": "\u226f",
"NotGreaterEqual;": "\u2271",
"NotGreaterFullEqual;": "\u2267\u0338",
"NotGreaterGreater;": "\u226b\u0338",
"NotGreaterLess;": "\u2279",
"NotGreaterSlantEqual;": "\u2a7e\u0338",
"NotGreaterTilde;": "\u2275",
"NotHumpDownHump;": "\u224e\u0338",
"NotHumpEqual;": "\u224f\u0338",
"NotLeftTriangle;": "\u22ea",
"NotLeftTriangleBar;": "\u29cf\u0338",
"NotLeftTriangleEqual;": "\u22ec",
"NotLess;": "\u226e",
"NotLessEqual;": "\u2270",
"NotLessGreater;": "\u2278",
"NotLessLess;": "\u226a\u0338",
"NotLessSlantEqual;": "\u2a7d\u0338",
"NotLessTilde;": "\u2274",
"NotNestedGreaterGreater;": "\u2aa2\u0338",
"NotNestedLessLess;": "\u2aa1\u0338",
"NotPrecedes;": "\u2280",
"NotPrecedesEqual;": "\u2aaf\u0338",
"NotPrecedesSlantEqual;": "\u22e0",
"NotReverseElement;": "\u220c",
"NotRightTriangle;": "\u22eb",
"NotRightTriangleBar;": "\u29d0\u0338",
"NotRightTriangleEqual;": "\u22ed",
"NotSquareSubset;": "\u228f\u0338",
"NotSquareSubsetEqual;": "\u22e2",
"NotSquareSuperset;": "\u2290\u0338",
"NotSquareSupersetEqual;": "\u22e3",
"NotSubset;": "\u2282\u20d2",
"NotSubsetEqual;": "\u2288",
"NotSucceeds;": "\u2281",
"NotSucceedsEqual;": "\u2ab0\u0338",
"NotSucceedsSlantEqual;": "\u22e1",
"NotSucceedsTilde;": "\u227f\u0338",
"NotSuperset;": "\u2283\u20d2",
"NotSupersetEqual;": "\u2289",
"NotTilde;": "\u2241",
"NotTildeEqual;": "\u2244",
"NotTildeFullEqual;": "\u2247",
"NotTildeTilde;": "\u2249",
"NotVerticalBar;": "\u2224",
"Nscr;": "\U0001d4a9",
"Ntilde": "\xd1",
"Ntilde;": "\xd1",
"Nu;": "\u039d",
"OElig;": "\u0152",
"Oacute": "\xd3",
"Oacute;": "\xd3",
"Ocirc": "\xd4",
"Ocirc;": "\xd4",
"Ocy;": "\u041e",
"Odblac;": "\u0150",
"Ofr;": "\U0001d512",
"Ograve": "\xd2",
"Ograve;": "\xd2",
"Omacr;": "\u014c",
"Omega;": "\u03a9",
"Omicron;": "\u039f",
"Oopf;": "\U0001d546",
"OpenCurlyDoubleQuote;": "\u201c",
"OpenCurlyQuote;": "\u2018",
"Or;": "\u2a54",
"Oscr;": "\U0001d4aa",
"Oslash": "\xd8",
"Oslash;": "\xd8",
"Otilde": "\xd5",
"Otilde;": "\xd5",
"Otimes;": "\u2a37",
"Ouml": "\xd6",
"Ouml;": "\xd6",
"OverBar;": "\u203e",
"OverBrace;": "\u23de",
"OverBracket;": "\u23b4",
"OverParenthesis;": "\u23dc",
"PartialD;": "\u2202",
"Pcy;": "\u041f",
"Pfr;": "\U0001d513",
"Phi;": "\u03a6",
"Pi;": "\u03a0",
"PlusMinus;": "\xb1",
"Poincareplane;": "\u210c",
"Popf;": "\u2119",
"Pr;": "\u2abb",
"Precedes;": "\u227a",
"PrecedesEqual;": "\u2aaf",
"PrecedesSlantEqual;": "\u227c",
"PrecedesTilde;": "\u227e",
"Prime;": "\u2033",
"Product;": "\u220f",
"Proportion;": "\u2237",
"Proportional;": "\u221d",
"Pscr;": "\U0001d4ab",
"Psi;": "\u03a8",
"QUOT": "\"",
"QUOT;": "\"",
"Qfr;": "\U0001d514",
"Qopf;": "\u211a",
"Qscr;": "\U0001d4ac",
"RBarr;": "\u2910",
"REG": "\xae",
"REG;": "\xae",
"Racute;": "\u0154",
"Rang;": "\u27eb",
"Rarr;": "\u21a0",
"Rarrtl;": "\u2916",
"Rcaron;": "\u0158",
"Rcedil;": "\u0156",
"Rcy;": "\u0420",
"Re;": "\u211c",
"ReverseElement;": "\u220b",
"ReverseEquilibrium;": "\u21cb",
"ReverseUpEquilibrium;": "\u296f",
"Rfr;": "\u211c",
"Rho;": "\u03a1",
"RightAngleBracket;": "\u27e9",
"RightArrow;": "\u2192",
"RightArrowBar;": "\u21e5",
"RightArrowLeftArrow;": "\u21c4",
"RightCeiling;": "\u2309",
"RightDoubleBracket;": "\u27e7",
"RightDownTeeVector;": "\u295d",
"RightDownVector;": "\u21c2",
"RightDownVectorBar;": "\u2955",
"RightFloor;": "\u230b",
"RightTee;": "\u22a2",
"RightTeeArrow;": "\u21a6",
"RightTeeVector;": "\u295b",
"RightTriangle;": "\u22b3",
"RightTriangleBar;": "\u29d0",
"RightTriangleEqual;": "\u22b5",
"RightUpDownVector;": "\u294f",
"RightUpTeeVector;": "\u295c",
"RightUpVector;": "\u21be",
"RightUpVectorBar;": "\u2954",
"RightVector;": "\u21c0",
"RightVectorBar;": "\u2953",
"Rightarrow;": "\u21d2",
"Ropf;": "\u211d",
"RoundImplies;": "\u2970",
"Rrightarrow;": "\u21db",
"Rscr;": "\u211b",
"Rsh;": "\u21b1",
"RuleDelayed;": "\u29f4",
"SHCHcy;": "\u0429",
"SHcy;": "\u0428",
"SOFTcy;": "\u042c",
"Sacute;": "\u015a",
"Sc;": "\u2abc",
"Scaron;": "\u0160",
"Scedil;": "\u015e",
"Scirc;": "\u015c",
"Scy;": "\u0421",
"Sfr;": "\U0001d516",
"ShortDownArrow;": "\u2193",
"ShortLeftArrow;": "\u2190",
"ShortRightArrow;": "\u2192",
"ShortUpArrow;": "\u2191",
"Sigma;": "\u03a3",
"SmallCircle;": "\u2218",
"Sopf;": "\U0001d54a",
"Sqrt;": "\u221a",
"Square;": "\u25a1",
"SquareIntersection;": "\u2293",
"SquareSubset;": "\u228f",
"SquareSubsetEqual;": "\u2291",
"SquareSuperset;": "\u2290",
"SquareSupersetEqual;": "\u2292",
"SquareUnion;": "\u2294",
"Sscr;": "\U0001d4ae",
"Star;": "\u22c6",
"Sub;": "\u22d0",
"Subset;": "\u22d0",
"SubsetEqual;": "\u2286",
"Succeeds;": "\u227b",
"SucceedsEqual;": "\u2ab0",
"SucceedsSlantEqual;": "\u227d",
"SucceedsTilde;": "\u227f",
"SuchThat;": "\u220b",
"Sum;": "\u2211",
"Sup;": "\u22d1",
"Superset;": "\u2283",
"SupersetEqual;": "\u2287",
"Supset;": "\u22d1",
"THORN": "\xde",
"THORN;": "\xde",
"TRADE;": "\u2122",
"TSHcy;": "\u040b",
"TScy;": "\u0426",
"Tab;": "\t",
"Tau;": "\u03a4",
"Tcaron;": "\u0164",
"Tcedil;": "\u0162",
"Tcy;": "\u0422",
"Tfr;": "\U0001d517",
"Therefore;": "\u2234",
"Theta;": "\u0398",
"ThickSpace;": "\u205f\u200a",
"ThinSpace;": "\u2009",
"Tilde;": "\u223c",
"TildeEqual;": "\u2243",
"TildeFullEqual;": "\u2245",
"TildeTilde;": "\u2248",
"Topf;": "\U0001d54b",
"TripleDot;": "\u20db",
"Tscr;": "\U0001d4af",
"Tstrok;": "\u0166",
"Uacute": "\xda",
"Uacute;": "\xda",
"Uarr;": "\u219f",
"Uarrocir;": "\u2949",
"Ubrcy;": "\u040e",
"Ubreve;": "\u016c",
"Ucirc": "\xdb",
"Ucirc;": "\xdb",
"Ucy;": "\u0423",
"Udblac;": "\u0170",
"Ufr;": "\U0001d518",
"Ugrave": "\xd9",
"Ugrave;": "\xd9",
"Umacr;": "\u016a",
"UnderBar;": "_",
"UnderBrace;": "\u23df",
"UnderBracket;": "\u23b5",
"UnderParenthesis;": "\u23dd",
"Union;": "\u22c3",
"UnionPlus;": "\u228e",
"Uogon;": "\u0172",
"Uopf;": "\U0001d54c",
"UpArrow;": "\u2191",
"UpArrowBar;": "\u2912",
"UpArrowDownArrow;": "\u21c5",
"UpDownArrow;": "\u2195",
"UpEquilibrium;": "\u296e",
"UpTee;": "\u22a5",
"UpTeeArrow;": "\u21a5",
"Uparrow;": "\u21d1",
"Updownarrow;": "\u21d5",
"UpperLeftArrow;": "\u2196",
"UpperRightArrow;": "\u2197",
"Upsi;": "\u03d2",
"Upsilon;": "\u03a5",
"Uring;": "\u016e",
"Uscr;": "\U0001d4b0",
"Utilde;": "\u0168",
"Uuml": "\xdc",
"Uuml;": "\xdc",
"VDash;": "\u22ab",
"Vbar;": "\u2aeb",
"Vcy;": "\u0412",
"Vdash;": "\u22a9",
"Vdashl;": "\u2ae6",
"Vee;": "\u22c1",
"Verbar;": "\u2016",
"Vert;": "\u2016",
"VerticalBar;": "\u2223",
"VerticalLine;": "|",
"VerticalSeparator;": "\u2758",
"VerticalTilde;": "\u2240",
"VeryThinSpace;": "\u200a",
"Vfr;": "\U0001d519",
"Vopf;": "\U0001d54d",
"Vscr;": "\U0001d4b1",
"Vvdash;": "\u22aa",
"Wcirc;": "\u0174",
"Wedge;": "\u22c0",
"Wfr;": "\U0001d51a",
"Wopf;": "\U0001d54e",
"Wscr;": "\U0001d4b2",
"Xfr;": "\U0001d51b",
"Xi;": "\u039e",
"Xopf;": "\U0001d54f",
"Xscr;": "\U0001d4b3",
"YAcy;": "\u042f",
"YIcy;": "\u0407",
"YUcy;": "\u042e",
"Yacute": "\xdd",
"Yacute;": "\xdd",
"Ycirc;": "\u0176",
"Ycy;": "\u042b",
"Yfr;": "\U0001d51c",
"Yopf;": "\U0001d550",
"Yscr;": "\U0001d4b4",
"Yuml;": "\u0178",
"ZHcy;": "\u0416",
"Zacute;": "\u0179",
"Zcaron;": "\u017d",
"Zcy;": "\u0417",
"Zdot;": "\u017b",
"ZeroWidthSpace;": "\u200b",
"Zeta;": "\u0396",
"Zfr;": "\u2128",
"Zopf;": "\u2124",
"Zscr;": "\U0001d4b5",
"aacute": "\xe1",
"aacute;": "\xe1",
"abreve;": "\u0103",
"ac;": "\u223e",
"acE;": "\u223e\u0333",
"acd;": "\u223f",
"acirc": "\xe2",
"acirc;": "\xe2",
"acute": "\xb4",
"acute;": "\xb4",
"acy;": "\u0430",
"aelig": "\xe6",
"aelig;": "\xe6",
"af;": "\u2061",
"afr;": "\U0001d51e",
"agrave": "\xe0",
"agrave;": "\xe0",
"alefsym;": "\u2135",
"aleph;": "\u2135",
"alpha;": "\u03b1",
"amacr;": "\u0101",
"amalg;": "\u2a3f",
"amp": "&",
"amp;": "&",
"and;": "\u2227",
"andand;": "\u2a55",
"andd;": "\u2a5c",
"andslope;": "\u2a58",
"andv;": "\u2a5a",
"ang;": "\u2220",
"ange;": "\u29a4",
"angle;": "\u2220",
"angmsd;": "\u2221",
"angmsdaa;": "\u29a8",
"angmsdab;": "\u29a9",
"angmsdac;": "\u29aa",
"angmsdad;": "\u29ab",
"angmsdae;": "\u29ac",
"angmsdaf;": "\u29ad",
"angmsdag;": "\u29ae",
"angmsdah;": "\u29af",
"angrt;": "\u221f",
"angrtvb;": "\u22be",
"angrtvbd;": "\u299d",
"angsph;": "\u2222",
"angst;": "\xc5",
"angzarr;": "\u237c",
"aogon;": "\u0105",
"aopf;": "\U0001d552",
"ap;": "\u2248",
"apE;": "\u2a70",
"apacir;": "\u2a6f",
"ape;": "\u224a",
"apid;": "\u224b",
"apos;": "'",
"approx;": "\u2248",
"approxeq;": "\u224a",
"aring": "\xe5",
"aring;": "\xe5",
"ascr;": "\U0001d4b6",
"ast;": "*",
"asymp;": "\u2248",
"asympeq;": "\u224d",
"atilde": "\xe3",
"atilde;": "\xe3",
"auml": "\xe4",
"auml;": "\xe4",
"awconint;": "\u2233",
"awint;": "\u2a11",
"bNot;": "\u2aed",
"backcong;": "\u224c",
"backepsilon;": "\u03f6",
"backprime;": "\u2035",
"backsim;": "\u223d",
"backsimeq;": "\u22cd",
"barvee;": "\u22bd",
"barwed;": "\u2305",
"barwedge;": "\u2305",
"bbrk;": "\u23b5",
"bbrktbrk;": "\u23b6",
"bcong;": "\u224c",
"bcy;": "\u0431",
"bdquo;": "\u201e",
"becaus;": "\u2235",
"because;": "\u2235",
"bemptyv;": "\u29b0",
"bepsi;": "\u03f6",
"bernou;": "\u212c",
"beta;": "\u03b2",
"beth;": "\u2136",
"between;": "\u226c",
"bfr;": "\U0001d51f",
"bigcap;": "\u22c2",
"bigcirc;": "\u25ef",
"bigcup;": "\u22c3",
"bigodot;": "\u2a00",
"bigoplus;": "\u2a01",
"bigotimes;": "\u2a02",
"bigsqcup;": "\u2a06",
"bigstar;": "\u2605",
"bigtriangledown;": "\u25bd",
"bigtriangleup;": "\u25b3",
"biguplus;": "\u2a04",
"bigvee;": "\u22c1",
"bigwedge;": "\u22c0",
"bkarow;": "\u290d",
"blacklozenge;": "\u29eb",
"blacksquare;": "\u25aa",
"blacktriangle;": "\u25b4",
"blacktriangledown;": "\u25be",
"blacktriangleleft;": "\u25c2",
"blacktriangleright;": "\u25b8",
"blank;": "\u2423",
"blk12;": "\u2592",
"blk14;": "\u2591",
"blk34;": "\u2593",
"block;": "\u2588",
"bne;": "=\u20e5",
"bnequiv;": "\u2261\u20e5",
"bnot;": "\u2310",
"bopf;": "\U0001d553",
"bot;": "\u22a5",
"bottom;": "\u22a5",
"bowtie;": "\u22c8",
"boxDL;": "\u2557",
"boxDR;": "\u2554",
"boxDl;": "\u2556",
"boxDr;": "\u2553",
"boxH;": "\u2550",
"boxHD;": "\u2566",
"boxHU;": "\u2569",
"boxHd;": "\u2564",
"boxHu;": "\u2567",
"boxUL;": "\u255d",
"boxUR;": "\u255a",
"boxUl;": "\u255c",
"boxUr;": "\u2559",
"boxV;": "\u2551",
"boxVH;": "\u256c",
"boxVL;": "\u2563",
"boxVR;": "\u2560",
"boxVh;": "\u256b",
"boxVl;": "\u2562",
"boxVr;": "\u255f",
"boxbox;": "\u29c9",
"boxdL;": "\u2555",
"boxdR;": "\u2552",
"boxdl;": "\u2510",
"boxdr;": "\u250c",
"boxh;": "\u2500",
"boxhD;": "\u2565",
"boxhU;": "\u2568",
"boxhd;": "\u252c",
"boxhu;": "\u2534",
"boxminus;": "\u229f",
"boxplus;": "\u229e",
"boxtimes;": "\u22a0",
"boxuL;": "\u255b",
"boxuR;": "\u2558",
"boxul;": "\u2518",
"boxur;": "\u2514",
"boxv;": "\u2502",
"boxvH;": "\u256a",
"boxvL;": "\u2561",
"boxvR;": "\u255e",
"boxvh;": "\u253c",
"boxvl;": "\u2524",
"boxvr;": "\u251c",
"bprime;": "\u2035",
"breve;": "\u02d8",
"brvbar": "\xa6",
"brvbar;": "\xa6",
"bscr;": "\U0001d4b7",
"bsemi;": "\u204f",
"bsim;": "\u223d",
"bsime;": "\u22cd",
"bsol;": "\\",
"bsolb;": "\u29c5",
"bsolhsub;": "\u27c8",
"bull;": "\u2022",
"bullet;": "\u2022",
"bump;": "\u224e",
"bumpE;": "\u2aae",
"bumpe;": "\u224f",
"bumpeq;": "\u224f",
"cacute;": "\u0107",
"cap;": "\u2229",
"capand;": "\u2a44",
"capbrcup;": "\u2a49",
"capcap;": "\u2a4b",
"capcup;": "\u2a47",
"capdot;": "\u2a40",
"caps;": "\u2229\ufe00",
"caret;": "\u2041",
"caron;": "\u02c7",
"ccaps;": "\u2a4d",
"ccaron;": "\u010d",
"ccedil": "\xe7",
"ccedil;": "\xe7",
"ccirc;": "\u0109",
"ccups;": "\u2a4c",
"ccupssm;": "\u2a50",
"cdot;": "\u010b",
"cedil": "\xb8",
"cedil;": "\xb8",
"cemptyv;": "\u29b2",
"cent": "\xa2",
"cent;": "\xa2",
"centerdot;": "\xb7",
"cfr;": "\U0001d520",
"chcy;": "\u0447",
"check;": "\u2713",
"checkmark;": "\u2713",
"chi;": "\u03c7",
"cir;": "\u25cb",
"cirE;": "\u29c3",
"circ;": "\u02c6",
"circeq;": "\u2257",
"circlearrowleft;": "\u21ba",
"circlearrowright;": "\u21bb",
"circledR;": "\xae",
"circledS;": "\u24c8",
"circledast;": "\u229b",
"circledcirc;": "\u229a",
"circleddash;": "\u229d",
"cire;": "\u2257",
"cirfnint;": "\u2a10",
"cirmid;": "\u2aef",
"cirscir;": "\u29c2",
"clubs;": "\u2663",
"clubsuit;": "\u2663",
"colon;": ":",
"colone;": "\u2254",
"coloneq;": "\u2254",
"comma;": ",",
"commat;": "@",
"comp;": "\u2201",
"compfn;": "\u2218",
"complement;": "\u2201",
"complexes;": "\u2102",
"cong;": "\u2245",
"congdot;": "\u2a6d",
"conint;": "\u222e",
"copf;": "\U0001d554",
"coprod;": "\u2210",
"copy": "\xa9",
"copy;": "\xa9",
"copysr;": "\u2117",
"crarr;": "\u21b5",
"cross;": "\u2717",
"cscr;": "\U0001d4b8",
"csub;": "\u2acf",
"csube;": "\u2ad1",
"csup;": "\u2ad0",
"csupe;": "\u2ad2",
"ctdot;": "\u22ef",
"cudarrl;": "\u2938",
"cudarrr;": "\u2935",
"cuepr;": "\u22de",
"cuesc;": "\u22df",
"cularr;": "\u21b6",
"cularrp;": "\u293d",
"cup;": "\u222a",
"cupbrcap;": "\u2a48",
"cupcap;": "\u2a46",
"cupcup;": "\u2a4a",
"cupdot;": "\u228d",
"cupor;": "\u2a45",
"cups;": "\u222a\ufe00",
"curarr;": "\u21b7",
"curarrm;": "\u293c",
"curlyeqprec;": "\u22de",
"curlyeqsucc;": "\u22df",
"curlyvee;": "\u22ce",
"curlywedge;": "\u22cf",
"curren": "\xa4",
"curren;": "\xa4",
"curvearrowleft;": "\u21b6",
"curvearrowright;": "\u21b7",
"cuvee;": "\u22ce",
"cuwed;": "\u22cf",
"cwconint;": "\u2232",
"cwint;": "\u2231",
"cylcty;": "\u232d",
"dArr;": "\u21d3",
"dHar;": "\u2965",
"dagger;": "\u2020",
"daleth;": "\u2138",
"darr;": "\u2193",
"dash;": "\u2010",
"dashv;": "\u22a3",
"dbkarow;": "\u290f",
"dblac;": "\u02dd",
"dcaron;": "\u010f",
"dcy;": "\u0434",
"dd;": "\u2146",
"ddagger;": "\u2021",
"ddarr;": "\u21ca",
"ddotseq;": "\u2a77",
"deg": "\xb0",
"deg;": "\xb0",
"delta;": "\u03b4",
"demptyv;": "\u29b1",
"dfisht;": "\u297f",
"dfr;": "\U0001d521",
"dharl;": "\u21c3",
"dharr;": "\u21c2",
"diam;": "\u22c4",
"diamond;": "\u22c4",
"diamondsuit;": "\u2666",
"diams;": "\u2666",
"die;": "\xa8",
"digamma;": "\u03dd",
"disin;": "\u22f2",
"div;": "\xf7",
"divide": "\xf7",
"divide;": "\xf7",
"divideontimes;": "\u22c7",
"divonx;": "\u22c7",
"djcy;": "\u0452",
"dlcorn;": "\u231e",
"dlcrop;": "\u230d",
"dollar;": "$",
"dopf;": "\U0001d555",
"dot;": "\u02d9",
"doteq;": "\u2250",
"doteqdot;": "\u2251",
"dotminus;": "\u2238",
"dotplus;": "\u2214",
"dotsquare;": "\u22a1",
"doublebarwedge;": "\u2306",
"downarrow;": "\u2193",
"downdownarrows;": "\u21ca",
"downharpoonleft;": "\u21c3",
"downharpoonright;": "\u21c2",
"drbkarow;": "\u2910",
"drcorn;": "\u231f",
"drcrop;": "\u230c",
"dscr;": "\U0001d4b9",
"dscy;": "\u0455",
"dsol;": "\u29f6",
"dstrok;": "\u0111",
"dtdot;": "\u22f1",
"dtri;": "\u25bf",
"dtrif;": "\u25be",
"duarr;": "\u21f5",
"duhar;": "\u296f",
"dwangle;": "\u29a6",
"dzcy;": "\u045f",
"dzigrarr;": "\u27ff",
"eDDot;": "\u2a77",
"eDot;": "\u2251",
"eacute": "\xe9",
"eacute;": "\xe9",
"easter;": "\u2a6e",
"ecaron;": "\u011b",
"ecir;": "\u2256",
"ecirc": "\xea",
"ecirc;": "\xea",
"ecolon;": "\u2255",
"ecy;": "\u044d",
"edot;": "\u0117",
"ee;": "\u2147",
"efDot;": "\u2252",
"efr;": "\U0001d522",
"eg;": "\u2a9a",
"egrave": "\xe8",
"egrave;": "\xe8",
"egs;": "\u2a96",
"egsdot;": "\u2a98",
"el;": "\u2a99",
"elinters;": "\u23e7",
"ell;": "\u2113",
"els;": "\u2a95",
"elsdot;": "\u2a97",
"emacr;": "\u0113",
"empty;": "\u2205",
"emptyset;": "\u2205",
"emptyv;": "\u2205",
"emsp13;": "\u2004",
"emsp14;": "\u2005",
"emsp;": "\u2003",
"eng;": "\u014b",
"ensp;": "\u2002",
"eogon;": "\u0119",
"eopf;": "\U0001d556",
"epar;": "\u22d5",
"eparsl;": "\u29e3",
"eplus;": "\u2a71",
"epsi;": "\u03b5",
"epsilon;": "\u03b5",
"epsiv;": "\u03f5",
"eqcirc;": "\u2256",
"eqcolon;": "\u2255",
"eqsim;": "\u2242",
"eqslantgtr;": "\u2a96",
"eqslantless;": "\u2a95",
"equals;": "=",
"equest;": "\u225f",
"equiv;": "\u2261",
"equivDD;": "\u2a78",
"eqvparsl;": "\u29e5",
"erDot;": "\u2253",
"erarr;": "\u2971",
"escr;": "\u212f",
"esdot;": "\u2250",
"esim;": "\u2242",
"eta;": "\u03b7",
"eth": "\xf0",
"eth;": "\xf0",
"euml": "\xeb",
"euml;": "\xeb",
"euro;": "\u20ac",
"excl;": "!",
"exist;": "\u2203",
"expectation;": "\u2130",
"exponentiale;": "\u2147",
"fallingdotseq;": "\u2252",
"fcy;": "\u0444",
"female;": "\u2640",
"ffilig;": "\ufb03",
"fflig;": "\ufb00",
"ffllig;": "\ufb04",
"ffr;": "\U0001d523",
"filig;": "\ufb01",
"fjlig;": "fj",
"flat;": "\u266d",
"fllig;": "\ufb02",
"fltns;": "\u25b1",
"fnof;": "\u0192",
"fopf;": "\U0001d557",
"forall;": "\u2200",
"fork;": "\u22d4",
"forkv;": "\u2ad9",
"fpartint;": "\u2a0d",
"frac12": "\xbd",
"frac12;": "\xbd",
"frac13;": "\u2153",
"frac14": "\xbc",
"frac14;": "\xbc",
"frac15;": "\u2155",
"frac16;": "\u2159",
"frac18;": "\u215b",
"frac23;": "\u2154",
"frac25;": "\u2156",
"frac34": "\xbe",
"frac34;": "\xbe",
"frac35;": "\u2157",
"frac38;": "\u215c",
"frac45;": "\u2158",
"frac56;": "\u215a",
"frac58;": "\u215d",
"frac78;": "\u215e",
"frasl;": "\u2044",
"frown;": "\u2322",
"fscr;": "\U0001d4bb",
"gE;": "\u2267",
"gEl;": "\u2a8c",
"gacute;": "\u01f5",
"gamma;": "\u03b3",
"gammad;": "\u03dd",
"gap;": "\u2a86",
"gbreve;": "\u011f",
"gcirc;": "\u011d",
"gcy;": "\u0433",
"gdot;": "\u0121",
"ge;": "\u2265",
"gel;": "\u22db",
"geq;": "\u2265",
"geqq;": "\u2267",
"geqslant;": "\u2a7e",
"ges;": "\u2a7e",
"gescc;": "\u2aa9",
"gesdot;": "\u2a80",
"gesdoto;": "\u2a82",
"gesdotol;": "\u2a84",
"gesl;": "\u22db\ufe00",
"gesles;": "\u2a94",
"gfr;": "\U0001d524",
"gg;": "\u226b",
"ggg;": "\u22d9",
"gimel;": "\u2137",
"gjcy;": "\u0453",
"gl;": "\u2277",
"glE;": "\u2a92",
"gla;": "\u2aa5",
"glj;": "\u2aa4",
"gnE;": "\u2269",
"gnap;": "\u2a8a",
"gnapprox;": "\u2a8a",
"gne;": "\u2a88",
"gneq;": "\u2a88",
"gneqq;": "\u2269",
"gnsim;": "\u22e7",
"gopf;": "\U0001d558",
"grave;": "`",
"gscr;": "\u210a",
"gsim;": "\u2273",
"gsime;": "\u2a8e",
"gsiml;": "\u2a90",
"gt": ">",
"gt;": ">",
"gtcc;": "\u2aa7",
"gtcir;": "\u2a7a",
"gtdot;": "\u22d7",
"gtlPar;": "\u2995",
"gtquest;": "\u2a7c",
"gtrapprox;": "\u2a86",
"gtrarr;": "\u2978",
"gtrdot;": "\u22d7",
"gtreqless;": "\u22db",
"gtreqqless;": "\u2a8c",
"gtrless;": "\u2277",
"gtrsim;": "\u2273",
"gvertneqq;": "\u2269\ufe00",
"gvnE;": "\u2269\ufe00",
"hArr;": "\u21d4",
"hairsp;": "\u200a",
"half;": "\xbd",
"hamilt;": "\u210b",
"hardcy;": "\u044a",
"harr;": "\u2194",
"harrcir;": "\u2948",
"harrw;": "\u21ad",
"hbar;": "\u210f",
"hcirc;": "\u0125",
"hearts;": "\u2665",
"heartsuit;": "\u2665",
"hellip;": "\u2026",
"hercon;": "\u22b9",
"hfr;": "\U0001d525",
"hksearow;": "\u2925",
"hkswarow;": "\u2926",
"hoarr;": "\u21ff",
"homtht;": "\u223b",
"hookleftarrow;": "\u21a9",
"hookrightarrow;": "\u21aa",
"hopf;": "\U0001d559",
"horbar;": "\u2015",
"hscr;": "\U0001d4bd",
"hslash;": "\u210f",
"hstrok;": "\u0127",
"hybull;": "\u2043",
"hyphen;": "\u2010",
"iacute": "\xed",
"iacute;": "\xed",
"ic;": "\u2063",
"icirc": "\xee",
"icirc;": "\xee",
"icy;": "\u0438",
"iecy;": "\u0435",
"iexcl": "\xa1",
"iexcl;": "\xa1",
"iff;": "\u21d4",
"ifr;": "\U0001d526",
"igrave": "\xec",
"igrave;": "\xec",
"ii;": "\u2148",
"iiiint;": "\u2a0c",
"iiint;": "\u222d",
"iinfin;": "\u29dc",
"iiota;": "\u2129",
"ijlig;": "\u0133",
"imacr;": "\u012b",
"image;": "\u2111",
"imagline;": "\u2110",
"imagpart;": "\u2111",
"imath;": "\u0131",
"imof;": "\u22b7",
"imped;": "\u01b5",
"in;": "\u2208",
"incare;": "\u2105",
"infin;": "\u221e",
"infintie;": "\u29dd",
"inodot;": "\u0131",
"int;": "\u222b",
"intcal;": "\u22ba",
"integers;": "\u2124",
"intercal;": "\u22ba",
"intlarhk;": "\u2a17",
"intprod;": "\u2a3c",
"iocy;": "\u0451",
"iogon;": "\u012f",
"iopf;": "\U0001d55a",
"iota;": "\u03b9",
"iprod;": "\u2a3c",
"iquest": "\xbf",
"iquest;": "\xbf",
"iscr;": "\U0001d4be",
"isin;": "\u2208",
"isinE;": "\u22f9",
"isindot;": "\u22f5",
"isins;": "\u22f4",
"isinsv;": "\u22f3",
"isinv;": "\u2208",
"it;": "\u2062",
"itilde;": "\u0129",
"iukcy;": "\u0456",
"iuml": "\xef",
"iuml;": "\xef",
"jcirc;": "\u0135",
"jcy;": "\u0439",
"jfr;": "\U0001d527",
"jmath;": "\u0237",
"jopf;": "\U0001d55b",
"jscr;": "\U0001d4bf",
"jsercy;": "\u0458",
"jukcy;": "\u0454",
"kappa;": "\u03ba",
"kappav;": "\u03f0",
"kcedil;": "\u0137",
"kcy;": "\u043a",
"kfr;": "\U0001d528",
"kgreen;": "\u0138",
"khcy;": "\u0445",
"kjcy;": "\u045c",
"kopf;": "\U0001d55c",
"kscr;": "\U0001d4c0",
"lAarr;": "\u21da",
"lArr;": "\u21d0",
"lAtail;": "\u291b",
"lBarr;": "\u290e",
"lE;": "\u2266",
"lEg;": "\u2a8b",
"lHar;": "\u2962",
"lacute;": "\u013a",
"laemptyv;": "\u29b4",
"lagran;": "\u2112",
"lambda;": "\u03bb",
"lang;": "\u27e8",
"langd;": "\u2991",
"langle;": "\u27e8",
"lap;": "\u2a85",
"laquo": "\xab",
"laquo;": "\xab",
"larr;": "\u2190",
"larrb;": "\u21e4",
"larrbfs;": "\u291f",
"larrfs;": "\u291d",
"larrhk;": "\u21a9",
"larrlp;": "\u21ab",
"larrpl;": "\u2939",
"larrsim;": "\u2973",
"larrtl;": "\u21a2",
"lat;": "\u2aab",
"latail;": "\u2919",
"late;": "\u2aad",
"lates;": "\u2aad\ufe00",
"lbarr;": "\u290c",
"lbbrk;": "\u2772",
"lbrace;": "{",
"lbrack;": "[",
"lbrke;": "\u298b",
"lbrksld;": "\u298f",
"lbrkslu;": "\u298d",
"lcaron;": "\u013e",
"lcedil;": "\u013c",
"lceil;": "\u2308",
"lcub;": "{",
"lcy;": "\u043b",
"ldca;": "\u2936",
"ldquo;": "\u201c",
"ldquor;": "\u201e",
"ldrdhar;": "\u2967",
"ldrushar;": "\u294b",
"ldsh;": "\u21b2",
"le;": "\u2264",
"leftarrow;": "\u2190",
"leftarrowtail;": "\u21a2",
"leftharpoondown;": "\u21bd",
"leftharpoonup;": "\u21bc",
"leftleftarrows;": "\u21c7",
"leftrightarrow;": "\u2194",
"leftrightarrows;": "\u21c6",
"leftrightharpoons;": "\u21cb",
"leftrightsquigarrow;": "\u21ad",
"leftthreetimes;": "\u22cb",
"leg;": "\u22da",
"leq;": "\u2264",
"leqq;": "\u2266",
"leqslant;": "\u2a7d",
"les;": "\u2a7d",
"lescc;": "\u2aa8",
"lesdot;": "\u2a7f",
"lesdoto;": "\u2a81",
"lesdotor;": "\u2a83",
"lesg;": "\u22da\ufe00",
"lesges;": "\u2a93",
"lessapprox;": "\u2a85",
"lessdot;": "\u22d6",
"lesseqgtr;": "\u22da",
"lesseqqgtr;": "\u2a8b",
"lessgtr;": "\u2276",
"lesssim;": "\u2272",
"lfisht;": "\u297c",
"lfloor;": "\u230a",
"lfr;": "\U0001d529",
"lg;": "\u2276",
"lgE;": "\u2a91",
"lhard;": "\u21bd",
"lharu;": "\u21bc",
"lharul;": "\u296a",
"lhblk;": "\u2584",
"ljcy;": "\u0459",
"ll;": "\u226a",
"llarr;": "\u21c7",
"llcorner;": "\u231e",
"llhard;": "\u296b",
"lltri;": "\u25fa",
"lmidot;": "\u0140",
"lmoust;": "\u23b0",
"lmoustache;": "\u23b0",
"lnE;": "\u2268",
"lnap;": "\u2a89",
"lnapprox;": "\u2a89",
"lne;": "\u2a87",
"lneq;": "\u2a87",
"lneqq;": "\u2268",
"lnsim;": "\u22e6",
"loang;": "\u27ec",
"loarr;": "\u21fd",
"lobrk;": "\u27e6",
"longleftarrow;": "\u27f5",
"longleftrightarrow;": "\u27f7",
"longmapsto;": "\u27fc",
"longrightarrow;": "\u27f6",
"looparrowleft;": "\u21ab",
"looparrowright;": "\u21ac",
"lopar;": "\u2985",
"lopf;": "\U0001d55d",
"loplus;": "\u2a2d",
"lotimes;": "\u2a34",
"lowast;": "\u2217",
"lowbar;": "_",
"loz;": "\u25ca",
"lozenge;": "\u25ca",
"lozf;": "\u29eb",
"lpar;": "(",
"lparlt;": "\u2993",
"lrarr;": "\u21c6",
"lrcorner;": "\u231f",
"lrhar;": "\u21cb",
"lrhard;": "\u296d",
"lrm;": "\u200e",
"lrtri;": "\u22bf",
"lsaquo;": "\u2039",
"lscr;": "\U0001d4c1",
"lsh;": "\u21b0",
"lsim;": "\u2272",
"lsime;": "\u2a8d",
"lsimg;": "\u2a8f",
"lsqb;": "[",
"lsquo;": "\u2018",
"lsquor;": "\u201a",
"lstrok;": "\u0142",
"lt": "<",
"lt;": "<",
"ltcc;": "\u2aa6",
"ltcir;": "\u2a79",
"ltdot;": "\u22d6",
"lthree;": "\u22cb",
"ltimes;": "\u22c9",
"ltlarr;": "\u2976",
"ltquest;": "\u2a7b",
"ltrPar;": "\u2996",
"ltri;": "\u25c3",
"ltrie;": "\u22b4",
"ltrif;": "\u25c2",
"lurdshar;": "\u294a",
"luruhar;": "\u2966",
"lvertneqq;": "\u2268\ufe00",
"lvnE;": "\u2268\ufe00",
"mDDot;": "\u223a",
"macr": "\xaf",
"macr;": "\xaf",
"male;": "\u2642",
"malt;": "\u2720",
"maltese;": "\u2720",
"map;": "\u21a6",
"mapsto;": "\u21a6",
"mapstodown;": "\u21a7",
"mapstoleft;": "\u21a4",
"mapstoup;": "\u21a5",
"marker;": "\u25ae",
"mcomma;": "\u2a29",
"mcy;": "\u043c",
"mdash;": "\u2014",
"measuredangle;": "\u2221",
"mfr;": "\U0001d52a",
"mho;": "\u2127",
"micro": "\xb5",
"micro;": "\xb5",
"mid;": "\u2223",
"midast;": "*",
"midcir;": "\u2af0",
"middot": "\xb7",
"middot;": "\xb7",
"minus;": "\u2212",
"minusb;": "\u229f",
"minusd;": "\u2238",
"minusdu;": "\u2a2a",
"mlcp;": "\u2adb",
"mldr;": "\u2026",
"mnplus;": "\u2213",
"models;": "\u22a7",
"mopf;": "\U0001d55e",
"mp;": "\u2213",
"mscr;": "\U0001d4c2",
"mstpos;": "\u223e",
"mu;": "\u03bc",
"multimap;": "\u22b8",
"mumap;": "\u22b8",
"nGg;": "\u22d9\u0338",
"nGt;": "\u226b\u20d2",
"nGtv;": "\u226b\u0338",
"nLeftarrow;": "\u21cd",
"nLeftrightarrow;": "\u21ce",
"nLl;": "\u22d8\u0338",
"nLt;": "\u226a\u20d2",
"nLtv;": "\u226a\u0338",
"nRightarrow;": "\u21cf",
"nVDash;": "\u22af",
"nVdash;": "\u22ae",
"nabla;": "\u2207",
"nacute;": "\u0144",
"nang;": "\u2220\u20d2",
"nap;": "\u2249",
"napE;": "\u2a70\u0338",
"napid;": "\u224b\u0338",
"napos;": "\u0149",
"napprox;": "\u2249",
"natur;": "\u266e",
"natural;": "\u266e",
"naturals;": "\u2115",
"nbsp": "\xa0",
"nbsp;": "\xa0",
"nbump;": "\u224e\u0338",
"nbumpe;": "\u224f\u0338",
"ncap;": "\u2a43",
"ncaron;": "\u0148",
"ncedil;": "\u0146",
"ncong;": "\u2247",
"ncongdot;": "\u2a6d\u0338",
"ncup;": "\u2a42",
"ncy;": "\u043d",
"ndash;": "\u2013",
"ne;": "\u2260",
"neArr;": "\u21d7",
"nearhk;": "\u2924",
"nearr;": "\u2197",
"nearrow;": "\u2197",
"nedot;": "\u2250\u0338",
"nequiv;": "\u2262",
"nesear;": "\u2928",
"nesim;": "\u2242\u0338",
"nexist;": "\u2204",
"nexists;": "\u2204",
"nfr;": "\U0001d52b",
"ngE;": "\u2267\u0338",
"nge;": "\u2271",
"ngeq;": "\u2271",
"ngeqq;": "\u2267\u0338",
"ngeqslant;": "\u2a7e\u0338",
"nges;": "\u2a7e\u0338",
"ngsim;": "\u2275",
"ngt;": "\u226f",
"ngtr;": "\u226f",
"nhArr;": "\u21ce",
"nharr;": "\u21ae",
"nhpar;": "\u2af2",
"ni;": "\u220b",
"nis;": "\u22fc",
"nisd;": "\u22fa",
"niv;": "\u220b",
"njcy;": "\u045a",
"nlArr;": "\u21cd",
"nlE;": "\u2266\u0338",
"nlarr;": "\u219a",
"nldr;": "\u2025",
"nle;": "\u2270",
"nleftarrow;": "\u219a",
"nleftrightarrow;": "\u21ae",
"nleq;": "\u2270",
"nleqq;": "\u2266\u0338",
"nleqslant;": "\u2a7d\u0338",
"nles;": "\u2a7d\u0338",
"nless;": "\u226e",
"nlsim;": "\u2274",
"nlt;": "\u226e",
"nltri;": "\u22ea",
"nltrie;": "\u22ec",
"nmid;": "\u2224",
"nopf;": "\U0001d55f",
"not": "\xac",
"not;": "\xac",
"notin;": "\u2209",
"notinE;": "\u22f9\u0338",
"notindot;": "\u22f5\u0338",
"notinva;": "\u2209",
"notinvb;": "\u22f7",
"notinvc;": "\u22f6",
"notni;": "\u220c",
"notniva;": "\u220c",
"notnivb;": "\u22fe",
"notnivc;": "\u22fd",
"npar;": "\u2226",
"nparallel;": "\u2226",
"nparsl;": "\u2afd\u20e5",
"npart;": "\u2202\u0338",
"npolint;": "\u2a14",
"npr;": "\u2280",
"nprcue;": "\u22e0",
"npre;": "\u2aaf\u0338",
"nprec;": "\u2280",
"npreceq;": "\u2aaf\u0338",
"nrArr;": "\u21cf",
"nrarr;": "\u219b",
"nrarrc;": "\u2933\u0338",
"nrarrw;": "\u219d\u0338",
"nrightarrow;": "\u219b",
"nrtri;": "\u22eb",
"nrtrie;": "\u22ed",
"nsc;": "\u2281",
"nsccue;": "\u22e1",
"nsce;": "\u2ab0\u0338",
"nscr;": "\U0001d4c3",
"nshortmid;": "\u2224",
"nshortparallel;": "\u2226",
"nsim;": "\u2241",
"nsime;": "\u2244",
"nsimeq;": "\u2244",
"nsmid;": "\u2224",
"nspar;": "\u2226",
"nsqsube;": "\u22e2",
"nsqsupe;": "\u22e3",
"nsub;": "\u2284",
"nsubE;": "\u2ac5\u0338",
"nsube;": "\u2288",
"nsubset;": "\u2282\u20d2",
"nsubseteq;": "\u2288",
"nsubseteqq;": "\u2ac5\u0338",
"nsucc;": "\u2281",
"nsucceq;": "\u2ab0\u0338",
"nsup;": "\u2285",
"nsupE;": "\u2ac6\u0338",
"nsupe;": "\u2289",
"nsupset;": "\u2283\u20d2",
"nsupseteq;": "\u2289",
"nsupseteqq;": "\u2ac6\u0338",
"ntgl;": "\u2279",
"ntilde": "\xf1",
"ntilde;": "\xf1",
"ntlg;": "\u2278",
"ntriangleleft;": "\u22ea",
"ntrianglelefteq;": "\u22ec",
"ntriangleright;": "\u22eb",
"ntrianglerighteq;": "\u22ed",
"nu;": "\u03bd",
"num;": "#",
"numero;": "\u2116",
"numsp;": "\u2007",
"nvDash;": "\u22ad",
"nvHarr;": "\u2904",
"nvap;": "\u224d\u20d2",
"nvdash;": "\u22ac",
"nvge;": "\u2265\u20d2",
"nvgt;": ">\u20d2",
"nvinfin;": "\u29de",
"nvlArr;": "\u2902",
"nvle;": "\u2264\u20d2",
"nvlt;": "<\u20d2",
"nvltrie;": "\u22b4\u20d2",
"nvrArr;": "\u2903",
"nvrtrie;": "\u22b5\u20d2",
"nvsim;": "\u223c\u20d2",
"nwArr;": "\u21d6",
"nwarhk;": "\u2923",
"nwarr;": "\u2196",
"nwarrow;": "\u2196",
"nwnear;": "\u2927",
"oS;": "\u24c8",
"oacute": "\xf3",
"oacute;": "\xf3",
"oast;": "\u229b",
"ocir;": "\u229a",
"ocirc": "\xf4",
"ocirc;": "\xf4",
"ocy;": "\u043e",
"odash;": "\u229d",
"odblac;": "\u0151",
"odiv;": "\u2a38",
"odot;": "\u2299",
"odsold;": "\u29bc",
"oelig;": "\u0153",
"ofcir;": "\u29bf",
"ofr;": "\U0001d52c",
"ogon;": "\u02db",
"ograve": "\xf2",
"ograve;": "\xf2",
"ogt;": "\u29c1",
"ohbar;": "\u29b5",
"ohm;": "\u03a9",
"oint;": "\u222e",
"olarr;": "\u21ba",
"olcir;": "\u29be",
"olcross;": "\u29bb",
"oline;": "\u203e",
"olt;": "\u29c0",
"omacr;": "\u014d",
"omega;": "\u03c9",
"omicron;": "\u03bf",
"omid;": "\u29b6",
"ominus;": "\u2296",
"oopf;": "\U0001d560",
"opar;": "\u29b7",
"operp;": "\u29b9",
"oplus;": "\u2295",
"or;": "\u2228",
"orarr;": "\u21bb",
"ord;": "\u2a5d",
"order;": "\u2134",
"orderof;": "\u2134",
"ordf": "\xaa",
"ordf;": "\xaa",
"ordm": "\xba",
"ordm;": "\xba",
"origof;": "\u22b6",
"oror;": "\u2a56",
"orslope;": "\u2a57",
"orv;": "\u2a5b",
"oscr;": "\u2134",
"oslash": "\xf8",
"oslash;": "\xf8",
"osol;": "\u2298",
"otilde": "\xf5",
"otilde;": "\xf5",
"otimes;": "\u2297",
"otimesas;": "\u2a36",
"ouml": "\xf6",
"ouml;": "\xf6",
"ovbar;": "\u233d",
"par;": "\u2225",
"para": "\xb6",
"para;": "\xb6",
"parallel;": "\u2225",
"parsim;": "\u2af3",
"parsl;": "\u2afd",
"part;": "\u2202",
"pcy;": "\u043f",
"percnt;": "%",
"period;": ".",
"permil;": "\u2030",
"perp;": "\u22a5",
"pertenk;": "\u2031",
"pfr;": "\U0001d52d",
"phi;": "\u03c6",
"phiv;": "\u03d5",
"phmmat;": "\u2133",
"phone;": "\u260e",
"pi;": "\u03c0",
"pitchfork;": "\u22d4",
"piv;": "\u03d6",
"planck;": "\u210f",
"planckh;": "\u210e",
"plankv;": "\u210f",
"plus;": "+",
"plusacir;": "\u2a23",
"plusb;": "\u229e",
"pluscir;": "\u2a22",
"plusdo;": "\u2214",
"plusdu;": "\u2a25",
"pluse;": "\u2a72",
"plusmn": "\xb1",
"plusmn;": "\xb1",
"plussim;": "\u2a26",
"plustwo;": "\u2a27",
"pm;": "\xb1",
"pointint;": "\u2a15",
"popf;": "\U0001d561",
"pound": "\xa3",
"pound;": "\xa3",
"pr;": "\u227a",
"prE;": "\u2ab3",
"prap;": "\u2ab7",
"prcue;": "\u227c",
"pre;": "\u2aaf",
"prec;": "\u227a",
"precapprox;": "\u2ab7",
"preccurlyeq;": "\u227c",
"preceq;": "\u2aaf",
"precnapprox;": "\u2ab9",
"precneqq;": "\u2ab5",
"precnsim;": "\u22e8",
"precsim;": "\u227e",
"prime;": "\u2032",
"primes;": "\u2119",
"prnE;": "\u2ab5",
"prnap;": "\u2ab9",
"prnsim;": "\u22e8",
"prod;": "\u220f",
"profalar;": "\u232e",
"profline;": "\u2312",
"profsurf;": "\u2313",
"prop;": "\u221d",
"propto;": "\u221d",
"prsim;": "\u227e",
"prurel;": "\u22b0",
"pscr;": "\U0001d4c5",
"psi;": "\u03c8",
"puncsp;": "\u2008",
"qfr;": "\U0001d52e",
"qint;": "\u2a0c",
"qopf;": "\U0001d562",
"qprime;": "\u2057",
"qscr;": "\U0001d4c6",
"quaternions;": "\u210d",
"quatint;": "\u2a16",
"quest;": "?",
"questeq;": "\u225f",
"quot": "\"",
"quot;": "\"",
"rAarr;": "\u21db",
"rArr;": "\u21d2",
"rAtail;": "\u291c",
"rBarr;": "\u290f",
"rHar;": "\u2964",
"race;": "\u223d\u0331",
"racute;": "\u0155",
"radic;": "\u221a",
"raemptyv;": "\u29b3",
"rang;": "\u27e9",
"rangd;": "\u2992",
"range;": "\u29a5",
"rangle;": "\u27e9",
"raquo": "\xbb",
"raquo;": "\xbb",
"rarr;": "\u2192",
"rarrap;": "\u2975",
"rarrb;": "\u21e5",
"rarrbfs;": "\u2920",
"rarrc;": "\u2933",
"rarrfs;": "\u291e",
"rarrhk;": "\u21aa",
"rarrlp;": "\u21ac",
"rarrpl;": "\u2945",
"rarrsim;": "\u2974",
"rarrtl;": "\u21a3",
"rarrw;": "\u219d",
"ratail;": "\u291a",
"ratio;": "\u2236",
"rationals;": "\u211a",
"rbarr;": "\u290d",
"rbbrk;": "\u2773",
"rbrace;": "}",
"rbrack;": "]",
"rbrke;": "\u298c",
"rbrksld;": "\u298e",
"rbrkslu;": "\u2990",
"rcaron;": "\u0159",
"rcedil;": "\u0157",
"rceil;": "\u2309",
"rcub;": "}",
"rcy;": "\u0440",
"rdca;": "\u2937",
"rdldhar;": "\u2969",
"rdquo;": "\u201d",
"rdquor;": "\u201d",
"rdsh;": "\u21b3",
"real;": "\u211c",
"realine;": "\u211b",
"realpart;": "\u211c",
"reals;": "\u211d",
"rect;": "\u25ad",
"reg": "\xae",
"reg;": "\xae",
"rfisht;": "\u297d",
"rfloor;": "\u230b",
"rfr;": "\U0001d52f",
"rhard;": "\u21c1",
"rharu;": "\u21c0",
"rharul;": "\u296c",
"rho;": "\u03c1",
"rhov;": "\u03f1",
"rightarrow;": "\u2192",
"rightarrowtail;": "\u21a3",
"rightharpoondown;": "\u21c1",
"rightharpoonup;": "\u21c0",
"rightleftarrows;": "\u21c4",
"rightleftharpoons;": "\u21cc",
"rightrightarrows;": "\u21c9",
"rightsquigarrow;": "\u219d",
"rightthreetimes;": "\u22cc",
"ring;": "\u02da",
"risingdotseq;": "\u2253",
"rlarr;": "\u21c4",
"rlhar;": "\u21cc",
"rlm;": "\u200f",
"rmoust;": "\u23b1",
"rmoustache;": "\u23b1",
"rnmid;": "\u2aee",
"roang;": "\u27ed",
"roarr;": "\u21fe",
"robrk;": "\u27e7",
"ropar;": "\u2986",
"ropf;": "\U0001d563",
"roplus;": "\u2a2e",
"rotimes;": "\u2a35",
"rpar;": ")",
"rpargt;": "\u2994",
"rppolint;": "\u2a12",
"rrarr;": "\u21c9",
"rsaquo;": "\u203a",
"rscr;": "\U0001d4c7",
"rsh;": "\u21b1",
"rsqb;": "]",
"rsquo;": "\u2019",
"rsquor;": "\u2019",
"rthree;": "\u22cc",
"rtimes;": "\u22ca",
"rtri;": "\u25b9",
"rtrie;": "\u22b5",
"rtrif;": "\u25b8",
"rtriltri;": "\u29ce",
"ruluhar;": "\u2968",
"rx;": "\u211e",
"sacute;": "\u015b",
"sbquo;": "\u201a",
"sc;": "\u227b",
"scE;": "\u2ab4",
"scap;": "\u2ab8",
"scaron;": "\u0161",
"sccue;": "\u227d",
"sce;": "\u2ab0",
"scedil;": "\u015f",
"scirc;": "\u015d",
"scnE;": "\u2ab6",
"scnap;": "\u2aba",
"scnsim;": "\u22e9",
"scpolint;": "\u2a13",
"scsim;": "\u227f",
"scy;": "\u0441",
"sdot;": "\u22c5",
"sdotb;": "\u22a1",
"sdote;": "\u2a66",
"seArr;": "\u21d8",
"searhk;": "\u2925",
"searr;": "\u2198",
"searrow;": "\u2198",
"sect": "\xa7",
"sect;": "\xa7",
"semi;": ";",
"seswar;": "\u2929",
"setminus;": "\u2216",
"setmn;": "\u2216",
"sext;": "\u2736",
"sfr;": "\U0001d530",
"sfrown;": "\u2322",
"sharp;": "\u266f",
"shchcy;": "\u0449",
"shcy;": "\u0448",
"shortmid;": "\u2223",
"shortparallel;": "\u2225",
"shy": "\xad",
"shy;": "\xad",
"sigma;": "\u03c3",
"sigmaf;": "\u03c2",
"sigmav;": "\u03c2",
"sim;": "\u223c",
"simdot;": "\u2a6a",
"sime;": "\u2243",
"simeq;": "\u2243",
"simg;": "\u2a9e",
"simgE;": "\u2aa0",
"siml;": "\u2a9d",
"simlE;": "\u2a9f",
"simne;": "\u2246",
"simplus;": "\u2a24",
"simrarr;": "\u2972",
"slarr;": "\u2190",
"smallsetminus;": "\u2216",
"smashp;": "\u2a33",
"smeparsl;": "\u29e4",
"smid;": "\u2223",
"smile;": "\u2323",
"smt;": "\u2aaa",
"smte;": "\u2aac",
"smtes;": "\u2aac\ufe00",
"softcy;": "\u044c",
"sol;": "/",
"solb;": "\u29c4",
"solbar;": "\u233f",
"sopf;": "\U0001d564",
"spades;": "\u2660",
"spadesuit;": "\u2660",
"spar;": "\u2225",
"sqcap;": "\u2293",
"sqcaps;": "\u2293\ufe00",
"sqcup;": "\u2294",
"sqcups;": "\u2294\ufe00",
"sqsub;": "\u228f",
"sqsube;": "\u2291",
"sqsubset;": "\u228f",
"sqsubseteq;": "\u2291",
"sqsup;": "\u2290",
"sqsupe;": "\u2292",
"sqsupset;": "\u2290",
"sqsupseteq;": "\u2292",
"squ;": "\u25a1",
"square;": "\u25a1",
"squarf;": "\u25aa",
"squf;": "\u25aa",
"srarr;": "\u2192",
"sscr;": "\U0001d4c8",
"ssetmn;": "\u2216",
"ssmile;": "\u2323",
"sstarf;": "\u22c6",
"star;": "\u2606",
"starf;": "\u2605",
"straightepsilon;": "\u03f5",
"straightphi;": "\u03d5",
"strns;": "\xaf",
"sub;": "\u2282",
"subE;": "\u2ac5",
"subdot;": "\u2abd",
"sube;": "\u2286",
"subedot;": "\u2ac3",
"submult;": "\u2ac1",
"subnE;": "\u2acb",
"subne;": "\u228a",
"subplus;": "\u2abf",
"subrarr;": "\u2979",
"subset;": "\u2282",
"subseteq;": "\u2286",
"subseteqq;": "\u2ac5",
"subsetneq;": "\u228a",
"subsetneqq;": "\u2acb",
"subsim;": "\u2ac7",
"subsub;": "\u2ad5",
"subsup;": "\u2ad3",
"succ;": "\u227b",
"succapprox;": "\u2ab8",
"succcurlyeq;": "\u227d",
"succeq;": "\u2ab0",
"succnapprox;": "\u2aba",
"succneqq;": "\u2ab6",
"succnsim;": "\u22e9",
"succsim;": "\u227f",
"sum;": "\u2211",
"sung;": "\u266a",
"sup1": "\xb9",
"sup1;": "\xb9",
"sup2": "\xb2",
"sup2;": "\xb2",
"sup3": "\xb3",
"sup3;": "\xb3",
"sup;": "\u2283",
"supE;": "\u2ac6",
"supdot;": "\u2abe",
"supdsub;": "\u2ad8",
"supe;": "\u2287",
"supedot;": "\u2ac4",
"suphsol;": "\u27c9",
"suphsub;": "\u2ad7",
"suplarr;": "\u297b",
"supmult;": "\u2ac2",
"supnE;": "\u2acc",
"supne;": "\u228b",
"supplus;": "\u2ac0",
"supset;": "\u2283",
"supseteq;": "\u2287",
"supseteqq;": "\u2ac6",
"supsetneq;": "\u228b",
"supsetneqq;": "\u2acc",
"supsim;": "\u2ac8",
"supsub;": "\u2ad4",
"supsup;": "\u2ad6",
"swArr;": "\u21d9",
"swarhk;": "\u2926",
"swarr;": "\u2199",
"swarrow;": "\u2199",
"swnwar;": "\u292a",
"szlig": "\xdf",
"szlig;": "\xdf",
"target;": "\u2316",
"tau;": "\u03c4",
"tbrk;": "\u23b4",
"tcaron;": "\u0165",
"tcedil;": "\u0163",
"tcy;": "\u0442",
"tdot;": "\u20db",
"telrec;": "\u2315",
"tfr;": "\U0001d531",
"there4;": "\u2234",
"therefore;": "\u2234",
"theta;": "\u03b8",
"thetasym;": "\u03d1",
"thetav;": "\u03d1",
"thickapprox;": "\u2248",
"thicksim;": "\u223c",
"thinsp;": "\u2009",
"thkap;": "\u2248",
"thksim;": "\u223c",
"thorn": "\xfe",
"thorn;": "\xfe",
"tilde;": "\u02dc",
"times": "\xd7",
"times;": "\xd7",
"timesb;": "\u22a0",
"timesbar;": "\u2a31",
"timesd;": "\u2a30",
"tint;": "\u222d",
"toea;": "\u2928",
"top;": "\u22a4",
"topbot;": "\u2336",
"topcir;": "\u2af1",
"topf;": "\U0001d565",
"topfork;": "\u2ada",
"tosa;": "\u2929",
"tprime;": "\u2034",
"trade;": "\u2122",
"triangle;": "\u25b5",
"triangledown;": "\u25bf",
"triangleleft;": "\u25c3",
"trianglelefteq;": "\u22b4",
"triangleq;": "\u225c",
"triangleright;": "\u25b9",
"trianglerighteq;": "\u22b5",
"tridot;": "\u25ec",
"trie;": "\u225c",
"triminus;": "\u2a3a",
"triplus;": "\u2a39",
"trisb;": "\u29cd",
"tritime;": "\u2a3b",
"trpezium;": "\u23e2",
"tscr;": "\U0001d4c9",
"tscy;": "\u0446",
"tshcy;": "\u045b",
"tstrok;": "\u0167",
"twixt;": "\u226c",
"twoheadleftarrow;": "\u219e",
"twoheadrightarrow;": "\u21a0",
"uArr;": "\u21d1",
"uHar;": "\u2963",
"uacute": "\xfa",
"uacute;": "\xfa",
"uarr;": "\u2191",
"ubrcy;": "\u045e",
"ubreve;": "\u016d",
"ucirc": "\xfb",
"ucirc;": "\xfb",
"ucy;": "\u0443",
"udarr;": "\u21c5",
"udblac;": "\u0171",
"udhar;": "\u296e",
"ufisht;": "\u297e",
"ufr;": "\U0001d532",
"ugrave": "\xf9",
"ugrave;": "\xf9",
"uharl;": "\u21bf",
"uharr;": "\u21be",
"uhblk;": "\u2580",
"ulcorn;": "\u231c",
"ulcorner;": "\u231c",
"ulcrop;": "\u230f",
"ultri;": "\u25f8",
"umacr;": "\u016b",
"uml": "\xa8",
"uml;": "\xa8",
"uogon;": "\u0173",
"uopf;": "\U0001d566",
"uparrow;": "\u2191",
"updownarrow;": "\u2195",
"upharpoonleft;": "\u21bf",
"upharpoonright;": "\u21be",
"uplus;": "\u228e",
"upsi;": "\u03c5",
"upsih;": "\u03d2",
"upsilon;": "\u03c5",
"upuparrows;": "\u21c8",
"urcorn;": "\u231d",
"urcorner;": "\u231d",
"urcrop;": "\u230e",
"uring;": "\u016f",
"urtri;": "\u25f9",
"uscr;": "\U0001d4ca",
"utdot;": "\u22f0",
"utilde;": "\u0169",
"utri;": "\u25b5",
"utrif;": "\u25b4",
"uuarr;": "\u21c8",
"uuml": "\xfc",
"uuml;": "\xfc",
"uwangle;": "\u29a7",
"vArr;": "\u21d5",
"vBar;": "\u2ae8",
"vBarv;": "\u2ae9",
"vDash;": "\u22a8",
"vangrt;": "\u299c",
"varepsilon;": "\u03f5",
"varkappa;": "\u03f0",
"varnothing;": "\u2205",
"varphi;": "\u03d5",
"varpi;": "\u03d6",
"varpropto;": "\u221d",
"varr;": "\u2195",
"varrho;": "\u03f1",
"varsigma;": "\u03c2",
"varsubsetneq;": "\u228a\ufe00",
"varsubsetneqq;": "\u2acb\ufe00",
"varsupsetneq;": "\u228b\ufe00",
"varsupsetneqq;": "\u2acc\ufe00",
"vartheta;": "\u03d1",
"vartriangleleft;": "\u22b2",
"vartriangleright;": "\u22b3",
"vcy;": "\u0432",
"vdash;": "\u22a2",
"vee;": "\u2228",
"veebar;": "\u22bb",
"veeeq;": "\u225a",
"vellip;": "\u22ee",
"verbar;": "|",
"vert;": "|",
"vfr;": "\U0001d533",
"vltri;": "\u22b2",
"vnsub;": "\u2282\u20d2",
"vnsup;": "\u2283\u20d2",
"vopf;": "\U0001d567",
"vprop;": "\u221d",
"vrtri;": "\u22b3",
"vscr;": "\U0001d4cb",
"vsubnE;": "\u2acb\ufe00",
"vsubne;": "\u228a\ufe00",
"vsupnE;": "\u2acc\ufe00",
"vsupne;": "\u228b\ufe00",
"vzigzag;": "\u299a",
"wcirc;": "\u0175",
"wedbar;": "\u2a5f",
"wedge;": "\u2227",
"wedgeq;": "\u2259",
"weierp;": "\u2118",
"wfr;": "\U0001d534",
"wopf;": "\U0001d568",
"wp;": "\u2118",
"wr;": "\u2240",
"wreath;": "\u2240",
"wscr;": "\U0001d4cc",
"xcap;": "\u22c2",
"xcirc;": "\u25ef",
"xcup;": "\u22c3",
"xdtri;": "\u25bd",
"xfr;": "\U0001d535",
"xhArr;": "\u27fa",
"xharr;": "\u27f7",
"xi;": "\u03be",
"xlArr;": "\u27f8",
"xlarr;": "\u27f5",
"xmap;": "\u27fc",
"xnis;": "\u22fb",
"xodot;": "\u2a00",
"xopf;": "\U0001d569",
"xoplus;": "\u2a01",
"xotime;": "\u2a02",
"xrArr;": "\u27f9",
"xrarr;": "\u27f6",
"xscr;": "\U0001d4cd",
"xsqcup;": "\u2a06",
"xuplus;": "\u2a04",
"xutri;": "\u25b3",
"xvee;": "\u22c1",
"xwedge;": "\u22c0",
"yacute": "\xfd",
"yacute;": "\xfd",
"yacy;": "\u044f",
"ycirc;": "\u0177",
"ycy;": "\u044b",
"yen": "\xa5",
"yen;": "\xa5",
"yfr;": "\U0001d536",
"yicy;": "\u0457",
"yopf;": "\U0001d56a",
"yscr;": "\U0001d4ce",
"yucy;": "\u044e",
"yuml": "\xff",
"yuml;": "\xff",
"zacute;": "\u017a",
"zcaron;": "\u017e",
"zcy;": "\u0437",
"zdot;": "\u017c",
"zeetrf;": "\u2128",
"zeta;": "\u03b6",
"zfr;": "\U0001d537",
"zhcy;": "\u0436",
"zigrarr;": "\u21dd",
"zopf;": "\U0001d56b",
"zscr;": "\U0001d4cf",
"zwj;": "\u200d",
"zwnj;": "\u200c",
}
replacementCharacters = {
0x0: "\uFFFD",
0x0d: "\u000D",
0x80: "\u20AC",
0x81: "\u0081",
0x82: "\u201A",
0x83: "\u0192",
0x84: "\u201E",
0x85: "\u2026",
0x86: "\u2020",
0x87: "\u2021",
0x88: "\u02C6",
0x89: "\u2030",
0x8A: "\u0160",
0x8B: "\u2039",
0x8C: "\u0152",
0x8D: "\u008D",
0x8E: "\u017D",
0x8F: "\u008F",
0x90: "\u0090",
0x91: "\u2018",
0x92: "\u2019",
0x93: "\u201C",
0x94: "\u201D",
0x95: "\u2022",
0x96: "\u2013",
0x97: "\u2014",
0x98: "\u02DC",
0x99: "\u2122",
0x9A: "\u0161",
0x9B: "\u203A",
0x9C: "\u0153",
0x9D: "\u009D",
0x9E: "\u017E",
0x9F: "\u0178",
}
tokenTypes = {
"Doctype": 0,
"Characters": 1,
"SpaceCharacters": 2,
"StartTag": 3,
"EndTag": 4,
"EmptyTag": 5,
"Comment": 6,
"ParseError": 7
}
tagTokenTypes = frozenset([tokenTypes["StartTag"], tokenTypes["EndTag"],
tokenTypes["EmptyTag"]])
prefixes = dict([(v, k) for k, v in namespaces.items()])
prefixes["http://www.w3.org/1998/Math/MathML"] = "math"
class DataLossWarning(UserWarning):
"""Raised when the current tree is unable to represent the input data"""
pass
class _ReparseException(Exception):
pass
| mpl-2.0 |
tvolkert/engine | testing/run_tests.py | 1 | 17247 | #!/usr/bin/env python
# Copyright 2013 The Flutter Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
A top level harness to run all unit-tests in a specific engine build.
"""
import argparse
import glob
import os
import re
import subprocess
import sys
import time
buildroot_dir = os.path.abspath(os.path.join(os.path.realpath(__file__), '..', '..', '..'))
out_dir = os.path.join(buildroot_dir, 'out')
golden_dir = os.path.join(buildroot_dir, 'flutter', 'testing', 'resources')
fonts_dir = os.path.join(buildroot_dir, 'flutter', 'third_party', 'txt', 'third_party', 'fonts')
roboto_font_path = os.path.join(fonts_dir, 'Roboto-Regular.ttf')
dart_tests_dir = os.path.join(buildroot_dir, 'flutter', 'testing', 'dart',)
font_subset_dir = os.path.join(buildroot_dir, 'flutter', 'tools', 'font-subset')
fml_unittests_filter = '--gtest_filter=-*TimeSensitiveTest*'
def PrintDivider(char='='):
print '\n'
for _ in xrange(4):
print(''.join([char for _ in xrange(80)]))
print '\n'
def RunCmd(cmd, **kwargs):
command_string = ' '.join(cmd)
PrintDivider('>')
print 'Running command "%s"' % command_string
start_time = time.time()
process = subprocess.Popen(cmd, stdout=sys.stdout, stderr=sys.stderr, **kwargs)
process.communicate()
end_time = time.time()
if process.returncode != 0:
PrintDivider('!')
raise Exception('Command "%s" exited with code %d' % (command_string, process.returncode))
PrintDivider('<')
print 'Command run successfully in %.2f seconds: %s' % (end_time - start_time, command_string)
def IsMac():
return sys.platform == 'darwin'
def IsLinux():
return sys.platform.startswith('linux')
def IsWindows():
return sys.platform.startswith(('cygwin', 'win'))
def ExecutableSuffix():
return '.exe' if IsWindows() else ''
def FindExecutablePath(path):
if os.path.exists(path):
return path
if IsWindows():
exe_path = path + '.exe'
if os.path.exists(exe_path):
return exe_path
bat_path = path + '.bat'
if os.path.exists(bat_path):
return bat_path
raise Exception('Executable %s does not exist!' % path)
def RunEngineExecutable(build_dir, executable_name, filter, flags=[], cwd=buildroot_dir):
if filter is not None and executable_name not in filter:
print('Skipping %s due to filter.' % executable_name)
return
executable = FindExecutablePath(os.path.join(build_dir, executable_name))
print('Running %s in %s' % (executable_name, cwd))
test_command = [ executable ] + flags
print(' '.join(test_command))
RunCmd(test_command, cwd=cwd)
def RunCCTests(build_dir, filter):
print("Running Engine Unit-tests.")
shuffle_flags = [
"--gtest_shuffle",
"--gtest_repeat=2",
]
RunEngineExecutable(build_dir, 'client_wrapper_glfw_unittests', filter, shuffle_flags)
RunEngineExecutable(build_dir, 'common_cpp_core_unittests', filter, shuffle_flags)
RunEngineExecutable(build_dir, 'common_cpp_unittests', filter, shuffle_flags)
RunEngineExecutable(build_dir, 'client_wrapper_unittests', filter, shuffle_flags)
# https://github.com/flutter/flutter/issues/36294
if not IsWindows():
RunEngineExecutable(build_dir, 'embedder_unittests', filter, shuffle_flags)
else:
RunEngineExecutable(build_dir, 'flutter_windows_unittests', filter, shuffle_flags)
RunEngineExecutable(build_dir, 'client_wrapper_windows_unittests', filter, shuffle_flags)
flow_flags = ['--gtest_filter=-PerformanceOverlayLayer.Gold']
if IsLinux():
flow_flags = [
'--golden-dir=%s' % golden_dir,
'--font-file=%s' % roboto_font_path,
]
RunEngineExecutable(build_dir, 'flow_unittests', filter, flow_flags + shuffle_flags)
# TODO(44614): Re-enable after https://github.com/flutter/flutter/issues/44614 has been addressed.
# RunEngineExecutable(build_dir, 'fml_unittests', filter, [ fml_unittests_filter ] + shuffle_flags)
RunEngineExecutable(build_dir, 'runtime_unittests', filter, shuffle_flags)
if not IsWindows():
# https://github.com/flutter/flutter/issues/36295
RunEngineExecutable(build_dir, 'shell_unittests', filter, shuffle_flags)
# https://github.com/google/googletest/issues/2490
RunEngineExecutable(build_dir, 'android_external_view_embedder_unittests', filter, shuffle_flags)
RunEngineExecutable(build_dir, 'jni_unittests', filter, shuffle_flags)
RunEngineExecutable(build_dir, 'ui_unittests', filter, shuffle_flags)
RunEngineExecutable(build_dir, 'testing_unittests', filter, shuffle_flags)
# These unit-tests are Objective-C and can only run on Darwin.
if IsMac():
RunEngineExecutable(build_dir, 'flutter_channels_unittests', filter, shuffle_flags)
# https://github.com/flutter/flutter/issues/36296
if IsLinux():
RunEngineExecutable(build_dir, 'txt_unittests', filter, shuffle_flags)
if IsLinux():
RunEngineExecutable(build_dir, 'flutter_linux_unittests', filter, shuffle_flags)
def RunEngineBenchmarks(build_dir, filter):
print("Running Engine Benchmarks.")
RunEngineExecutable(build_dir, 'shell_benchmarks', filter)
RunEngineExecutable(build_dir, 'fml_benchmarks', filter)
RunEngineExecutable(build_dir, 'ui_benchmarks', filter)
if IsLinux():
RunEngineExecutable(build_dir, 'txt_benchmarks', filter)
def SnapshotTest(build_dir, dart_file, kernel_file_output, verbose_dart_snapshot):
print("Generating snapshot for test %s" % dart_file)
dart = os.path.join(build_dir, 'dart-sdk', 'bin', 'dart')
frontend_server = os.path.join(build_dir, 'gen', 'frontend_server.dart.snapshot')
flutter_patched_sdk = os.path.join(build_dir, 'flutter_patched_sdk')
test_packages = os.path.join(dart_tests_dir, '.packages')
assert os.path.exists(dart)
assert os.path.exists(frontend_server)
assert os.path.exists(flutter_patched_sdk)
assert os.path.exists(test_packages)
snapshot_command = [
dart,
frontend_server,
'--enable-experiment=non-nullable',
'--no-sound-null-safety',
'--sdk-root',
flutter_patched_sdk,
'--incremental',
'--target=flutter',
'--packages',
test_packages,
'--output-dill',
kernel_file_output,
dart_file
]
if verbose_dart_snapshot:
RunCmd(snapshot_command, cwd=buildroot_dir)
else:
subprocess.check_output(snapshot_command, cwd=buildroot_dir)
assert os.path.exists(kernel_file_output)
def RunDartTest(build_dir, dart_file, verbose_dart_snapshot, multithreaded):
kernel_file_name = os.path.basename(dart_file) + '.kernel.dill'
kernel_file_output = os.path.join(out_dir, kernel_file_name)
SnapshotTest(build_dir, dart_file, kernel_file_output, verbose_dart_snapshot)
command_args = [
'--disable-observatory',
'--use-test-fonts',
kernel_file_output
]
if multithreaded:
threading = 'multithreaded'
command_args.insert(0, '--force-multithreading')
else:
threading = 'single-threaded'
print("Running test '%s' using 'flutter_tester' (%s)" % (kernel_file_name, threading))
RunEngineExecutable(build_dir, 'flutter_tester', None, command_args)
def RunPubGet(build_dir, directory):
print("Running 'pub get' in the tests directory %s" % dart_tests_dir)
pub_get_command = [
os.path.join(build_dir, 'dart-sdk', 'bin', 'pub'),
'get'
]
RunCmd(pub_get_command, cwd=directory)
def EnsureDebugUnoptSkyPackagesAreBuilt():
variant_out_dir = os.path.join(out_dir, 'host_debug_unopt')
ninja_command = [
'autoninja',
'-C',
variant_out_dir,
'flutter/sky/packages'
]
# Attempt running Ninja if the out directory exists.
# We don't want to blow away any custom GN args the caller may have already set.
if os.path.exists(variant_out_dir):
RunCmd(ninja_command, cwd=buildroot_dir)
return
gn_command = [
os.path.join(buildroot_dir, 'flutter', 'tools', 'gn'),
'--runtime-mode',
'debug',
'--unopt',
'--no-lto',
]
RunCmd(gn_command, cwd=buildroot_dir)
RunCmd(ninja_command, cwd=buildroot_dir)
def EnsureJavaTestsAreBuilt(android_out_dir):
"""Builds the engine variant and the test jar containing the JUnit tests"""
ninja_command = [
'autoninja',
'-C',
android_out_dir,
'flutter/shell/platform/android:robolectric_tests'
]
# Attempt running Ninja if the out directory exists.
# We don't want to blow away any custom GN args the caller may have already set.
if os.path.exists(android_out_dir):
RunCmd(ninja_command, cwd=buildroot_dir)
return
assert android_out_dir != "out/android_debug_unopt", "%s doesn't exist. Run GN to generate the directory first" % android_out_dir
# Otherwise prepare the directory first, then build the test.
gn_command = [
os.path.join(buildroot_dir, 'flutter', 'tools', 'gn'),
'--android',
'--unoptimized',
'--runtime-mode=debug',
'--no-lto',
]
RunCmd(gn_command, cwd=buildroot_dir)
RunCmd(ninja_command, cwd=buildroot_dir)
def EnsureIosTestsAreBuilt(ios_out_dir):
"""Builds the engine variant and the test dylib containing the XCTests"""
ninja_command = [
'autoninja',
'-C',
ios_out_dir,
'ios_test_flutter'
]
# Attempt running Ninja if the out directory exists.
# We don't want to blow away any custom GN args the caller may have already set.
if os.path.exists(ios_out_dir):
RunCmd(ninja_command, cwd=buildroot_dir)
return
assert ios_out_dir != "out/ios_debug_sim_unopt", "%s doesn't exist. Run GN to generate the directory first" % ios_out_dir
# Otherwise prepare the directory first, then build the test.
gn_command = [
os.path.join(buildroot_dir, 'flutter', 'tools', 'gn'),
'--ios',
'--unoptimized',
'--runtime-mode=debug',
'--no-lto',
'--simulator'
]
RunCmd(gn_command, cwd=buildroot_dir)
RunCmd(ninja_command, cwd=buildroot_dir)
def AssertExpectedJavaVersion():
"""Checks that the user has Java 8 which is the supported Java version for Android"""
EXPECTED_VERSION = '1.8'
# `java -version` is output to stderr. https://bugs.java.com/bugdatabase/view_bug.do?bug_id=4380614
version_output = subprocess.check_output(['java', '-version'], stderr=subprocess.STDOUT)
match = bool(re.compile('version "%s' % EXPECTED_VERSION).search(version_output))
message = "JUnit tests need to be run with Java %s. Check the `java -version` on your PATH." % EXPECTED_VERSION
assert match, message
def AssertExpectedXcodeVersion():
"""Checks that the user has a recent version of Xcode installed"""
EXPECTED_MAJOR_VERSION = '11'
version_output = subprocess.check_output(['xcodebuild', '-version'])
message = "Xcode must be installed to run the iOS embedding unit tests"
assert "Xcode %s." % EXPECTED_MAJOR_VERSION in version_output, message
def RunJavaTests(filter, android_variant='android_debug_unopt'):
"""Runs the Java JUnit unit tests for the Android embedding"""
AssertExpectedJavaVersion()
android_out_dir = os.path.join(out_dir, android_variant)
EnsureJavaTestsAreBuilt(android_out_dir)
embedding_deps_dir = os.path.join(buildroot_dir, 'third_party', 'android_embedding_dependencies', 'lib')
classpath = map(str, [
os.path.join(buildroot_dir, 'third_party', 'android_tools', 'sdk', 'platforms', 'android-29', 'android.jar'),
os.path.join(embedding_deps_dir, '*'), # Wildcard for all jars in the directory
os.path.join(android_out_dir, 'flutter.jar'),
os.path.join(android_out_dir, 'robolectric_tests.jar')
])
test_class = filter if filter else 'io.flutter.FlutterTestSuite'
command = [
'java',
'-Drobolectric.offline=true',
'-Drobolectric.dependency.dir=' + embedding_deps_dir,
'-classpath', ':'.join(classpath),
'-Drobolectric.logging=stdout',
'org.junit.runner.JUnitCore',
test_class
]
RunCmd(command)
def RunObjcTests(ios_variant='ios_debug_sim_unopt'):
"""Runs Objective-C XCTest unit tests for the iOS embedding"""
AssertExpectedXcodeVersion()
ios_out_dir = os.path.join(out_dir, ios_variant)
EnsureIosTestsAreBuilt(ios_out_dir)
pretty = "cat" if subprocess.call(["which", "xcpretty"]) else "xcpretty"
ios_unit_test_dir = os.path.join(buildroot_dir, 'flutter', 'testing', 'ios', 'IosUnitTests')
command = [
'xcodebuild '
'-sdk iphonesimulator '
'-scheme IosUnitTests '
"-destination platform='iOS Simulator,name=iPhone 8' "
'test '
'FLUTTER_ENGINE=' + ios_variant +
' | ' + pretty
]
RunCmd(command, cwd=ios_unit_test_dir, shell=True)
def RunDartTests(build_dir, filter, verbose_dart_snapshot):
# This one is a bit messy. The pubspec.yaml at flutter/testing/dart/pubspec.yaml
# has dependencies that are hardcoded to point to the sky packages at host_debug_unopt/
# Before running Dart tests, make sure to run just that target (NOT the whole engine)
EnsureDebugUnoptSkyPackagesAreBuilt();
# Now that we have the Sky packages at the hardcoded location, run `pub get`.
RunEngineExecutable(build_dir, os.path.join('dart-sdk', 'bin', 'pub'), None, flags=['get'], cwd=dart_tests_dir)
dart_tests = glob.glob('%s/*.dart' % dart_tests_dir)
for dart_test_file in dart_tests:
if filter is not None and os.path.basename(dart_test_file) not in filter:
print("Skipping %s due to filter." % dart_test_file)
else:
print("Testing dart file %s" % dart_test_file)
RunDartTest(build_dir, dart_test_file, verbose_dart_snapshot, True)
RunDartTest(build_dir, dart_test_file, verbose_dart_snapshot, False)
def RunFrontEndServerTests(build_dir):
test_dir = os.path.join(buildroot_dir, 'flutter', 'flutter_frontend_server')
dart_tests = glob.glob('%s/test/*_test.dart' % test_dir)
for dart_test_file in dart_tests:
opts = [
dart_test_file,
os.path.join(build_dir, 'gen', 'frontend_server.dart.snapshot'),
os.path.join(build_dir, 'flutter_patched_sdk')]
RunEngineExecutable(
build_dir,
os.path.join('dart-sdk', 'bin', 'dart'),
None,
flags=opts,
cwd=test_dir)
def RunConstFinderTests(build_dir):
test_dir = os.path.join(buildroot_dir, 'flutter', 'tools', 'const_finder', 'test')
opts = [
os.path.join(test_dir, 'const_finder_test.dart'),
os.path.join(build_dir, 'gen', 'frontend_server.dart.snapshot'),
os.path.join(build_dir, 'flutter_patched_sdk')]
RunEngineExecutable(build_dir, os.path.join('dart-sdk', 'bin', 'dart'), None, flags=opts, cwd=test_dir)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--variant', dest='variant', action='store',
default='host_debug_unopt', help='The engine build variant to run the tests for.');
parser.add_argument('--type', type=str, default='all')
parser.add_argument('--engine-filter', type=str, default='',
help='A list of engine test executables to run.')
parser.add_argument('--dart-filter', type=str, default='',
help='A list of Dart test scripts to run.')
parser.add_argument('--java-filter', type=str, default='',
help='A single Java test class to run.')
parser.add_argument('--android-variant', dest='android_variant', action='store',
default='android_debug_unopt',
help='The engine build variant to run java tests for')
parser.add_argument('--ios-variant', dest='ios_variant', action='store',
default='ios_debug_sim_unopt',
help='The engine build variant to run objective-c tests for')
parser.add_argument('--verbose-dart-snapshot', dest='verbose_dart_snapshot', action='store_true',
default=False, help='Show extra dart snapshot logging.')
args = parser.parse_args()
if args.type == 'all':
types = ['engine', 'dart', 'benchmarks', 'java', 'objc', 'font-subset']
else:
types = args.type.split(',')
build_dir = os.path.join(out_dir, args.variant)
if args.type != 'java':
assert os.path.exists(build_dir), 'Build variant directory %s does not exist!' % build_dir
engine_filter = args.engine_filter.split(',') if args.engine_filter else None
if 'engine' in types:
RunCCTests(build_dir, engine_filter)
if 'dart' in types:
assert not IsWindows(), "Dart tests can't be run on windows. https://github.com/flutter/flutter/issues/36301."
dart_filter = args.dart_filter.split(',') if args.dart_filter else None
RunDartTests(build_dir, dart_filter, args.verbose_dart_snapshot)
RunConstFinderTests(build_dir)
RunFrontEndServerTests(build_dir)
if 'java' in types:
assert not IsWindows(), "Android engine files can't be compiled on Windows."
java_filter = args.java_filter
if ',' in java_filter or '*' in java_filter:
print('Can only filter JUnit4 tests by single entire class name, eg "io.flutter.SmokeTest". Ignoring filter=' + java_filter)
java_filter = None
RunJavaTests(java_filter, args.android_variant)
if 'objc' in types:
assert IsMac(), "iOS embedding tests can only be run on macOS."
RunObjcTests(args.ios_variant)
# https://github.com/flutter/flutter/issues/36300
if 'benchmarks' in types and not IsWindows():
RunEngineBenchmarks(build_dir, engine_filter)
if ('engine' in types or 'font-subset' in types) and args.variant != 'host_release':
RunCmd(['python', 'test.py'], cwd=font_subset_dir)
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
disqus/overseer | overseer/forms.py | 2 | 1113 | """
overseer.forms
~~~~~~~~~~~~~~
:copyright: (c) 2011 DISQUS.
:license: Apache License 2.0, see LICENSE for more details.
"""
from django import forms
from overseer.models import Service, Subscription, UnverifiedSubscription
class BaseSubscriptionForm(forms.ModelForm):
services = forms.ModelMultipleChoiceField(queryset=Service.objects.all(), widget=forms.CheckboxSelectMultiple())
class Meta:
fields = ('services',)
model = Subscription
class NewSubscriptionForm(BaseSubscriptionForm):
email = forms.EmailField(widget=forms.TextInput(attrs={'placeholder': 'you@example.com'}))
class Meta:
fields = ('email', 'services',)
model = UnverifiedSubscription
def clean_email(self):
value = self.cleaned_data.get('email')
if value:
value = value.lower()
return value
class UpdateSubscriptionForm(BaseSubscriptionForm):
unsubscribe = forms.BooleanField(required=False)
services = forms.ModelMultipleChoiceField(queryset=Service.objects.all(), widget=forms.CheckboxSelectMultiple(), required=False)
| apache-2.0 |
michkot/benchexec | benchexec/outputhandler.py | 1 | 27854 | """
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2015 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# prepare for Python 3
from __future__ import absolute_import, division, print_function, unicode_literals
import collections
import threading
import time
import sys
import os
import xml.etree.ElementTree as ET
from .model import MEMLIMIT, TIMELIMIT, SOFTTIMELIMIT, CORELIMIT
from . import filewriter
from . import result
from . import util as util
RESULT_XML_PUBLIC_ID = '+//IDN sosy-lab.org//DTD BenchExec result 1.2//EN'
RESULT_XML_SYSTEM_ID = 'http://www.sosy-lab.org/benchexec/result-1.2.dtd'
# colors for column status in terminal
USE_COLORS = True
COLOR_GREEN = "\033[32;1m{0}\033[m"
COLOR_RED = "\033[31;1m{0}\033[m"
COLOR_ORANGE = "\033[33;1m{0}\033[m"
COLOR_MAGENTA = "\033[35;1m{0}\033[m"
COLOR_DEFAULT = "{0}"
UNDERLINE = "\033[4m{0}\033[0m"
COLOR_DIC = {result.CATEGORY_CORRECT: COLOR_GREEN,
result.CATEGORY_WRONG: COLOR_RED,
result.CATEGORY_UNKNOWN: COLOR_ORANGE,
result.CATEGORY_ERROR: COLOR_MAGENTA,
result.CATEGORY_MISSING: COLOR_DEFAULT,
None: COLOR_DEFAULT}
LEN_OF_STATUS = 22
TERMINAL_TITLE=''
_term = os.environ.get('TERM', '')
if _term.startswith(('xterm', 'rxvt')):
TERMINAL_TITLE = "\033]0;Task {0}\007"
elif _term.startswith('screen'):
TERMINAL_TITLE = "\033kTask {0}\033\\"
# the number of digits after the decimal separator for text output of time columns with times
TIME_PRECISION = 2
class OutputHandler:
"""
The class OutputHandler manages all outputs to the terminal and to files.
"""
print_lock = threading.Lock()
def __init__(self, benchmark, sysinfo):
"""
The constructor of OutputHandler collects information about the benchmark and the computer.
"""
self.all_created_files = []
self.benchmark = benchmark
self.statistics = Statistics()
version = self.benchmark.tool_version
memlimit = None
timelimit = None
corelimit = None
if MEMLIMIT in self.benchmark.rlimits:
memlimit = str(self.benchmark.rlimits[MEMLIMIT]) + " MB"
if SOFTTIMELIMIT in self.benchmark.rlimits:
timelimit = str(self.benchmark.rlimits[SOFTTIMELIMIT]) + " s"
elif TIMELIMIT in self.benchmark.rlimits:
timelimit = str(self.benchmark.rlimits[TIMELIMIT]) + " s"
if CORELIMIT in self.benchmark.rlimits:
corelimit = str(self.benchmark.rlimits[CORELIMIT])
# create folder for file-specific log-files.
os.makedirs(benchmark.log_folder, exist_ok=True)
self.store_header_in_xml(version, memlimit, timelimit, corelimit)
self.write_header_to_log(version, memlimit, timelimit, corelimit, sysinfo)
if sysinfo:
# store systemInfo in XML
self.store_system_info(sysinfo.os, sysinfo.cpu_model,
sysinfo.cpu_number_of_cores, sysinfo.cpu_max_frequency,
sysinfo.memory, sysinfo.hostname,
environment=sysinfo.environment,
cpu_turboboost=sysinfo.cpu_turboboost)
self.xml_file_names = []
def store_system_info(self, opSystem, cpu_model, cpu_number_of_cores, cpu_max_frequency, memory, hostname,
runSet=None, environment={},
cpu_turboboost=None):
for systemInfo in self.xml_header.findall("systeminfo"):
if systemInfo.attrib["hostname"] == hostname:
return
osElem = ET.Element("os", {"name":opSystem})
cpuElem = ET.Element("cpu", {"model":cpu_model, "cores":cpu_number_of_cores, "frequency":cpu_max_frequency})
if cpu_turboboost is not None:
cpuElem.set("turboboostActive", str(cpu_turboboost).lower())
ramElem = ET.Element("ram", {"size":memory})
systemInfo = ET.Element("systeminfo", {"hostname":hostname})
systemInfo.append(osElem)
systemInfo.append(cpuElem)
systemInfo.append(ramElem)
env = ET.SubElement(systemInfo, "environment")
for var, value in sorted(environment.items()):
ET.SubElement(env, "var", name=var).text = value
self.xml_header.append(systemInfo)
if runSet:
# insert before <run> tags to conform with DTD
i = None
for i, elem in enumerate(runSet.xml):
if elem.tag == "run":
break
if i is None:
runSet.xml.append(systemInfo)
else:
runSet.xml.insert(i, systemInfo)
def set_error(self, msg, runSet=None):
"""
Mark the benchmark as erroneous, e.g., because the benchmarking tool crashed.
The message is intended as explanation for the user.
"""
self.xml_header.set('error', msg if msg else 'unknown error')
if runSet:
runSet.xml.set('error', msg if msg else 'unknown error')
def store_header_in_xml(self, version, memlimit, timelimit, corelimit):
# store benchmarkInfo in XML
self.xml_header = ET.Element("result",
{"benchmarkname": self.benchmark.name,
"date": time.strftime("%Y-%m-%d %H:%M:%S %Z", self.benchmark.start_time),
"tool": self.benchmark.tool_name, "version": version,
"toolmodule": self.benchmark.tool_module})
self.xml_header.set(MEMLIMIT, memlimit if memlimit else '-')
self.xml_header.set(TIMELIMIT, timelimit if timelimit else '-')
self.xml_header.set(CORELIMIT, corelimit if corelimit else '-')
# store columnTitles in XML, this are the default columns, that are shown in a default html-table from table-generator
columntitlesElem = ET.Element("columns")
columntitlesElem.append(ET.Element("column", {"title": "status"}))
columntitlesElem.append(ET.Element("column", {"title": "cputime"}))
columntitlesElem.append(ET.Element("column", {"title": "walltime"}))
for column in self.benchmark.columns:
columnElem = ET.Element("column", {"title": column.title})
columntitlesElem.append(columnElem)
self.xml_header.append(columntitlesElem)
# Build dummy entries for output, later replaced by the results,
# The dummy XML elements are shared over all runs.
self.xml_dummy_elements = [ET.Element("column", {"title": "status", "value": ""}),
ET.Element("column", {"title": "cputime", "value": ""}),
ET.Element("column", {"title": "walltime", "value": ""})]
for column in self.benchmark.columns:
self.xml_dummy_elements.append(ET.Element("column",
{"title": column.title, "value": ""}))
def write_header_to_log(self, version, memlimit, timelimit, corelimit, sysinfo):
"""
This method writes information about benchmark and system into txt_file.
"""
columnWidth = 20
simpleLine = "-" * (60) + "\n\n"
header = " BENCHMARK INFORMATION\n"\
+ "benchmark:".ljust(columnWidth) + self.benchmark.name + "\n"\
+ "date:".ljust(columnWidth) + time.strftime("%a, %Y-%m-%d %H:%M:%S %Z", self.benchmark.start_time) + "\n"\
+ "tool:".ljust(columnWidth) + self.benchmark.tool_name\
+ " " + version + "\n"
if memlimit:
header += "memlimit:".ljust(columnWidth) + memlimit + "\n"
if timelimit:
header += "timelimit:".ljust(columnWidth) + timelimit + "\n"
if corelimit:
header += "CPU cores used:".ljust(columnWidth) + corelimit + "\n"
header += simpleLine
if sysinfo:
header += " SYSTEM INFORMATION\n"\
+ "host:".ljust(columnWidth) + sysinfo.hostname + "\n"\
+ "os:".ljust(columnWidth) + sysinfo.os + "\n"\
+ "cpu:".ljust(columnWidth) + sysinfo.cpu_model + "\n"\
+ "- cores:".ljust(columnWidth) + sysinfo.cpu_number_of_cores + "\n"\
+ "- max frequency:".ljust(columnWidth) + sysinfo.cpu_max_frequency + "\n"\
+ "ram:".ljust(columnWidth) + sysinfo.memory + "\n"\
+ simpleLine
self.description = header
runSetName = None
run_sets = [runSet for runSet in self.benchmark.run_sets if runSet.should_be_executed()]
if len(run_sets) == 1:
# in case there is only a single run set to to execute, we can use its name
runSetName = run_sets[0].name
# write to file
txt_file_name = self.get_filename(runSetName, "txt")
self.txt_file = filewriter.FileWriter(txt_file_name, self.description)
self.all_created_files.append(txt_file_name)
def output_before_run_set(self, runSet):
"""
The method output_before_run_set() calculates the length of the
first column for the output in terminal and stores information
about the runSet in XML.
@param runSet: current run set
"""
sourcefiles = [run.identifier for run in runSet.runs]
# common prefix of file names
runSet.common_prefix = util.common_base_dir(sourcefiles) + os.path.sep
# length of the first column in terminal
runSet.max_length_of_filename = max(len(file) for file in sourcefiles) if sourcefiles else 20
runSet.max_length_of_filename = max(20, runSet.max_length_of_filename - len(runSet.common_prefix))
# write run set name to terminal
numberOfFiles = len(runSet.runs)
numberOfFilesStr = (" (1 file)" if numberOfFiles == 1
else " ({0} files)".format(numberOfFiles))
util.printOut("\nexecuting run set"
+ (" '" + runSet.name + "'" if runSet.name else "")
+ numberOfFilesStr
+ (TERMINAL_TITLE.format(runSet.full_name) if USE_COLORS and sys.stdout.isatty() else ""))
# write information about the run set into txt_file
self.writeRunSetInfoToLog(runSet)
# prepare information for text output
for run in runSet.runs:
run.resultline = self.format_sourcefile_name(run.identifier, runSet)
# prepare XML structure for each run and runSet
run.xml = ET.Element("run",
{"name": run.identifier, "files": "[" + ", ".join(run.sourcefiles) + "]"})
if run.specific_options:
run.xml.set("options", " ".join(run.specific_options))
if run.properties:
run.xml.set("properties", " ".join(sorted(run.properties)))
run.xml.extend(self.xml_dummy_elements)
runSet.xml = self.runs_to_xml(runSet, runSet.runs)
# write (empty) results to txt_file and XML
self.txt_file.append(self.run_set_to_text(runSet), False)
xml_file_name = self.get_filename(runSet.name, "xml")
runSet.xml_file = filewriter.FileWriter(xml_file_name,
self._result_xml_to_string(runSet.xml))
runSet.xml_file.lastModifiedTime = time.time()
self.all_created_files.append(xml_file_name)
self.xml_file_names.append(xml_file_name)
def output_for_skipping_run_set(self, runSet, reason=None):
'''
This function writes a simple message to terminal and logfile,
when a run set is skipped.
There is no message about skipping a run set in the xml-file.
'''
# print to terminal
util.printOut("\nSkipping run set" +
(" '" + runSet.name + "'" if runSet.name else "") +
(" " + reason if reason else "")
)
# write into txt_file
runSetInfo = "\n\n"
if runSet.name:
runSetInfo += runSet.name + "\n"
runSetInfo += "Run set {0} of {1}: skipped {2}\n".format(
runSet.index, len(self.benchmark.run_sets), reason or "")
self.txt_file.append(runSetInfo)
def writeRunSetInfoToLog(self, runSet):
"""
This method writes the information about a run set into the txt_file.
"""
runSetInfo = "\n\n"
if runSet.name:
runSetInfo += runSet.name + "\n"
runSetInfo += "Run set {0} of {1} with options '{2}' and propertyfile '{3}'\n\n".format(
runSet.index, len(self.benchmark.run_sets),
" ".join(runSet.options),
runSet.propertyfile)
titleLine = self.create_output_line(runSet, "inputfile", "status", "cpu time",
"wall time", "host", self.benchmark.columns, True)
runSet.simpleLine = "-" * (len(titleLine))
runSetInfo += titleLine + "\n" + runSet.simpleLine + "\n"
# write into txt_file
self.txt_file.append(runSetInfo)
def output_before_run(self, run):
"""
The method output_before_run() prints the name of a file to terminal.
It returns the name of the logfile.
@param run: a Run object
"""
# output in terminal
runSet = run.runSet
try:
OutputHandler.print_lock.acquire()
try:
runSet.started_runs += 1
except AttributeError:
runSet.started_runs = 1
timeStr = time.strftime("%H:%M:%S", time.localtime()) + " "
progressIndicator = " ({0}/{1})".format(runSet.started_runs, len(runSet.runs))
terminalTitle = TERMINAL_TITLE.format(runSet.full_name + progressIndicator) if USE_COLORS and sys.stdout.isatty() else ""
if self.benchmark.num_of_threads == 1:
util.printOut(terminalTitle + timeStr + self.format_sourcefile_name(run.identifier, runSet), '')
else:
util.printOut(terminalTitle + timeStr + "starting " + self.format_sourcefile_name(run.identifier, runSet))
finally:
OutputHandler.print_lock.release()
# get name of file-specific log-file
self.all_created_files.append(run.log_file)
def output_after_run(self, run):
"""
The method output_after_run() prints filename, result, time and status
of a run to terminal and stores all data in XML
"""
# format times, type is changed from float to string!
cputime_str = util.format_number(run.cputime, TIME_PRECISION)
walltime_str = util.format_number(run.walltime, TIME_PRECISION)
# format numbers, number_of_digits is optional, so it can be None
for column in run.columns:
if column.number_of_digits is not None:
# if the number ends with "s" or another letter, remove it
if (not column.value.isdigit()) and column.value[-2:-1].isdigit():
column.value = column.value[:-1]
try:
floatValue = float(column.value)
column.value = util.format_number(floatValue, column.number_of_digits)
except ValueError: # if value is no float, don't format it
pass
# store information in run
run.resultline = self.create_output_line(run.runSet, run.identifier, run.status,
cputime_str, walltime_str, run.values.get('host'),
run.columns)
self.add_values_to_run_xml(run)
# output in terminal/console
if USE_COLORS and sys.stdout.isatty(): # is terminal, not file
statusStr = COLOR_DIC[run.category].format(run.status.ljust(LEN_OF_STATUS))
else:
statusStr = run.status.ljust(LEN_OF_STATUS)
try:
OutputHandler.print_lock.acquire()
valueStr = statusStr + cputime_str.rjust(8) + walltime_str.rjust(8)
if self.benchmark.num_of_threads == 1:
util.printOut(valueStr)
else:
timeStr = time.strftime("%H:%M:%S", time.localtime()) + " "*14
util.printOut(timeStr + self.format_sourcefile_name(run.identifier, run.runSet) + valueStr)
# write result in txt_file and XML
self.txt_file.append(self.run_set_to_text(run.runSet), False)
self.statistics.add_result(run)
# we don't want to write this file to often, it can slow down the whole script,
# so we wait at least 10 seconds between two write-actions
currentTime = time.time()
if currentTime - run.runSet.xml_file.lastModifiedTime > 60:
run.runSet.xml_file.replace(self._result_xml_to_string(run.runSet.xml))
run.runSet.xml_file.lastModifiedTime = time.time()
finally:
OutputHandler.print_lock.release()
def output_after_run_set(self, runSet, cputime=None, walltime=None, energy={}):
"""
The method output_after_run_set() stores the times of a run set in XML.
@params cputime, walltime: accumulated times of the run set
"""
self.add_values_to_run_set_xml(runSet, cputime, walltime, energy)
# write results to files
runSet.xml_file.replace(self._result_xml_to_string(runSet.xml))
if len(runSet.blocks) > 1:
for block in runSet.blocks:
blockFileName = self.get_filename(runSet.name, block.name + ".xml")
util.write_file(
self._result_xml_to_string(self.runs_to_xml(runSet, block.runs, block.name)),
blockFileName
)
self.all_created_files.append(blockFileName)
self.txt_file.append(self.run_set_to_text(runSet, True, cputime, walltime, energy))
def run_set_to_text(self, runSet, finished=False, cputime=0, walltime=0, energy={}):
lines = []
# store values of each run
for run in runSet.runs: lines.append(run.resultline)
lines.append(runSet.simpleLine)
# write endline into txt_file
if finished:
endline = ("Run set {0}".format(runSet.index))
# format time, type is changed from float to string!
cputime_str = "None" if cputime is None else util.format_number(cputime, TIME_PRECISION)
walltime_str = "None" if walltime is None else util.format_number(walltime, TIME_PRECISION)
lines.append(self.create_output_line(runSet, endline, "done", cputime_str,
walltime_str, "-", []))
return "\n".join(lines) + "\n"
def runs_to_xml(self, runSet, runs, blockname=None):
"""
This function creates the XML structure for a list of runs
"""
# copy benchmarkinfo, limits, columntitles, systeminfo from xml_header
runsElem = util.copy_of_xml_element(self.xml_header)
runsElem.set("options", " ".join(runSet.options))
if blockname is not None:
runsElem.set("block", blockname)
runsElem.set("name", ((runSet.real_name + ".") if runSet.real_name else "") + blockname)
elif runSet.real_name:
runsElem.set("name", runSet.real_name)
# collect XMLelements from all runs
for run in runs: runsElem.append(run.xml)
return runsElem
def add_values_to_run_xml(self, run):
"""
This function adds the result values to the XML representation of a run.
"""
runElem = run.xml
for elem in list(runElem):
runElem.remove(elem)
self.add_column_to_xml(runElem, 'status', run.status)
if run.cputime is not None:
self.add_column_to_xml(runElem, 'cputime', str(run.cputime) + 's')
if run.walltime is not None:
self.add_column_to_xml(runElem, 'walltime', str(run.walltime) + 's')
self.add_column_to_xml(runElem, '@category', run.category) # hidden
self.add_column_to_xml(runElem, '', run.values)
for column in run.columns:
self.add_column_to_xml(runElem, column.title, column.value)
def add_values_to_run_set_xml(self, runSet, cputime, walltime, energy):
"""
This function adds the result values to the XML representation of a runSet.
"""
self.add_column_to_xml(runSet.xml, 'cputime', cputime)
self.add_column_to_xml(runSet.xml, 'walltime', walltime)
self.add_column_to_xml(runSet.xml, 'energy', energy)
def add_column_to_xml(self, xml, title, value, prefix=""):
if value is None:
return
if isinstance(value, dict):
for key, value in value.items():
if prefix:
common_prefix = prefix + '_' + title
else:
common_prefix = title
self.add_column_to_xml(xml, key, value, prefix=common_prefix)
return
if hasattr(value, '__getitem__') and not isinstance(value, (str, bytes)):
value = ','.join(map(str, value))
# default case: add columns
if prefix:
if prefix.startswith('@'):
attributes = {"title": prefix[1:] + '_' + title, "value": str(value), "hidden": "true"}
else:
attributes = {"title": prefix + '_' + title, "value": str(value)}
else:
if title.startswith('@'):
attributes = {"title": title[1:], "value": str(value), "hidden": "true"}
else:
attributes = {"title": title, "value": str(value)}
xml.append(ET.Element("column", attributes))
def create_output_line(self, runSet, sourcefile, status, cputime_delta, walltime_delta, host, columns, isFirstLine=False):
"""
@param sourcefile: title of a sourcefile
@param status: status of programm
@param cputime_delta: time from running the programm
@param walltime_delta: time from running the programm
@param columns: list of columns with a title or a value
@param isFirstLine: boolean for different output of headline and other lines
@return: a line for the outputFile
"""
lengthOfTime = 12
minLengthOfColumns = 8
outputLine = self.format_sourcefile_name(sourcefile, runSet) + \
status.ljust(LEN_OF_STATUS) + \
cputime_delta.rjust(lengthOfTime) + \
walltime_delta.rjust(lengthOfTime) + \
str(host).rjust(lengthOfTime)
for column in columns:
columnLength = max(minLengthOfColumns, len(column.title)) + 2
if isFirstLine:
value = column.title
else:
value = column.value
outputLine = outputLine + str(value).rjust(columnLength)
return outputLine
def output_after_benchmark(self, isStoppedByInterrupt):
self.statistics.print_to_terminal()
if self.xml_file_names:
def _find_file_relative(name):
"""
Find a file with the given name in the same directory as this script.
Returns a path relative to the current directory, or None.
"""
path = os.path.join(os.path.dirname(sys.argv[0]), name)
if not os.path.isfile(path):
path = os.path.join(os.path.dirname(__file__), os.path.pardir, name)
if not os.path.isfile(path):
return None
if os.path.dirname(path) in os.environ['PATH'].split(os.pathsep):
# in PATH, just use command name
return os.path.basename(path)
path = os.path.relpath(path)
if path == name:
path = './' + path # for easier copy and paste into a shell
return path
tableGeneratorPath = _find_file_relative('table-generator.py') \
or _find_file_relative('table-generator')
if tableGeneratorPath:
util.printOut("In order to get HTML and CSV tables, run\n{0} '{1}'"
.format(tableGeneratorPath, "' '".join(self.xml_file_names)))
if isStoppedByInterrupt:
util.printOut("\nScript was interrupted by user, some runs may not be done.\n")
def get_filename(self, runSetName, fileExtension):
'''
This function returns the name of the file for a run set
with an extension ("txt", "xml").
'''
fileName = self.benchmark.output_base_name + ".results."
if runSetName:
fileName += runSetName + "."
return fileName + fileExtension
def format_sourcefile_name(self, fileName, runSet):
'''
Formats the file name of a program for printing on console.
'''
if fileName.startswith(runSet.common_prefix):
fileName = fileName[len(runSet.common_prefix):]
return fileName.ljust(runSet.max_length_of_filename + 4)
def _result_xml_to_string(self, xml):
return util.xml_to_string(xml, 'result', RESULT_XML_PUBLIC_ID, RESULT_XML_SYSTEM_ID)
class Statistics:
def __init__(self):
self.dic = collections.defaultdict(int)
self.counter = 0
self.score = 0
self.max_score = 0
def add_result(self, run):
self.counter += 1
self.dic[run.category] += 1
self.dic[(run.category, result.get_result_classification(run.status))] += 1
self.score += result.score_for_task(run.identifier, run.properties, run.category)
#if run.properties:
self.max_score += result.score_for_task(run.identifier, run.properties, result.CATEGORY_CORRECT)
def print_to_terminal(self):
correct = self.dic[result.CATEGORY_CORRECT]
correct_true = self.dic[(result.CATEGORY_CORRECT, result.RESULT_CLASS_TRUE)]
correct_false = correct - correct_true
incorrect = self.dic[result.CATEGORY_WRONG]
incorrect_true = self.dic[(result.CATEGORY_WRONG, result.RESULT_CLASS_TRUE)]
incorrect_false = incorrect - incorrect_true
width = 6
output = ['',
'Statistics:' + str(self.counter).rjust(width + 9) + ' Files',
' correct: ' + str(correct).rjust(width),
' correct true: ' + str(correct_true).rjust(width),
' correct false: ' + str(correct_false).rjust(width),
' incorrect: ' + str(incorrect).rjust(width),
' incorrect true: ' + str(incorrect_true).rjust(width),
' incorrect false:' + str(incorrect_false).rjust(width),
' unknown: ' + str(self.dic[result.CATEGORY_UNKNOWN] + self.dic[result.CATEGORY_ERROR]).rjust(width),
]
if self.max_score:
output.append(
' Score: ' + str(self.score).rjust(width) + ' (max: ' + str(self.max_score) + ')'
)
util.printOut('\n'.join(output)+'\n')
| apache-2.0 |
markYoungH/chromium.src | components/domain_reliability/bake_in_configs.py | 12 | 10951 | #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Takes the JSON files in components/domain_reliability/baked_in_configs and
encodes their contents as an array of C strings that gets compiled in to Chrome
and loaded at runtime."""
import ast
import json
import os
import sys
# A whitelist of domains that the script will accept when baking configs in to
# Chrome, to ensure incorrect ones are not added accidentally. Subdomains of
# whitelist entries are also allowed (e.g. maps.google.com, ssl.gstatic.com).
DOMAIN_WHITELIST = (
'2mdn.net',
'admob.biz',
'admob.co.in',
'admob.co.kr',
'admob.co.nz',
'admob.co.uk',
'admob.co.za',
'admob.com',
'admob.com.br',
'admob.com.es',
'admob.com.fr',
'admob.com.mx',
'admob.com.pt',
'admob.de',
'admob.dk',
'admob.es',
'admob.fi',
'admob.fr',
'admob.gr',
'admob.hk',
'admob.ie',
'admob.in',
'admob.it',
'admob.jp',
'admob.kr',
'admob.mobi',
'admob.no',
'admob.ph',
'admob.pt',
'admob.sg',
'admob.tw',
'admob.us',
'admob.vn',
'dartmotif.com',
'doubleclick.com',
'doubleclick.ne.jp',
'doubleclick.net',
'doubleclickusercontent.com',
'g.co',
'ggpht.com',
'gmodules.com',
'goo.gl',
'google-analytics.com',
'google-syndication.com',
'google.ac',
'google.ad',
'google.ae',
'google.af',
'google.ag',
'google.al',
'google.am',
'google.as',
'google.at',
'google.az',
'google.ba',
'google.be',
'google.bf',
'google.bg',
'google.bi',
'google.bj',
'google.bs',
'google.bt',
'google.by',
'google.ca',
'google.cat',
'google.cc',
'google.cd',
'google.cf',
'google.cg',
'google.ch',
'google.ci',
'google.cl',
'google.cm',
'google.cn',
'google.co.ao',
'google.co.bw',
'google.co.ck',
'google.co.cr',
'google.co.hu',
'google.co.id',
'google.co.il',
'google.co.im',
'google.co.in',
'google.co.je',
'google.co.jp',
'google.co.ke',
'google.co.kr',
'google.co.ls',
'google.co.ma',
'google.co.mz',
'google.co.nz',
'google.co.th',
'google.co.tz',
'google.co.ug',
'google.co.uk',
'google.co.uz',
'google.co.ve',
'google.co.vi',
'google.co.za',
'google.co.zm',
'google.co.zw',
'google.com',
'google.com.af',
'google.com.ag',
'google.com.ai',
'google.com.ar',
'google.com.au',
'google.com.bd',
'google.com.bh',
'google.com.bn',
'google.com.bo',
'google.com.br',
'google.com.by',
'google.com.bz',
'google.com.cn',
'google.com.co',
'google.com.cu',
'google.com.cy',
'google.com.do',
'google.com.ec',
'google.com.eg',
'google.com.et',
'google.com.fj',
'google.com.ge',
'google.com.gh',
'google.com.gi',
'google.com.gr',
'google.com.gt',
'google.com.hk',
'google.com.iq',
'google.com.jm',
'google.com.jo',
'google.com.kh',
'google.com.kw',
'google.com.lb',
'google.com.ly',
'google.com.mm',
'google.com.mt',
'google.com.mx',
'google.com.my',
'google.com.na',
'google.com.nf',
'google.com.ng',
'google.com.ni',
'google.com.np',
'google.com.nr',
'google.com.om',
'google.com.pa',
'google.com.pe',
'google.com.pg',
'google.com.ph',
'google.com.pk',
'google.com.pl',
'google.com.pr',
'google.com.py',
'google.com.qa',
'google.com.ru',
'google.com.sa',
'google.com.sb',
'google.com.sg',
'google.com.sl',
'google.com.sv',
'google.com.tj',
'google.com.tn',
'google.com.tr',
'google.com.tw',
'google.com.ua',
'google.com.uy',
'google.com.vc',
'google.com.ve',
'google.com.vn',
'google.cv',
'google.cz',
'google.de',
'google.dj',
'google.dk',
'google.dm',
'google.dz',
'google.ee',
'google.es',
'google.fi',
'google.fm',
'google.fr',
'google.ga',
'google.ge',
'google.gg',
'google.gl',
'google.gm',
'google.gp',
'google.gr',
'google.gy',
'google.hk',
'google.hn',
'google.hr',
'google.ht',
'google.hu',
'google.ie',
'google.im',
'google.info',
'google.iq',
'google.ir',
'google.is',
'google.it',
'google.it.ao',
'google.je',
'google.jo',
'google.jobs',
'google.jp',
'google.kg',
'google.ki',
'google.kz',
'google.la',
'google.li',
'google.lk',
'google.lt',
'google.lu',
'google.lv',
'google.md',
'google.me',
'google.mg',
'google.mk',
'google.ml',
'google.mn',
'google.ms',
'google.mu',
'google.mv',
'google.mw',
'google.ne',
'google.ne.jp',
'google.net',
'google.ng',
'google.nl',
'google.no',
'google.nr',
'google.nu',
'google.off.ai',
'google.org',
'google.pk',
'google.pl',
'google.pn',
'google.ps',
'google.pt',
'google.ro',
'google.rs',
'google.ru',
'google.rw',
'google.sc',
'google.se',
'google.sh',
'google.si',
'google.sk',
'google.sm',
'google.sn',
'google.so',
'google.sr',
'google.st',
'google.td',
'google.tg',
'google.tk',
'google.tl',
'google.tm',
'google.tn',
'google.to',
'google.tt',
'google.us',
'google.uz',
'google.vg',
'google.vu',
'google.ws',
'googleadservices.com',
'googlealumni.com',
'googleapis.com',
'googleapps.com',
'googlecbs.com',
'googlecommerce.com',
'googledrive.com',
'googleenterprise.com',
'googlegoro.com',
'googlehosted.com',
'googlepayments.com',
'googlesource.com',
'googlesyndication.com',
'googletagmanager.com',
'googletagservices.com',
'googleusercontent.com',
'googlevideo.com',
'gstatic.com',
'gvt1.com',
'gvt2.com',
'withgoogle.com',
'youtu.be',
'youtube-3rd-party.com',
'youtube-nocookie.com',
'youtube.ae',
'youtube.al',
'youtube.am',
'youtube.at',
'youtube.az',
'youtube.ba',
'youtube.be',
'youtube.bg',
'youtube.bh',
'youtube.bo',
'youtube.ca',
'youtube.cat',
'youtube.ch',
'youtube.cl',
'youtube.co',
'youtube.co.ae',
'youtube.co.at',
'youtube.co.hu',
'youtube.co.id',
'youtube.co.il',
'youtube.co.in',
'youtube.co.jp',
'youtube.co.ke',
'youtube.co.kr',
'youtube.co.ma',
'youtube.co.nz',
'youtube.co.th',
'youtube.co.ug',
'youtube.co.uk',
'youtube.co.ve',
'youtube.co.za',
'youtube.com',
'youtube.com.ar',
'youtube.com.au',
'youtube.com.az',
'youtube.com.bh',
'youtube.com.bo',
'youtube.com.br',
'youtube.com.by',
'youtube.com.co',
'youtube.com.do',
'youtube.com.ee',
'youtube.com.eg',
'youtube.com.es',
'youtube.com.gh',
'youtube.com.gr',
'youtube.com.gt',
'youtube.com.hk',
'youtube.com.hr',
'youtube.com.jm',
'youtube.com.jo',
'youtube.com.kw',
'youtube.com.lb',
'youtube.com.lv',
'youtube.com.mk',
'youtube.com.mt',
'youtube.com.mx',
'youtube.com.my',
'youtube.com.ng',
'youtube.com.om',
'youtube.com.pe',
'youtube.com.ph',
'youtube.com.pk',
'youtube.com.pt',
'youtube.com.qa',
'youtube.com.ro',
'youtube.com.sa',
'youtube.com.sg',
'youtube.com.tn',
'youtube.com.tr',
'youtube.com.tw',
'youtube.com.ua',
'youtube.com.uy',
'youtube.com.ve',
'youtube.cz',
'youtube.de',
'youtube.dk',
'youtube.ee',
'youtube.es',
'youtube.fi',
'youtube.fr',
'youtube.ge',
'youtube.gr',
'youtube.gt',
'youtube.hk',
'youtube.hr',
'youtube.hu',
'youtube.ie',
'youtube.in',
'youtube.is',
'youtube.it',
'youtube.jo',
'youtube.jp',
'youtube.kr',
'youtube.lk',
'youtube.lt',
'youtube.lv',
'youtube.ma',
'youtube.md',
'youtube.me',
'youtube.mk',
'youtube.mx',
'youtube.my',
'youtube.ng',
'youtube.nl',
'youtube.no',
'youtube.pe',
'youtube.ph',
'youtube.pk',
'youtube.pl',
'youtube.pr',
'youtube.pt',
'youtube.qa',
'youtube.ro',
'youtube.rs',
'youtube.ru',
'youtube.sa',
'youtube.se',
'youtube.sg',
'youtube.si',
'youtube.sk',
'youtube.sn',
'youtube.tn',
'youtube.ua',
'youtube.ug',
'youtube.uy',
'youtube.vn',
'youtubeeducation.com',
'youtubemobilesupport.com',
'ytimg.com'
)
CC_HEADER = """// AUTOGENERATED FILE. DO NOT EDIT.
//
// (Update configs in components/domain_reliability/baked_in_configs and list
// configs in components/domain_reliability/baked_in_configs.gypi instead.)
#include "components/domain_reliability/baked_in_configs.h"
#include <stdlib.h>
namespace domain_reliability {
const char* const kBakedInJsonConfigs[] = {
"""
CC_FOOTER = """ NULL
};
} // namespace domain_reliability
"""
def read_json_files_from_gypi(gypi_file):
with open(gypi_file, 'r') as f:
gypi_text = f.read()
json_files = ast.literal_eval(gypi_text)['variables']['baked_in_configs']
return json_files
def domain_is_whitelisted(domain):
return any(domain == e or domain.endswith('.' + e) for e in DOMAIN_WHITELIST)
def quote_and_wrap_text(text, width=79, prefix=' "', suffix='"'):
max_length = width - len(prefix) - len(suffix)
output = prefix
line_length = 0
for c in text:
if c == "\"":
c = "\\\""
elif c == "\n":
c = "\\n"
elif c == "\\":
c = "\\\\"
if line_length + len(c) > max_length:
output += suffix + "\n" + prefix
line_length = 0
output += c
line_length += len(c)
output += suffix
return output
def main():
if len(sys.argv) != 4:
print >> sys.stderr, (('Usage: %s <JSON pathname base directory> ' +
'<input .gypi file> <output .cpp file>') %
sys.argv[0])
print >> sys.stderr, sys.modules[__name__].__doc__
return 1
json_path = sys.argv[1]
gypi_file = sys.argv[2]
cpp_file = sys.argv[3]
json_files = read_json_files_from_gypi(gypi_file)
json_files = [ os.path.join(json_path, f) for f in json_files ]
json_files = [ os.path.normpath(f) for f in json_files ]
cpp_code = CC_HEADER
found_invalid_config = False
for json_file in json_files:
with open(json_file, 'r') as f:
json_text = f.read()
try:
config = json.loads(json_text)
except ValueError, e:
print >> sys.stderr, "%s: error parsing JSON: %s" % (json_file, e)
found_invalid_config = True
continue
if 'monitored_domain' not in config:
print >> sys.stderr, '%s: no monitored_domain found' % json_file
found_invalid_config = True
continue
domain = config['monitored_domain']
if not domain_is_whitelisted(domain):
print >> sys.stderr, ('%s: monitored_domain "%s" not in whitelist' %
(json_file, domain))
found_invalid_config = True
continue
# Re-dump JSON to get a more compact representation.
dumped_json_text = json.dumps(config, separators=(',', ':'))
cpp_code += " // " + json_file + ":\n"
cpp_code += quote_and_wrap_text(dumped_json_text) + ",\n"
cpp_code += "\n"
cpp_code += CC_FOOTER
if found_invalid_config:
return 1
with open(cpp_file, 'wb') as f:
f.write(cpp_code)
return 0
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
dvliman/jaikuengine | .google_appengine/lib/django-1.2/django/core/cache/backends/memcached.py | 64 | 3558 | "Memcached cache backend"
import time
from django.core.cache.backends.base import BaseCache, InvalidCacheBackendError
from django.utils.encoding import smart_unicode, smart_str
try:
import cmemcache as memcache
import warnings
warnings.warn(
"Support for the 'cmemcache' library has been deprecated. Please use python-memcached instead.",
PendingDeprecationWarning
)
except ImportError:
try:
import memcache
except:
raise InvalidCacheBackendError("Memcached cache backend requires either the 'memcache' or 'cmemcache' library")
class CacheClass(BaseCache):
def __init__(self, server, params):
BaseCache.__init__(self, params)
self._cache = memcache.Client(server.split(';'))
def _get_memcache_timeout(self, timeout):
"""
Memcached deals with long (> 30 days) timeouts in a special
way. Call this function to obtain a safe value for your timeout.
"""
timeout = timeout or self.default_timeout
if timeout > 2592000: # 60*60*24*30, 30 days
# See http://code.google.com/p/memcached/wiki/FAQ
# "You can set expire times up to 30 days in the future. After that
# memcached interprets it as a date, and will expire the item after
# said date. This is a simple (but obscure) mechanic."
#
# This means that we have to switch to absolute timestamps.
timeout += int(time.time())
return timeout
def add(self, key, value, timeout=0):
if isinstance(value, unicode):
value = value.encode('utf-8')
return self._cache.add(smart_str(key), value, self._get_memcache_timeout(timeout))
def get(self, key, default=None):
val = self._cache.get(smart_str(key))
if val is None:
return default
return val
def set(self, key, value, timeout=0):
self._cache.set(smart_str(key), value, self._get_memcache_timeout(timeout))
def delete(self, key):
self._cache.delete(smart_str(key))
def get_many(self, keys):
return self._cache.get_multi(map(smart_str,keys))
def close(self, **kwargs):
self._cache.disconnect_all()
def incr(self, key, delta=1):
try:
val = self._cache.incr(key, delta)
# python-memcache responds to incr on non-existent keys by
# raising a ValueError. Cmemcache returns None. In both
# cases, we should raise a ValueError though.
except ValueError:
val = None
if val is None:
raise ValueError("Key '%s' not found" % key)
return val
def decr(self, key, delta=1):
try:
val = self._cache.decr(key, delta)
# python-memcache responds to decr on non-existent keys by
# raising a ValueError. Cmemcache returns None. In both
# cases, we should raise a ValueError though.
except ValueError:
val = None
if val is None:
raise ValueError("Key '%s' not found" % key)
return val
def set_many(self, data, timeout=0):
safe_data = {}
for key, value in data.items():
if isinstance(value, unicode):
value = value.encode('utf-8')
safe_data[smart_str(key)] = value
self._cache.set_multi(safe_data, self._get_memcache_timeout(timeout))
def delete_many(self, keys):
self._cache.delete_multi(map(smart_str, keys))
def clear(self):
self._cache.flush_all()
| apache-2.0 |
TigorC/zulip | zerver/models.py | 2 | 57542 | from __future__ import absolute_import
from typing import Any, Dict, List, Set, Tuple, TypeVar, Text, \
Union, Optional, Sequence, AbstractSet, Pattern, AnyStr
from typing.re import Match
from zerver.lib.str_utils import NonBinaryStr
from django.db import models
from django.db.models.query import QuerySet
from django.db.models import Manager
from django.conf import settings
from django.contrib.auth.models import AbstractBaseUser, UserManager, \
PermissionsMixin
import django.contrib.auth
from django.core.exceptions import ValidationError
from django.core.validators import URLValidator
from django.dispatch import receiver
from zerver.lib.cache import cache_with_key, flush_user_profile, flush_realm, \
user_profile_by_id_cache_key, user_profile_by_email_cache_key, \
generic_bulk_cached_fetch, cache_set, flush_stream, \
display_recipient_cache_key, cache_delete, \
get_stream_cache_key, active_user_dicts_in_realm_cache_key, \
active_bot_dicts_in_realm_cache_key, active_user_dict_fields, \
active_bot_dict_fields, flush_message
from zerver.lib.utils import make_safe_digest, generate_random_token
from zerver.lib.str_utils import ModelReprMixin
from django.db import transaction
from zerver.lib.camo import get_camo_url
from django.utils import timezone
from django.contrib.sessions.models import Session
from zerver.lib.timestamp import datetime_to_timestamp
from django.db.models.signals import pre_save, post_save, post_delete
from django.core.validators import MinLengthValidator, RegexValidator
from django.utils.translation import ugettext_lazy as _
from zerver.lib import cache
from bitfield import BitField
from bitfield.types import BitHandler
from collections import defaultdict
from datetime import timedelta
import pylibmc
import re
import logging
import time
import datetime
MAX_SUBJECT_LENGTH = 60
MAX_MESSAGE_LENGTH = 10000
MAX_LANGUAGE_ID_LENGTH = 50 # type: int
STREAM_NAMES = TypeVar('STREAM_NAMES', Sequence[Text], AbstractSet[Text])
# Doing 1000 remote cache requests to get_display_recipient is quite slow,
# so add a local cache as well as the remote cache cache.
per_request_display_recipient_cache = {} # type: Dict[int, List[Dict[str, Any]]]
def get_display_recipient_by_id(recipient_id, recipient_type, recipient_type_id):
# type: (int, int, int) -> Union[Text, List[Dict[str, Any]]]
if recipient_id not in per_request_display_recipient_cache:
result = get_display_recipient_remote_cache(recipient_id, recipient_type, recipient_type_id)
per_request_display_recipient_cache[recipient_id] = result
return per_request_display_recipient_cache[recipient_id]
def get_display_recipient(recipient):
# type: (Recipient) -> Union[Text, List[Dict[str, Any]]]
return get_display_recipient_by_id(
recipient.id,
recipient.type,
recipient.type_id
)
def flush_per_request_caches():
# type: () -> None
global per_request_display_recipient_cache
per_request_display_recipient_cache = {}
global per_request_realm_filters_cache
per_request_realm_filters_cache = {}
@cache_with_key(lambda *args: display_recipient_cache_key(args[0]),
timeout=3600*24*7)
def get_display_recipient_remote_cache(recipient_id, recipient_type, recipient_type_id):
# type: (int, int, int) -> Union[Text, List[Dict[str, Any]]]
"""
returns: an appropriate object describing the recipient. For a
stream this will be the stream name as a string. For a huddle or
personal, it will be an array of dicts about each recipient.
"""
if recipient_type == Recipient.STREAM:
stream = Stream.objects.get(id=recipient_type_id)
return stream.name
# We don't really care what the ordering is, just that it's deterministic.
user_profile_list = (UserProfile.objects.filter(subscription__recipient_id=recipient_id)
.select_related()
.order_by('email'))
return [{'email': user_profile.email,
'domain': user_profile.realm.domain,
'full_name': user_profile.full_name,
'short_name': user_profile.short_name,
'id': user_profile.id,
'is_mirror_dummy': user_profile.is_mirror_dummy} for user_profile in user_profile_list]
def get_realm_emoji_cache_key(realm):
# type: (Realm) -> Text
return u'realm_emoji:%s' % (realm.id,)
class Realm(ModelReprMixin, models.Model):
# domain is a domain in the Internet sense. It must be structured like a
# valid email domain. We use is to restrict access, identify bots, etc.
domain = models.CharField(max_length=40, db_index=True, unique=True) # type: Text
# name is the user-visible identifier for the realm. It has no required
# structure.
AUTHENTICATION_FLAGS = [u'Google', u'Email', u'GitHub', u'LDAP', u'Dev', u'RemoteUser']
name = models.CharField(max_length=40, null=True) # type: Optional[Text]
string_id = models.CharField(max_length=40, unique=True) # type: Text
restricted_to_domain = models.BooleanField(default=False) # type: bool
invite_required = models.BooleanField(default=True) # type: bool
invite_by_admins_only = models.BooleanField(default=False) # type: bool
create_stream_by_admins_only = models.BooleanField(default=False) # type: bool
mandatory_topics = models.BooleanField(default=False) # type: bool
show_digest_email = models.BooleanField(default=True) # type: bool
name_changes_disabled = models.BooleanField(default=False) # type: bool
allow_message_editing = models.BooleanField(default=True) # type: bool
DEFAULT_MESSAGE_CONTENT_EDIT_LIMIT_SECONDS = 600 # if changed, also change in admin.js
message_content_edit_limit_seconds = models.IntegerField(default=DEFAULT_MESSAGE_CONTENT_EDIT_LIMIT_SECONDS) # type: int
message_retention_days = models.IntegerField(null=True) # type: Optional[int]
# Valid org_types are {CORPORATE, COMMUNITY}
CORPORATE = 1
COMMUNITY = 2
org_type = models.PositiveSmallIntegerField(default=COMMUNITY) # type: int
date_created = models.DateTimeField(default=timezone.now) # type: datetime.datetime
notifications_stream = models.ForeignKey('Stream', related_name='+', null=True, blank=True) # type: Optional[Stream]
deactivated = models.BooleanField(default=False) # type: bool
default_language = models.CharField(default=u'en', max_length=MAX_LANGUAGE_ID_LENGTH) # type: Text
authentication_methods = BitField(flags=AUTHENTICATION_FLAGS,
default=2**31 - 1) # type: BitHandler
waiting_period_threshold = models.PositiveIntegerField(default=0) # type: int
DEFAULT_NOTIFICATION_STREAM_NAME = u'announce'
def authentication_methods_dict(self):
# type: () -> Dict[Text, bool]
"""Returns the a mapping from authentication flags to their status,
showing only those authentication flags that are supported on
the current server (i.e. if EmailAuthBackend is not configured
on the server, this will not return an entry for "Email")."""
# This mapping needs to be imported from here due to the cyclic
# dependency.
from zproject.backends import AUTH_BACKEND_NAME_MAP
ret = {} # type: Dict[Text, bool]
supported_backends = {backend.__class__ for backend in django.contrib.auth.get_backends()}
for k, v in self.authentication_methods.iteritems():
backend = AUTH_BACKEND_NAME_MAP[k]
if backend in supported_backends:
ret[k] = v
return ret
def __unicode__(self):
# type: () -> Text
return u"<Realm: %s %s>" % (self.domain, self.id)
@cache_with_key(get_realm_emoji_cache_key, timeout=3600*24*7)
def get_emoji(self):
# type: () -> Dict[Text, Dict[str, Text]]
return get_realm_emoji_uncached(self)
@property
def deployment(self):
# type: () -> Any # returns a Deployment from zilencer.models
# see https://github.com/zulip/zulip/issues/1845 before you
# attempt to add test coverage for this method, as we may
# be revisiting the deployments model soon
try:
return self._deployments.all()[0]
except IndexError:
return None
@deployment.setter # type: ignore # https://github.com/python/mypy/issues/220
def set_deployments(self, value):
# type: (Any) -> None
self._deployments = [value] # type: Any
def get_admin_users(self):
# type: () -> Sequence[UserProfile]
# TODO: Change return type to QuerySet[UserProfile]
return UserProfile.objects.filter(realm=self, is_realm_admin=True,
is_active=True).select_related()
def get_active_users(self):
# type: () -> Sequence[UserProfile]
# TODO: Change return type to QuerySet[UserProfile]
return UserProfile.objects.filter(realm=self, is_active=True).select_related()
@property
def subdomain(self):
# type: () -> Text
if settings.REALMS_HAVE_SUBDOMAINS:
return self.string_id
return None
@property
def uri(self):
# type: () -> str
if settings.REALMS_HAVE_SUBDOMAINS and self.subdomain is not None:
return '%s%s.%s' % (settings.EXTERNAL_URI_SCHEME,
self.subdomain, settings.EXTERNAL_HOST)
return settings.SERVER_URI
@property
def host(self):
# type: () -> str
if settings.REALMS_HAVE_SUBDOMAINS and self.subdomain is not None:
return "%s.%s" % (self.subdomain, settings.EXTERNAL_HOST)
return settings.EXTERNAL_HOST
@property
def is_zephyr_mirror_realm(self):
# type: () -> bool
return self.domain == "mit.edu"
@property
def webathena_enabled(self):
# type: () -> bool
return self.is_zephyr_mirror_realm
@property
def presence_disabled(self):
# type: () -> bool
return self.is_zephyr_mirror_realm
class Meta(object):
permissions = (
('administer', "Administer a realm"),
('api_super_user', "Can send messages as other users for mirroring"),
)
post_save.connect(flush_realm, sender=Realm)
def get_realm(domain):
# type: (Text) -> Optional[Realm]
if not domain:
return None
try:
return Realm.objects.get(domain__iexact=domain.strip())
except Realm.DoesNotExist:
return None
# Added to assist with the domain to string_id transition. Will eventually
# be renamed and replace get_realm.
def get_realm_by_string_id(string_id):
# type: (Text) -> Optional[Realm]
if not string_id:
return None
try:
return Realm.objects.get(string_id=string_id)
except Realm.DoesNotExist:
return None
def completely_open(domain):
# type: (Text) -> bool
# This domain is completely open to everyone on the internet to
# join. E-mail addresses do not need to match the domain and
# an invite from an existing user is not required.
realm = get_realm(domain)
if not realm:
return False
return not realm.invite_required and not realm.restricted_to_domain
def get_unique_open_realm():
# type: () -> Optional[Realm]
"""We only return a realm if there is a unique non-system-only realm,
it is completely open, and there are no subdomains."""
if settings.REALMS_HAVE_SUBDOMAINS:
return None
realms = Realm.objects.filter(deactivated=False)
# On production installations, the (usually "zulip.com") system
# realm is an empty realm just used for system bots, so don't
# include it in this accounting.
realms = realms.exclude(domain__in=settings.SYSTEM_ONLY_REALMS)
if len(realms) != 1:
return None
realm = realms[0]
if realm.invite_required or realm.restricted_to_domain:
return None
return realm
def name_changes_disabled(realm):
# type: (Optional[Realm]) -> bool
if realm is None:
return settings.NAME_CHANGES_DISABLED
return settings.NAME_CHANGES_DISABLED or realm.name_changes_disabled
class RealmAlias(models.Model):
realm = models.ForeignKey(Realm, null=True) # type: Optional[Realm]
# should always be stored lowercase
domain = models.CharField(max_length=80, db_index=True) # type: Text
def can_add_alias(domain):
# type: (Text) -> bool
if settings.REALMS_HAVE_SUBDOMAINS:
return True
if RealmAlias.objects.filter(domain=domain).exists():
return False
return True
# These functions should only be used on email addresses that have
# been validated via django.core.validators.validate_email
#
# Note that we need to use some care, since can you have multiple @-signs; e.g.
# "tabbott@test"@zulip.com
# is valid email address
def email_to_username(email):
# type: (Text) -> Text
return "@".join(email.split("@")[:-1]).lower()
# Returns the raw domain portion of the desired email address
def email_to_domain(email):
# type: (Text) -> Text
return email.split("@")[-1].lower()
class GetRealmByDomainException(Exception):
pass
def get_realm_by_email_domain(email):
# type: (Text) -> Optional[Realm]
if settings.REALMS_HAVE_SUBDOMAINS:
raise GetRealmByDomainException(
"Cannot get realm from email domain when settings.REALMS_HAVE_SUBDOMAINS = True")
try:
alias = RealmAlias.objects.select_related('realm').get(domain = email_to_domain(email))
return alias.realm
except RealmAlias.DoesNotExist:
return None
# Is a user with the given email address allowed to be in the given realm?
# (This function does not check whether the user has been invited to the realm.
# So for invite-only realms, this is the test for whether a user can be invited,
# not whether the user can sign up currently.)
def email_allowed_for_realm(email, realm):
# type: (Text, Realm) -> bool
if not realm.restricted_to_domain:
return True
domain = email_to_domain(email)
return RealmAlias.objects.filter(realm = realm, domain = domain).exists()
def list_of_domains_for_realm(realm):
# type: (Realm) -> List[Text]
return list(RealmAlias.objects.filter(realm = realm).values_list('domain', flat=True))
class RealmEmoji(ModelReprMixin, models.Model):
realm = models.ForeignKey(Realm) # type: Realm
# Second part of the regex (negative lookbehind) disallows names ending with one of the punctuation characters
name = models.TextField(validators=[MinLengthValidator(1),
RegexValidator(regex=r'^[0-9a-zA-Z.\-_]+(?<![.\-_])$',
message=_("Invalid characters in Emoji name"))]) # type: Text
# URLs start having browser compatibility problem below 2000
# characters, so 1000 seems like a safe limit.
img_url = models.URLField(max_length=1000) # type: Text
class Meta(object):
unique_together = ("realm", "name")
def __unicode__(self):
# type: () -> Text
return u"<RealmEmoji(%s): %s %s>" % (self.realm.domain, self.name, self.img_url)
def get_realm_emoji_uncached(realm):
# type: (Realm) -> Dict[Text, Dict[str, Text]]
d = {}
for row in RealmEmoji.objects.filter(realm=realm):
d[row.name] = dict(source_url=row.img_url,
display_url=get_camo_url(row.img_url))
return d
def flush_realm_emoji(sender, **kwargs):
# type: (Any, **Any) -> None
realm = kwargs['instance'].realm
cache_set(get_realm_emoji_cache_key(realm),
get_realm_emoji_uncached(realm),
timeout=3600*24*7)
post_save.connect(flush_realm_emoji, sender=RealmEmoji)
post_delete.connect(flush_realm_emoji, sender=RealmEmoji)
def filter_pattern_validator(value):
# type: (Text) -> None
regex = re.compile(r'(?:[\w\-#]+)(\(\?P<\w+>.+\))')
error_msg = 'Invalid filter pattern, you must use the following format PREFIX-(?P<id>.+)'
if not regex.match(str(value)):
raise ValidationError(error_msg)
try:
re.compile(value)
except:
# Regex is invalid
raise ValidationError(error_msg)
def filter_format_validator(value):
# type: (str) -> None
regex = re.compile(r'^[\.\/:a-zA-Z0-9_-]+%\(([a-zA-Z0-9_-]+)\)s[a-zA-Z0-9_-]*$')
if not regex.match(value):
raise ValidationError('URL format string must be in the following format: `https://example.com/%(\w+)s`')
class RealmFilter(models.Model):
realm = models.ForeignKey(Realm) # type: Realm
pattern = models.TextField(validators=[filter_pattern_validator]) # type: Text
url_format_string = models.TextField(validators=[URLValidator, filter_format_validator]) # type: Text
class Meta(object):
unique_together = ("realm", "pattern")
def __unicode__(self):
# type: () -> Text
return u"<RealmFilter(%s): %s %s>" % (self.realm.domain, self.pattern, self.url_format_string)
def get_realm_filters_cache_key(domain):
# type: (Text) -> Text
return u'all_realm_filters:%s' % (domain,)
# We have a per-process cache to avoid doing 1000 remote cache queries during page load
per_request_realm_filters_cache = {} # type: Dict[Text, List[Tuple[Text, Text, int]]]
def domain_in_local_realm_filters_cache(domain):
# type: (Text) -> bool
return domain in per_request_realm_filters_cache
def realm_filters_for_domain(domain):
# type: (Text) -> List[Tuple[Text, Text, int]]
domain = domain.lower()
if not domain_in_local_realm_filters_cache(domain):
per_request_realm_filters_cache[domain] = realm_filters_for_domain_remote_cache(domain)
return per_request_realm_filters_cache[domain]
@cache_with_key(get_realm_filters_cache_key, timeout=3600*24*7)
def realm_filters_for_domain_remote_cache(domain):
# type: (Text) -> List[Tuple[Text, Text, int]]
filters = []
for realm_filter in RealmFilter.objects.filter(realm=get_realm(domain)):
filters.append((realm_filter.pattern, realm_filter.url_format_string, realm_filter.id))
return filters
def all_realm_filters():
# type: () -> Dict[Text, List[Tuple[Text, Text, int]]]
filters = defaultdict(list) # type: Dict[Text, List[Tuple[Text, Text, int]]]
for realm_filter in RealmFilter.objects.all():
filters[realm_filter.realm.domain].append((realm_filter.pattern, realm_filter.url_format_string, realm_filter.id))
return filters
def flush_realm_filter(sender, **kwargs):
# type: (Any, **Any) -> None
realm = kwargs['instance'].realm
cache_delete(get_realm_filters_cache_key(realm.domain))
try:
per_request_realm_filters_cache.pop(realm.domain.lower())
except KeyError:
pass
post_save.connect(flush_realm_filter, sender=RealmFilter)
post_delete.connect(flush_realm_filter, sender=RealmFilter)
class UserProfile(ModelReprMixin, AbstractBaseUser, PermissionsMixin):
DEFAULT_BOT = 1
"""
Incoming webhook bots are limited to only sending messages via webhooks.
Thus, it is less of a security risk to expose their API keys to third-party services,
since they can't be used to read messages.
"""
INCOMING_WEBHOOK_BOT = 2
# Fields from models.AbstractUser minus last_name and first_name,
# which we don't use; email is modified to make it indexed and unique.
email = models.EmailField(blank=False, db_index=True, unique=True) # type: Text
is_staff = models.BooleanField(default=False) # type: bool
is_active = models.BooleanField(default=True, db_index=True) # type: bool
is_realm_admin = models.BooleanField(default=False, db_index=True) # type: bool
is_bot = models.BooleanField(default=False, db_index=True) # type: bool
bot_type = models.PositiveSmallIntegerField(null=True, db_index=True) # type: Optional[int]
is_api_super_user = models.BooleanField(default=False, db_index=True) # type: bool
date_joined = models.DateTimeField(default=timezone.now) # type: datetime.datetime
is_mirror_dummy = models.BooleanField(default=False) # type: bool
bot_owner = models.ForeignKey('self', null=True, on_delete=models.SET_NULL) # type: Optional[UserProfile]
USERNAME_FIELD = 'email'
MAX_NAME_LENGTH = 100
# Our custom site-specific fields
full_name = models.CharField(max_length=MAX_NAME_LENGTH) # type: Text
short_name = models.CharField(max_length=MAX_NAME_LENGTH) # type: Text
# pointer points to Message.id, NOT UserMessage.id.
pointer = models.IntegerField() # type: int
last_pointer_updater = models.CharField(max_length=64) # type: Text
realm = models.ForeignKey(Realm) # type: Realm
api_key = models.CharField(max_length=32) # type: Text
tos_version = models.CharField(null=True, max_length=10) # type: Text
### Notifications settings. ###
# Stream notifications.
enable_stream_desktop_notifications = models.BooleanField(default=False) # type: bool
enable_stream_sounds = models.BooleanField(default=False) # type: bool
# PM + @-mention notifications.
enable_desktop_notifications = models.BooleanField(default=True) # type: bool
enable_sounds = models.BooleanField(default=True) # type: bool
enable_offline_email_notifications = models.BooleanField(default=True) # type: bool
enable_offline_push_notifications = models.BooleanField(default=True) # type: bool
enable_online_push_notifications = models.BooleanField(default=False) # type: bool
enable_digest_emails = models.BooleanField(default=True) # type: bool
# Old notification field superseded by existence of stream notification
# settings.
default_desktop_notifications = models.BooleanField(default=True) # type: bool
###
last_reminder = models.DateTimeField(default=timezone.now, null=True) # type: Optional[datetime.datetime]
rate_limits = models.CharField(default=u"", max_length=100) # type: Text # comma-separated list of range:max pairs
# Default streams
default_sending_stream = models.ForeignKey('zerver.Stream', null=True, related_name='+') # type: Optional[Stream]
default_events_register_stream = models.ForeignKey('zerver.Stream', null=True, related_name='+') # type: Optional[Stream]
default_all_public_streams = models.BooleanField(default=False) # type: bool
# UI vars
enter_sends = models.NullBooleanField(default=True) # type: Optional[bool]
autoscroll_forever = models.BooleanField(default=False) # type: bool
left_side_userlist = models.BooleanField(default=False) # type: bool
# display settings
twenty_four_hour_time = models.BooleanField(default=False) # type: bool
default_language = models.CharField(default=u'en', max_length=MAX_LANGUAGE_ID_LENGTH) # type: Text
# Hours to wait before sending another email to a user
EMAIL_REMINDER_WAITPERIOD = 24
# Minutes to wait before warning a bot owner that her bot sent a message
# to a nonexistent stream
BOT_OWNER_STREAM_ALERT_WAITPERIOD = 1
AVATAR_FROM_GRAVATAR = u'G'
AVATAR_FROM_USER = u'U'
AVATAR_SOURCES = (
(AVATAR_FROM_GRAVATAR, 'Hosted by Gravatar'),
(AVATAR_FROM_USER, 'Uploaded by user'),
)
avatar_source = models.CharField(default=AVATAR_FROM_GRAVATAR, choices=AVATAR_SOURCES, max_length=1) # type: Text
TUTORIAL_WAITING = u'W'
TUTORIAL_STARTED = u'S'
TUTORIAL_FINISHED = u'F'
TUTORIAL_STATES = ((TUTORIAL_WAITING, "Waiting"),
(TUTORIAL_STARTED, "Started"),
(TUTORIAL_FINISHED, "Finished"))
tutorial_status = models.CharField(default=TUTORIAL_WAITING, choices=TUTORIAL_STATES, max_length=1) # type: Text
# Contains serialized JSON of the form:
# [("step 1", true), ("step 2", false)]
# where the second element of each tuple is if the step has been
# completed.
onboarding_steps = models.TextField(default=u'[]') # type: Text
invites_granted = models.IntegerField(default=0) # type: int
invites_used = models.IntegerField(default=0) # type: int
alert_words = models.TextField(default=u'[]') # type: Text # json-serialized list of strings
# Contains serialized JSON of the form:
# [["social", "mit"], ["devel", "ios"]]
muted_topics = models.TextField(default=u'[]') # type: Text
objects = UserManager() # type: UserManager
def can_admin_user(self, target_user):
# type: (UserProfile) -> bool
"""Returns whether this user has permission to modify target_user"""
if target_user.bot_owner == self:
return True
elif self.is_realm_admin and self.realm == target_user.realm:
return True
else:
return False
def __unicode__(self):
# type: () -> Text
return u"<UserProfile: %s %s>" % (self.email, self.realm)
@property
def is_incoming_webhook(self):
# type: () -> bool
return self.bot_type == UserProfile.INCOMING_WEBHOOK_BOT
@staticmethod
def emails_from_ids(user_ids):
# type: (Sequence[int]) -> Dict[int, Text]
rows = UserProfile.objects.filter(id__in=user_ids).values('id', 'email')
return {row['id']: row['email'] for row in rows}
def can_create_streams(self):
# type: () -> bool
diff = (timezone.now() - self.date_joined).days
if self.is_realm_admin:
return True
elif self.realm.create_stream_by_admins_only:
return False
if diff >= self.realm.waiting_period_threshold:
return True
return False
def major_tos_version(self):
# type: () -> int
if self.tos_version is not None:
return int(self.tos_version.split('.')[0])
else:
return -1
def receives_offline_notifications(user_profile):
# type: (UserProfile) -> bool
return ((user_profile.enable_offline_email_notifications or
user_profile.enable_offline_push_notifications) and
not user_profile.is_bot)
def receives_online_notifications(user_profile):
# type: (UserProfile) -> bool
return (user_profile.enable_online_push_notifications and
not user_profile.is_bot)
def remote_user_to_email(remote_user):
# type: (Text) -> Text
if settings.SSO_APPEND_DOMAIN is not None:
remote_user += "@" + settings.SSO_APPEND_DOMAIN
return remote_user
# Make sure we flush the UserProfile object from our remote cache
# whenever we save it.
post_save.connect(flush_user_profile, sender=UserProfile)
class PreregistrationUser(models.Model):
email = models.EmailField() # type: Text
referred_by = models.ForeignKey(UserProfile, null=True) # Optional[UserProfile]
streams = models.ManyToManyField('Stream') # type: Manager
invited_at = models.DateTimeField(auto_now=True) # type: datetime.datetime
realm_creation = models.BooleanField(default=False)
# status: whether an object has been confirmed.
# if confirmed, set to confirmation.settings.STATUS_ACTIVE
status = models.IntegerField(default=0) # type: int
realm = models.ForeignKey(Realm, null=True) # type: Optional[Realm]
class PushDeviceToken(models.Model):
APNS = 1
GCM = 2
KINDS = (
(APNS, 'apns'),
(GCM, 'gcm'),
)
kind = models.PositiveSmallIntegerField(choices=KINDS) # type: int
# The token is a unique device-specific token that is
# sent to us from each device:
# - APNS token if kind == APNS
# - GCM registration id if kind == GCM
token = models.CharField(max_length=4096, unique=True) # type: Text
last_updated = models.DateTimeField(auto_now=True) # type: datetime.datetime
# The user who's device this is
user = models.ForeignKey(UserProfile, db_index=True) # type: UserProfile
# [optional] Contains the app id of the device if it is an iOS device
ios_app_id = models.TextField(null=True) # type: Optional[Text]
def generate_email_token_for_stream():
# type: () -> Text
return generate_random_token(32)
class Stream(ModelReprMixin, models.Model):
MAX_NAME_LENGTH = 60
name = models.CharField(max_length=MAX_NAME_LENGTH, db_index=True) # type: Text
realm = models.ForeignKey(Realm, db_index=True) # type: Realm
invite_only = models.NullBooleanField(default=False) # type: Optional[bool]
# Used by the e-mail forwarder. The e-mail RFC specifies a maximum
# e-mail length of 254, and our max stream length is 30, so we
# have plenty of room for the token.
email_token = models.CharField(
max_length=32, default=generate_email_token_for_stream) # type: Text
description = models.CharField(max_length=1024, default=u'') # type: Text
date_created = models.DateTimeField(default=timezone.now) # type: datetime.datetime
deactivated = models.BooleanField(default=False) # type: bool
def __unicode__(self):
# type: () -> Text
return u"<Stream: %s>" % (self.name,)
def is_public(self):
# type: () -> bool
# All streams are private in Zephyr mirroring realms.
return not self.invite_only and not self.realm.is_zephyr_mirror_realm
class Meta(object):
unique_together = ("name", "realm")
def num_subscribers(self):
# type: () -> int
return Subscription.objects.filter(
recipient__type=Recipient.STREAM,
recipient__type_id=self.id,
user_profile__is_active=True,
active=True
).count()
# This is stream information that is sent to clients
def to_dict(self):
# type: () -> Dict[str, Any]
return dict(name=self.name,
stream_id=self.id,
description=self.description,
invite_only=self.invite_only)
post_save.connect(flush_stream, sender=Stream)
post_delete.connect(flush_stream, sender=Stream)
def valid_stream_name(name):
# type: (Text) -> bool
return name != ""
# The Recipient table is used to map Messages to the set of users who
# received the message. It is implemented as a set of triples (id,
# type_id, type). We have 3 types of recipients: Huddles (for group
# private messages), UserProfiles (for 1:1 private messages), and
# Streams. The recipient table maps a globally unique recipient id
# (used by the Message table) to the type-specific unique id (the
# stream id, user_profile id, or huddle id).
class Recipient(ModelReprMixin, models.Model):
type_id = models.IntegerField(db_index=True) # type: int
type = models.PositiveSmallIntegerField(db_index=True) # type: int
# Valid types are {personal, stream, huddle}
PERSONAL = 1
STREAM = 2
HUDDLE = 3
class Meta(object):
unique_together = ("type", "type_id")
# N.B. If we used Django's choice=... we would get this for free (kinda)
_type_names = {
PERSONAL: 'personal',
STREAM: 'stream',
HUDDLE: 'huddle'}
def type_name(self):
# type: () -> str
# Raises KeyError if invalid
return self._type_names[self.type]
def __unicode__(self):
# type: () -> Text
display_recipient = get_display_recipient(self)
return u"<Recipient: %s (%d, %s)>" % (display_recipient, self.type_id, self.type)
class Client(ModelReprMixin, models.Model):
name = models.CharField(max_length=30, db_index=True, unique=True) # type: Text
def __unicode__(self):
# type: () -> Text
return u"<Client: %s>" % (self.name,)
get_client_cache = {} # type: Dict[Text, Client]
def get_client(name):
# type: (Text) -> Client
# Accessing KEY_PREFIX through the module is necessary
# because we need the updated value of the variable.
cache_name = cache.KEY_PREFIX + name
if cache_name not in get_client_cache:
result = get_client_remote_cache(name)
get_client_cache[cache_name] = result
return get_client_cache[cache_name]
def get_client_cache_key(name):
# type: (Text) -> Text
return u'get_client:%s' % (make_safe_digest(name),)
@cache_with_key(get_client_cache_key, timeout=3600*24*7)
def get_client_remote_cache(name):
# type: (Text) -> Client
(client, _) = Client.objects.get_or_create(name=name)
return client
# get_stream_backend takes either a realm id or a realm
@cache_with_key(get_stream_cache_key, timeout=3600*24*7)
def get_stream_backend(stream_name, realm):
# type: (Text, Realm) -> Stream
return Stream.objects.select_related("realm").get(
name__iexact=stream_name.strip(), realm_id=realm.id)
def get_active_streams(realm):
# type: (Realm) -> QuerySet
"""
Return all streams (including invite-only streams) that have not been deactivated.
"""
return Stream.objects.filter(realm=realm, deactivated=False)
def get_stream(stream_name, realm):
# type: (Text, Realm) -> Optional[Stream]
try:
return get_stream_backend(stream_name, realm)
except Stream.DoesNotExist:
return None
def bulk_get_streams(realm, stream_names):
# type: (Realm, STREAM_NAMES) -> Dict[Text, Any]
def fetch_streams_by_name(stream_names):
# type: (List[Text]) -> Sequence[Stream]
#
# This should be just
#
# Stream.objects.select_related("realm").filter(name__iexact__in=stream_names,
# realm_id=realm_id)
#
# But chaining __in and __iexact doesn't work with Django's
# ORM, so we have the following hack to construct the relevant where clause
if len(stream_names) == 0:
return []
upper_list = ", ".join(["UPPER(%s)"] * len(stream_names))
where_clause = "UPPER(zerver_stream.name::text) IN (%s)" % (upper_list,)
return get_active_streams(realm.id).select_related("realm").extra(
where=[where_clause],
params=stream_names)
return generic_bulk_cached_fetch(lambda stream_name: get_stream_cache_key(stream_name, realm),
fetch_streams_by_name,
[stream_name.lower() for stream_name in stream_names],
id_fetcher=lambda stream: stream.name.lower())
def get_recipient_cache_key(type, type_id):
# type: (int, int) -> Text
return u"get_recipient:%s:%s" % (type, type_id,)
@cache_with_key(get_recipient_cache_key, timeout=3600*24*7)
def get_recipient(type, type_id):
# type: (int, int) -> Recipient
return Recipient.objects.get(type_id=type_id, type=type)
def bulk_get_recipients(type, type_ids):
# type: (int, List[int]) -> Dict[int, Any]
def cache_key_function(type_id):
# type: (int) -> Text
return get_recipient_cache_key(type, type_id)
def query_function(type_ids):
# type: (List[int]) -> Sequence[Recipient]
# TODO: Change return type to QuerySet[Recipient]
return Recipient.objects.filter(type=type, type_id__in=type_ids)
return generic_bulk_cached_fetch(cache_key_function, query_function, type_ids,
id_fetcher=lambda recipient: recipient.type_id)
def sew_messages_and_reactions(messages, reactions):
# type: (List[Dict[str, Any]], List[Dict[str, Any]]) -> List[Dict[str, Any]]
"""Given a iterable of messages and reactions stitch reactions
into messages.
"""
# Add all messages with empty reaction item
for message in messages:
message['reactions'] = []
# Convert list of messages into dictionary to make reaction stitching easy
converted_messages = {message['id']: message for message in messages}
for reaction in reactions:
converted_messages[reaction['message_id']]['reactions'].append(
reaction)
return list(converted_messages.values())
class Message(ModelReprMixin, models.Model):
sender = models.ForeignKey(UserProfile) # type: UserProfile
recipient = models.ForeignKey(Recipient) # type: Recipient
subject = models.CharField(max_length=MAX_SUBJECT_LENGTH, db_index=True) # type: Text
content = models.TextField() # type: Text
rendered_content = models.TextField(null=True) # type: Optional[Text]
rendered_content_version = models.IntegerField(null=True) # type: Optional[int]
pub_date = models.DateTimeField('date published', db_index=True) # type: datetime.datetime
sending_client = models.ForeignKey(Client) # type: Client
last_edit_time = models.DateTimeField(null=True) # type: Optional[datetime.datetime]
edit_history = models.TextField(null=True) # type: Optional[Text]
has_attachment = models.BooleanField(default=False, db_index=True) # type: bool
has_image = models.BooleanField(default=False, db_index=True) # type: bool
has_link = models.BooleanField(default=False, db_index=True) # type: bool
def topic_name(self):
# type: () -> Text
"""
Please start using this helper to facilitate an
eventual switch over to a separate topic table.
"""
return self.subject
def __unicode__(self):
# type: () -> Text
display_recipient = get_display_recipient(self.recipient)
return u"<Message: %s / %s / %r>" % (display_recipient, self.subject, self.sender)
def get_realm(self):
# type: () -> Realm
return self.sender.realm
def save_rendered_content(self):
# type: () -> None
self.save(update_fields=["rendered_content", "rendered_content_version"])
@staticmethod
def need_to_render_content(rendered_content, rendered_content_version, bugdown_version):
# type: (Optional[Text], int, int) -> bool
return rendered_content is None or rendered_content_version < bugdown_version
def to_log_dict(self):
# type: () -> Dict[str, Any]
return dict(
id = self.id,
sender_id = self.sender.id,
sender_email = self.sender.email,
sender_domain = self.sender.realm.domain,
sender_full_name = self.sender.full_name,
sender_short_name = self.sender.short_name,
sending_client = self.sending_client.name,
type = self.recipient.type_name(),
recipient = get_display_recipient(self.recipient),
subject = self.topic_name(),
content = self.content,
timestamp = datetime_to_timestamp(self.pub_date))
@staticmethod
def get_raw_db_rows(needed_ids):
# type: (List[int]) -> List[Dict[str, Any]]
# This is a special purpose function optimized for
# callers like get_old_messages_backend().
fields = [
'id',
'subject',
'pub_date',
'last_edit_time',
'edit_history',
'content',
'rendered_content',
'rendered_content_version',
'recipient_id',
'recipient__type',
'recipient__type_id',
'sender_id',
'sending_client__name',
'sender__email',
'sender__full_name',
'sender__short_name',
'sender__realm__id',
'sender__realm__domain',
'sender__avatar_source',
'sender__is_mirror_dummy',
]
messages = Message.objects.filter(id__in=needed_ids).values(*fields)
"""Adding one-many or Many-Many relationship in values results in N X
results.
Link: https://docs.djangoproject.com/en/1.8/ref/models/querysets/#values
"""
reactions = Reaction.get_raw_db_rows(needed_ids)
return sew_messages_and_reactions(messages, reactions)
def sent_by_human(self):
# type: () -> bool
sending_client = self.sending_client.name.lower()
return (sending_client in ('zulipandroid', 'zulipios', 'zulipdesktop',
'website', 'ios', 'android')) or (
'desktop app' in sending_client)
@staticmethod
def content_has_attachment(content):
# type: (Text) -> Match
return re.search(r'[/\-]user[\-_]uploads[/\.-]', content)
@staticmethod
def content_has_image(content):
# type: (Text) -> bool
return bool(re.search(r'[/\-]user[\-_]uploads[/\.-]\S+\.(bmp|gif|jpg|jpeg|png|webp)', content, re.IGNORECASE))
@staticmethod
def content_has_link(content):
# type: (Text) -> bool
return ('http://' in content or
'https://' in content or
'/user_uploads' in content or
(settings.ENABLE_FILE_LINKS and 'file:///' in content))
@staticmethod
def is_status_message(content, rendered_content):
# type: (Text, Text) -> bool
"""
Returns True if content and rendered_content are from 'me_message'
"""
if content.startswith('/me ') and '\n' not in content:
if rendered_content.startswith('<p>') and rendered_content.endswith('</p>'):
return True
return False
def update_calculated_fields(self):
# type: () -> None
# TODO: rendered_content could also be considered a calculated field
content = self.content
self.has_attachment = bool(Message.content_has_attachment(content))
self.has_image = bool(Message.content_has_image(content))
self.has_link = bool(Message.content_has_link(content))
@receiver(pre_save, sender=Message)
def pre_save_message(sender, **kwargs):
# type: (Any, **Any) -> None
if kwargs['update_fields'] is None or "content" in kwargs['update_fields']:
message = kwargs['instance']
message.update_calculated_fields()
def get_context_for_message(message):
# type: (Message) -> Sequence[Message]
# TODO: Change return type to QuerySet[Message]
return Message.objects.filter(
recipient_id=message.recipient_id,
subject=message.subject,
id__lt=message.id,
pub_date__gt=message.pub_date - timedelta(minutes=15),
).order_by('-id')[:10]
post_save.connect(flush_message, sender=Message)
class Reaction(ModelReprMixin, models.Model):
user_profile = models.ForeignKey(UserProfile) # type: UserProfile
message = models.ForeignKey(Message) # type: Message
emoji_name = models.TextField() # type: Text
class Meta(object):
unique_together = ("user_profile", "message", "emoji_name")
@staticmethod
def get_raw_db_rows(needed_ids):
# type: (List[int]) -> List[Dict[str, Any]]
fields = ['message_id', 'emoji_name', 'user_profile__email',
'user_profile__id', 'user_profile__full_name']
return Reaction.objects.filter(message_id__in=needed_ids).values(*fields)
# Whenever a message is sent, for each user current subscribed to the
# corresponding Recipient object, we add a row to the UserMessage
# table, which has has columns (id, user profile id, message id,
# flags) indicating which messages each user has received. This table
# allows us to quickly query any user's last 1000 messages to generate
# the home view.
#
# Additionally, the flags field stores metadata like whether the user
# has read the message, starred the message, collapsed or was
# mentioned the message, etc.
#
# UserMessage is the largest table in a Zulip installation, even
# though each row is only 4 integers.
class UserMessage(ModelReprMixin, models.Model):
user_profile = models.ForeignKey(UserProfile) # type: UserProfile
message = models.ForeignKey(Message) # type: Message
# We're not using the archived field for now, but create it anyway
# since this table will be an unpleasant one to do schema changes
# on later
ALL_FLAGS = ['read', 'starred', 'collapsed', 'mentioned', 'wildcard_mentioned',
'summarize_in_home', 'summarize_in_stream', 'force_expand', 'force_collapse',
'has_alert_word', "historical", 'is_me_message']
flags = BitField(flags=ALL_FLAGS, default=0) # type: BitHandler
class Meta(object):
unique_together = ("user_profile", "message")
def __unicode__(self):
# type: () -> Text
display_recipient = get_display_recipient(self.message.recipient)
return u"<UserMessage: %s / %s (%s)>" % (display_recipient, self.user_profile.email, self.flags_list())
def flags_list(self):
# type: () -> List[str]
return [flag for flag in self.flags.keys() if getattr(self.flags, flag).is_set]
def parse_usermessage_flags(val):
# type: (int) -> List[str]
flags = []
mask = 1
for flag in UserMessage.ALL_FLAGS:
if val & mask:
flags.append(flag)
mask <<= 1
return flags
class Attachment(ModelReprMixin, models.Model):
file_name = models.TextField(db_index=True) # type: Text
# path_id is a storage location agnostic representation of the path of the file.
# If the path of a file is http://localhost:9991/user_uploads/a/b/abc/temp_file.py
# then its path_id will be a/b/abc/temp_file.py.
path_id = models.TextField(db_index=True) # type: Text
owner = models.ForeignKey(UserProfile) # type: UserProfile
realm = models.ForeignKey(Realm, blank=True, null=True) # type: Realm
is_realm_public = models.BooleanField(default=False) # type: bool
messages = models.ManyToManyField(Message) # type: Manager
create_time = models.DateTimeField(default=timezone.now, db_index=True) # type: datetime.datetime
def __unicode__(self):
# type: () -> Text
return u"<Attachment: %s>" % (self.file_name,)
def is_claimed(self):
# type: () -> bool
return self.messages.count() > 0
def get_old_unclaimed_attachments(weeks_ago):
# type: (int) -> Sequence[Attachment]
# TODO: Change return type to QuerySet[Attachment]
delta_weeks_ago = timezone.now() - datetime.timedelta(weeks=weeks_ago)
old_attachments = Attachment.objects.filter(messages=None, create_time__lt=delta_weeks_ago)
return old_attachments
class Subscription(ModelReprMixin, models.Model):
user_profile = models.ForeignKey(UserProfile) # type: UserProfile
recipient = models.ForeignKey(Recipient) # type: Recipient
active = models.BooleanField(default=True) # type: bool
in_home_view = models.NullBooleanField(default=True) # type: Optional[bool]
DEFAULT_STREAM_COLOR = u"#c2c2c2"
color = models.CharField(max_length=10, default=DEFAULT_STREAM_COLOR) # type: Text
pin_to_top = models.BooleanField(default=False) # type: bool
desktop_notifications = models.BooleanField(default=True) # type: bool
audible_notifications = models.BooleanField(default=True) # type: bool
# Combination desktop + audible notifications superseded by the
# above.
notifications = models.BooleanField(default=False) # type: bool
class Meta(object):
unique_together = ("user_profile", "recipient")
def __unicode__(self):
# type: () -> Text
return u"<Subscription: %r -> %s>" % (self.user_profile, self.recipient)
@cache_with_key(user_profile_by_id_cache_key, timeout=3600*24*7)
def get_user_profile_by_id(uid):
# type: (int) -> UserProfile
return UserProfile.objects.select_related().get(id=uid)
@cache_with_key(user_profile_by_email_cache_key, timeout=3600*24*7)
def get_user_profile_by_email(email):
# type: (Text) -> UserProfile
return UserProfile.objects.select_related().get(email__iexact=email.strip())
@cache_with_key(active_user_dicts_in_realm_cache_key, timeout=3600*24*7)
def get_active_user_dicts_in_realm(realm):
# type: (Realm) -> List[Dict[str, Any]]
return UserProfile.objects.filter(realm=realm, is_active=True) \
.values(*active_user_dict_fields)
@cache_with_key(active_bot_dicts_in_realm_cache_key, timeout=3600*24*7)
def get_active_bot_dicts_in_realm(realm):
# type: (Realm) -> List[Dict[str, Any]]
return UserProfile.objects.filter(realm=realm, is_active=True, is_bot=True) \
.values(*active_bot_dict_fields)
def get_owned_bot_dicts(user_profile, include_all_realm_bots_if_admin=True):
# type: (UserProfile, bool) -> List[Dict[str, Any]]
if user_profile.is_realm_admin and include_all_realm_bots_if_admin:
result = get_active_bot_dicts_in_realm(user_profile.realm)
else:
result = UserProfile.objects.filter(realm=user_profile.realm, is_active=True, is_bot=True,
bot_owner=user_profile).values(*active_bot_dict_fields)
# TODO: Remove this import cycle
from zerver.lib.avatar import get_avatar_url
return [{'email': botdict['email'],
'user_id': botdict['id'],
'full_name': botdict['full_name'],
'api_key': botdict['api_key'],
'default_sending_stream': botdict['default_sending_stream__name'],
'default_events_register_stream': botdict['default_events_register_stream__name'],
'default_all_public_streams': botdict['default_all_public_streams'],
'owner': botdict['bot_owner__email'],
'avatar_url': get_avatar_url(botdict['avatar_source'], botdict['email']),
}
for botdict in result]
def get_prereg_user_by_email(email):
# type: (Text) -> PreregistrationUser
# A user can be invited many times, so only return the result of the latest
# invite.
return PreregistrationUser.objects.filter(email__iexact=email.strip()).latest("invited_at")
def get_cross_realm_emails():
# type: () -> Set[Text]
return set(settings.CROSS_REALM_BOT_EMAILS)
# The Huddle class represents a group of individuals who have had a
# Group Private Message conversation together. The actual membership
# of the Huddle is stored in the Subscription table just like with
# Streams, and a hash of that list is stored in the huddle_hash field
# below, to support efficiently mapping from a set of users to the
# corresponding Huddle object.
class Huddle(models.Model):
# TODO: We should consider whether using
# CommaSeparatedIntegerField would be better.
huddle_hash = models.CharField(max_length=40, db_index=True, unique=True) # type: Text
def get_huddle_hash(id_list):
# type: (List[int]) -> Text
id_list = sorted(set(id_list))
hash_key = ",".join(str(x) for x in id_list)
return make_safe_digest(hash_key)
def huddle_hash_cache_key(huddle_hash):
# type: (Text) -> Text
return u"huddle_by_hash:%s" % (huddle_hash,)
def get_huddle(id_list):
# type: (List[int]) -> Huddle
huddle_hash = get_huddle_hash(id_list)
return get_huddle_backend(huddle_hash, id_list)
@cache_with_key(lambda huddle_hash, id_list: huddle_hash_cache_key(huddle_hash), timeout=3600*24*7)
def get_huddle_backend(huddle_hash, id_list):
# type: (Text, List[int]) -> Huddle
(huddle, created) = Huddle.objects.get_or_create(huddle_hash=huddle_hash)
if created:
with transaction.atomic():
recipient = Recipient.objects.create(type_id=huddle.id,
type=Recipient.HUDDLE)
subs_to_create = [Subscription(recipient=recipient,
user_profile=get_user_profile_by_id(user_profile_id))
for user_profile_id in id_list]
Subscription.objects.bulk_create(subs_to_create)
return huddle
def clear_database():
# type: () -> None
pylibmc.Client(['127.0.0.1']).flush_all()
model = None # type: Any
for model in [Message, Stream, UserProfile, Recipient,
Realm, Subscription, Huddle, UserMessage, Client,
DefaultStream]:
model.objects.all().delete()
Session.objects.all().delete()
class UserActivity(models.Model):
user_profile = models.ForeignKey(UserProfile) # type: UserProfile
client = models.ForeignKey(Client) # type: Client
query = models.CharField(max_length=50, db_index=True) # type: Text
count = models.IntegerField() # type: int
last_visit = models.DateTimeField('last visit') # type: datetime.datetime
class Meta(object):
unique_together = ("user_profile", "client", "query")
class UserActivityInterval(models.Model):
user_profile = models.ForeignKey(UserProfile) # type: UserProfile
start = models.DateTimeField('start time', db_index=True) # type: datetime.datetime
end = models.DateTimeField('end time', db_index=True) # type: datetime.datetime
class UserPresence(models.Model):
user_profile = models.ForeignKey(UserProfile) # type: UserProfile
client = models.ForeignKey(Client) # type: Client
# Valid statuses
ACTIVE = 1
IDLE = 2
timestamp = models.DateTimeField('presence changed') # type: datetime.datetime
status = models.PositiveSmallIntegerField(default=ACTIVE) # type: int
@staticmethod
def status_to_string(status):
# type: (int) -> str
if status == UserPresence.ACTIVE:
return 'active'
elif status == UserPresence.IDLE:
return 'idle'
@staticmethod
def get_status_dict_by_realm(realm_id):
# type: (int) -> defaultdict[Any, Dict[Any, Any]]
user_statuses = defaultdict(dict) # type: defaultdict[Any, Dict[Any, Any]]
query = UserPresence.objects.filter(
user_profile__realm_id=realm_id,
user_profile__is_active=True,
user_profile__is_bot=False
).values(
'client__name',
'status',
'timestamp',
'user_profile__email',
'user_profile__id',
'user_profile__enable_offline_push_notifications',
'user_profile__is_mirror_dummy',
)
mobile_user_ids = [row['user'] for row in PushDeviceToken.objects.filter(
user__realm_id=1,
user__is_active=True,
user__is_bot=False,
).distinct("user").values("user")]
for row in query:
info = UserPresence.to_presence_dict(
client_name=row['client__name'],
status=row['status'],
dt=row['timestamp'],
push_enabled=row['user_profile__enable_offline_push_notifications'],
has_push_devices=row['user_profile__id'] in mobile_user_ids,
is_mirror_dummy=row['user_profile__is_mirror_dummy'],
)
user_statuses[row['user_profile__email']][row['client__name']] = info
return user_statuses
@staticmethod
def to_presence_dict(client_name=None, status=None, dt=None, push_enabled=None,
has_push_devices=None, is_mirror_dummy=None):
# type: (Optional[Text], Optional[int], Optional[datetime.datetime], Optional[bool], Optional[bool], Optional[bool]) -> Dict[str, Any]
presence_val = UserPresence.status_to_string(status)
timestamp = datetime_to_timestamp(dt)
return dict(
client=client_name,
status=presence_val,
timestamp=timestamp,
pushable=(push_enabled and has_push_devices),
)
def to_dict(self):
# type: () -> Dict[str, Any]
return UserPresence.to_presence_dict(
client_name=self.client.name,
status=self.status,
dt=self.timestamp
)
@staticmethod
def status_from_string(status):
# type: (NonBinaryStr) -> Optional[int]
if status == 'active':
status_val = UserPresence.ACTIVE
elif status == 'idle':
status_val = UserPresence.IDLE
else:
status_val = None
return status_val
class Meta(object):
unique_together = ("user_profile", "client")
class DefaultStream(models.Model):
realm = models.ForeignKey(Realm) # type: Realm
stream = models.ForeignKey(Stream) # type: Stream
class Meta(object):
unique_together = ("realm", "stream")
class Referral(models.Model):
user_profile = models.ForeignKey(UserProfile) # type: UserProfile
email = models.EmailField(blank=False, null=False) # type: Text
timestamp = models.DateTimeField(auto_now_add=True, null=False) # type: datetime.datetime
# This table only gets used on Zulip Voyager instances
# For reasons of deliverability (and sending from multiple email addresses),
# we will still send from mandrill when we send things from the (staging.)zulip.com install
class ScheduledJob(models.Model):
scheduled_timestamp = models.DateTimeField(auto_now_add=False, null=False) # type: datetime.datetime
type = models.PositiveSmallIntegerField() # type: int
# Valid types are {email}
# for EMAIL, filter_string is recipient_email
EMAIL = 1
# JSON representation of the job's data. Be careful, as we are not relying on Django to do validation
data = models.TextField() # type: Text
# Kind if like a ForeignKey, but table is determined by type.
filter_id = models.IntegerField(null=True) # type: Optional[int]
filter_string = models.CharField(max_length=100) # type: Text
| apache-2.0 |
openelections/openelections-core | openelex/us/oh/datasource.py | 1 | 8326 | """
Standardize names of data files on Ohio Secretary of State.
File-name conventions on OH site vary widely according to election, but typically there is a single precinct file, a race-wide (county) file
and additional files for absentee and provisional ballots. Earlier election results are in HTML and have no precinct files. This example is from
the 2012 election:
general election
precinct: yyyymmddprecinct.xlsx
county: FinalResults.xlsx
provisional: provisional.xlsx
absentee: absentee.xlsx
primary election
precinct: 2012precinct.xlsx
county: [per-race csv files]
provisional: provisional.xlsx
absentee: absentee.xlsx
Exceptions: 2000 & 2002 are HTML pages with no precinct-level results.
The elections object created from the Dashboard API includes a portal link to the main page of results (needed for scraping results links) and a
direct link to the most detailed data for that election (precinct-level, if available, for general and primary elections or county level otherwise).
If precinct-level results file is available, grab that. If not, run the _url_paths function to load details about the location and scope of HTML (aspx)
files. Use the path attribute and the base_url to construct the full raw results URLs.
"""
from future import standard_library
standard_library.install_aliases()
import os
from os.path import join
import json
import unicodecsv
import urllib.parse
from openelex import PROJECT_ROOT
from openelex.base.datasource import BaseDatasource
class Datasource(BaseDatasource):
# PUBLIC INTERFACE
def mappings(self, year=None):
"""Return array of dicts containing source url and
standardized filename for raw results file, along
with other pieces of metadata
"""
mappings = []
for yr, elecs in list(self.elections(year).items()):
mappings.extend(self._build_metadata(yr, elecs))
return mappings
def target_urls(self, year=None):
"Get list of source data urls, optionally filtered by year"
return [item['raw_url'] for item in self.mappings(year)]
def filename_url_pairs(self, year=None):
return [(item['generated_filename'], item['raw_url'])
for item in self.mappings(year)]
# PRIVATE METHODS
def _build_metadata(self, year, elections):
year_int = int(year)
precinct_elections = [e for e in elections if e['precinct_level'] == True]
other_elections = [e for e in elections if e['precinct_level'] == False]
if precinct_elections:
meta = self._precinct_meta(year, precinct_elections)
if other_elections:
if not meta:
meta = []
for election in other_elections:
results = [x for x in self._url_paths() if x['date'] == election['start_date']]
for result in results:
meta.append({
"generated_filename":
self._generate_office_filename(election['direct_links'][0], election['start_date'], election['race_type'], result),
"raw_url": self._build_raw_url(year, result['path']),
"ocd_id": 'ocd-division/country:us/state:oh',
"name": 'Ohio',
"election": election['slug']
})
return meta
def _build_raw_url(self, year, path):
return "http://www.sos.state.oh.us/sos/elections/Research/electResultsMain/%sElectionsResults/%s" % (year, path)
def _precinct_meta(self, year, elections):
payload = []
meta = {
'ocd_id': 'ocd-division/country:us/state:oh',
'name': 'Ohio',
}
try:
general = [e for e in elections if e['race_type'] == 'general'][0]
except:
general = None
try:
primary = [e for e in elections if e['race_type'] == 'primary'][0]
except:
primary = None
if general:
# Add General meta to payload
general_url = [g for g in general['direct_links'] if 'precinct' in g][0]
general_filename = self._generate_precinct_filename(general_url, general['start_date'], 'general')
gen_meta = meta.copy()
gen_meta.update({
'raw_url': general_url,
'generated_filename': general_filename,
'election': general['slug']
})
payload.append(gen_meta)
general_county_url = [g for g in general['direct_links'] if 'esults' in g][0]
general_county_filename = self._generate_county_filename(general_county_url, general['start_date'], 'general', None)
gen_county_meta = meta.copy()
gen_county_meta.update({
'raw_url': general_county_url,
'generated_filename': general_county_filename,
'election': general['slug']
})
payload.append(gen_county_meta)
# Add Primary meta to payload
if primary and int(year) > 2000:
pri_meta = meta.copy()
primary_url = [p for p in primary['direct_links'] if 'precinct' in p][0]
primary_filename = self._generate_precinct_filename(primary_url, primary['start_date'], 'primary')
pri_meta.update({
'raw_url': primary_url,
'generated_filename': primary_filename,
'election': primary['slug']
})
payload.append(pri_meta)
# check url_paths for county files
results = [x for x in self._url_paths() if x['date'] == primary['start_date']]
for result in results:
payload.append({
"generated_filename":
self._generate_county_filename(result['url'], result['date'], 'primary', result['party']),
"raw_url": result['url'],
"ocd_id": 'ocd-division/country:us/state:oh',
"name": 'Ohio',
"election": primary['slug']
})
return payload
def _generate_precinct_filename(self, url, start_date, election_type):
# example: 20121106__oh__general__precinct.xlsx
bits = [
start_date.replace('-',''),
self.state.lower(),
election_type,
'precinct'
]
path = urllib.parse.urlparse(url).path
ext = os.path.splitext(path)[1]
name = "__".join(bits)+ ext
return name
def _generate_county_filename(self, url, start_date, election_type, party):
# example: 20121106__oh__general__county.xlsx
if party:
election_type = party+"__"+election_type
bits = [
start_date.replace('-',''),
self.state.lower(),
election_type,
'county'
]
path = urllib.parse.urlparse(url).path
ext = os.path.splitext(path)[1]
name = "__".join(bits)+ ext
return name
def _generate_office_filename(self, url, start_date, election_type, result):
# example: 20021105__oh__general__gov.aspx
if result['district'] == '':
office = result['office']
else:
office = result['office'] + '__' + result['district']
if result['special'] == '1':
election_type = 'special__' + election_type
if result['race_type'] == 'general':
bits = [
start_date.replace('-',''),
self.state.lower(),
election_type,
office
]
else:
bits = [
start_date.replace('-',''),
self.state.lower(),
result['party'],
election_type,
office
]
path = urllib.parse.urlparse(url).path
name = "__".join(bits)+'.html'
return name
def _jurisdictions(self):
"""Ohio counties"""
m = self.jurisdiction_mappings()
mappings = [x for x in m if x['results_name'] != ""]
return mappings
| mit |
brianwoo/django-tutorial | build/Django/build/lib.linux-x86_64-2.7/django/core/serializers/base.py | 181 | 6813 | """
Module for abstract serializer/unserializer base classes.
"""
import warnings
from django.db import models
from django.utils import six
from django.utils.deprecation import RemovedInDjango19Warning
class SerializerDoesNotExist(KeyError):
"""The requested serializer was not found."""
pass
class SerializationError(Exception):
"""Something bad happened during serialization."""
pass
class DeserializationError(Exception):
"""Something bad happened during deserialization."""
pass
class Serializer(object):
"""
Abstract serializer base class.
"""
# Indicates if the implemented serializer is only available for
# internal Django use.
internal_use_only = False
def serialize(self, queryset, **options):
"""
Serialize a queryset.
"""
self.options = options
self.stream = options.pop("stream", six.StringIO())
self.selected_fields = options.pop("fields", None)
self.use_natural_keys = options.pop("use_natural_keys", False)
if self.use_natural_keys:
warnings.warn("``use_natural_keys`` is deprecated; use ``use_natural_foreign_keys`` instead.",
RemovedInDjango19Warning)
self.use_natural_foreign_keys = options.pop('use_natural_foreign_keys', False) or self.use_natural_keys
self.use_natural_primary_keys = options.pop('use_natural_primary_keys', False)
self.start_serialization()
self.first = True
for obj in queryset:
self.start_object(obj)
# Use the concrete parent class' _meta instead of the object's _meta
# This is to avoid local_fields problems for proxy models. Refs #17717.
concrete_model = obj._meta.concrete_model
for field in concrete_model._meta.local_fields:
if field.serialize:
if field.rel is None:
if self.selected_fields is None or field.attname in self.selected_fields:
self.handle_field(obj, field)
else:
if self.selected_fields is None or field.attname[:-3] in self.selected_fields:
self.handle_fk_field(obj, field)
for field in concrete_model._meta.many_to_many:
if field.serialize:
if self.selected_fields is None or field.attname in self.selected_fields:
self.handle_m2m_field(obj, field)
self.end_object(obj)
if self.first:
self.first = False
self.end_serialization()
return self.getvalue()
def start_serialization(self):
"""
Called when serializing of the queryset starts.
"""
raise NotImplementedError('subclasses of Serializer must provide a start_serialization() method')
def end_serialization(self):
"""
Called when serializing of the queryset ends.
"""
pass
def start_object(self, obj):
"""
Called when serializing of an object starts.
"""
raise NotImplementedError('subclasses of Serializer must provide a start_object() method')
def end_object(self, obj):
"""
Called when serializing of an object ends.
"""
pass
def handle_field(self, obj, field):
"""
Called to handle each individual (non-relational) field on an object.
"""
raise NotImplementedError('subclasses of Serializer must provide an handle_field() method')
def handle_fk_field(self, obj, field):
"""
Called to handle a ForeignKey field.
"""
raise NotImplementedError('subclasses of Serializer must provide an handle_fk_field() method')
def handle_m2m_field(self, obj, field):
"""
Called to handle a ManyToManyField.
"""
raise NotImplementedError('subclasses of Serializer must provide an handle_m2m_field() method')
def getvalue(self):
"""
Return the fully serialized queryset (or None if the output stream is
not seekable).
"""
if callable(getattr(self.stream, 'getvalue', None)):
return self.stream.getvalue()
class Deserializer(six.Iterator):
"""
Abstract base deserializer class.
"""
def __init__(self, stream_or_string, **options):
"""
Init this serializer given a stream or a string
"""
self.options = options
if isinstance(stream_or_string, six.string_types):
self.stream = six.StringIO(stream_or_string)
else:
self.stream = stream_or_string
def __iter__(self):
return self
def __next__(self):
"""Iteration iterface -- return the next item in the stream"""
raise NotImplementedError('subclasses of Deserializer must provide a __next__() method')
class DeserializedObject(object):
"""
A deserialized model.
Basically a container for holding the pre-saved deserialized data along
with the many-to-many data saved with the object.
Call ``save()`` to save the object (with the many-to-many data) to the
database; call ``save(save_m2m=False)`` to save just the object fields
(and not touch the many-to-many stuff.)
"""
def __init__(self, obj, m2m_data=None):
self.object = obj
self.m2m_data = m2m_data
def __repr__(self):
return "<DeserializedObject: %s.%s(pk=%s)>" % (
self.object._meta.app_label, self.object._meta.object_name, self.object.pk)
def save(self, save_m2m=True, using=None):
# Call save on the Model baseclass directly. This bypasses any
# model-defined save. The save is also forced to be raw.
# raw=True is passed to any pre/post_save signals.
models.Model.save_base(self.object, using=using, raw=True)
if self.m2m_data and save_m2m:
for accessor_name, object_list in self.m2m_data.items():
setattr(self.object, accessor_name, object_list)
# prevent a second (possibly accidental) call to save() from saving
# the m2m data twice.
self.m2m_data = None
def build_instance(Model, data, db):
"""
Build a model instance.
If the model instance doesn't have a primary key and the model supports
natural keys, try to retrieve it from the database.
"""
obj = Model(**data)
if (obj.pk is None and hasattr(Model, 'natural_key') and
hasattr(Model._default_manager, 'get_by_natural_key')):
natural_key = obj.natural_key()
try:
obj.pk = Model._default_manager.db_manager(db).get_by_natural_key(*natural_key).pk
except Model.DoesNotExist:
pass
return obj
| gpl-3.0 |
taniho0707/auto-sl-stage | write_main.py | 1 | 1979 | #!/usr/bin/python2
# coding=utf-8
import sys, os, platform, subprocess
try:
################################################################################
# ボーレート (ST保証のボーレート: 1200 - 115200 bps)
baudrate = "230400"
# シリアルポート
if platform.system() == "Windows":
port = "com4"
elif platform.system() == "Linux":
port = "/dev/ttyUSB0"
else: # 他のOS
port = "/dev/ttyUSB0"
# 書き込むモトローラS形式ファイル
mot_files = os.path.join("bin", "main.bin")
# コマンドライン引数がある場合はそれを使う
if len(sys.argv) > 1:
mot_files = sys.argv[1]
# ターミナルソフト
if platform.system() == "Windows":
termsoft = "C:/PROGRA~1/teraterm/ttermpro.exe"
# termsoft = "C:/PROGRA~2/teraterm/ttermpro.exe" # 64bit-Windows
elif platform.system() == "Linux":
termsoft = "gtkterm"
else: # 他のOS
termsoft = "kermit"
################################################################################
# このスクリプトのあるディレクトリへ移動する
this_dir = os.path.dirname(os.path.realpath(__file__))
os.chdir(this_dir)
# 書き込み
command = os.path.join("stm32writer", "stm32writer.py")
command += " --port=%s" % port
command += " --baudrate=%s" % baudrate
command += " --show-erase-page"
# command += " --compare"
command += " --go"
command += " --time"
command += " --binary"
command += " " + mot_files
print command
print
p = subprocess.Popen(command, shell=True) # コマンドの実行
ret = p.wait()
print
if ret: sys.exit(1)
# ターミナルソフトの呼び出し
# print termsoft
# print
# p = subprocess.Popen(termsoft, shell=True) # コマンドの実行
# ret = p.wait()
# print
# if ret: sys.exit(1)
except KeyboardInterrupt:
sys.exit(1)
finally:
# Windows の場合は入力受付状態にしてプロンプトを閉じないようにする
if platform.system() == "Windows": raw_input("press enter to exit")
| gpl-3.0 |
jarshwah/django | django/utils/ipv6.py | 71 | 8143 | # This code was mostly based on ipaddr-py
# Copyright 2007 Google Inc. https://github.com/google/ipaddr-py
# Licensed under the Apache License, Version 2.0 (the "License").
import re
from django.core.exceptions import ValidationError
from django.utils.six.moves import range
from django.utils.translation import ugettext_lazy as _
def clean_ipv6_address(ip_str, unpack_ipv4=False,
error_message=_("This is not a valid IPv6 address.")):
"""
Cleans an IPv6 address string.
Validity is checked by calling is_valid_ipv6_address() - if an
invalid address is passed, ValidationError is raised.
Replaces the longest continuous zero-sequence with "::" and
removes leading zeroes and makes sure all hextets are lowercase.
Args:
ip_str: A valid IPv6 address.
unpack_ipv4: if an IPv4-mapped address is found,
return the plain IPv4 address (default=False).
error_message: An error message used in the ValidationError.
Returns:
A compressed IPv6 address, or the same value
"""
best_doublecolon_start = -1
best_doublecolon_len = 0
doublecolon_start = -1
doublecolon_len = 0
if not is_valid_ipv6_address(ip_str):
raise ValidationError(error_message, code='invalid')
# This algorithm can only handle fully exploded
# IP strings
ip_str = _explode_shorthand_ip_string(ip_str)
ip_str = _sanitize_ipv4_mapping(ip_str)
# If needed, unpack the IPv4 and return straight away
# - no need in running the rest of the algorithm
if unpack_ipv4:
ipv4_unpacked = _unpack_ipv4(ip_str)
if ipv4_unpacked:
return ipv4_unpacked
hextets = ip_str.split(":")
for index in range(len(hextets)):
# Remove leading zeroes
if '.' not in hextets[index]:
hextets[index] = hextets[index].lstrip('0')
if not hextets[index]:
hextets[index] = '0'
# Determine best hextet to compress
if hextets[index] == '0':
doublecolon_len += 1
if doublecolon_start == -1:
# Start of a sequence of zeros.
doublecolon_start = index
if doublecolon_len > best_doublecolon_len:
# This is the longest sequence of zeros so far.
best_doublecolon_len = doublecolon_len
best_doublecolon_start = doublecolon_start
else:
doublecolon_len = 0
doublecolon_start = -1
# Compress the most suitable hextet
if best_doublecolon_len > 1:
best_doublecolon_end = (best_doublecolon_start +
best_doublecolon_len)
# For zeros at the end of the address.
if best_doublecolon_end == len(hextets):
hextets += ['']
hextets[best_doublecolon_start:best_doublecolon_end] = ['']
# For zeros at the beginning of the address.
if best_doublecolon_start == 0:
hextets = [''] + hextets
result = ":".join(hextets)
return result.lower()
def _sanitize_ipv4_mapping(ip_str):
"""
Sanitize IPv4 mapping in an expanded IPv6 address.
This converts ::ffff:0a0a:0a0a to ::ffff:10.10.10.10.
If there is nothing to sanitize, returns an unchanged
string.
Args:
ip_str: A string, the expanded IPv6 address.
Returns:
The sanitized output string, if applicable.
"""
if not ip_str.lower().startswith('0000:0000:0000:0000:0000:ffff:'):
# not an ipv4 mapping
return ip_str
hextets = ip_str.split(':')
if '.' in hextets[-1]:
# already sanitized
return ip_str
ipv4_address = "%d.%d.%d.%d" % (
int(hextets[6][0:2], 16),
int(hextets[6][2:4], 16),
int(hextets[7][0:2], 16),
int(hextets[7][2:4], 16),
)
result = ':'.join(hextets[0:6])
result += ':' + ipv4_address
return result
def _unpack_ipv4(ip_str):
"""
Unpack an IPv4 address that was mapped in a compressed IPv6 address.
This converts 0000:0000:0000:0000:0000:ffff:10.10.10.10 to 10.10.10.10.
If there is nothing to sanitize, returns None.
Args:
ip_str: A string, the expanded IPv6 address.
Returns:
The unpacked IPv4 address, or None if there was nothing to unpack.
"""
if not ip_str.lower().startswith('0000:0000:0000:0000:0000:ffff:'):
return None
return ip_str.rsplit(':', 1)[1]
def is_valid_ipv6_address(ip_str):
"""
Ensure we have a valid IPv6 address.
Args:
ip_str: A string, the IPv6 address.
Returns:
A boolean, True if this is a valid IPv6 address.
"""
from django.core.validators import validate_ipv4_address
symbols_re = re.compile(r'^[0-9a-fA-F:.]+$')
if not symbols_re.match(ip_str):
return False
# We need to have at least one ':'.
if ':' not in ip_str:
return False
# We can only have one '::' shortener.
if ip_str.count('::') > 1:
return False
# '::' should be encompassed by start, digits or end.
if ':::' in ip_str:
return False
# A single colon can neither start nor end an address.
if ((ip_str.startswith(':') and not ip_str.startswith('::')) or
(ip_str.endswith(':') and not ip_str.endswith('::'))):
return False
# We can never have more than 7 ':' (1::2:3:4:5:6:7:8 is invalid)
if ip_str.count(':') > 7:
return False
# If we have no concatenation, we need to have 8 fields with 7 ':'.
if '::' not in ip_str and ip_str.count(':') != 7:
# We might have an IPv4 mapped address.
if ip_str.count('.') != 3:
return False
ip_str = _explode_shorthand_ip_string(ip_str)
# Now that we have that all squared away, let's check that each of the
# hextets are between 0x0 and 0xFFFF.
for hextet in ip_str.split(':'):
if hextet.count('.') == 3:
# If we have an IPv4 mapped address, the IPv4 portion has to
# be at the end of the IPv6 portion.
if not ip_str.split(':')[-1] == hextet:
return False
try:
validate_ipv4_address(hextet)
except ValidationError:
return False
else:
try:
# a value error here means that we got a bad hextet,
# something like 0xzzzz
if int(hextet, 16) < 0x0 or int(hextet, 16) > 0xFFFF:
return False
except ValueError:
return False
return True
def _explode_shorthand_ip_string(ip_str):
"""
Expand a shortened IPv6 address.
Args:
ip_str: A string, the IPv6 address.
Returns:
A string, the expanded IPv6 address.
"""
if not _is_shorthand_ip(ip_str):
# We've already got a longhand ip_str.
return ip_str
new_ip = []
hextet = ip_str.split('::')
# If there is a ::, we need to expand it with zeroes
# to get to 8 hextets - unless there is a dot in the last hextet,
# meaning we're doing v4-mapping
if '.' in ip_str.split(':')[-1]:
fill_to = 7
else:
fill_to = 8
if len(hextet) > 1:
sep = len(hextet[0].split(':')) + len(hextet[1].split(':'))
new_ip = hextet[0].split(':')
for __ in range(fill_to - sep):
new_ip.append('0000')
new_ip += hextet[1].split(':')
else:
new_ip = ip_str.split(':')
# Now need to make sure every hextet is 4 lower case characters.
# If a hextet is < 4 characters, we've got missing leading 0's.
ret_ip = []
for hextet in new_ip:
ret_ip.append(('0' * (4 - len(hextet)) + hextet).lower())
return ':'.join(ret_ip)
def _is_shorthand_ip(ip_str):
"""Determine if the address is shortened.
Args:
ip_str: A string, the IPv6 address.
Returns:
A boolean, True if the address is shortened.
"""
if ip_str.count('::') == 1:
return True
if any(len(x) < 4 for x in ip_str.split(':')):
return True
return False
| bsd-3-clause |
shi2wei3/virt-test | virttest/libvirt_xml/devices/controller.py | 19 | 2217 | """
controller device support class(es)
http://libvirt.org/formatdomain.html#elementsControllers
"""
from virttest.libvirt_xml import accessors
from virttest.libvirt_xml.devices import base, librarian
class Controller(base.TypedDeviceBase):
__slots__ = ('type', 'index', 'model', 'ports', 'vectors', 'driver',
'address', 'pcihole64')
def __init__(self, type_name, virsh_instance=base.base.virsh):
super(Controller, self).__init__(device_tag='controller',
type_name=type_name,
virsh_instance=virsh_instance)
accessors.XMLAttribute('type', self, parent_xpath='/',
tag_name='controller', attribute='type')
accessors.XMLAttribute('index', self, parent_xpath='/',
tag_name='controller', attribute='index')
accessors.XMLAttribute('model', self, parent_xpath='/',
tag_name='controller', attribute='model')
accessors.XMLAttribute('ports', self, parent_xpath='/',
tag_name='controller', attribute='ports')
accessors.XMLAttribute('vectors', self, parent_xpath='/',
tag_name='controller', attribute='vectors')
accessors.XMLElementText('pcihole64', self, parent_xpath='/',
tag_name='pcihole64')
accessors.XMLElementDict('driver', self, parent_xpath='/',
tag_name='driver')
accessors.XMLElementNest('address', self, parent_xpath='/',
tag_name='address', subclass=self.Address,
subclass_dargs={'type_name': 'pci',
'virsh_instance': virsh_instance})
Address = librarian.get('address')
def new_controller_address(self, **dargs):
"""
Return a new controller Address instance and set properties from dargs
"""
new_one = self.Address("pci", virsh_instance=self.virsh)
for key, value in dargs.items():
setattr(new_one, key, value)
return new_one
| gpl-2.0 |
nuigroup/pymt-widgets | pymt/parser.py | 2 | 3534 | '''
Parser: default parser from string to special type
Used specially for CSS
'''
__all__ = ('parse_image', 'parse_color', 'parse_int', 'parse_float',
'parse_string', 'parse_bool', 'parse_int2',
'parse_float4', 'parse_filename')
import re
from pymt.logger import pymt_logger
from pymt.resources import resource_find
from pymt.core.image import Image
from pymt.core.svg import Svg
def parse_filename(filename):
'''Parse a filename, and search inside resource if exist.
If we haven't found it, just return the name.
'''
filename = parse_string(filename)
result = resource_find(filename)
if result is None:
pymt_logger.error('Resource: unable to found <%s>' % filename)
return result or filename
def parse_image(filename):
'''Parse a filename to load an image ro svg'''
filename = parse_filename(filename)
if filename in (None, 'None', u'None'):
return None
if filename.endswith('.svg'):
return Svg(filename)
else:
return Image(filename)
raise Exception('Error trying to load image specified in css: %s' \
% filename)
def parse_color(text):
'''Parse a text color to a pymt color. Format supported are :
* rgb(r, g, b)
* rgba(r, g, b, a)
* #aaa
* #rrggbb
'''
value = [1, 1, 1, 1]
if text.startswith('rgb'):
res = re.match('rgba?\((.*)\)', text)
value = map(lambda x: int(x) / 255., re.split(',\ ?', res.groups()[0]))
if len(value) == 3:
value.append(1.)
elif text.startswith('#'):
res = text[1:]
if len(res) == 3:
res = ''.join(map(lambda x: x+x, res))
value = [int(x, 16) / 255. for x in re.split(
'([0-9a-f]{2})', res) if x != '']
if len(value) == 3:
value.append(1.)
return value
def parse_bool(text):
'''Parse a string to a boolean'''
if text.lower() in ('true', '1'):
return True
elif text.lower() in ('false', '0'):
return False
raise Exception('Invalid boolean: %s' % text)
def parse_string(text):
'''Parse a string to a string (remove quotes and double-quotes)'''
if len(text) >= 2 and text[0] in ('"', "'") and text[-1] in ('"', "'"):
text = text[1:-1]
return text.strip()
def parse_int2(text):
'''Parse a string to a integer with exactly 2 number
>>> print parse_int2("12 54")
12, 54
'''
texts = [x for x in text.split(' ') if x.strip() != '']
value = map(parse_int, texts)
if len(value) < 1:
raise Exception('Invalid format int2 for %s' % text)
elif len(value) == 1:
return [value[0], value[0]]
elif len(value) > 2:
raise Exception('Too much value in %s : %s' % (text, str(value)))
return value
def parse_float4(text):
'''Parse a string to a float with exactly 4 floats
>>> parse_float4('54 87. 35 0')
54, 87., 35, 0
'''
texts = [x for x in text.split(' ') if x.strip() != '']
value = map(parse_float, texts)
if len(value) < 1:
raise Exception('Invalid format float4 for %s' % text)
elif len(value) == 1:
return map(lambda x: value[0], range(4))
elif len(value) == 2:
return [value[0], value[1], value[0], value[1]]
elif len(value) == 3:
# ambigous case!
return [value[0], value[1], value[0], value[2]]
elif len(value) > 4:
raise Exception('Too much value in %s' % text)
return value
parse_int = int
parse_float = float
| lgpl-3.0 |
UniversalMasterEgg8679/ansible | lib/ansible/module_utils/known_hosts.py | 46 | 7491 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Michael DeHaan <michael.dehaan@gmail.com>, 2012-2013
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
import hmac
import re
try:
import urlparse
except ImportError:
import urllib.parse as urlparse
try:
from hashlib import sha1
except ImportError:
import sha as sha1
HASHED_KEY_MAGIC = "|1|"
def add_git_host_key(module, url, accept_hostkey=True, create_dir=True):
""" idempotently add a git url hostkey """
if is_ssh_url(url):
fqdn, port = get_fqdn_and_port(url)
if fqdn:
known_host = check_hostkey(module, fqdn)
if not known_host:
if accept_hostkey:
rc, out, err = add_host_key(module, fqdn, port=port, create_dir=create_dir)
if rc != 0:
module.fail_json(msg="failed to add %s hostkey: %s" % (fqdn, out + err))
else:
module.fail_json(msg="%s has an unknown hostkey. Set accept_hostkey to True "
"or manually add the hostkey prior to running the git module" % fqdn)
def is_ssh_url(url):
""" check if url is ssh """
if "@" in url and "://" not in url:
return True
for scheme in "ssh://", "git+ssh://", "ssh+git://":
if url.startswith(scheme):
return True
return False
def get_fqdn_and_port(repo_url):
""" chop the hostname and port out of a url """
fqdn = None
port = None
ipv6_re = re.compile('(\[[^]]*\])(?::([0-9]+))?')
if "@" in repo_url and "://" not in repo_url:
# most likely an user@host:path or user@host/path type URL
repo_url = repo_url.split("@", 1)[1]
match = ipv6_re.match(repo_url)
# For this type of URL, colon specifies the path, not the port
if match:
fqdn, path = match.groups()
elif ":" in repo_url:
fqdn = repo_url.split(":")[0]
elif "/" in repo_url:
fqdn = repo_url.split("/")[0]
elif "://" in repo_url:
# this should be something we can parse with urlparse
parts = urlparse.urlparse(repo_url)
# parts[1] will be empty on python2.4 on ssh:// or git:// urls, so
# ensure we actually have a parts[1] before continuing.
if parts[1] != '':
fqdn = parts[1]
if "@" in fqdn:
fqdn = fqdn.split("@", 1)[1]
match = ipv6_re.match(fqdn)
if match:
fqdn, port = match.groups()
elif ":" in fqdn:
fqdn, port = fqdn.split(":")[0:2]
return fqdn, port
def check_hostkey(module, fqdn):
return not not_in_host_file(module, fqdn)
# this is a variant of code found in connection_plugins/paramiko.py and we should modify
# the paramiko code to import and use this.
def not_in_host_file(self, host):
if 'USER' in os.environ:
user_host_file = os.path.expandvars("~${USER}/.ssh/known_hosts")
else:
user_host_file = "~/.ssh/known_hosts"
user_host_file = os.path.expanduser(user_host_file)
host_file_list = []
host_file_list.append(user_host_file)
host_file_list.append("/etc/ssh/ssh_known_hosts")
host_file_list.append("/etc/ssh/ssh_known_hosts2")
host_file_list.append("/etc/openssh/ssh_known_hosts")
hfiles_not_found = 0
for hf in host_file_list:
if not os.path.exists(hf):
hfiles_not_found += 1
continue
try:
host_fh = open(hf)
except IOError:
hfiles_not_found += 1
continue
else:
data = host_fh.read()
host_fh.close()
for line in data.split("\n"):
if line is None or " " not in line:
continue
tokens = line.split()
if tokens[0].find(HASHED_KEY_MAGIC) == 0:
# this is a hashed known host entry
try:
(kn_salt,kn_host) = tokens[0][len(HASHED_KEY_MAGIC):].split("|",2)
hash = hmac.new(kn_salt.decode('base64'), digestmod=sha1)
hash.update(host)
if hash.digest() == kn_host.decode('base64'):
return False
except:
# invalid hashed host key, skip it
continue
else:
# standard host file entry
if host in tokens[0]:
return False
return True
def add_host_key(module, fqdn, port=22, key_type="rsa", create_dir=False):
""" use ssh-keyscan to add the hostkey """
keyscan_cmd = module.get_bin_path('ssh-keyscan', True)
if 'USER' in os.environ:
user_ssh_dir = os.path.expandvars("~${USER}/.ssh/")
user_host_file = os.path.expandvars("~${USER}/.ssh/known_hosts")
else:
user_ssh_dir = "~/.ssh/"
user_host_file = "~/.ssh/known_hosts"
user_ssh_dir = os.path.expanduser(user_ssh_dir)
if not os.path.exists(user_ssh_dir):
if create_dir:
try:
os.makedirs(user_ssh_dir, int('700', 8))
except:
module.fail_json(msg="failed to create host key directory: %s" % user_ssh_dir)
else:
module.fail_json(msg="%s does not exist" % user_ssh_dir)
elif not os.path.isdir(user_ssh_dir):
module.fail_json(msg="%s is not a directory" % user_ssh_dir)
if port:
this_cmd = "%s -t %s -p %s %s" % (keyscan_cmd, key_type, port, fqdn)
else:
this_cmd = "%s -t %s %s" % (keyscan_cmd, key_type, fqdn)
rc, out, err = module.run_command(this_cmd)
# ssh-keyscan gives a 0 exit code and prints nothins on timeout
if rc != 0 or not out:
module.fail_json(msg='failed to get the hostkey for %s' % fqdn)
module.append_to_file(user_host_file, out)
return rc, out, err
| gpl-3.0 |
1nsect/ScrabbleStream | main.py | 1 | 2291 | import sys
import time #sleep function
import numpy as np
from matplotlib import pyplot as plt
np.set_printoptions(threshold=np.nan)
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import cv2
import math #to use absolute value function 'fabs'
import pytesseract
from PIL import Image
#from pyimagesearch import imutils #can't find that modul...
import ToolboxScrabble as ts
import PictureAcquisition as pa
import ReadBoard as rb
#Setting - Setting - Setting - Setting - Setting - Setting - Setting - Setting - Setting - Setting - Setting -
ImageSize = 1000 #size of the board's image
EdgeRatio = float(31)/float(32)
Margin=ImageSize-ImageSize*EdgeRatio
CellSize=int(round((ImageSize-2*Margin)/15))
#Il faudra calibrer cette valeur
Threshold = 110
#get coordinates of all the columns
X_ = ts.getColumnsPixelPosition(Margin,CellSize)
print X_
TimeToSkip= 100
TimeToWait = 4000
#Init - Init - Init - Init - Init - Init - Init - Init - Init - Init - Init - Init - Init - Init - Init - Init -
'''Take picture from camera
im=pa.takePicture()
im = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY)
ts.ShowImage('coucou',im,0)
'''
#Create the survey matrix
boardState = np.zeros((15, 15), dtype=object)
# load the query image
# to the new height, clone it, and resize it
im = cv2.imread('PlateauO.jpg')
orig = im.copy()
im = cv2.resize(im,None,ImageSize,0.5,0.5, interpolation = cv2.INTER_AREA)
# convert the image to grayscale
gray = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY)
ts.ShowImage('title',gray,TimeToSkip)
#croping
perspective = pa.CropBoard(gray, ImageSize, TimeToSkip)
ts.ShowImage('Perspective',perspective,TimeToSkip)
#Loop - Loop - Loop - Loop - Loop - Loop - Loop - Loop - Loop - Loop - Loop - Loop - Loop - Loop - Loop - Loop -
#Scan the new board state and extract new caramels
newFilledCells = rb.getFilledCells(perspective,X_,boardState,CellSize,Threshold)
print newFilledCells
#add the new filled cells to the boardState matrix
boardState = boardState + newFilledCells
#draw line to know where the columns are
rb.drawGrid(perspective.copy(), X_, CellSize)
rb.ReadBoard(perspective,boardState,X_,CellSize)
print boardState
#letter = rb.getChar(perspective, 7, 7, CellSize)
#print letter
'''
While():
Protocole de calibration
'''
print("End")
| gpl-3.0 |
int19h/PTVS | Python/Tests/TestData/VirtualEnv/env/Lib/encodings/mac_latin2.py | 647 | 8565 | """ Python Character Mapping Codec generated from 'LATIN2.TXT' with gencodec.py.
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
(c) Copyright 2000 Guido van Rossum.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_map)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_map)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mac-latin2',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x0081: 0x0100, # LATIN CAPITAL LETTER A WITH MACRON
0x0082: 0x0101, # LATIN SMALL LETTER A WITH MACRON
0x0083: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0084: 0x0104, # LATIN CAPITAL LETTER A WITH OGONEK
0x0085: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x0086: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x0087: 0x00e1, # LATIN SMALL LETTER A WITH ACUTE
0x0088: 0x0105, # LATIN SMALL LETTER A WITH OGONEK
0x0089: 0x010c, # LATIN CAPITAL LETTER C WITH CARON
0x008a: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x008b: 0x010d, # LATIN SMALL LETTER C WITH CARON
0x008c: 0x0106, # LATIN CAPITAL LETTER C WITH ACUTE
0x008d: 0x0107, # LATIN SMALL LETTER C WITH ACUTE
0x008e: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x008f: 0x0179, # LATIN CAPITAL LETTER Z WITH ACUTE
0x0090: 0x017a, # LATIN SMALL LETTER Z WITH ACUTE
0x0091: 0x010e, # LATIN CAPITAL LETTER D WITH CARON
0x0092: 0x00ed, # LATIN SMALL LETTER I WITH ACUTE
0x0093: 0x010f, # LATIN SMALL LETTER D WITH CARON
0x0094: 0x0112, # LATIN CAPITAL LETTER E WITH MACRON
0x0095: 0x0113, # LATIN SMALL LETTER E WITH MACRON
0x0096: 0x0116, # LATIN CAPITAL LETTER E WITH DOT ABOVE
0x0097: 0x00f3, # LATIN SMALL LETTER O WITH ACUTE
0x0098: 0x0117, # LATIN SMALL LETTER E WITH DOT ABOVE
0x0099: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x009a: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x009b: 0x00f5, # LATIN SMALL LETTER O WITH TILDE
0x009c: 0x00fa, # LATIN SMALL LETTER U WITH ACUTE
0x009d: 0x011a, # LATIN CAPITAL LETTER E WITH CARON
0x009e: 0x011b, # LATIN SMALL LETTER E WITH CARON
0x009f: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x00a0: 0x2020, # DAGGER
0x00a1: 0x00b0, # DEGREE SIGN
0x00a2: 0x0118, # LATIN CAPITAL LETTER E WITH OGONEK
0x00a4: 0x00a7, # SECTION SIGN
0x00a5: 0x2022, # BULLET
0x00a6: 0x00b6, # PILCROW SIGN
0x00a7: 0x00df, # LATIN SMALL LETTER SHARP S
0x00a8: 0x00ae, # REGISTERED SIGN
0x00aa: 0x2122, # TRADE MARK SIGN
0x00ab: 0x0119, # LATIN SMALL LETTER E WITH OGONEK
0x00ac: 0x00a8, # DIAERESIS
0x00ad: 0x2260, # NOT EQUAL TO
0x00ae: 0x0123, # LATIN SMALL LETTER G WITH CEDILLA
0x00af: 0x012e, # LATIN CAPITAL LETTER I WITH OGONEK
0x00b0: 0x012f, # LATIN SMALL LETTER I WITH OGONEK
0x00b1: 0x012a, # LATIN CAPITAL LETTER I WITH MACRON
0x00b2: 0x2264, # LESS-THAN OR EQUAL TO
0x00b3: 0x2265, # GREATER-THAN OR EQUAL TO
0x00b4: 0x012b, # LATIN SMALL LETTER I WITH MACRON
0x00b5: 0x0136, # LATIN CAPITAL LETTER K WITH CEDILLA
0x00b6: 0x2202, # PARTIAL DIFFERENTIAL
0x00b7: 0x2211, # N-ARY SUMMATION
0x00b8: 0x0142, # LATIN SMALL LETTER L WITH STROKE
0x00b9: 0x013b, # LATIN CAPITAL LETTER L WITH CEDILLA
0x00ba: 0x013c, # LATIN SMALL LETTER L WITH CEDILLA
0x00bb: 0x013d, # LATIN CAPITAL LETTER L WITH CARON
0x00bc: 0x013e, # LATIN SMALL LETTER L WITH CARON
0x00bd: 0x0139, # LATIN CAPITAL LETTER L WITH ACUTE
0x00be: 0x013a, # LATIN SMALL LETTER L WITH ACUTE
0x00bf: 0x0145, # LATIN CAPITAL LETTER N WITH CEDILLA
0x00c0: 0x0146, # LATIN SMALL LETTER N WITH CEDILLA
0x00c1: 0x0143, # LATIN CAPITAL LETTER N WITH ACUTE
0x00c2: 0x00ac, # NOT SIGN
0x00c3: 0x221a, # SQUARE ROOT
0x00c4: 0x0144, # LATIN SMALL LETTER N WITH ACUTE
0x00c5: 0x0147, # LATIN CAPITAL LETTER N WITH CARON
0x00c6: 0x2206, # INCREMENT
0x00c7: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00c8: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00c9: 0x2026, # HORIZONTAL ELLIPSIS
0x00ca: 0x00a0, # NO-BREAK SPACE
0x00cb: 0x0148, # LATIN SMALL LETTER N WITH CARON
0x00cc: 0x0150, # LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
0x00cd: 0x00d5, # LATIN CAPITAL LETTER O WITH TILDE
0x00ce: 0x0151, # LATIN SMALL LETTER O WITH DOUBLE ACUTE
0x00cf: 0x014c, # LATIN CAPITAL LETTER O WITH MACRON
0x00d0: 0x2013, # EN DASH
0x00d1: 0x2014, # EM DASH
0x00d2: 0x201c, # LEFT DOUBLE QUOTATION MARK
0x00d3: 0x201d, # RIGHT DOUBLE QUOTATION MARK
0x00d4: 0x2018, # LEFT SINGLE QUOTATION MARK
0x00d5: 0x2019, # RIGHT SINGLE QUOTATION MARK
0x00d6: 0x00f7, # DIVISION SIGN
0x00d7: 0x25ca, # LOZENGE
0x00d8: 0x014d, # LATIN SMALL LETTER O WITH MACRON
0x00d9: 0x0154, # LATIN CAPITAL LETTER R WITH ACUTE
0x00da: 0x0155, # LATIN SMALL LETTER R WITH ACUTE
0x00db: 0x0158, # LATIN CAPITAL LETTER R WITH CARON
0x00dc: 0x2039, # SINGLE LEFT-POINTING ANGLE QUOTATION MARK
0x00dd: 0x203a, # SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
0x00de: 0x0159, # LATIN SMALL LETTER R WITH CARON
0x00df: 0x0156, # LATIN CAPITAL LETTER R WITH CEDILLA
0x00e0: 0x0157, # LATIN SMALL LETTER R WITH CEDILLA
0x00e1: 0x0160, # LATIN CAPITAL LETTER S WITH CARON
0x00e2: 0x201a, # SINGLE LOW-9 QUOTATION MARK
0x00e3: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
0x00e4: 0x0161, # LATIN SMALL LETTER S WITH CARON
0x00e5: 0x015a, # LATIN CAPITAL LETTER S WITH ACUTE
0x00e6: 0x015b, # LATIN SMALL LETTER S WITH ACUTE
0x00e7: 0x00c1, # LATIN CAPITAL LETTER A WITH ACUTE
0x00e8: 0x0164, # LATIN CAPITAL LETTER T WITH CARON
0x00e9: 0x0165, # LATIN SMALL LETTER T WITH CARON
0x00ea: 0x00cd, # LATIN CAPITAL LETTER I WITH ACUTE
0x00eb: 0x017d, # LATIN CAPITAL LETTER Z WITH CARON
0x00ec: 0x017e, # LATIN SMALL LETTER Z WITH CARON
0x00ed: 0x016a, # LATIN CAPITAL LETTER U WITH MACRON
0x00ee: 0x00d3, # LATIN CAPITAL LETTER O WITH ACUTE
0x00ef: 0x00d4, # LATIN CAPITAL LETTER O WITH CIRCUMFLEX
0x00f0: 0x016b, # LATIN SMALL LETTER U WITH MACRON
0x00f1: 0x016e, # LATIN CAPITAL LETTER U WITH RING ABOVE
0x00f2: 0x00da, # LATIN CAPITAL LETTER U WITH ACUTE
0x00f3: 0x016f, # LATIN SMALL LETTER U WITH RING ABOVE
0x00f4: 0x0170, # LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
0x00f5: 0x0171, # LATIN SMALL LETTER U WITH DOUBLE ACUTE
0x00f6: 0x0172, # LATIN CAPITAL LETTER U WITH OGONEK
0x00f7: 0x0173, # LATIN SMALL LETTER U WITH OGONEK
0x00f8: 0x00dd, # LATIN CAPITAL LETTER Y WITH ACUTE
0x00f9: 0x00fd, # LATIN SMALL LETTER Y WITH ACUTE
0x00fa: 0x0137, # LATIN SMALL LETTER K WITH CEDILLA
0x00fb: 0x017b, # LATIN CAPITAL LETTER Z WITH DOT ABOVE
0x00fc: 0x0141, # LATIN CAPITAL LETTER L WITH STROKE
0x00fd: 0x017c, # LATIN SMALL LETTER Z WITH DOT ABOVE
0x00fe: 0x0122, # LATIN CAPITAL LETTER G WITH CEDILLA
0x00ff: 0x02c7, # CARON
})
### Encoding Map
encoding_map = codecs.make_encoding_map(decoding_map)
| apache-2.0 |
vicky2135/lucious | oscar/lib/python2.7/site-packages/phonenumbers/shortdata/__init__.py | 2 | 2170 | """Auto-generated file, do not edit by hand."""
# Copyright (C) 2010-2017 The Libphonenumber Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ..phonemetadata import PhoneMetadata
_AVAILABLE_REGION_CODES = ['AC','AD','AE','AF','AG','AI','AL','AM','AO','AR','AS','AT','AU','AW','AX','AZ','BA','BB','BD','BE','BF','BG','BH','BI','BJ','BL','BM','BN','BO','BQ','BR','BS','BT','BW','BY','BZ','CA','CC','CD','CF','CG','CH','CI','CK','CL','CM','CN','CO','CR','CU','CV','CW','CX','CY','CZ','DE','DJ','DK','DM','DO','DZ','EC','EE','EG','EH','ER','ES','ET','FI','FJ','FK','FM','FO','FR','GA','GB','GD','GE','GF','GG','GH','GI','GL','GM','GN','GP','GR','GT','GU','GW','GY','HK','HN','HR','HT','HU','ID','IE','IL','IM','IN','IQ','IR','IS','IT','JE','JM','JO','JP','KE','KG','KH','KI','KM','KN','KP','KR','KW','KY','KZ','LA','LB','LC','LI','LK','LR','LS','LT','LU','LV','LY','MA','MC','MD','ME','MF','MG','MH','MK','ML','MM','MN','MO','MP','MQ','MR','MS','MT','MU','MV','MW','MX','MY','MZ','NA','NC','NE','NF','NG','NI','NL','NO','NP','NR','NU','NZ','OM','PA','PE','PF','PG','PH','PK','PL','PM','PR','PT','PW','PY','QA','RE','RO','RS','RU','RW','SA','SB','SC','SD','SE','SG','SH','SI','SJ','SK','SL','SM','SN','SO','SR','ST','SV','SX','SY','SZ','TC','TD','TG','TH','TJ','TL','TM','TN','TO','TR','TT','TV','TW','TZ','UA','UG','US','UY','UZ','VA','VC','VE','VG','VI','VN','VU','WF','WS','YE','YT','ZA','ZM','ZW']
def _load_region(code):
__import__("region_%s" % code, globals(), locals(),
fromlist=["PHONE_METADATA_%s" % code], level=1)
for region_code in _AVAILABLE_REGION_CODES:
PhoneMetadata.register_short_region_loader(region_code, _load_region)
| bsd-3-clause |
40223117cda/2015cdaw13 | static/Brython3.1.1-20150328-091302/Lib/socket.py | 730 | 14913 | # Wrapper module for _socket, providing some additional facilities
# implemented in Python.
"""\
This module provides socket operations and some related functions.
On Unix, it supports IP (Internet Protocol) and Unix domain sockets.
On other systems, it only supports IP. Functions specific for a
socket are available as methods of the socket object.
Functions:
socket() -- create a new socket object
socketpair() -- create a pair of new socket objects [*]
fromfd() -- create a socket object from an open file descriptor [*]
fromshare() -- create a socket object from data received from socket.share() [*]
gethostname() -- return the current hostname
gethostbyname() -- map a hostname to its IP number
gethostbyaddr() -- map an IP number or hostname to DNS info
getservbyname() -- map a service name and a protocol name to a port number
getprotobyname() -- map a protocol name (e.g. 'tcp') to a number
ntohs(), ntohl() -- convert 16, 32 bit int from network to host byte order
htons(), htonl() -- convert 16, 32 bit int from host to network byte order
inet_aton() -- convert IP addr string (123.45.67.89) to 32-bit packed format
inet_ntoa() -- convert 32-bit packed format IP to string (123.45.67.89)
socket.getdefaulttimeout() -- get the default timeout value
socket.setdefaulttimeout() -- set the default timeout value
create_connection() -- connects to an address, with an optional timeout and
optional source address.
[*] not available on all platforms!
Special objects:
SocketType -- type object for socket objects
error -- exception raised for I/O errors
has_ipv6 -- boolean value indicating if IPv6 is supported
Integer constants:
AF_INET, AF_UNIX -- socket domains (first argument to socket() call)
SOCK_STREAM, SOCK_DGRAM, SOCK_RAW -- socket types (second argument)
Many other constants may be defined; these may be used in calls to
the setsockopt() and getsockopt() methods.
"""
import _socket
from _socket import *
import os, sys, io
try:
import errno
except ImportError:
errno = None
EBADF = getattr(errno, 'EBADF', 9)
EAGAIN = getattr(errno, 'EAGAIN', 11)
EWOULDBLOCK = getattr(errno, 'EWOULDBLOCK', 11)
__all__ = ["getfqdn", "create_connection"]
__all__.extend(os._get_exports_list(_socket))
_realsocket = socket
# WSA error codes
if sys.platform.lower().startswith("win"):
errorTab = {}
errorTab[10004] = "The operation was interrupted."
errorTab[10009] = "A bad file handle was passed."
errorTab[10013] = "Permission denied."
errorTab[10014] = "A fault occurred on the network??" # WSAEFAULT
errorTab[10022] = "An invalid operation was attempted."
errorTab[10035] = "The socket operation would block"
errorTab[10036] = "A blocking operation is already in progress."
errorTab[10048] = "The network address is in use."
errorTab[10054] = "The connection has been reset."
errorTab[10058] = "The network has been shut down."
errorTab[10060] = "The operation timed out."
errorTab[10061] = "Connection refused."
errorTab[10063] = "The name is too long."
errorTab[10064] = "The host is down."
errorTab[10065] = "The host is unreachable."
__all__.append("errorTab")
class socket(_socket.socket):
"""A subclass of _socket.socket adding the makefile() method."""
__slots__ = ["__weakref__", "_io_refs", "_closed"]
def __init__(self, family=AF_INET, type=SOCK_STREAM, proto=0, fileno=None):
_socket.socket.__init__(self, family, type, proto, fileno)
self._io_refs = 0
self._closed = False
def __enter__(self):
return self
def __exit__(self, *args):
if not self._closed:
self.close()
def __repr__(self):
"""Wrap __repr__() to reveal the real class name."""
s = _socket.socket.__repr__(self)
if s.startswith("<socket object"):
s = "<%s.%s%s%s" % (self.__class__.__module__,
self.__class__.__name__,
getattr(self, '_closed', False) and " [closed] " or "",
s[7:])
return s
def __getstate__(self):
raise TypeError("Cannot serialize socket object")
def dup(self):
"""dup() -> socket object
Return a new socket object connected to the same system resource.
"""
fd = dup(self.fileno())
sock = self.__class__(self.family, self.type, self.proto, fileno=fd)
sock.settimeout(self.gettimeout())
return sock
def accept(self):
"""accept() -> (socket object, address info)
Wait for an incoming connection. Return a new socket
representing the connection, and the address of the client.
For IP sockets, the address info is a pair (hostaddr, port).
"""
fd, addr = self._accept()
sock = socket(self.family, self.type, self.proto, fileno=fd)
# Issue #7995: if no default timeout is set and the listening
# socket had a (non-zero) timeout, force the new socket in blocking
# mode to override platform-specific socket flags inheritance.
if getdefaulttimeout() is None and self.gettimeout():
sock.setblocking(True)
return sock, addr
def makefile(self, mode="r", buffering=None, *,
encoding=None, errors=None, newline=None):
"""makefile(...) -> an I/O stream connected to the socket
The arguments are as for io.open() after the filename,
except the only mode characters supported are 'r', 'w' and 'b'.
The semantics are similar too. (XXX refactor to share code?)
"""
for c in mode:
if c not in {"r", "w", "b"}:
raise ValueError("invalid mode %r (only r, w, b allowed)")
writing = "w" in mode
reading = "r" in mode or not writing
assert reading or writing
binary = "b" in mode
rawmode = ""
if reading:
rawmode += "r"
if writing:
rawmode += "w"
raw = SocketIO(self, rawmode)
self._io_refs += 1
if buffering is None:
buffering = -1
if buffering < 0:
buffering = io.DEFAULT_BUFFER_SIZE
if buffering == 0:
if not binary:
raise ValueError("unbuffered streams must be binary")
return raw
if reading and writing:
buffer = io.BufferedRWPair(raw, raw, buffering)
elif reading:
buffer = io.BufferedReader(raw, buffering)
else:
assert writing
buffer = io.BufferedWriter(raw, buffering)
if binary:
return buffer
text = io.TextIOWrapper(buffer, encoding, errors, newline)
text.mode = mode
return text
def _decref_socketios(self):
if self._io_refs > 0:
self._io_refs -= 1
if self._closed:
self.close()
def _real_close(self, _ss=_socket.socket):
# This function should not reference any globals. See issue #808164.
_ss.close(self)
def close(self):
# This function should not reference any globals. See issue #808164.
self._closed = True
if self._io_refs <= 0:
self._real_close()
def detach(self):
"""detach() -> file descriptor
Close the socket object without closing the underlying file descriptor.
The object cannot be used after this call, but the file descriptor
can be reused for other purposes. The file descriptor is returned.
"""
self._closed = True
return super().detach()
def fromfd(fd, family, type, proto=0):
""" fromfd(fd, family, type[, proto]) -> socket object
Create a socket object from a duplicate of the given file
descriptor. The remaining arguments are the same as for socket().
"""
nfd = dup(fd)
return socket(family, type, proto, nfd)
if hasattr(_socket.socket, "share"):
def fromshare(info):
""" fromshare(info) -> socket object
Create a socket object from a the bytes object returned by
socket.share(pid).
"""
return socket(0, 0, 0, info)
if hasattr(_socket, "socketpair"):
def socketpair(family=None, type=SOCK_STREAM, proto=0):
"""socketpair([family[, type[, proto]]]) -> (socket object, socket object)
Create a pair of socket objects from the sockets returned by the platform
socketpair() function.
The arguments are the same as for socket() except the default family is
AF_UNIX if defined on the platform; otherwise, the default is AF_INET.
"""
if family is None:
try:
family = AF_UNIX
except NameError:
family = AF_INET
a, b = _socket.socketpair(family, type, proto)
a = socket(family, type, proto, a.detach())
b = socket(family, type, proto, b.detach())
return a, b
_blocking_errnos = { EAGAIN, EWOULDBLOCK }
class SocketIO(io.RawIOBase):
"""Raw I/O implementation for stream sockets.
This class supports the makefile() method on sockets. It provides
the raw I/O interface on top of a socket object.
"""
# One might wonder why not let FileIO do the job instead. There are two
# main reasons why FileIO is not adapted:
# - it wouldn't work under Windows (where you can't used read() and
# write() on a socket handle)
# - it wouldn't work with socket timeouts (FileIO would ignore the
# timeout and consider the socket non-blocking)
# XXX More docs
def __init__(self, sock, mode):
if mode not in ("r", "w", "rw", "rb", "wb", "rwb"):
raise ValueError("invalid mode: %r" % mode)
io.RawIOBase.__init__(self)
self._sock = sock
if "b" not in mode:
mode += "b"
self._mode = mode
self._reading = "r" in mode
self._writing = "w" in mode
self._timeout_occurred = False
def readinto(self, b):
"""Read up to len(b) bytes into the writable buffer *b* and return
the number of bytes read. If the socket is non-blocking and no bytes
are available, None is returned.
If *b* is non-empty, a 0 return value indicates that the connection
was shutdown at the other end.
"""
self._checkClosed()
self._checkReadable()
if self._timeout_occurred:
raise IOError("cannot read from timed out object")
while True:
try:
return self._sock.recv_into(b)
except timeout:
self._timeout_occurred = True
raise
except InterruptedError:
continue
except error as e:
if e.args[0] in _blocking_errnos:
return None
raise
def write(self, b):
"""Write the given bytes or bytearray object *b* to the socket
and return the number of bytes written. This can be less than
len(b) if not all data could be written. If the socket is
non-blocking and no bytes could be written None is returned.
"""
self._checkClosed()
self._checkWritable()
try:
return self._sock.send(b)
except error as e:
# XXX what about EINTR?
if e.args[0] in _blocking_errnos:
return None
raise
def readable(self):
"""True if the SocketIO is open for reading.
"""
if self.closed:
raise ValueError("I/O operation on closed socket.")
return self._reading
def writable(self):
"""True if the SocketIO is open for writing.
"""
if self.closed:
raise ValueError("I/O operation on closed socket.")
return self._writing
def seekable(self):
"""True if the SocketIO is open for seeking.
"""
if self.closed:
raise ValueError("I/O operation on closed socket.")
return super().seekable()
def fileno(self):
"""Return the file descriptor of the underlying socket.
"""
self._checkClosed()
return self._sock.fileno()
@property
def name(self):
if not self.closed:
return self.fileno()
else:
return -1
@property
def mode(self):
return self._mode
def close(self):
"""Close the SocketIO object. This doesn't close the underlying
socket, except if all references to it have disappeared.
"""
if self.closed:
return
io.RawIOBase.close(self)
self._sock._decref_socketios()
self._sock = None
def getfqdn(name=''):
"""Get fully qualified domain name from name.
An empty argument is interpreted as meaning the local host.
First the hostname returned by gethostbyaddr() is checked, then
possibly existing aliases. In case no FQDN is available, hostname
from gethostname() is returned.
"""
name = name.strip()
if not name or name == '0.0.0.0':
name = gethostname()
try:
hostname, aliases, ipaddrs = gethostbyaddr(name)
except error:
pass
else:
aliases.insert(0, hostname)
for name in aliases:
if '.' in name:
break
else:
name = hostname
return name
_GLOBAL_DEFAULT_TIMEOUT = object()
def create_connection(address, timeout=_GLOBAL_DEFAULT_TIMEOUT,
source_address=None):
"""Connect to *address* and return the socket object.
Convenience function. Connect to *address* (a 2-tuple ``(host,
port)``) and return the socket object. Passing the optional
*timeout* parameter will set the timeout on the socket instance
before attempting to connect. If no *timeout* is supplied, the
global default timeout setting returned by :func:`getdefaulttimeout`
is used. If *source_address* is set it must be a tuple of (host, port)
for the socket to bind as a source address before making the connection.
An host of '' or port 0 tells the OS to use the default.
"""
host, port = address
err = None
for res in getaddrinfo(host, port, 0, SOCK_STREAM):
af, socktype, proto, canonname, sa = res
sock = None
try:
sock = socket(af, socktype, proto)
if timeout is not _GLOBAL_DEFAULT_TIMEOUT:
sock.settimeout(timeout)
if source_address:
sock.bind(source_address)
sock.connect(sa)
return sock
except error as _:
err = _
if sock is not None:
sock.close()
if err is not None:
raise err
else:
raise error("getaddrinfo returns an empty list")
| gpl-3.0 |
ralphbean/ansible | v2/ansible/playbook/base.py | 2 | 11947 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import uuid
from functools import partial
from inspect import getmembers
from io import FileIO
from six import iteritems, string_types
from jinja2.exceptions import UndefinedError
from ansible.errors import AnsibleParserError
from ansible.parsing import DataLoader
from ansible.playbook.attribute import Attribute, FieldAttribute
from ansible.template import Templar
from ansible.utils.boolean import boolean
from ansible.utils.debug import debug
from ansible.template import template
class Base:
def __init__(self):
# initialize the data loader and variable manager, which will be provided
# later when the object is actually loaded
self._loader = None
self._variable_manager = None
# every object gets a random uuid:
self._uuid = uuid.uuid4()
# and initialize the base attributes
self._initialize_base_attributes()
# The following three functions are used to programatically define data
# descriptors (aka properties) for the Attributes of all of the playbook
# objects (tasks, blocks, plays, etc).
#
# The function signature is a little strange because of how we define
# them. We use partial to give each method the name of the Attribute that
# it is for. Since partial prefills the positional arguments at the
# beginning of the function we end up with the first positional argument
# being allocated to the name instead of to the class instance (self) as
# normal. To deal with that we make the property name field the first
# positional argument and self the second arg.
#
# Because these methods are defined inside of the class, they get bound to
# the instance when the object is created. After we run partial on them
# and put the result back into the class as a property, they get bound
# a second time. This leads to self being placed in the arguments twice.
# To work around that, we mark the functions as @staticmethod so that the
# first binding to the instance doesn't happen.
@staticmethod
def _generic_g(prop_name, self):
method = "_get_attr_%s" % prop_name
if method in dir(self):
return getattr(self, method)()
return self._attributes[prop_name]
@staticmethod
def _generic_s(prop_name, self, value):
self._attributes[prop_name] = value
@staticmethod
def _generic_d(prop_name, self):
del self._attributes[prop_name]
def _get_base_attributes(self):
'''
Returns the list of attributes for this class (or any subclass thereof).
If the attribute name starts with an underscore, it is removed
'''
base_attributes = dict()
for (name, value) in getmembers(self.__class__):
if isinstance(value, Attribute):
if name.startswith('_'):
name = name[1:]
base_attributes[name] = value
return base_attributes
def _initialize_base_attributes(self):
# each class knows attributes set upon it, see Task.py for example
self._attributes = dict()
for (name, value) in self._get_base_attributes().items():
getter = partial(self._generic_g, name)
setter = partial(self._generic_s, name)
deleter = partial(self._generic_d, name)
# Place the property into the class so that cls.name is the
# property functions.
setattr(Base, name, property(getter, setter, deleter))
# Place the value into the instance so that the property can
# process and hold that value/
setattr(self, name, value.default)
def preprocess_data(self, ds):
''' infrequently used method to do some pre-processing of legacy terms '''
for base_class in self.__class__.mro():
method = getattr(self, "_preprocess_data_%s" % base_class.__name__.lower(), None)
if method:
return method(ds)
return ds
def load_data(self, ds, variable_manager=None, loader=None):
''' walk the input datastructure and assign any values '''
assert ds is not None
# the variable manager class is used to manage and merge variables
# down to a single dictionary for reference in templating, etc.
self._variable_manager = variable_manager
# the data loader class is used to parse data from strings and files
if loader is not None:
self._loader = loader
else:
self._loader = DataLoader()
if isinstance(ds, string_types) or isinstance(ds, FileIO):
ds = self._loader.load(ds)
# call the preprocess_data() function to massage the data into
# something we can more easily parse, and then call the validation
# function on it to ensure there are no incorrect key values
ds = self.preprocess_data(ds)
self._validate_attributes(ds)
# Walk all attributes in the class.
#
# FIXME: we currently don't do anything with private attributes but
# may later decide to filter them out of 'ds' here.
for name in self._get_base_attributes():
# copy the value over unless a _load_field method is defined
if name in ds:
method = getattr(self, '_load_%s' % name, None)
if method:
self._attributes[name] = method(name, ds[name])
else:
self._attributes[name] = ds[name]
# run early, non-critical validation
self.validate()
# cache the datastructure internally
setattr(self, '_ds', ds)
# return the constructed object
return self
def get_ds(self):
try:
return getattr(self, '_ds')
except AttributeError:
return None
def get_loader(self):
return self._loader
def get_variable_manager(self):
return self._variable_manager
def _validate_attributes(self, ds):
'''
Ensures that there are no keys in the datastructure which do
not map to attributes for this object.
'''
valid_attrs = frozenset(name for name in self._get_base_attributes())
for key in ds:
if key not in valid_attrs:
raise AnsibleParserError("'%s' is not a valid attribute for a %s" % (key, self.__class__.__name__), obj=ds)
def validate(self, all_vars=dict()):
''' validation that is done at parse time, not load time '''
# walk all fields in the object
for (name, attribute) in iteritems(self._get_base_attributes()):
# run validator only if present
method = getattr(self, '_validate_%s' % name, None)
if method:
method(attribute, name, getattr(self, name))
def copy(self):
'''
Create a copy of this object and return it.
'''
new_me = self.__class__()
for name in self._get_base_attributes():
setattr(new_me, name, getattr(self, name))
new_me._loader = self._loader
new_me._variable_manager = self._variable_manager
return new_me
def post_validate(self, all_vars=dict(), fail_on_undefined=True):
'''
we can't tell that everything is of the right type until we have
all the variables. Run basic types (from isa) as well as
any _post_validate_<foo> functions.
'''
basedir = None
if self._loader is not None:
basedir = self._loader.get_basedir()
templar = Templar(loader=self._loader, variables=all_vars, fail_on_undefined=fail_on_undefined)
for (name, attribute) in iteritems(self._get_base_attributes()):
if getattr(self, name) is None:
if not attribute.required:
continue
else:
raise AnsibleParserError("the field '%s' is required but was not set" % name)
try:
# if the attribute contains a variable, template it now
value = templar.template(getattr(self, name))
# run the post-validator if present
method = getattr(self, '_post_validate_%s' % name, None)
if method:
value = method(attribute, value, all_vars, fail_on_undefined)
else:
# otherwise, just make sure the attribute is of the type it should be
if attribute.isa == 'string':
value = unicode(value)
elif attribute.isa == 'int':
value = int(value)
elif attribute.isa == 'bool':
value = boolean(value)
elif attribute.isa == 'list':
if not isinstance(value, list):
value = [ value ]
elif attribute.isa == 'dict' and not isinstance(value, dict):
raise TypeError()
# and assign the massaged value back to the attribute field
setattr(self, name, value)
except (TypeError, ValueError) as e:
raise AnsibleParserError("the field '%s' has an invalid value (%s), and could not be converted to an %s. Error was: %s" % (name, value, attribute.isa, e), obj=self.get_ds())
except UndefinedError as e:
if fail_on_undefined:
raise AnsibleParserError("the field '%s' has an invalid value, which appears to include a variable that is undefined. The error was: %s" % (name,e), obj=self.get_ds())
def serialize(self):
'''
Serializes the object derived from the base object into
a dictionary of values. This only serializes the field
attributes for the object, so this may need to be overridden
for any classes which wish to add additional items not stored
as field attributes.
'''
repr = dict()
for name in self._get_base_attributes():
repr[name] = getattr(self, name)
# serialize the uuid field
repr['uuid'] = getattr(self, '_uuid')
return repr
def deserialize(self, data):
'''
Given a dictionary of values, load up the field attributes for
this object. As with serialize(), if there are any non-field
attribute data members, this method will need to be overridden
and extended.
'''
assert isinstance(data, dict)
for (name, attribute) in iteritems(self._get_base_attributes()):
if name in data:
setattr(self, name, data[name])
else:
setattr(self, name, attribute.default)
# restore the UUID field
setattr(self, '_uuid', data.get('uuid'))
def __getstate__(self):
return self.serialize()
def __setstate__(self, data):
self.__init__()
self.deserialize(data)
| gpl-3.0 |
Beeblio/django | django/forms/formsets.py | 8 | 17458 | from __future__ import unicode_literals
from django.core.exceptions import ValidationError
from django.forms import Form
from django.forms.fields import IntegerField, BooleanField
from django.forms.utils import ErrorList
from django.forms.widgets import HiddenInput
from django.utils.encoding import python_2_unicode_compatible
from django.utils.functional import cached_property
from django.utils.safestring import mark_safe
from django.utils import six
from django.utils.six.moves import xrange
from django.utils.translation import ungettext, ugettext as _
__all__ = ('BaseFormSet', 'formset_factory', 'all_valid')
# special field names
TOTAL_FORM_COUNT = 'TOTAL_FORMS'
INITIAL_FORM_COUNT = 'INITIAL_FORMS'
MIN_NUM_FORM_COUNT = 'MIN_NUM_FORMS'
MAX_NUM_FORM_COUNT = 'MAX_NUM_FORMS'
ORDERING_FIELD_NAME = 'ORDER'
DELETION_FIELD_NAME = 'DELETE'
# default minimum number of forms in a formset
DEFAULT_MIN_NUM = 0
# default maximum number of forms in a formset, to prevent memory exhaustion
DEFAULT_MAX_NUM = 1000
class ManagementForm(Form):
"""
``ManagementForm`` is used to keep track of how many form instances
are displayed on the page. If adding new forms via javascript, you should
increment the count field of this form as well.
"""
def __init__(self, *args, **kwargs):
self.base_fields[TOTAL_FORM_COUNT] = IntegerField(widget=HiddenInput)
self.base_fields[INITIAL_FORM_COUNT] = IntegerField(widget=HiddenInput)
# MIN_NUM_FORM_COUNT and MAX_NUM_FORM_COUNT are output with the rest of
# the management form, but only for the convenience of client-side
# code. The POST value of them returned from the client is not checked.
self.base_fields[MIN_NUM_FORM_COUNT] = IntegerField(required=False, widget=HiddenInput)
self.base_fields[MAX_NUM_FORM_COUNT] = IntegerField(required=False, widget=HiddenInput)
super(ManagementForm, self).__init__(*args, **kwargs)
@python_2_unicode_compatible
class BaseFormSet(object):
"""
A collection of instances of the same Form class.
"""
def __init__(self, data=None, files=None, auto_id='id_%s', prefix=None,
initial=None, error_class=ErrorList):
self.is_bound = data is not None or files is not None
self.prefix = prefix or self.get_default_prefix()
self.auto_id = auto_id
self.data = data or {}
self.files = files or {}
self.initial = initial
self.error_class = error_class
self._errors = None
self._non_form_errors = None
def __str__(self):
return self.as_table()
def __iter__(self):
"""Yields the forms in the order they should be rendered"""
return iter(self.forms)
def __getitem__(self, index):
"""Returns the form at the given index, based on the rendering order"""
return self.forms[index]
def __len__(self):
return len(self.forms)
def __bool__(self):
"""All formsets have a management form which is not included in the length"""
return True
def __nonzero__(self): # Python 2 compatibility
return type(self).__bool__(self)
@property
def management_form(self):
"""Returns the ManagementForm instance for this FormSet."""
if self.is_bound:
form = ManagementForm(self.data, auto_id=self.auto_id, prefix=self.prefix)
if not form.is_valid():
raise ValidationError(
_('ManagementForm data is missing or has been tampered with'),
code='missing_management_form',
)
else:
form = ManagementForm(auto_id=self.auto_id, prefix=self.prefix, initial={
TOTAL_FORM_COUNT: self.total_form_count(),
INITIAL_FORM_COUNT: self.initial_form_count(),
MIN_NUM_FORM_COUNT: self.min_num,
MAX_NUM_FORM_COUNT: self.max_num
})
return form
def total_form_count(self):
"""Returns the total number of forms in this FormSet."""
if self.is_bound:
# return absolute_max if it is lower than the actual total form
# count in the data; this is DoS protection to prevent clients
# from forcing the server to instantiate arbitrary numbers of
# forms
return min(self.management_form.cleaned_data[TOTAL_FORM_COUNT], self.absolute_max)
else:
initial_forms = self.initial_form_count()
total_forms = initial_forms + self.extra
# Allow all existing related objects/inlines to be displayed,
# but don't allow extra beyond max_num.
if initial_forms > self.max_num >= 0:
total_forms = initial_forms
elif total_forms > self.max_num >= 0:
total_forms = self.max_num
return total_forms
def initial_form_count(self):
"""Returns the number of forms that are required in this FormSet."""
if self.is_bound:
return self.management_form.cleaned_data[INITIAL_FORM_COUNT]
else:
# Use the length of the initial data if it's there, 0 otherwise.
initial_forms = len(self.initial) if self.initial else 0
return initial_forms
@cached_property
def forms(self):
"""
Instantiate forms at first property access.
"""
# DoS protection is included in total_form_count()
forms = [self._construct_form(i) for i in xrange(self.total_form_count())]
return forms
def _construct_form(self, i, **kwargs):
"""
Instantiates and returns the i-th form instance in a formset.
"""
defaults = {
'auto_id': self.auto_id,
'prefix': self.add_prefix(i),
'error_class': self.error_class,
}
if self.is_bound:
defaults['data'] = self.data
defaults['files'] = self.files
if self.initial and not 'initial' in kwargs:
try:
defaults['initial'] = self.initial[i]
except IndexError:
pass
# Allow extra forms to be empty.
if i >= self.initial_form_count():
defaults['empty_permitted'] = True
defaults.update(kwargs)
form = self.form(**defaults)
self.add_fields(form, i)
return form
@property
def initial_forms(self):
"""Return a list of all the initial forms in this formset."""
return self.forms[:self.initial_form_count()]
@property
def extra_forms(self):
"""Return a list of all the extra forms in this formset."""
return self.forms[self.initial_form_count():]
@property
def empty_form(self):
form = self.form(
auto_id=self.auto_id,
prefix=self.add_prefix('__prefix__'),
empty_permitted=True,
)
self.add_fields(form, None)
return form
@property
def cleaned_data(self):
"""
Returns a list of form.cleaned_data dicts for every form in self.forms.
"""
if not self.is_valid():
raise AttributeError("'%s' object has no attribute 'cleaned_data'" % self.__class__.__name__)
return [form.cleaned_data for form in self.forms]
@property
def deleted_forms(self):
"""
Returns a list of forms that have been marked for deletion.
"""
if not self.is_valid() or not self.can_delete:
return []
# construct _deleted_form_indexes which is just a list of form indexes
# that have had their deletion widget set to True
if not hasattr(self, '_deleted_form_indexes'):
self._deleted_form_indexes = []
for i in range(0, self.total_form_count()):
form = self.forms[i]
# if this is an extra form and hasn't changed, don't consider it
if i >= self.initial_form_count() and not form.has_changed():
continue
if self._should_delete_form(form):
self._deleted_form_indexes.append(i)
return [self.forms[i] for i in self._deleted_form_indexes]
@property
def ordered_forms(self):
"""
Returns a list of form in the order specified by the incoming data.
Raises an AttributeError if ordering is not allowed.
"""
if not self.is_valid() or not self.can_order:
raise AttributeError("'%s' object has no attribute 'ordered_forms'" % self.__class__.__name__)
# Construct _ordering, which is a list of (form_index, order_field_value)
# tuples. After constructing this list, we'll sort it by order_field_value
# so we have a way to get to the form indexes in the order specified
# by the form data.
if not hasattr(self, '_ordering'):
self._ordering = []
for i in range(0, self.total_form_count()):
form = self.forms[i]
# if this is an extra form and hasn't changed, don't consider it
if i >= self.initial_form_count() and not form.has_changed():
continue
# don't add data marked for deletion to self.ordered_data
if self.can_delete and self._should_delete_form(form):
continue
self._ordering.append((i, form.cleaned_data[ORDERING_FIELD_NAME]))
# After we're done populating self._ordering, sort it.
# A sort function to order things numerically ascending, but
# None should be sorted below anything else. Allowing None as
# a comparison value makes it so we can leave ordering fields
# blank.
def compare_ordering_key(k):
if k[1] is None:
return (1, 0) # +infinity, larger than any number
return (0, k[1])
self._ordering.sort(key=compare_ordering_key)
# Return a list of form.cleaned_data dicts in the order specified by
# the form data.
return [self.forms[i[0]] for i in self._ordering]
@classmethod
def get_default_prefix(cls):
return 'form'
def non_form_errors(self):
"""
Returns an ErrorList of errors that aren't associated with a particular
form -- i.e., from formset.clean(). Returns an empty ErrorList if there
are none.
"""
if self._non_form_errors is None:
self.full_clean()
return self._non_form_errors
@property
def errors(self):
"""
Returns a list of form.errors for every form in self.forms.
"""
if self._errors is None:
self.full_clean()
return self._errors
def total_error_count(self):
"""
Returns the number of errors across all forms in the formset.
"""
return len(self.non_form_errors()) +\
sum(len(form_errors) for form_errors in self.errors)
def _should_delete_form(self, form):
"""
Returns whether or not the form was marked for deletion.
"""
return form.cleaned_data.get(DELETION_FIELD_NAME, False)
def is_valid(self):
"""
Returns True if every form in self.forms is valid.
"""
if not self.is_bound:
return False
# We loop over every form.errors here rather than short circuiting on the
# first failure to make sure validation gets triggered for every form.
forms_valid = True
# This triggers a full clean.
self.errors
for i in range(0, self.total_form_count()):
form = self.forms[i]
if self.can_delete:
if self._should_delete_form(form):
# This form is going to be deleted so any of its errors
# should not cause the entire formset to be invalid.
continue
forms_valid &= form.is_valid()
return forms_valid and not bool(self.non_form_errors())
def full_clean(self):
"""
Cleans all of self.data and populates self._errors and
self._non_form_errors.
"""
self._errors = []
self._non_form_errors = self.error_class()
if not self.is_bound: # Stop further processing.
return
for i in range(0, self.total_form_count()):
form = self.forms[i]
self._errors.append(form.errors)
try:
if (self.validate_max and
self.total_form_count() - len(self.deleted_forms) > self.max_num) or \
self.management_form.cleaned_data[TOTAL_FORM_COUNT] > self.absolute_max:
raise ValidationError(ungettext(
"Please submit %d or fewer forms.",
"Please submit %d or fewer forms.", self.max_num) % self.max_num,
code='too_many_forms',
)
if (self.validate_min and
self.total_form_count() - len(self.deleted_forms) < self.min_num):
raise ValidationError(ungettext(
"Please submit %d or more forms.",
"Please submit %d or more forms.", self.min_num) % self.min_num,
code='too_few_forms')
# Give self.clean() a chance to do cross-form validation.
self.clean()
except ValidationError as e:
self._non_form_errors = self.error_class(e.error_list)
def clean(self):
"""
Hook for doing any extra formset-wide cleaning after Form.clean() has
been called on every form. Any ValidationError raised by this method
will not be associated with a particular form; it will be accesible
via formset.non_form_errors()
"""
pass
def has_changed(self):
"""
Returns true if data in any form differs from initial.
"""
return any(form.has_changed() for form in self)
def add_fields(self, form, index):
"""A hook for adding extra fields on to each form instance."""
if self.can_order:
# Only pre-fill the ordering field for initial forms.
if index is not None and index < self.initial_form_count():
form.fields[ORDERING_FIELD_NAME] = IntegerField(label=_('Order'), initial=index + 1, required=False)
else:
form.fields[ORDERING_FIELD_NAME] = IntegerField(label=_('Order'), required=False)
if self.can_delete:
form.fields[DELETION_FIELD_NAME] = BooleanField(label=_('Delete'), required=False)
def add_prefix(self, index):
return '%s-%s' % (self.prefix, index)
def is_multipart(self):
"""
Returns True if the formset needs to be multipart, i.e. it
has FileInput. Otherwise, False.
"""
if self.forms:
return self.forms[0].is_multipart()
else:
return self.empty_form.is_multipart()
@property
def media(self):
# All the forms on a FormSet are the same, so you only need to
# interrogate the first form for media.
if self.forms:
return self.forms[0].media
else:
return self.empty_form.media
def as_table(self):
"Returns this formset rendered as HTML <tr>s -- excluding the <table></table>."
# XXX: there is no semantic division between forms here, there
# probably should be. It might make sense to render each form as a
# table row with each field as a td.
forms = ' '.join(form.as_table() for form in self)
return mark_safe('\n'.join([six.text_type(self.management_form), forms]))
def as_p(self):
"Returns this formset rendered as HTML <p>s."
forms = ' '.join(form.as_p() for form in self)
return mark_safe('\n'.join([six.text_type(self.management_form), forms]))
def as_ul(self):
"Returns this formset rendered as HTML <li>s."
forms = ' '.join(form.as_ul() for form in self)
return mark_safe('\n'.join([six.text_type(self.management_form), forms]))
def formset_factory(form, formset=BaseFormSet, extra=1, can_order=False,
can_delete=False, max_num=None, validate_max=False,
min_num=None, validate_min=False):
"""Return a FormSet for the given form class."""
if min_num is None:
min_num = DEFAULT_MIN_NUM
if max_num is None:
max_num = DEFAULT_MAX_NUM
# hard limit on forms instantiated, to prevent memory-exhaustion attacks
# limit is simply max_num + DEFAULT_MAX_NUM (which is 2*DEFAULT_MAX_NUM
# if max_num is None in the first place)
absolute_max = max_num + DEFAULT_MAX_NUM
extra += min_num
attrs = {'form': form, 'extra': extra,
'can_order': can_order, 'can_delete': can_delete,
'min_num': min_num, 'max_num': max_num,
'absolute_max': absolute_max, 'validate_min': validate_min,
'validate_max': validate_max}
return type(form.__name__ + str('FormSet'), (formset,), attrs)
def all_valid(formsets):
"""Returns true if every formset in formsets is valid."""
valid = True
for formset in formsets:
if not formset.is_valid():
valid = False
return valid
| bsd-3-clause |
qrkourier/ansible | lib/ansible/modules/cloud/ovirt/ovirt_group_facts.py | 74 | 3548 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ovirt_group_facts
short_description: Retrieve facts about one or more oVirt/RHV groups
author: "Ondra Machacek (@machacekondra)"
version_added: "2.3"
description:
- "Retrieve facts about one or more oVirt/RHV groups."
notes:
- "This module creates a new top-level C(ovirt_groups) fact, which
contains a list of groups."
options:
pattern:
description:
- "Search term which is accepted by oVirt/RHV search backend."
- "For example to search group X use following pattern: name=X"
extends_documentation_fragment: ovirt_facts
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
# Gather facts about all groups which names start with C(admin):
- ovirt_group_facts:
pattern: name=admin*
- debug:
var: ovirt_groups
'''
RETURN = '''
ovirt_groups:
description: "List of dictionaries describing the groups. Group attribues are mapped to dictionary keys,
all groups attributes can be found at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/group."
returned: On success.
type: list
'''
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
check_sdk,
create_connection,
get_dict_of_struct,
ovirt_facts_full_argument_spec,
)
def main():
argument_spec = ovirt_facts_full_argument_spec(
pattern=dict(default='', required=False),
)
module = AnsibleModule(argument_spec)
if module._name == 'ovirt_groups_facts':
module.deprecate("The 'ovirt_groups_facts' module is being renamed 'ovirt_group_facts'", version=2.8)
check_sdk(module)
try:
auth = module.params.pop('auth')
connection = create_connection(auth)
groups_service = connection.system_service().groups_service()
groups = groups_service.list(search=module.params['pattern'])
module.exit_json(
changed=False,
ansible_facts=dict(
ovirt_groups=[
get_dict_of_struct(
struct=c,
connection=connection,
fetch_nested=module.params.get('fetch_nested'),
attributes=module.params.get('nested_attributes'),
) for c in groups
],
),
)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
connection.close(logout=auth.get('token') is None)
if __name__ == '__main__':
main()
| gpl-3.0 |
mbodenhamer/fmap | tests/test_fmap.py | 1 | 3102 | import os
import sys
from fmap import fmap
from tempfile import mkstemp
from subprocess import Popen, PIPE
DIR = os.path.abspath(os.path.dirname(__file__))
TESTDIR = os.path.join(DIR, 'tree')
#-------------------------------------------------------------------------------
def test_fmap():
seen = []
def accum(name):
seen.append(name)
fmap(TESTDIR, accum)
assert set(seen) == {'c', 'd', 'f', 'g', 'h'}
del seen[:]
fmap(TESTDIR, accum, max_depth=0)
assert set(seen) == {'c', 'd'}
del seen[:]
fmap(TESTDIR, accum, apply_dirs=True)
assert set(seen) == {'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'}
del seen[:]
fmap(TESTDIR, accum, apply_dirs=True, max_depth=0)
assert set(seen) == {'a', 'b', 'c', 'd'}
del seen[:]
fmap(TESTDIR, accum, excludes=['a'])
assert set(seen) == {'c', 'd', 'h'}
del seen[:]
fmap(TESTDIR, accum, patterns=['g', 'd', 'h'], excludes=['a'])
assert set(seen) == {'d', 'h'}
#-------------------------------------------------------------------------------
def test_main():
from fmap import main
main = main.main
main('-x', '*', 'echo')
_, path = mkstemp()
tf = open(path, 'rt')
cmd = 'echo {{}} >> {}'.format(path)
def seen():
tf.seek(0)
out = tf.read()
seen = map(os.path.basename, filter(bool, out.split('\n')))
with open(path, 'wt') as f:
f.write('')
return set(seen)
main('-r', TESTDIR, cmd)
assert seen() == {'c', 'd', 'f', 'g', 'h'}
main('-r', TESTDIR, '-z0', cmd)
assert seen() == {'c', 'd'}
main('-r', TESTDIR, '-z0', '-p', cmd)
assert seen() == set()
argv = sys.argv
sys.argv = ['', '-r', TESTDIR, cmd]
main()
assert seen() == {'c', 'd', 'f', 'g', 'h'}
sys.argv = argv
tf.close()
os.remove(path)
#-------------------------------------------------------------------------------
def test_fmap_invocation():
def seen(p):
out = p.communicate()[0].decode('utf-8')
seen = map(os.path.basename, filter(bool, out.split('\n')))
return set(seen)
p = Popen('fmap -r {} echo'.format(TESTDIR), stdout=PIPE, shell=True)
assert seen(p) == {'c', 'd', 'f', 'g', 'h'}
p = Popen('fmap -r {} -z0 echo'.format(TESTDIR), stdout=PIPE, shell=True)
assert seen(p) == {'c', 'd'}
p = Popen('fmap -r {} -d echo'.format(TESTDIR), stdout=PIPE, shell=True)
assert seen(p) == {'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h'}
p = Popen('fmap -r {} -d -z0 echo'.format(TESTDIR), stdout=PIPE, shell=True)
assert seen(p) == {'a', 'b', 'c', 'd'}
p = Popen('fmap -r {} -x a echo'.format(TESTDIR), stdout=PIPE, shell=True)
assert seen(p) == {'c', 'd', 'h'}
p = Popen('fmap -r {} -x a echo g d h'.format(TESTDIR), stdout=PIPE,
shell=True)
assert seen(p) == {'d', 'h'}
p = Popen('python -m fmap -r {} -x a echo g d h'.format(TESTDIR),
stdout=PIPE, shell=True)
assert seen(p) == {'d', 'h'}
#-------------------------------------------------------------------------------
| mit |
nlholdem/icodoom | .venv/lib/python2.7/site-packages/tensorflow/contrib/learn/python/learn/learn_io/data_feeder.py | 88 | 31139 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Implementations of different data feeders to provide data for TF trainer."""
# TODO(ipolosukhin): Replace this module with feed-dict queue runners & queues.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import itertools
import math
import numpy as np
import six
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import tf_logging as logging
# pylint: disable=g-multiple-import,g-bad-import-order
from .pandas_io import HAS_PANDAS, extract_pandas_data, extract_pandas_matrix, extract_pandas_labels
from .dask_io import HAS_DASK, extract_dask_data, extract_dask_labels
# pylint: enable=g-multiple-import,g-bad-import-order
def _get_in_out_shape(x_shape, y_shape, n_classes, batch_size=None):
"""Returns shape for input and output of the data feeder."""
x_is_dict, y_is_dict = isinstance(
x_shape, dict), y_shape is not None and isinstance(y_shape, dict)
if y_is_dict and n_classes is not None:
assert (isinstance(n_classes, dict))
if batch_size is None:
batch_size = list(x_shape.values())[0][0] if x_is_dict else x_shape[0]
elif batch_size <= 0:
raise ValueError('Invalid batch_size %d.' % batch_size)
if x_is_dict:
input_shape = {}
for k, v in list(x_shape.items()):
input_shape[k] = [batch_size] + (list(v[1:]) if len(v) > 1 else [1])
else:
x_shape = list(x_shape[1:]) if len(x_shape) > 1 else [1]
input_shape = [batch_size] + x_shape
if y_shape is None:
return input_shape, None, batch_size
def out_el_shape(out_shape, num_classes):
out_shape = list(out_shape[1:]) if len(out_shape) > 1 else []
# Skip first dimension if it is 1.
if out_shape and out_shape[0] == 1:
out_shape = out_shape[1:]
if num_classes is not None and num_classes > 1:
return [batch_size] + out_shape + [num_classes]
else:
return [batch_size] + out_shape
if not y_is_dict:
output_shape = out_el_shape(y_shape, n_classes)
else:
output_shape = dict([
(k, out_el_shape(v, n_classes[k]
if n_classes is not None and k in n_classes else None))
for k, v in list(y_shape.items())
])
return input_shape, output_shape, batch_size
def _data_type_filter(x, y):
"""Filter data types into acceptable format."""
if HAS_DASK:
x = extract_dask_data(x)
if y is not None:
y = extract_dask_labels(y)
if HAS_PANDAS:
x = extract_pandas_data(x)
if y is not None:
y = extract_pandas_labels(y)
return x, y
def _is_iterable(x):
return hasattr(x, 'next') or hasattr(x, '__next__')
def setup_train_data_feeder(x,
y,
n_classes,
batch_size=None,
shuffle=True,
epochs=None):
"""Create data feeder, to sample inputs from dataset.
If `x` and `y` are iterators, use `StreamingDataFeeder`.
Args:
x: numpy, pandas or Dask matrix or dictionary of aforementioned. Also
supports iterables.
y: numpy, pandas or Dask array or dictionary of aforementioned. Also
supports
iterables.
n_classes: number of classes. Must be None or same type as y. In case, `y`
is `dict`
(or iterable which returns dict) such that `n_classes[key] = n_classes for
y[key]`
batch_size: size to split data into parts. Must be >= 1.
shuffle: Whether to shuffle the inputs.
epochs: Number of epochs to run.
Returns:
DataFeeder object that returns training data.
Raises:
ValueError: if one of `x` and `y` is iterable and the other is not.
"""
x, y = _data_type_filter(x, y)
if HAS_DASK:
# pylint: disable=g-import-not-at-top
import dask.dataframe as dd
if (isinstance(x, (dd.Series, dd.DataFrame)) and
(y is None or isinstance(y, (dd.Series, dd.DataFrame)))):
data_feeder_cls = DaskDataFeeder
else:
data_feeder_cls = DataFeeder
else:
data_feeder_cls = DataFeeder
if _is_iterable(x):
if y is not None and not _is_iterable(y):
raise ValueError('Both x and y should be iterators for '
'streaming learning to work.')
return StreamingDataFeeder(x, y, n_classes, batch_size)
return data_feeder_cls(
x, y, n_classes, batch_size, shuffle=shuffle, epochs=epochs)
def _batch_data(x, batch_size=None):
if (batch_size is not None) and (batch_size <= 0):
raise ValueError('Invalid batch_size %d.' % batch_size)
x_first_el = six.next(x)
x = itertools.chain([x_first_el], x)
chunk = dict([(k, []) for k in list(x_first_el.keys())]) if isinstance(
x_first_el, dict) else []
chunk_filled = False
for data in x:
if isinstance(data, dict):
for k, v in list(data.items()):
chunk[k].append(v)
if (batch_size is not None) and (len(chunk[k]) >= batch_size):
chunk[k] = np.matrix(chunk[k])
chunk_filled = True
if chunk_filled:
yield chunk
chunk = dict([(k, []) for k in list(x_first_el.keys())]) if isinstance(
x_first_el, dict) else []
chunk_filled = False
else:
chunk.append(data)
if (batch_size is not None) and (len(chunk) >= batch_size):
yield np.matrix(chunk)
chunk = []
if isinstance(x_first_el, dict):
for k, v in list(data.items()):
chunk[k] = np.matrix(chunk[k])
yield chunk
else:
yield np.matrix(chunk)
def setup_predict_data_feeder(x, batch_size=None):
"""Returns an iterable for feeding into predict step.
Args:
x: numpy, pandas, Dask array or dictionary of aforementioned. Also supports
iterable.
batch_size: Size of batches to split data into. If `None`, returns one
batch of full size.
Returns:
List or iterator (or dictionary thereof) of parts of data to predict on.
Raises:
ValueError: if `batch_size` <= 0.
"""
if HAS_DASK:
x = extract_dask_data(x)
if HAS_PANDAS:
x = extract_pandas_data(x)
if _is_iterable(x):
return _batch_data(x, batch_size)
if len(x.shape) == 1:
x = np.reshape(x, (-1, 1))
if batch_size is not None:
if batch_size <= 0:
raise ValueError('Invalid batch_size %d.' % batch_size)
n_batches = int(math.ceil(float(len(x)) / batch_size))
return [x[i * batch_size:(i + 1) * batch_size] for i in xrange(n_batches)]
return [x]
def setup_processor_data_feeder(x):
"""Sets up processor iterable.
Args:
x: numpy, pandas or iterable.
Returns:
Iterable of data to process.
"""
if HAS_PANDAS:
x = extract_pandas_matrix(x)
return x
def check_array(array, dtype):
"""Checks array on dtype and converts it if different.
Args:
array: Input array.
dtype: Expected dtype.
Returns:
Original array or converted.
"""
# skip check if array is instance of other classes, e.g. h5py.Dataset
# to avoid copying array and loading whole data into memory
if isinstance(array, (np.ndarray, list)):
array = np.array(array, dtype=dtype, order=None, copy=False)
return array
def _access(data, iloc):
"""Accesses an element from collection, using integer location based indexing.
Args:
data: array-like. The collection to access
iloc: `int` or `list` of `int`s. Location(s) to access in `collection`
Returns:
The element of `a` found at location(s) `iloc`.
"""
if HAS_PANDAS:
import pandas as pd # pylint: disable=g-import-not-at-top
if isinstance(data, pd.Series) or isinstance(data, pd.DataFrame):
return data.iloc[iloc]
return data[iloc]
def _check_dtype(dtype):
if dtypes.as_dtype(dtype) == dtypes.float64:
logging.warn(
'float64 is not supported by many models, consider casting to float32.')
return dtype
class DataFeeder(object):
"""Data feeder is an example class to sample data for TF trainer."""
def __init__(self,
x,
y,
n_classes,
batch_size=None,
shuffle=True,
random_state=None,
epochs=None):
"""Initializes a DataFeeder instance.
Args:
x: One feature sample which can either Nd numpy matrix of shape
`[n_samples, n_features, ...]` or dictionary of Nd numpy matrix.
y: label vector, either floats for regression or class id for
classification. If matrix, will consider as a sequence of labels.
Can be `None` for unsupervised setting. Also supports dictionary of
labels.
n_classes: Number of classes, 0 and 1 are considered regression, `None`
will pass through the input labels without one-hot conversion. Also, if
`y` is `dict`, then `n_classes` must be `dict` such that
`n_classes[key] = n_classes for label y[key]`, `None` otherwise.
batch_size: Mini-batch size to accumulate samples in one mini batch.
shuffle: Whether to shuffle `x`.
random_state: Numpy `RandomState` object to reproduce sampling.
epochs: Number of times to iterate over input data before raising
`StopIteration` exception.
Attributes:
x: Input features (ndarray or dictionary of ndarrays).
y: Input label (ndarray or dictionary of ndarrays).
n_classes: Number of classes (if `None`, pass through indices without
one-hot conversion).
batch_size: Mini-batch size to accumulate.
input_shape: Shape of the input (or dictionary of shapes).
output_shape: Shape of the output (or dictionary of shapes).
input_dtype: DType of input (or dictionary of shapes).
output_dtype: DType of output (or dictionary of shapes.
"""
x_is_dict, y_is_dict = isinstance(x, dict), y is not None and isinstance(
y, dict)
if isinstance(y, list):
y = np.array(y)
self._x = dict([(k, check_array(v, v.dtype)) for k, v in list(x.items())
]) if x_is_dict else check_array(x, x.dtype)
self._y = None if y is None else \
dict([(k, check_array(v, v.dtype)) for k, v in list(y.items())]) if x_is_dict else check_array(y, y.dtype)
# self.n_classes is not None means we're converting raw target indices to one-hot.
if n_classes is not None:
if not y_is_dict:
y_dtype = (np.int64
if n_classes is not None and n_classes > 1 else np.float32)
self._y = (None if y is None else check_array(y, dtype=y_dtype))
self.n_classes = n_classes
self.max_epochs = epochs
x_shape = dict([(k, v.shape) for k, v in list(self._x.items())
]) if x_is_dict else self._x.shape
y_shape = dict([(k, v.shape) for k, v in list(self._y.items())
]) if y_is_dict else None if y is None else self._y.shape
self.input_shape, self.output_shape, self._batch_size = _get_in_out_shape(
x_shape, y_shape, n_classes, batch_size)
# Input dtype matches dtype of x.
self._input_dtype = dict([(k, _check_dtype(v.dtype)) for k, v in list(self._x.items())]) if x_is_dict \
else _check_dtype(self._x.dtype)
# note: self._output_dtype = np.float32 when y is None
self._output_dtype = dict([(k, _check_dtype(v.dtype)) for k, v in list(self._y.items())]) if y_is_dict \
else _check_dtype(self._y.dtype) if y is not None else np.float32
# self.n_classes is None means we're passing in raw target indices
if n_classes is not None and y_is_dict:
for key in list(n_classes.keys()):
if key in self._output_dtype:
self._output_dtype[key] = np.float32
self._shuffle = shuffle
self.random_state = np.random.RandomState(
42) if random_state is None else random_state
num_samples = list(self._x.values())[0].shape[
0] if x_is_dict else self._x.shape[0]
if self._shuffle:
self.indices = self.random_state.permutation(num_samples)
else:
self.indices = np.array(range(num_samples))
self.offset = 0
self.epoch = 0
self._epoch_placeholder = None
@property
def x(self):
return self._x
@property
def y(self):
return self._y
@property
def shuffle(self):
return self._shuffle
@property
def input_dtype(self):
return self._input_dtype
@property
def output_dtype(self):
return self._output_dtype
@property
def batch_size(self):
return self._batch_size
def make_epoch_variable(self):
"""Adds a placeholder variable for the epoch to the graph.
Returns:
The epoch placeholder.
"""
self._epoch_placeholder = array_ops.placeholder(
dtypes.int32, [1], name='epoch')
return self._epoch_placeholder
def input_builder(self):
"""Builds inputs in the graph.
Returns:
Two placeholders for inputs and outputs.
"""
def get_placeholder(shape, dtype, name_prepend):
if shape is None:
return None
if isinstance(shape, dict):
placeholder = {}
for key in list(shape.keys()):
placeholder[key] = array_ops.placeholder(
dtypes.as_dtype(dtype[key]), [None] + shape[key][1:],
name=name_prepend + '_' + key)
else:
placeholder = array_ops.placeholder(
dtypes.as_dtype(dtype), [None] + shape[1:], name=name_prepend)
return placeholder
self._input_placeholder = get_placeholder(self.input_shape,
self._input_dtype, 'input')
self._output_placeholder = get_placeholder(self.output_shape,
self._output_dtype, 'output')
return self._input_placeholder, self._output_placeholder
def set_placeholders(self, input_placeholder, output_placeholder):
"""Sets placeholders for this data feeder.
Args:
input_placeholder: Placeholder for `x` variable. Should match shape
of the examples in the x dataset.
output_placeholder: Placeholder for `y` variable. Should match
shape of the examples in the y dataset. Can be `None`.
"""
self._input_placeholder = input_placeholder
self._output_placeholder = output_placeholder
def get_feed_params(self):
"""Function returns a `dict` with data feed params while training.
Returns:
A `dict` with data feed params while training.
"""
return {
'epoch': self.epoch,
'offset': self.offset,
'batch_size': self._batch_size
}
def get_feed_dict_fn(self):
"""Returns a function that samples data into given placeholders.
Returns:
A function that when called samples a random subset of batch size
from `x` and `y`.
"""
x_is_dict, y_is_dict = isinstance(
self._x, dict), self._y is not None and isinstance(self._y, dict)
# Assign input features from random indices.
def extract(data, indices):
return (np.array(_access(data, indices)).reshape((indices.shape[0], 1)) if
len(data.shape) == 1 else _access(data, indices))
# assign labels from random indices
def assign_label(data, shape, dtype, n_classes, indices):
shape[0] = indices.shape[0]
out = np.zeros(shape, dtype=dtype)
for i in xrange(out.shape[0]):
sample = indices[i]
# self.n_classes is None means we're passing in raw target indices
if n_classes is None:
out[i] = _access(data, sample)
else:
if n_classes > 1:
if len(shape) == 2:
out.itemset((i, int(_access(data, sample))), 1.0)
else:
for idx, value in enumerate(_access(data, sample)):
out.itemset(tuple([i, idx, value]), 1.0)
else:
out[i] = _access(data, sample)
return out
def _feed_dict_fn():
"""Function that samples data into given placeholders."""
if self.max_epochs is not None and self.epoch + 1 > self.max_epochs:
raise StopIteration
assert self._input_placeholder is not None
feed_dict = {}
if self._epoch_placeholder is not None:
feed_dict[self._epoch_placeholder.name] = [self.epoch]
# Take next batch of indices.
x_len = list(self._x.values())[0].shape[
0] if x_is_dict else self._x.shape[0]
end = min(x_len, self.offset + self._batch_size)
batch_indices = self.indices[self.offset:end]
# adding input placeholder
feed_dict.update(
dict([(self._input_placeholder[k].name, extract(v, batch_indices))
for k, v in list(self._x.items())]) if x_is_dict else
{self._input_placeholder.name: extract(self._x, batch_indices)})
# move offset and reset it if necessary
self.offset += self._batch_size
if self.offset >= x_len:
self.indices = self.random_state.permutation(
x_len) if self._shuffle else np.array(range(x_len))
self.offset = 0
self.epoch += 1
# return early if there are no labels
if self._output_placeholder is None:
return feed_dict
# adding output placeholders
if y_is_dict:
for k, v in list(self._y.items()):
n_classes = (self.n_classes[k] if k in self.n_classes else
None) if self.n_classes is not None else None
shape, dtype = self.output_shape[k], self._output_dtype[k]
feed_dict.update({
self._output_placeholder[k].name:
assign_label(v, shape, dtype, n_classes, batch_indices)
})
else:
shape, dtype, n_classes = self.output_shape, self._output_dtype, self.n_classes
feed_dict.update({
self._output_placeholder.name:
assign_label(self._y, shape, dtype, n_classes, batch_indices)
})
return feed_dict
return _feed_dict_fn
class StreamingDataFeeder(DataFeeder):
"""Data feeder for TF trainer that reads data from iterator.
Streaming data feeder allows to read data as it comes it from disk or
somewhere else. It's custom to have this iterators rotate infinetly over
the dataset, to allow control of how much to learn on the trainer side.
"""
def __init__(self, x, y, n_classes, batch_size):
"""Initializes a StreamingDataFeeder instance.
Args:
x: iterator each element of which returns one feature sample. Sample can
be a Nd numpy matrix or dictionary of Nd numpy matrices.
y: iterator each element of which returns one label sample. Sample can be
a Nd numpy matrix or dictionary of Nd numpy matrices with 1 or many
classes regression values.
n_classes: indicator of how many classes the corresponding label sample
has for the purposes of one-hot conversion of label. In case where `y`
is a dictionary, `n_classes` must be dictionary (with same keys as `y`)
of how many classes there are in each label in `y`. If key is
present in `y` and missing in `n_classes`, the value is assumed `None`
and no one-hot conversion will be applied to the label with that key.
batch_size: Mini batch size to accumulate samples in one batch. If set
`None`, then assumes that iterator to return already batched element.
Attributes:
x: input features (or dictionary of input features).
y: input label (or dictionary of output features).
n_classes: number of classes.
batch_size: mini batch size to accumulate.
input_shape: shape of the input (can be dictionary depending on `x`).
output_shape: shape of the output (can be dictionary depending on `y`).
input_dtype: dtype of input (can be dictionary depending on `x`).
output_dtype: dtype of output (can be dictionary depending on `y`).
"""
# pylint: disable=invalid-name,super-init-not-called
x_first_el = six.next(x)
self._x = itertools.chain([x_first_el], x)
if y is not None:
y_first_el = six.next(y)
self._y = itertools.chain([y_first_el], y)
else:
y_first_el = None
self._y = None
self.n_classes = n_classes
x_is_dict = isinstance(x_first_el, dict)
y_is_dict = y is not None and isinstance(y_first_el, dict)
if y_is_dict and n_classes is not None:
assert isinstance(n_classes, dict)
# extract shapes for first_elements
if x_is_dict:
x_first_el_shape = dict(
[(k, [1] + list(v.shape)) for k, v in list(x_first_el.items())])
else:
x_first_el_shape = [1] + list(x_first_el.shape)
if y_is_dict:
y_first_el_shape = dict(
[(k, [1] + list(v.shape)) for k, v in list(y_first_el.items())])
elif y is None:
y_first_el_shape = None
else:
y_first_el_shape = ([1] + list(y_first_el[0].shape if isinstance(
y_first_el, list) else y_first_el.shape))
self.input_shape, self.output_shape, self._batch_size = _get_in_out_shape(
x_first_el_shape, y_first_el_shape, n_classes, batch_size)
# Input dtype of x_first_el.
if x_is_dict:
self._input_dtype = dict(
[(k, _check_dtype(v.dtype)) for k, v in list(x_first_el.items())])
else:
self._input_dtype = _check_dtype(x_first_el.dtype)
# Output dtype of y_first_el.
def check_y_dtype(el):
if isinstance(el, np.ndarray):
return el.dtype
elif isinstance(el, list):
return check_y_dtype(el[0])
else:
return _check_dtype(np.dtype(type(el)))
# Output types are floats, due to both softmaxes and regression req.
if n_classes is not None and (y is None or not y_is_dict) and n_classes > 0:
self._output_dtype = np.float32
elif y_is_dict:
self._output_dtype = dict(
[(k, check_y_dtype(v)) for k, v in list(y_first_el.items())])
elif y is None:
self._output_dtype = None
else:
self._output_dtype = check_y_dtype(y_first_el)
def get_feed_params(self):
"""Function returns a `dict` with data feed params while training.
Returns:
A `dict` with data feed params while training.
"""
return {'batch_size': self._batch_size}
def get_feed_dict_fn(self):
"""Returns a function, that will sample data and provide it to placeholders.
Returns:
A function that when called samples a random subset of batch size
from x and y.
"""
self.stopped = False
def _feed_dict_fn():
"""Samples data and provides it to placeholders.
Returns:
`dict` of input and output tensors.
"""
def init_array(shape, dtype):
"""Initialize array of given shape or dict of shapes and dtype."""
if shape is None:
return None
elif isinstance(shape, dict):
return dict([(k, np.zeros(shape[k], dtype[k]))
for k in list(shape.keys())])
else:
return np.zeros(shape, dtype=dtype)
def put_data_array(dest, index, source=None, n_classes=None):
"""Puts data array into container."""
if source is None:
dest = dest[:index]
elif n_classes is not None and n_classes > 1:
if len(self.output_shape) == 2:
dest.itemset((index, source), 1.0)
else:
for idx, value in enumerate(source):
dest.itemset(tuple([index, idx, value]), 1.0)
else:
if len(dest.shape) > 1:
dest[index, :] = source
else:
dest[index] = source[0] if isinstance(source, list) else source
return dest
def put_data_array_or_dict(holder, index, data=None, n_classes=None):
"""Puts data array or data dictionary into container."""
if holder is None:
return None
if isinstance(holder, dict):
if data is None:
data = {k: None for k in holder.keys()}
assert isinstance(data, dict)
for k in holder.keys():
num_classes = n_classes[k] if (n_classes is not None and
k in n_classes) else None
holder[k] = put_data_array(holder[k], index, data[k], num_classes)
else:
holder = put_data_array(holder, index, data, n_classes)
return holder
if self.stopped:
raise StopIteration
inp = init_array(self.input_shape, self._input_dtype)
out = init_array(self.output_shape, self._output_dtype)
for i in xrange(self._batch_size):
# Add handling when queue ends.
try:
next_inp = six.next(self._x)
inp = put_data_array_or_dict(inp, i, next_inp, None)
except StopIteration:
self.stopped = True
if i == 0:
raise
inp = put_data_array_or_dict(inp, i, None, None)
out = put_data_array_or_dict(out, i, None, None)
break
if self._y is not None:
next_out = six.next(self._y)
out = put_data_array_or_dict(out, i, next_out, self.n_classes)
# creating feed_dict
if isinstance(inp, dict):
feed_dict = dict([(self._input_placeholder[k].name, inp[k])
for k in list(self._input_placeholder.keys())])
else:
feed_dict = {self._input_placeholder.name: inp}
if self._y is not None:
if isinstance(out, dict):
feed_dict.update(
dict([(self._output_placeholder[k].name, out[k])
for k in list(self._output_placeholder.keys())]))
else:
feed_dict.update({self._output_placeholder.name: out})
return feed_dict
return _feed_dict_fn
class DaskDataFeeder(object):
"""Data feeder for that reads data from dask.Series and dask.DataFrame.
Numpy arrays can be serialized to disk and it's possible to do random seeks
into them. DaskDataFeeder will remove requirement to have full dataset in the
memory and still do random seeks for sampling of batches.
"""
def __init__(self,
x,
y,
n_classes,
batch_size,
shuffle=True,
random_state=None,
epochs=None):
"""Initializes a DaskDataFeeder instance.
Args:
x: iterator that returns for each element, returns features.
y: iterator that returns for each element, returns 1 or many classes /
regression values.
n_classes: indicator of how many classes the label has.
batch_size: Mini batch size to accumulate.
shuffle: Whether to shuffle the inputs.
random_state: random state for RNG. Note that it will mutate so use a
int value for this if you want consistent sized batches.
epochs: Number of epochs to run.
Attributes:
x: input features.
y: input label.
n_classes: number of classes.
batch_size: mini batch size to accumulate.
input_shape: shape of the input.
output_shape: shape of the output.
input_dtype: dtype of input.
output_dtype: dtype of output.
Raises:
ValueError: if `x` or `y` are `dict`, as they are not supported currently.
"""
if isinstance(x, dict) or isinstance(y, dict):
raise ValueError(
'DaskDataFeeder does not support dictionaries at the moment.')
# pylint: disable=invalid-name,super-init-not-called
import dask.dataframe as dd # pylint: disable=g-import-not-at-top
# TODO(terrytangyuan): check x and y dtypes in dask_io like pandas
self._x = x
self._y = y
# save column names
self._x_columns = list(x.columns)
if isinstance(y.columns[0], str):
self._y_columns = list(y.columns)
else:
# deal with cases where two DFs have overlapped default numeric colnames
self._y_columns = len(self._x_columns) + 1
self._y = self._y.rename(columns={y.columns[0]: self._y_columns})
# TODO(terrytangyuan): deal with unsupervised cases
# combine into a data frame
self.df = dd.multi.concat([self._x, self._y], axis=1)
self.n_classes = n_classes
x_count = x.count().compute()[0]
x_shape = (x_count, len(self._x.columns))
y_shape = (x_count, len(self._y.columns))
# TODO(terrytangyuan): Add support for shuffle and epochs.
self._shuffle = shuffle
self.epochs = epochs
self.input_shape, self.output_shape, self._batch_size = _get_in_out_shape(
x_shape, y_shape, n_classes, batch_size)
self.sample_fraction = self._batch_size / float(x_count)
self._input_dtype = _check_dtype(self._x.dtypes[0])
self._output_dtype = _check_dtype(self._y.dtypes[self._y_columns])
if random_state is None:
self.random_state = 66
else:
self.random_state = random_state
def get_feed_params(self):
"""Function returns a `dict` with data feed params while training.
Returns:
A `dict` with data feed params while training.
"""
return {'batch_size': self._batch_size}
def get_feed_dict_fn(self, input_placeholder, output_placeholder):
"""Returns a function, that will sample data and provide it to placeholders.
Args:
input_placeholder: tf.Placeholder for input features mini batch.
output_placeholder: tf.Placeholder for output labels.
Returns:
A function that when called samples a random subset of batch size
from x and y.
"""
def _feed_dict_fn():
"""Samples data and provides it to placeholders."""
# TODO(ipolosukhin): option for with/without replacement (dev version of
# dask)
sample = self.df.random_split(
[self.sample_fraction, 1 - self.sample_fraction],
random_state=self.random_state)
inp = extract_pandas_matrix(sample[0][self._x_columns].compute()).tolist()
out = extract_pandas_matrix(sample[0][self._y_columns].compute())
# convert to correct dtype
inp = np.array(inp, dtype=self._input_dtype)
# one-hot encode out for each class for cross entropy loss
if HAS_PANDAS:
import pandas as pd # pylint: disable=g-import-not-at-top
if not isinstance(out, pd.Series):
out = out.flatten()
out_max = self._y.max().compute().values[0]
encoded_out = np.zeros((out.size, out_max + 1), dtype=self._output_dtype)
encoded_out[np.arange(out.size), out] = 1
return {input_placeholder.name: inp, output_placeholder.name: encoded_out}
return _feed_dict_fn
| gpl-3.0 |
egafford/sahara | sahara/utils/ssh_remote.py | 2 | 33705 | # Copyright (c) 2013 Mirantis Inc.
# Copyright (c) 2013 Hortonworks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper methods for executing commands on nodes via SSH.
The main access point is method get_remote(instance), it returns
InstanceInteropHelper object which does the actual work. See the
class for the list of available methods.
It is a context manager, so it could be used with 'with' statement
like that:
with get_remote(instance) as r:
r.execute_command(...)
Note that the module offloads the ssh calls to a child process.
It was implemented that way because we found no way to run paramiko
and eventlet together. The private high-level module methods are
implementations which are run in a separate process.
"""
import copy
import os
import shlex
import sys
import threading
import time
from eventlet.green import subprocess as e_subprocess
from eventlet import semaphore
from eventlet import timeout as e_timeout
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import uuidutils
import paramiko
import requests
from requests import adapters
import six
from sahara import context
from sahara import exceptions as ex
from sahara.i18n import _
from sahara.i18n import _LE
from sahara.service import trusts
from sahara.utils import crypto
from sahara.utils import network as net_utils
from sahara.utils.openstack import neutron
from sahara.utils import procutils
from sahara.utils import remote
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
ssh_config_options = [
cfg.IntOpt(
'ssh_timeout_common', default=300, min=1,
help="Overrides timeout for common ssh operations, in seconds"),
cfg.IntOpt(
'ssh_timeout_interactive', default=1800, min=1,
help="Overrides timeout for interactive ssh operations, in seconds"),
cfg.IntOpt(
'ssh_timeout_files', default=120, min=1,
help="Overrides timeout for ssh operations with files, in seconds"),
]
CONF.register_opts(ssh_config_options)
_ssh = None
_proxy_ssh = None
_sessions = {}
INFRA = None
SSH_TIMEOUTS_MAPPING = {
'_execute_command': 'ssh_timeout_common',
'_execute_command_interactive': 'ssh_timeout_interactive'
}
_global_remote_semaphore = None
def _default_timeout(func):
timeout = SSH_TIMEOUTS_MAPPING.get(func.__name__, 'ssh_timeout_files')
return getattr(CONF, timeout, CONF.ssh_timeout_common)
def _get_ssh_timeout(func, timeout):
return timeout if timeout else _default_timeout(func)
def _connect(host, username, private_key, proxy_command=None,
gateway_host=None, gateway_image_username=None):
global _ssh
global _proxy_ssh
LOG.debug('Creating SSH connection')
if isinstance(private_key, six.string_types):
private_key = crypto.to_paramiko_private_key(private_key)
_ssh = paramiko.SSHClient()
_ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
proxy = None
if proxy_command:
LOG.debug('Creating proxy using command: {command}'.format(
command=proxy_command))
proxy = paramiko.ProxyCommand(proxy_command)
if gateway_host:
_proxy_ssh = paramiko.SSHClient()
_proxy_ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
LOG.debug('Connecting to proxy gateway at: {gateway}'.format(
gateway=gateway_host))
_proxy_ssh.connect(gateway_host, username=gateway_image_username,
pkey=private_key, sock=proxy)
proxy = _proxy_ssh.get_transport().open_session()
proxy.exec_command("nc {0} 22".format(host))
_ssh.connect(host, username=username, pkey=private_key, sock=proxy)
def _cleanup():
global _ssh
global _proxy_ssh
_ssh.close()
if _proxy_ssh:
_proxy_ssh.close()
def _read_paramimko_stream(recv_func):
result = ''
buf = recv_func(1024)
while buf != '':
result += buf
buf = recv_func(1024)
return result
def _escape_quotes(command):
command = command.replace('\\', '\\\\')
command = command.replace('"', '\\"')
command = command.replace('`', '\\`')
return command
def _execute_command(cmd, run_as_root=False, get_stderr=False,
raise_when_error=True):
global _ssh
chan = _ssh.get_transport().open_session()
if run_as_root:
chan.exec_command('sudo bash -c "%s"' % _escape_quotes(cmd))
else:
chan.exec_command(cmd)
# TODO(dmitryme): that could hang if stderr buffer overflows
stdout = _read_paramimko_stream(chan.recv)
stderr = _read_paramimko_stream(chan.recv_stderr)
ret_code = chan.recv_exit_status()
if ret_code and raise_when_error:
raise ex.RemoteCommandException(cmd=cmd, ret_code=ret_code,
stdout=stdout, stderr=stderr)
if get_stderr:
return ret_code, stdout, stderr
else:
return ret_code, stdout
def _execute_command_interactive(cmd, run_as_root=False):
global _ssh
chan = _ssh.get_transport().open_session()
if run_as_root:
chan.exec_command('sudo bash -c "%s"' % _escape_quotes(cmd))
else:
chan.exec_command(cmd)
_proxy_shell(chan)
_ssh.close()
def _proxy_shell(chan):
def readall():
while True:
d = sys.stdin.read(1)
if not d or chan.exit_status_ready():
break
chan.send(d)
reader = threading.Thread(target=readall)
reader.start()
while True:
data = chan.recv(256)
if not data or chan.exit_status_ready():
break
sys.stdout.write(data)
sys.stdout.flush()
def _get_http_client(host, port, proxy_command=None, gateway_host=None,
gateway_username=None, gateway_private_key=None):
global _sessions
_http_session = _sessions.get((host, port), None)
LOG.debug('Cached HTTP session for {host}:{port} is {session}'.format(
host=host, port=port, session=_http_session))
if not _http_session:
if gateway_host:
_http_session = _get_proxy_gateway_http_session(
gateway_host, gateway_username,
gateway_private_key, host, port, proxy_command)
LOG.debug('Created ssh proxied HTTP session for {host}:{port}'
.format(host=host, port=port))
elif proxy_command:
# can return a new session here because it actually uses
# the same adapter (and same connection pools) for a given
# host and port tuple
_http_session = _get_proxied_http_session(
proxy_command, host, port=port)
LOG.debug('Created proxied HTTP session for {host}:{port}'
.format(host=host, port=port))
else:
# need to cache the sessions that are not proxied through
# HTTPRemoteWrapper so that a new session with a new HTTPAdapter
# and associated pools is not recreated for each HTTP invocation
_http_session = requests.Session()
LOG.debug('Created standard HTTP session for {host}:{port}'
.format(host=host, port=port))
adapter = requests.adapters.HTTPAdapter()
for prefix in ['http://', 'https://']:
_http_session.mount(prefix + '%s:%s' % (host, port),
adapter)
LOG.debug('Caching session {session} for {host}:{port}'
.format(session=_http_session, host=host, port=port))
_sessions[(host, port)] = _http_session
return _http_session
def _write_fl(sftp, remote_file, data):
fl = sftp.file(remote_file, 'w')
fl.write(data)
fl.close()
def _append_fl(sftp, remote_file, data):
fl = sftp.file(remote_file, 'a')
fl.write(data)
fl.close()
def _write_file(sftp, remote_file, data, run_as_root):
if run_as_root:
temp_file = 'temp-file-%s' % uuidutils.generate_uuid()
_write_fl(sftp, temp_file, data)
_execute_command(
'mv %s %s' % (temp_file, remote_file), run_as_root=True)
else:
_write_fl(sftp, remote_file, data)
def _append_file(sftp, remote_file, data, run_as_root):
if run_as_root:
temp_file = 'temp-file-%s' % uuidutils.generate_uuid()
_write_fl(sftp, temp_file, data)
_execute_command(
'cat %s >> %s' % (temp_file, remote_file), run_as_root=True)
_execute_command('rm -f %s' % temp_file)
else:
_append_fl(sftp, remote_file, data)
def _write_file_to(remote_file, data, run_as_root=False):
global _ssh
_write_file(_ssh.open_sftp(), remote_file, data, run_as_root)
def _write_files_to(files, run_as_root=False):
global _ssh
sftp = _ssh.open_sftp()
for fl, data in six.iteritems(files):
_write_file(sftp, fl, data, run_as_root)
def _append_to_file(remote_file, data, run_as_root=False):
global _ssh
_append_file(_ssh.open_sftp(), remote_file, data, run_as_root)
def _append_to_files(files, run_as_root=False):
global _ssh
sftp = _ssh.open_sftp()
for fl, data in six.iteritems(files):
_append_file(sftp, fl, data, run_as_root)
def _read_file(sftp, remote_file):
fl = sftp.file(remote_file, 'r')
data = fl.read()
fl.close()
return data
def _read_file_from(remote_file, run_as_root=False):
global _ssh
fl = remote_file
if run_as_root:
fl = 'temp-file-%s' % (uuidutils.generate_uuid())
_execute_command('cp %s %s' % (remote_file, fl), run_as_root=True)
try:
return _read_file(_ssh.open_sftp(), fl)
except IOError:
LOG.error(_LE("Can't read file {filename}").format(
filename=remote_file))
raise
finally:
if run_as_root:
_execute_command(
'rm %s' % fl, run_as_root=True, raise_when_error=False)
def __get_python_to_execute():
try:
_execute_command('python3 --version')
except Exception:
_execute_command('python2 --version')
return 'python2'
return 'python3'
def _get_os_distrib():
python_version = __get_python_to_execute()
return _execute_command(
('printf "import platform\nprint(platform.linux_distribution('
'full_distribution_name=0)[0])" | {}'.format(python_version)),
run_as_root=False)[1].lower().strip()
def _get_os_version():
python_version = __get_python_to_execute()
return _execute_command(
('printf "import platform\nprint(platform.linux_distribution()[1])"'
' | {}'.format(python_version)), run_as_root=False)[1].strip()
def _install_packages(packages):
distrib = _get_os_distrib()
if distrib == 'ubuntu':
cmd = 'RUNLEVEL=1 apt-get install -y %(pkgs)s'
elif distrib == 'fedora':
fversion = _get_os_version()
if fversion >= 22:
cmd = 'dnf install -y %(pkgs)s'
else:
cmd = 'yum install -y %(pkgs)s'
elif distrib in ('redhat', 'centos'):
cmd = 'yum install -y %(pkgs)s'
else:
raise ex.NotImplementedException(
_('Package Installation'),
_('%(fmt)s is not implemented for OS %(distrib)s') % {
'fmt': '%s', 'distrib': distrib})
cmd = cmd % {'pkgs': ' '.join(packages)}
_execute_command(cmd, run_as_root=True)
def _update_repository():
distrib = _get_os_distrib()
if distrib == 'ubuntu':
cmd = 'apt-get update'
elif distrib == 'fedora':
fversion = _get_os_version()
if fversion >= 22:
cmd = 'dnf clean all'
else:
cmd = 'yum clean all'
elif distrib in ('redhat', 'centos'):
cmd = 'yum clean all'
else:
raise ex.NotImplementedException(
_('Repository Update'),
_('%(fmt)s is not implemented for OS %(distrib)s') % {
'fmt': '%s', 'distrib': distrib})
_execute_command(cmd, run_as_root=True)
def _replace_remote_string(remote_file, old_str, new_str):
old_str = old_str.replace("\'", "\''")
new_str = new_str.replace("\'", "\''")
cmd = "sudo sed -i 's,%s,%s,g' %s" % (old_str, new_str, remote_file)
_execute_command(cmd)
def _execute_on_vm_interactive(cmd, matcher):
global _ssh
buf = ''
channel = _ssh.invoke_shell()
LOG.debug('Channel is {channel}'.format(channel=channel))
try:
LOG.debug('Sending cmd {command}'.format(command=cmd))
channel.send(cmd + '\n')
while not matcher.is_eof(buf):
buf += channel.recv(4096)
response = matcher.get_response(buf)
if response is not None:
channel.send(response + '\n')
buf = ''
finally:
LOG.debug('Closing channel')
channel.close()
def _acquire_remote_semaphore():
context.current().remote_semaphore.acquire()
_global_remote_semaphore.acquire()
def _release_remote_semaphore():
_global_remote_semaphore.release()
context.current().remote_semaphore.release()
def _get_proxied_http_session(proxy_command, host, port=None):
session = requests.Session()
adapter = ProxiedHTTPAdapter(
_simple_exec_func(shlex.split(proxy_command)), host, port)
session.mount('http://{0}:{1}'.format(host, adapter.port), adapter)
return session
def _get_proxy_gateway_http_session(gateway_host, gateway_username,
gateway_private_key, host, port=None,
proxy_command=None):
session = requests.Session()
adapter = ProxiedHTTPAdapter(
_proxy_gateway_func(gateway_host, gateway_username,
gateway_private_key, host,
port, proxy_command),
host, port)
session.mount('http://{0}:{1}'.format(host, port), adapter)
return session
def _simple_exec_func(cmd):
def func():
return e_subprocess.Popen(cmd,
stdin=e_subprocess.PIPE,
stdout=e_subprocess.PIPE,
stderr=e_subprocess.PIPE)
return func
def _proxy_gateway_func(gateway_host, gateway_username,
gateway_private_key, host,
port, proxy_command):
def func():
proc = procutils.start_subprocess()
try:
conn_params = (gateway_host, gateway_username, gateway_private_key,
proxy_command, None, None)
procutils.run_in_subprocess(proc, _connect, conn_params)
cmd = "nc {host} {port}".format(host=host, port=port)
procutils.run_in_subprocess(
proc, _execute_command_interactive, (cmd,), interactive=True)
return proc
except Exception:
with excutils.save_and_reraise_exception():
procutils.shutdown_subprocess(proc, _cleanup)
return func
class ProxiedHTTPAdapter(adapters.HTTPAdapter):
def __init__(self, create_process_func, host, port):
super(ProxiedHTTPAdapter, self).__init__()
LOG.debug('HTTP adapter created for {host}:{port}'.format(host=host,
port=port))
self.create_process_func = create_process_func
self.port = port
self.host = host
def get_connection(self, url, proxies=None):
pool_conn = (
super(ProxiedHTTPAdapter, self).get_connection(url, proxies))
if hasattr(pool_conn, '_get_conn'):
http_conn = pool_conn._get_conn()
if http_conn.sock is None:
if hasattr(http_conn, 'connect'):
sock = self._connect()
LOG.debug('HTTP connection {connection} getting new '
'netcat socket {socket}'.format(
connection=http_conn, socket=sock))
http_conn.sock = sock
else:
if hasattr(http_conn.sock, 'is_netcat_socket'):
LOG.debug('Pooled http connection has existing '
'netcat socket. resetting pipe')
http_conn.sock.reset()
pool_conn._put_conn(http_conn)
return pool_conn
def close(self):
LOG.debug('Closing HTTP adapter for {host}:{port}'
.format(host=self.host, port=self.port))
super(ProxiedHTTPAdapter, self).close()
def _connect(self):
LOG.debug('Returning netcat socket for {host}:{port}'
.format(host=self.host, port=self.port))
rootwrap_command = CONF.rootwrap_command if CONF.use_rootwrap else ''
return NetcatSocket(self.create_process_func, rootwrap_command)
class NetcatSocket(object):
def _create_process(self):
self.process = self.create_process_func()
def __init__(self, create_process_func, rootwrap_command=None):
self.create_process_func = create_process_func
self.rootwrap_command = rootwrap_command
self._create_process()
def send(self, content):
try:
self.process.stdin.write(content)
self.process.stdin.flush()
except IOError as e:
raise ex.SystemError(e)
return len(content)
def sendall(self, content):
return self.send(content)
def makefile(self, mode, *arg):
if mode.startswith('r'):
return self.process.stdout
if mode.startswith('w'):
return self.process.stdin
raise ex.IncorrectStateError(_("Unknown file mode %s") % mode)
def recv(self, size):
try:
return os.read(self.process.stdout.fileno(), size)
except IOError as e:
raise ex.SystemError(e)
def _terminate(self):
if self.rootwrap_command:
os.system('{0} kill {1}'.format(self.rootwrap_command, # nosec
self.process.pid))
else:
self.process.terminate()
def close(self):
LOG.debug('Socket close called')
self._terminate()
def settimeout(self, timeout):
pass
def fileno(self):
return self.process.stdin.fileno()
def is_netcat_socket(self):
return True
def reset(self):
self._terminate()
self._create_process()
class InstanceInteropHelper(remote.Remote):
def __init__(self, instance):
self.instance = instance
def __enter__(self):
_acquire_remote_semaphore()
try:
self.bulk = BulkInstanceInteropHelper(self.instance)
return self.bulk
except Exception:
with excutils.save_and_reraise_exception():
_release_remote_semaphore()
def __exit__(self, *exc_info):
try:
self.bulk.close()
finally:
_release_remote_semaphore()
def get_neutron_info(self, instance=None):
if not instance:
instance = self.instance
neutron_info = dict()
neutron_info['network'] = instance.cluster.neutron_management_network
ctx = context.current()
neutron_info['token'] = context.get_auth_token()
neutron_info['tenant'] = ctx.tenant_name
neutron_info['host'] = instance.management_ip
log_info = copy.deepcopy(neutron_info)
del log_info['token']
LOG.debug('Returning neutron info: {info}'.format(info=log_info))
return neutron_info
def _build_proxy_command(self, command, instance=None, port=None,
info=None, rootwrap_command=None):
# Accepted keywords in the proxy command template:
# {host}, {port}, {tenant_id}, {network_id}, {router_id}
keywords = {}
if not info:
info = self.get_neutron_info(instance)
keywords['tenant_id'] = context.current().tenant_id
keywords['network_id'] = info['network']
# Query Neutron only if needed
if '{router_id}' in command:
auth = trusts.get_os_admin_auth_plugin(instance.cluster)
client = neutron.NeutronClient(info['network'], info['token'],
info['tenant'], auth=auth)
keywords['router_id'] = client.get_router()
keywords['host'] = instance.management_ip
keywords['port'] = port
try:
command = command.format(**keywords)
except KeyError as e:
LOG.error(_LE('Invalid keyword in proxy_command: {result}').format(
result=e))
# Do not give more details to the end-user
raise ex.SystemError('Misconfiguration')
if rootwrap_command:
command = '{0} {1}'.format(rootwrap_command, command)
return command
def _get_conn_params(self):
host_ng = self.instance.node_group
cluster = host_ng.cluster
access_instance = self.instance
proxy_gateway_node = cluster.get_proxy_gateway_node()
gateway_host = None
gateway_image_username = None
if proxy_gateway_node and not host_ng.is_proxy_gateway:
# tmckay-fp in other words, if we are going to connect
# through the proxy instead of the node we are actually
# trying to reach
# okay, the node group that supplies the proxy gateway
# must have fps, but if a proxy is used the other
# nodes are not required to have an fp.
# so, this instance is assumed not to have a floating
# ip and we are going to get to it through the proxy
access_instance = proxy_gateway_node
gateway_host = proxy_gateway_node.management_ip
ng = proxy_gateway_node.node_group
gateway_image_username = ng.image_username
proxy_command = None
if CONF.proxy_command:
# Build a session through a user-defined socket
proxy_command = CONF.proxy_command
# tmckay-fp we have the node_group for the instance right here
# okay, this test here whether access_instance.management_ip is an
# fp -- just compare to internal?
# in the neutron case, we check the node group for the
# access_instance and look for fp
# in the nova case, we compare management_ip to internal_ip or even
# use the nova interface
elif CONF.use_namespaces and not net_utils.has_floating_ip(
access_instance):
# Build a session through a netcat socket in the Neutron namespace
proxy_command = (
'ip netns exec qrouter-{router_id} nc {host} {port}')
# proxy_command is currently a template, turn it into a real command
# i.e. dereference {host}, {port}, etc.
if proxy_command:
rootwrap = CONF.rootwrap_command if CONF.use_rootwrap else ''
proxy_command = self._build_proxy_command(
proxy_command, instance=access_instance, port=22,
info=None, rootwrap_command=rootwrap)
return (self.instance.management_ip,
host_ng.image_username,
cluster.management_private_key,
proxy_command,
gateway_host,
gateway_image_username)
def _run(self, func, *args, **kwargs):
proc = procutils.start_subprocess()
try:
procutils.run_in_subprocess(proc, _connect,
self._get_conn_params())
return procutils.run_in_subprocess(proc, func, args, kwargs)
except Exception:
with excutils.save_and_reraise_exception():
procutils.shutdown_subprocess(proc, _cleanup)
finally:
procutils.shutdown_subprocess(proc, _cleanup)
def _run_with_log(self, func, timeout, description, *args, **kwargs):
start_time = time.time()
try:
with e_timeout.Timeout(timeout,
ex.TimeoutException(timeout,
op_name=description)):
return self._run(func, *args, **kwargs)
finally:
self._log_command('"%s" took %.1f seconds to complete' % (
description, time.time() - start_time))
def _run_s(self, func, timeout, description, *args, **kwargs):
timeout = _get_ssh_timeout(func, timeout)
_acquire_remote_semaphore()
try:
return self._run_with_log(func, timeout,
description, *args, **kwargs)
finally:
_release_remote_semaphore()
def get_http_client(self, port, info=None):
self._log_command('Retrieving HTTP session for {0}:{1}'.format(
self.instance.management_ip, port))
host_ng = self.instance.node_group
cluster = host_ng.cluster
access_instance = self.instance
access_port = port
proxy_gateway_node = cluster.get_proxy_gateway_node()
gateway_host = None
gateway_username = None
gateway_private_key = None
if proxy_gateway_node and not host_ng.is_proxy_gateway:
access_instance = proxy_gateway_node
access_port = 22
gateway_host = proxy_gateway_node.management_ip
gateway_username = proxy_gateway_node.node_group.image_username
gateway_private_key = cluster.management_private_key
proxy_command = None
if CONF.proxy_command:
# Build a session through a user-defined socket
proxy_command = CONF.proxy_command
# tmckay-fp again we can check the node group for the instance
# what are the implications for nova here? None, because use_namespaces
# is synonomous with use_neutron
# this is a test on whether access_instance has a floating_ip
# in the neutron case, we check the node group for the
# access_instance and look for fp
# in the nova case, we compare management_ip to internal_ip or even
# use the nova interface
elif (CONF.use_namespaces and not net_utils.has_floating_ip(
access_instance)):
# need neutron info
if not info:
info = self.get_neutron_info(access_instance)
# Build a session through a netcat socket in the Neutron namespace
proxy_command = (
'ip netns exec qrouter-{router_id} nc {host} {port}')
# proxy_command is currently a template, turn it into a real command
# i.e. dereference {host}, {port}, etc.
if proxy_command:
rootwrap = CONF.rootwrap_command if CONF.use_rootwrap else ''
proxy_command = self._build_proxy_command(
proxy_command, instance=access_instance, port=access_port,
info=info, rootwrap_command=rootwrap)
return _get_http_client(self.instance.management_ip, port,
proxy_command, gateway_host,
gateway_username,
gateway_private_key)
def close_http_session(self, port):
global _sessions
host = self.instance.management_ip
self._log_command(_("Closing HTTP session for %(host)s:%(port)s") % {
'host': host, 'port': port})
session = _sessions.get((host, port), None)
if session is None:
raise ex.NotFoundException(
{'host': host, 'port': port},
_('Session for %(host)s:%(port)s not cached'))
session.close()
del _sessions[(host, port)]
def execute_command(self, cmd, run_as_root=False, get_stderr=False,
raise_when_error=True, timeout=None):
description = _('Executing "%s"') % cmd
self._log_command(description)
return self._run_s(_execute_command, timeout, description,
cmd, run_as_root, get_stderr, raise_when_error)
def write_file_to(self, remote_file, data, run_as_root=False,
timeout=None):
description = _('Writing file "%s"') % remote_file
self._log_command(description)
self._run_s(_write_file_to, timeout, description,
remote_file, data, run_as_root)
def write_files_to(self, files, run_as_root=False, timeout=None):
description = _('Writing files "%s"') % list(files)
self._log_command(description)
self._run_s(_write_files_to, timeout, description, files, run_as_root)
def append_to_file(self, r_file, data, run_as_root=False, timeout=None):
description = _('Appending to file "%s"') % r_file
self._log_command(description)
self._run_s(_append_to_file, timeout, description,
r_file, data, run_as_root)
def append_to_files(self, files, run_as_root=False, timeout=None):
description = _('Appending to files "%s"') % list(files)
self._log_command(description)
self._run_s(_append_to_files, timeout, description, files, run_as_root)
def read_file_from(self, remote_file, run_as_root=False, timeout=None):
description = _('Reading file "%s"') % remote_file
self._log_command(description)
return self._run_s(_read_file_from, timeout, description,
remote_file, run_as_root)
def get_os_distrib(self, timeout=None):
return self._run_s(_get_os_distrib, timeout, "get_os_distrib")
def get_os_version(self, timeout=None):
return self._run_s(_get_os_version, timeout, "get_os_version")
def install_packages(self, packages, timeout=None):
description = _('Installing packages "%s"') % list(packages)
self._log_command(description)
self._run_s(_install_packages, timeout, description, packages)
def update_repository(self, timeout=None):
description = _('Updating repository')
self._log_command(description)
self._run_s(_update_repository, timeout, description)
def replace_remote_string(self, remote_file, old_str, new_str,
timeout=None):
description = _('In file "%(file)s" replacing string '
'"%(old_string)s" with "%(new_string)s"') % {
"file": remote_file, "old_string": old_str, "new_string": new_str}
self._log_command(description)
self._run_s(_replace_remote_string, timeout, description,
remote_file, old_str, new_str)
def execute_on_vm_interactive(self, cmd, matcher, timeout=None):
"""Runs given command and responds to prompts.
'cmd' is a command to execute.
'matcher' is an object which provides responses on command's
prompts. It should have two methods implemented:
* get_response(buf) - returns response on prompt if it is
found in 'buf' string, which is a part of command output.
If no prompt is found, the method should return None.
* is_eof(buf) - returns True if current 'buf' indicates that
the command is finished. False should be returned
otherwise.
"""
description = _('Executing interactively "%s"') % cmd
self._log_command(description)
self._run_s(_execute_on_vm_interactive, timeout,
description, cmd, matcher)
def _log_command(self, str):
with context.set_current_instance_id(self.instance.instance_id):
LOG.debug(str)
class BulkInstanceInteropHelper(InstanceInteropHelper):
def __init__(self, instance):
super(BulkInstanceInteropHelper, self).__init__(instance)
self.proc = procutils.start_subprocess()
try:
procutils.run_in_subprocess(self.proc, _connect,
self._get_conn_params())
except Exception:
with excutils.save_and_reraise_exception():
procutils.shutdown_subprocess(self.proc, _cleanup)
def close(self):
procutils.shutdown_subprocess(self.proc, _cleanup)
def _run(self, func, *args, **kwargs):
return procutils.run_in_subprocess(self.proc, func, args, kwargs)
def _run_s(self, func, timeout, description, *args, **kwargs):
timeout = _get_ssh_timeout(func, timeout)
return self._run_with_log(func, timeout, description, *args, **kwargs)
class SshRemoteDriver(remote.RemoteDriver):
def get_type_and_version(self):
return "ssh.1.0"
def setup_remote(self, engine):
global _global_remote_semaphore
global INFRA
_global_remote_semaphore = semaphore.Semaphore(
CONF.global_remote_threshold)
INFRA = engine
def get_remote(self, instance):
return InstanceInteropHelper(instance)
def get_userdata_template(self):
# SSH does not need any instance customization
return ""
| apache-2.0 |
serviceagility/boto | boto/mashups/__init__.py | 782 | 1108 | # Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
| mit |
arikpoz/mxnet | example/speech_recognition/stt_datagenerator.py | 11 | 10656 | from __future__ import absolute_import, division, print_function
import json
import random
import numpy as np
from stt_utils import calc_feat_dim, spectrogram_from_file
from config_util import generate_file_path
from log_util import LogUtil
from label_util import LabelUtil
from stt_bi_graphemes_util import generate_bi_graphemes_label
class DataGenerator(object):
def __init__(self, save_dir, model_name, step=10, window=20, max_freq=8000, desc_file=None):
"""
Params:
step (int): Step size in milliseconds between windows
window (int): FFT window size in milliseconds
max_freq (int): Only FFT bins corresponding to frequencies between
[0, max_freq] are returned
desc_file (str, optional): Path to a JSON-line file that contains
labels and paths to the audio files. If this is None, then
load metadata right away
"""
#calc_feat_dim returns int(0.001*window*max_freq)+1
super(DataGenerator, self).__init__()
# feat_dim=0.001*20*8000+1=161
self.feat_dim = calc_feat_dim(window, max_freq)
# 1d 161 length of array filled with zeros
self.feats_mean = np.zeros((self.feat_dim,))
# 1d 161 length of array filled with 1s
self.feats_std = np.ones((self.feat_dim,))
self.max_input_length = 0
self.max_length_list_in_batch =[]
# 1d 161 length of array filled with random value
#[0.0, 1.0)
self.rng = random.Random()
if desc_file is not None:
self.load_metadata_from_desc_file(desc_file)
self.step = step
self.window = window
self.max_freq = max_freq
self.save_dir = save_dir
self.model_name = model_name
def get_meta_from_file(self, feats_mean, feats_std):
self.feats_mean = feats_mean
self.feats_std = feats_std
def featurize(self, audio_clip, overwrite=False):
""" For a given audio clip, calculate the log of its Fourier Transform
Params:
audio_clip(str): Path to the audio clip
"""
return spectrogram_from_file(
audio_clip, step=self.step, window=self.window,
max_freq=self.max_freq, overwrite=overwrite)
def load_metadata_from_desc_file(self, desc_file, partition='train',
max_duration=16.0,):
""" Read metadata from the description file
(possibly takes long, depending on the filesize)
Params:
desc_file (str): Path to a JSON-line file that contains labels and
paths to the audio files
partition (str): One of 'train', 'validation' or 'test'
max_duration (float): In seconds, the maximum duration of
utterances to train or test on
"""
logger = LogUtil().getlogger()
logger.info('Reading description file: {} for partition: {}'
.format(desc_file, partition))
audio_paths, durations, texts = [], [], []
with open(desc_file) as json_line_file:
for line_num, json_line in enumerate(json_line_file):
try:
spec = json.loads(json_line)
if float(spec['duration']) > max_duration:
continue
audio_paths.append(spec['key'])
durations.append(float(spec['duration']))
texts.append(spec['text'])
except Exception as e:
# Change to (KeyError, ValueError) or
# (KeyError,json.decoder.JSONDecodeError), depending on
# json module version
logger.warn('Error reading line #{}: {}'
.format(line_num, json_line))
logger.warn(str(e))
if partition == 'train':
self.count = len(audio_paths)
self.train_audio_paths = audio_paths
self.train_durations = durations
self.train_texts = texts
elif partition == 'validation':
self.val_audio_paths = audio_paths
self.val_durations = durations
self.val_texts = texts
self.val_count = len(audio_paths)
elif partition == 'test':
self.test_audio_paths = audio_paths
self.test_durations = durations
self.test_texts = texts
else:
raise Exception("Invalid partition to load metadata. "
"Must be train/validation/test")
def load_train_data(self, desc_file):
self.load_metadata_from_desc_file(desc_file, 'train')
def load_validation_data(self, desc_file):
self.load_metadata_from_desc_file(desc_file, 'validation')
@staticmethod
def sort_by_duration(durations, audio_paths, texts):
return zip(*sorted(zip(durations, audio_paths, texts)))
def normalize(self, feature, eps=1e-14):
return (feature - self.feats_mean) / (self.feats_std + eps)
def get_max_label_length(self, partition, is_bi_graphemes=False):
if partition == 'train':
texts = self.train_texts + self.val_texts
elif partition == 'test':
texts = self.train_texts
else:
raise Exception("Invalid partition to load metadata. "
"Must be train/validation/test")
if is_bi_graphemes:
self.max_label_length = max([len(generate_bi_graphemes_label(text)) for text in texts])
else:
self.max_label_length = max([len(text) for text in texts])
return self.max_label_length
def get_max_seq_length(self, partition):
if partition == 'train':
audio_paths = self.train_audio_paths + self.val_audio_paths
durations = self.train_durations + self.val_durations
elif partition == 'test':
audio_paths = self.train_audio_paths
durations = self.train_durations
else:
raise Exception("Invalid partition to load metadata. "
"Must be train/validation/test")
max_duration_indexes = durations.index(max(durations))
max_seq_length = self.featurize(audio_paths[max_duration_indexes]).shape[0]
self.max_seq_length=max_seq_length
return max_seq_length
def prepare_minibatch(self, audio_paths, texts, overwrite=False, is_bi_graphemes=False):
""" Featurize a minibatch of audio, zero pad them and return a dictionary
Params:
audio_paths (list(str)): List of paths to audio files
texts (list(str)): List of texts corresponding to the audio files
Returns:
dict: See below for contents
"""
assert len(audio_paths) == len(texts),\
"Inputs and outputs to the network must be of the same number"
# Features is a list of (timesteps, feature_dim) arrays
# Calculate the features for each audio clip, as the log of the
# Fourier Transform of the audio
features = [self.featurize(a, overwrite=overwrite) for a in audio_paths]
input_lengths = [f.shape[0] for f in features]
feature_dim = features[0].shape[1]
mb_size = len(features)
# Pad all the inputs so that they are all the same length
x = np.zeros((mb_size, self.max_seq_length, feature_dim))
y = np.zeros((mb_size, self.max_label_length))
labelUtil = LabelUtil.getInstance()
label_lengths = []
for i in range(mb_size):
feat = features[i]
feat = self.normalize(feat) # Center using means and std
x[i, :feat.shape[0], :] = feat
if is_bi_graphemes:
label = generate_bi_graphemes_label(texts[i])
label = labelUtil.convert_bi_graphemes_to_num(label)
y[i, :len(label)] = label
else:
label = labelUtil.convert_word_to_num(texts[i])
y[i, :len(texts[i])] = label
label_lengths.append(len(label))
return {
'x': x, # (0-padded features of shape(mb_size,timesteps,feat_dim)
'y': y, # list(int) Flattened labels (integer sequences)
'texts': texts, # list(str) Original texts
'input_lengths': input_lengths, # list(int) Length of each input
'label_lengths': label_lengths, # list(int) Length of each label
}
def iterate_test(self, minibatch_size=16):
return self.iterate(self.test_audio_paths, self.test_texts,
minibatch_size)
def iterate_validation(self, minibatch_size=16):
return self.iterate(self.val_audio_paths, self.val_texts,
minibatch_size)
def sample_normalize(self, k_samples=1000, overwrite=False):
""" Estimate the mean and std of the features from the training set
Params:
k_samples (int): Use this number of samples for estimation
"""
# if k_samples is negative then it goes through total dataset
if k_samples < 0:
audio_paths_iter = iter(self.audio_paths)
# using sample
else:
k_samples = min(k_samples, len(self.train_audio_paths))
samples = self.rng.sample(self.train_audio_paths, k_samples)
audio_paths_iter = iter(samples)
audio_clip = audio_paths_iter.next()
feat = self.featurize(audio_clip=audio_clip, overwrite=overwrite)
feat_squared = np.square(feat)
count = float(feat.shape[0])
dim = feat.shape[1]
for iter_index in range(len(samples) - 1):
next_feat = self.featurize(audio_clip=audio_paths_iter.next(), overwrite=overwrite)
next_feat_squared = np.square(next_feat)
feat_vertically_stacked = np.concatenate((feat, next_feat)).reshape(-1, dim)
feat = np.sum(feat_vertically_stacked, axis=0, keepdims=True)
feat_squared_vertically_stacked = np.concatenate((feat_squared, next_feat_squared)).reshape(-1, dim)
feat_squared = np.sum(feat_squared_vertically_stacked, axis=0, keepdims=True)
count = count + float(next_feat.shape[0])
self.feats_mean = feat / float(count)
self.feats_std = np.sqrt(feat_squared / float(count) - np.square(self.feats_mean))
np.savetxt(generate_file_path(self.save_dir, self.model_name, 'feats_mean'), self.feats_mean)
np.savetxt(generate_file_path(self.save_dir, self.model_name, 'feats_std'), self.feats_std)
| apache-2.0 |
kamcpp/tensorflow | tensorflow/models/image/cifar10/cifar10_input.py | 20 | 9629 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Routine for decoding the CIFAR-10 binary file format."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
# Process images of this size. Note that this differs from the original CIFAR
# image size of 32 x 32. If one alters this number, then the entire model
# architecture will change and any model would need to be retrained.
IMAGE_SIZE = 24
# Global constants describing the CIFAR-10 data set.
NUM_CLASSES = 10
NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN = 50000
NUM_EXAMPLES_PER_EPOCH_FOR_EVAL = 10000
def read_cifar10(filename_queue):
"""Reads and parses examples from CIFAR10 data files.
Recommendation: if you want N-way read parallelism, call this function
N times. This will give you N independent Readers reading different
files & positions within those files, which will give better mixing of
examples.
Args:
filename_queue: A queue of strings with the filenames to read from.
Returns:
An object representing a single example, with the following fields:
height: number of rows in the result (32)
width: number of columns in the result (32)
depth: number of color channels in the result (3)
key: a scalar string Tensor describing the filename & record number
for this example.
label: an int32 Tensor with the label in the range 0..9.
uint8image: a [height, width, depth] uint8 Tensor with the image data
"""
class CIFAR10Record(object):
pass
result = CIFAR10Record()
# Dimensions of the images in the CIFAR-10 dataset.
# See http://www.cs.toronto.edu/~kriz/cifar.html for a description of the
# input format.
label_bytes = 1 # 2 for CIFAR-100
result.height = 32
result.width = 32
result.depth = 3
image_bytes = result.height * result.width * result.depth
# Every record consists of a label followed by the image, with a
# fixed number of bytes for each.
record_bytes = label_bytes + image_bytes
# Read a record, getting filenames from the filename_queue. No
# header or footer in the CIFAR-10 format, so we leave header_bytes
# and footer_bytes at their default of 0.
reader = tf.FixedLengthRecordReader(record_bytes=record_bytes)
result.key, value = reader.read(filename_queue)
# Convert from a string to a vector of uint8 that is record_bytes long.
record_bytes = tf.decode_raw(value, tf.uint8)
# The first bytes represent the label, which we convert from uint8->int32.
result.label = tf.cast(
tf.slice(record_bytes, [0], [label_bytes]), tf.int32)
# The remaining bytes after the label represent the image, which we reshape
# from [depth * height * width] to [depth, height, width].
depth_major = tf.reshape(tf.slice(record_bytes, [label_bytes], [image_bytes]),
[result.depth, result.height, result.width])
# Convert from [depth, height, width] to [height, width, depth].
result.uint8image = tf.transpose(depth_major, [1, 2, 0])
return result
def _generate_image_and_label_batch(image, label, min_queue_examples,
batch_size, shuffle):
"""Construct a queued batch of images and labels.
Args:
image: 3-D Tensor of [height, width, 3] of type.float32.
label: 1-D Tensor of type.int32
min_queue_examples: int32, minimum number of samples to retain
in the queue that provides of batches of examples.
batch_size: Number of images per batch.
shuffle: boolean indicating whether to use a shuffling queue.
Returns:
images: Images. 4D tensor of [batch_size, height, width, 3] size.
labels: Labels. 1D tensor of [batch_size] size.
"""
# Create a queue that shuffles the examples, and then
# read 'batch_size' images + labels from the example queue.
num_preprocess_threads = 16
if shuffle:
images, label_batch = tf.train.shuffle_batch(
[image, label],
batch_size=batch_size,
num_threads=num_preprocess_threads,
capacity=min_queue_examples + 3 * batch_size,
min_after_dequeue=min_queue_examples)
else:
images, label_batch = tf.train.batch(
[image, label],
batch_size=batch_size,
num_threads=num_preprocess_threads,
capacity=min_queue_examples + 3 * batch_size)
# Display the training images in the visualizer.
tf.image_summary('images', images)
return images, tf.reshape(label_batch, [batch_size])
def distorted_inputs(data_dir, batch_size):
"""Construct distorted input for CIFAR training using the Reader ops.
Args:
data_dir: Path to the CIFAR-10 data directory.
batch_size: Number of images per batch.
Returns:
images: Images. 4D tensor of [batch_size, IMAGE_SIZE, IMAGE_SIZE, 3] size.
labels: Labels. 1D tensor of [batch_size] size.
"""
filenames = [os.path.join(data_dir, 'data_batch_%d.bin' % i)
for i in xrange(1, 6)]
for f in filenames:
if not tf.gfile.Exists(f):
raise ValueError('Failed to find file: ' + f)
# Create a queue that produces the filenames to read.
filename_queue = tf.train.string_input_producer(filenames)
# Read examples from files in the filename queue.
read_input = read_cifar10(filename_queue)
reshaped_image = tf.cast(read_input.uint8image, tf.float32)
height = IMAGE_SIZE
width = IMAGE_SIZE
# Image processing for training the network. Note the many random
# distortions applied to the image.
# Randomly crop a [height, width] section of the image.
distorted_image = tf.random_crop(reshaped_image, [height, width, 3])
# Randomly flip the image horizontally.
distorted_image = tf.image.random_flip_left_right(distorted_image)
# Because these operations are not commutative, consider randomizing
# the order their operation.
distorted_image = tf.image.random_brightness(distorted_image,
max_delta=63)
distorted_image = tf.image.random_contrast(distorted_image,
lower=0.2, upper=1.8)
# Subtract off the mean and divide by the variance of the pixels.
float_image = tf.image.per_image_whitening(distorted_image)
# Ensure that the random shuffling has good mixing properties.
min_fraction_of_examples_in_queue = 0.4
min_queue_examples = int(NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN *
min_fraction_of_examples_in_queue)
print ('Filling queue with %d CIFAR images before starting to train. '
'This will take a few minutes.' % min_queue_examples)
# Generate a batch of images and labels by building up a queue of examples.
return _generate_image_and_label_batch(float_image, read_input.label,
min_queue_examples, batch_size,
shuffle=True)
def inputs(eval_data, data_dir, batch_size):
"""Construct input for CIFAR evaluation using the Reader ops.
Args:
eval_data: bool, indicating if one should use the train or eval data set.
data_dir: Path to the CIFAR-10 data directory.
batch_size: Number of images per batch.
Returns:
images: Images. 4D tensor of [batch_size, IMAGE_SIZE, IMAGE_SIZE, 3] size.
labels: Labels. 1D tensor of [batch_size] size.
"""
if not eval_data:
filenames = [os.path.join(data_dir, 'data_batch_%d.bin' % i)
for i in xrange(1, 6)]
num_examples_per_epoch = NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN
else:
filenames = [os.path.join(data_dir, 'test_batch.bin')]
num_examples_per_epoch = NUM_EXAMPLES_PER_EPOCH_FOR_EVAL
for f in filenames:
if not tf.gfile.Exists(f):
raise ValueError('Failed to find file: ' + f)
# Create a queue that produces the filenames to read.
filename_queue = tf.train.string_input_producer(filenames)
# Read examples from files in the filename queue.
read_input = read_cifar10(filename_queue)
reshaped_image = tf.cast(read_input.uint8image, tf.float32)
height = IMAGE_SIZE
width = IMAGE_SIZE
# Image processing for evaluation.
# Crop the central [height, width] of the image.
resized_image = tf.image.resize_image_with_crop_or_pad(reshaped_image,
width, height)
# Subtract off the mean and divide by the variance of the pixels.
float_image = tf.image.per_image_whitening(resized_image)
# Ensure that the random shuffling has good mixing properties.
min_fraction_of_examples_in_queue = 0.4
min_queue_examples = int(num_examples_per_epoch *
min_fraction_of_examples_in_queue)
# Generate a batch of images and labels by building up a queue of examples.
return _generate_image_and_label_batch(float_image, read_input.label,
min_queue_examples, batch_size,
shuffle=False)
| apache-2.0 |
xph906/SDN-NW | example/graphDeps.py | 148 | 2081 | #!/usr/bin/python
import urllib2
import json
import sys
def simple_json_get(url):
return json.loads(urllib2.urlopen(url).read())
def shorten(s):
return s.replace('net.floodlightcontroller','n.f'
).replace('com.bigswitch','c.b')
def usage(s):
sys.stderr.write("Usage:\ngrahDeps.py hostname [port]\n%s" % s)
sys.stderr.write("\n\n\n\n writes data to 'hostname.dot' for use with graphviz\n")
sys.exit(1)
if __name__ == '__main__':
host='localhost'
port=8080
if len(sys.argv) == 1 or sys.argv[1] == '-h' or sys.argv[1] == '--help':
usage("need to specify hostname")
host = sys.argv[1]
if len(sys.argv) > 2:
port = int(sys.argv[2])
sys.stderr.write("Connecting to %s:%d ..." % (host,port))
URL="http://%s:%d/wm/core/module/loaded/json" % (host,port)
deps = simple_json_get(URL)
serviceMap = {}
nodeMap = {}
nodeCount = 0
sys.stderr.write("Writing to %s.dot ..." % (host))
f = open("%s.dot" % host, 'w')
f.write( "digraph Deps {\n")
for mod, info in deps.iteritems():
# sys.stderr.write("Discovered module %s\n" % mod)
nodeMap[mod] = "n%d" % nodeCount
nodeCount += 1
label = shorten(mod) + "\\n"
for service, serviceImpl in info['provides'].iteritems():
# sys.stderr.write(" Discovered service %s implemented with %s\n" % (service,serviceImpl))
label += "\\nService=%s" % shorten(service)
serviceMap[serviceImpl] = mod
f.write(" %s [ label=\"%s\", color=\"blue\"];\n" % (nodeMap[mod], label))
f.write("\n") # for readability
for mod, info in deps.iteritems():
for dep, serviceImpl in info['depends'].iteritems():
f.write(" %s -> %s [ label=\"%s\"];\n" % (
nodeMap[mod],
shorten(nodeMap[serviceMap[serviceImpl]]),
shorten(dep)))
f.write("}\n")
f.close();
sys.stderr.write("Now type\ndot -Tpdf -o %s.pdf %s.dot\n" % (
host, host))
| apache-2.0 |
alexharmenta/Inventationery | Inventationery/apps/Vendor/views.py | 1 | 12840 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author: Alex
# @Date: 2015-11-16 19:22:12
# @Last Modified by: Alex
# @Last Modified time: 2016-01-03 15:34:16
# views.py
from django.contrib.auth.decorators import login_required, permission_required
from django.utils.decorators import method_decorator
from django.contrib import messages
from django.shortcuts import render_to_response, get_object_or_404
from django.forms import inlineformset_factory
from django.http import HttpResponseRedirect, HttpResponse
from django.template import RequestContext
from django.views.generic import ListView, DeleteView
import csv
import time
from io import BytesIO
from reportlab.platypus import SimpleDocTemplate, Paragraph, TableStyle
from reportlab.lib.styles import getSampleStyleSheet
from reportlab.lib import colors
from reportlab.lib.pagesizes import letter
from reportlab.platypus import Table
from reportlab.lib.pagesizes import landscape
from .models import VendorModel
from Inventationery.apps.DirParty.models import DirPartyModel
from Inventationery.apps.LogisticsPostalAddress.models import (
LogisticsPostalAddressModel)
from Inventationery.apps.LogisticsElectronicAddress.models import (
LogisticsElectronicAddressModel)
from .forms import (VendorForm,
PartyForm,)
# CBV: View to list all vendors ordered by AccountNum
# ----------------------------------------------------------------------------
class VendorListView(ListView):
model = VendorModel
template_name = 'Vendor/VendorList.html'
context_object_name = 'vendors'
def get_queryset(self):
queryset = super(VendorListView, self).get_queryset()
queryset = VendorModel.objects.all().order_by('AccountNum')
return queryset
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
return super(self.__class__, self).dispatch(request, *args, **kwargs)
# FBV: View for create new Vendor
@login_required
@permission_required('Compras')
def createVendorView(request):
vendor = VendorModel()
party = DirPartyModel()
ElectronicFormSet = inlineformset_factory(
DirPartyModel, LogisticsElectronicAddressModel,
extra=1, max_num=5, fields='__all__')
PostalFormSet = inlineformset_factory(
DirPartyModel, LogisticsPostalAddressModel,
extra=1, max_num=5, fields='__all__')
if request.method == 'POST':
# formulario enviado
vendor_form = VendorForm(request.POST)
party_form = PartyForm(request.POST)
electronic_formset = ElectronicFormSet(
request.POST, instance=party, prefix='efs')
postal_formset = PostalFormSet(
request.POST, instance=party, prefix='pfs')
if vendor_form.is_valid() and party_form.is_valid():
electronic_formset = ElectronicFormSet(
request.POST, instance=party, prefix='efs')
postal_formset = PostalFormSet(
request.POST, instance=party, prefix='pfs')
party = party_form.save()
vendor = vendor_form.save(commit=False)
vendor.Party = party
vendor.save()
messages.success(request, "Proveedor creado correctamente")
for electronic_form in electronic_formset:
if electronic_form.is_valid():
description = electronic_form.cleaned_data.get(
'Description')
contact = electronic_form.cleaned_data.get('Contact')
if description and contact:
Electronic = electronic_form.save(commit=False)
Electronic.Party = party
Electronic.save()
else:
messages.warning(
request, 'Revise la información de contacto')
for postal_form in postal_formset:
if postal_form.is_valid():
description = postal_form.cleaned_data.get(
'Description')
if description:
Postal = postal_form.save(commit=False)
Postal.Party = party
Postal.save()
else:
messages.warning(
request, 'Revise la información de la dirección')
redirect_url = "/vendor/update/" + str(vendor.AccountNum)
return HttpResponseRedirect(redirect_url)
else:
messages.error(
request, "Ocurrió un error al crear el proveedor")
else:
# formulario inicial
vendor_form = VendorForm(instance=vendor)
party_form = PartyForm(instance=party)
electronic_formset = ElectronicFormSet(
prefix='efs', instance=party)
postal_formset = PostalFormSet(prefix='pfs', instance=party)
return render_to_response('Vendor/CreateVendor.html',
{'vendor_form': vendor_form,
'party_form': party_form,
'electronic_formset': electronic_formset,
'postal_formset': postal_formset, },
context_instance=RequestContext(request))
# FBV: View for update an existing Vendor
@login_required
@permission_required('Compras')
def updateVendorView(request, AccountNum):
Vendor = get_object_or_404(VendorModel, AccountNum=AccountNum)
Party = Vendor.Party
EA_list = []
PA_list = []
ElectronicFormSet = inlineformset_factory(
DirPartyModel, LogisticsElectronicAddressModel,
extra=1, max_num=5, fields='__all__')
PostalFormSet = inlineformset_factory(
DirPartyModel, LogisticsPostalAddressModel,
extra=1, max_num=5, fields='__all__')
if request.method == 'POST':
# formulario enviado
vendor_form = VendorForm(request.POST, instance=Vendor)
party_form = PartyForm(request.POST, instance=Party)
if vendor_form.is_valid() and party_form.is_valid():
party = party_form.save()
vendor = vendor_form.save(commit=False)
vendor.Party = party
vendor.save()
messages.success(request, "Proveedor actualizado correctamente")
electronic_formset = ElectronicFormSet(
request.POST, prefix='efs', instance=Party)
postal_formset = PostalFormSet(
request.POST, prefix='pfs', instance=Party)
for electronic_form in electronic_formset.forms:
if electronic_form.is_valid():
description = electronic_form.cleaned_data.get(
'Description')
contact = electronic_form.cleaned_data.get('Contact')
if description and contact:
EA = electronic_form.save()
EA_list.append(EA.pk)
else:
messages.warning(
request, 'Revise la información de contacto')
if electronic_formset.is_valid():
lea = LogisticsElectronicAddressModel.objects.filter(
Party=party)
lea.exclude(pk__in=list(EA_list)).delete()
for postal_form in postal_formset:
if postal_form.is_valid():
description = postal_form.cleaned_data.get(
'Description')
if description:
PA = postal_form.save()
PA_list.append(PA.pk)
else:
messages.warning(
request, 'Revise la información de dirección')
if postal_formset.is_valid():
lpa = LogisticsPostalAddressModel.objects.filter(Party=Party)
lpa.exclude(pk__in=list(PA_list)).delete()
else:
electronic_formset = ElectronicFormSet(
request.POST, prefix='efs')
postal_formset = PostalFormSet(
request.POST, prefix='pfs')
messages.error(
request, "Ocurrió un error al actualizar el proveedor")
else:
# formulario inicial
vendor_form = VendorForm(instance=Vendor)
party_form = PartyForm(instance=Vendor.Party)
electronic_formset = ElectronicFormSet(
prefix='efs', instance=Vendor.Party)
postal_formset = PostalFormSet(
prefix='pfs', instance=Vendor.Party)
return render_to_response('Vendor/UpdateVendor.html',
{'vendor_form': vendor_form,
'party_form': party_form,
'electronic_formset': electronic_formset,
'postal_formset': postal_formset,
'Vendor': Vendor, },
context_instance=RequestContext(request))
# CBV: View to delete an existing vendor
# ----------------------------------------------------------------------------
class DeleteVendorView(DeleteView):
model = DirPartyModel
template_name = 'Vendor/DeleteVendor.html'
success_url = '/vendor'
success_message = 'Se ha eliminado el proveedor'
def delete(self, request, *args, **kwargs):
"""
Calls the delete() method on the fetched object and then
redirects to the success URL.
"""
self.object = self.get_object()
success_url = self.get_success_url()
self.object.delete()
messages.success(self.request,
'Se ha eliminado el proveedor',
extra_tags='msg')
return HttpResponseRedirect(success_url)
@method_decorator(login_required)
def dispatch(self, request, *args, **kwargs):
return super(self.__class__, self).dispatch(request, *args, **kwargs)
# FBV: Export to csv
@login_required
@permission_required('Compras')
def export_csv(request):
# Create the HttpResponse object with the appropriate CSV header.
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="proveedores.csv"'
writer = csv.writer(response)
writer.writerow(['Número de cuenta',
'Nombre',
'Tipo de proveedor',
'RFC',
'Moneda',
'Direccion principal',
'Contacto principal'])
vendor_list = VendorModel.objects.all()
for vendor in vendor_list:
writer.writerow([vendor.AccountNum,
vendor.Party.NameAlias,
vendor.AccountType,
vendor.VATNum,
vendor.CurrencyCode,
vendor.get_PrimaryAddress(),
vendor.get_PrimaryElectronic()])
return response
# FBV: Export to pdf
@login_required
@permission_required('Compras')
def export_pdf(request):
response = HttpResponse(content_type='application/pdf')
# pdf_name = "proveedores.pdf" # llamado proveedores
# response['Content-Disposition'] = 'attachment; filename=%s' % pdf_name
buff = BytesIO()
doc = SimpleDocTemplate(buff,
pagesize=landscape(letter),
rightMargin=20,
leftMargin=20,
topMargin=30,
bottomMargin=20,
)
proveedores = []
styles = getSampleStyleSheet()
title = Paragraph("Listado de proveedores", styles['Heading2'])
date = Paragraph(time.strftime("%d/%m/%Y"), styles['Heading2'])
header = (title, date)
t = Table([''] + [header] + [''])
t.setStyle(TableStyle(
[
('ALIGN', (1, 1), (1, 1), 'RIGHT'),
('TEXTCOLOR', (0, 1), (0, 0), colors.green),
]
))
proveedores.append(t)
headings = ('Número de cuenta', 'Nombre', 'Tipo de proveedor',
'RFC', 'Moneda', 'Direccion principal', 'Contacto principal')
vendors = [(v.AccountNum, v.Party.NameAlias, v.AccountType,
v.VATNum, v.CurrencyCode, v.get_PrimaryAddress(),
v.get_PrimaryElectronic())
for v in VendorModel.objects.all()]
t = Table([headings] + vendors)
t.setStyle(TableStyle(
[
('GRID', (0, 0), (6, -1), 0.5, colors.dodgerblue),
('LINEBELOW', (0, 0), (-1, 0), 0.5, colors.darkblue),
('BACKGROUND', (0, 0), (-1, 0), colors.dodgerblue)
]
))
proveedores.append(t)
doc.build(proveedores)
response.write(buff.getvalue())
buff.close()
return response
| bsd-3-clause |
lpakula/django-ftp-deploy | ftp_deploy/utils/deploy.py | 1 | 5535 | import os
import json
import tempfile
from celery import current_task
from ftp_deploy.conf import *
from ftp_deploy.models import Log, Task
from .core import LockError
from .repo import commits_parser, repository_parser
from .ftp import ftp_connection
from .email import notification_success, notification_fail
from .curl import curl_connection
class Deploy(object):
"""Deploy method responsible for perform deploying"""
def __init__(self, host, payload, service, task_name):
self.host = host
self.service = service
self.task = Task.objects.get(name=task_name)
self.data = json.loads(payload)
self.json_string = payload
self.files_count = commits_parser(
self.data['commits'], self.service.repo_source).files_count()
self.ftp_host = self.service.ftp_host
self.ftp_username = self.service.ftp_username
self.ftp_password = self.service.ftp_password
self.ftp_path = self.service.ftp_path
self.repo = repository_parser(self.data, self.service)
self.repo_username, self.repo_password = self.repo.credentials()
self.user = self.repo.deploy_name()
def perform(self):
"""
Perform ftp connection and choose repository perform method
(bitbucket or github)
"""
if self.user == 'Restore':
self.service.get_logs_tree().delete()
self.log = Log()
self.log.payload = self.json_string
self.log.service = self.service
self.log.save()
try:
self.ftp = ftp_connection(
self.ftp_host, self.ftp_username, self.ftp_password,
self.ftp_path)
if self.service.lock():
raise LockError()
self.task.active = True
self.task.save()
self.ftp.connect()
except LockError as e:
self.set_fail('Service Locked', e)
except Exception as e:
self.set_fail('FTP Connection', e)
else:
try:
if self.service.repo_source == 'bb':
self.perform_bitbucket()
if self.service.repo_source == 'gh':
self.perform_github()
except Exception as e:
self.set_fail(self.user, e)
else:
self.log.user = self.user
self.log.status = True
self.log.save()
notification_success(self.host, self.service, self.json_string)
finally:
self.ftp.quit()
self.service.validate()
self.service.save()
self.task.delete()
def perform_bitbucket(self):
"""perform bitbucket deploy"""
curl = curl_connection(self.repo_username, self.repo_password)
curl.authenticate()
i = 0
for commit in self.data['commits']:
for files in commit['files']:
file_path = files['file']
self.update_progress(i, file_path)
i += 1
if files['type'] == 'removed':
self.ftp.remove_file(file_path)
else:
url = 'https://api.bitbucket.org/1.0/repositories%sraw/%s/%s' % (
self.data['repository']['absolute_url'],
commit['node'], file_path)
value = curl.perform(url)
self.create_file(file_path, value)
curl.close()
def perform_github(self):
curl = curl_connection(self.repo_username, self.repo_password)
curl.authenticate()
i = 0
for commit in self.data['commits']:
for file in commit['removed']:
self.update_progress(i, file)
i += 1
self.ftp.remove_file(file)
for file in commit['added'] + commit['modified']:
self.update_progress(i, file)
i += 1
url = 'https://raw.github.com/%s/%s/%s/%s' % (
self.data['repository']['owner']['name'],
self.data['repository']['name'], commit['id'], file)
value = curl.perform(url)
self.create_file(file, value)
curl.close()
def update_progress(self, i, file_path):
progress_percent = int(100 * float(i) / float(self.files_count))
current_task.update_state(state='PROGRESS', meta={
'status': progress_percent,
'file': os.path.basename(file_path)})
def create_file(self, file_path, value):
temp_file = tempfile.NamedTemporaryFile(mode='w+b', delete=False)
# Python 2 support
try:
temp_file.write(bytes(value, 'utf-8'))
except TypeError:
temp_file.write(value)
temp_file.close()
temp_file = open(temp_file.name, 'rb')
self.ftp.make_dirs(file_path)
self.ftp.create_file(file_path, temp_file)
temp_file.close()
os.unlink(temp_file.name)
def set_fail(self, user, message):
self.log.user = user
self.log.status_message = message
self.log.status = False
self.log.save()
notification_fail(self.host, self.service, self.json_string, message)
| mit |
X-dark/wallabag | docs/fr/conf.py | 3 | 1403 | # -*- coding: utf-8 -*-
#
# wallabag documentation build configuration file, created by
# sphinx-quickstart on Fri Oct 16 06:47:23 2015.
import sys
import os
extensions = []
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'wallabag-fr'
copyright = u'2013-2016, Nicolas Lœuillet - MIT Licence'
version = '2.1.0'
release = version
exclude_patterns = ['_build']
pygments_style = 'sphinx'
html_theme = 'default'
html_static_path = ['_static']
htmlhelp_basename = 'wallabagfrdoc'
latex_elements = {
}
latex_documents = [
('index', 'wallabag-fr.tex', u'wallabag Documentation',
u'Nicolas Lœuillet', 'manual'),
]
man_pages = [
('index', 'wallabagfr', u'wallabag Documentation',
[u'Nicolas Lœuillet'], 1)
]
texinfo_documents = [
('index', 'wallabag', u'wallabag Documentation',
u'Nicolas Lœuillet', 'wallabag', 'wallabag is an opensource read-it-later.',
'Miscellaneous'),
]
##### Guzzle sphinx theme
import guzzle_sphinx_theme
html_translator_class = 'guzzle_sphinx_theme.HTMLTranslator'
html_theme_path = guzzle_sphinx_theme.html_theme_path()
html_theme = 'guzzle_sphinx_theme'
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'**': ['logo-text.html', 'globaltoc.html', 'searchbox.html']
}
# Register the theme as an extension to generate a sitemap.xml
extensions.append("guzzle_sphinx_theme")
| mit |
zasdfgbnm/tensorflow | tensorflow/contrib/keras/api/keras/preprocessing/text/__init__.py | 69 | 1146 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras data preprocessing utils for text data."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.keras._impl.keras.preprocessing.text import one_hot
from tensorflow.python.keras._impl.keras.preprocessing.text import text_to_word_sequence
from tensorflow.python.keras._impl.keras.preprocessing.text import Tokenizer
del absolute_import
del division
del print_function
| apache-2.0 |
surajssd/kuma | vendor/packages/logilab/astng/test/unittest_inspector.py | 25 | 3699 | # Copyright (c) 2003-2013 LOGILAB S.A. (Paris, FRANCE).
# http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
for the visitors.diadefs module
"""
import sys
from os.path import join, abspath, dirname
from logilab.common.testlib import TestCase, unittest_main
from logilab.astng import nodes, inspector
from logilab.astng.bases import Instance, YES
from logilab.astng.manager import ASTNGManager, _silent_no_wrap
MANAGER = ASTNGManager()
def astng_wrapper(func, modname):
return func(modname)
DATA2 = join(dirname(abspath(__file__)), 'data2')
class LinkerTC(TestCase):
def setUp(self):
self.project = MANAGER.project_from_files([DATA2], astng_wrapper)
self.linker = inspector.Linker(self.project)
self.linker.visit(self.project)
def test_class_implements(self):
klass = self.project.get_module('data2.clientmodule_test')['Ancestor']
self.assertTrue(hasattr(klass, 'implements'))
self.assertEqual(len(klass.implements), 1)
self.assertTrue(isinstance(klass.implements[0], nodes.Class))
self.assertEqual(klass.implements[0].name, "Interface")
klass = self.project.get_module('data2.clientmodule_test')['Specialization']
self.assertTrue(hasattr(klass, 'implements'))
self.assertEqual(len(klass.implements), 0)
def test_locals_assignment_resolution(self):
klass = self.project.get_module('data2.clientmodule_test')['Specialization']
self.assertTrue(hasattr(klass, 'locals_type'))
type_dict = klass.locals_type
self.assertEqual(len(type_dict), 2)
keys = sorted(type_dict.keys())
self.assertEqual(keys, ['TYPE', 'top'])
self.assertEqual(len(type_dict['TYPE']), 1)
self.assertEqual(type_dict['TYPE'][0].value, 'final class')
self.assertEqual(len(type_dict['top']), 1)
self.assertEqual(type_dict['top'][0].value, 'class')
def test_instance_attrs_resolution(self):
klass = self.project.get_module('data2.clientmodule_test')['Specialization']
self.assertTrue(hasattr(klass, 'instance_attrs_type'))
type_dict = klass.instance_attrs_type
self.assertEqual(len(type_dict), 3)
keys = sorted(type_dict.keys())
self.assertEqual(keys, ['_id', 'relation', 'toto'])
self.assertTrue(isinstance(type_dict['relation'][0], Instance), type_dict['relation'])
self.assertEqual(type_dict['relation'][0].name, 'DoNothing')
self.assertTrue(isinstance(type_dict['toto'][0], Instance), type_dict['toto'])
self.assertEqual(type_dict['toto'][0].name, 'Toto')
self.assertIs(type_dict['_id'][0], YES)
class LinkerTC2(LinkerTC):
def setUp(self):
self.project = MANAGER.project_from_files([DATA2], func_wrapper=_silent_no_wrap)
self.linker = inspector.Linker(self.project)
self.linker.visit(self.project)
__all__ = ('LinkerTC', 'LinkerTC2')
if __name__ == '__main__':
unittest_main()
| mpl-2.0 |
spaceone/mils-secure | app/cached_counter.py | 3 | 4447 | # By Bill Katz, modified from code suggested by Nick Johnson
from google.appengine.ext import db
from google.appengine.api import memcache
from google.appengine.api.capabilities import CapabilitySet
import logging
class Counter(db.Model):
"""A databased-backed counter."""
count = db.IntegerProperty(default=0)
error_possible = db.BooleanProperty(default=False)
class CachedCounter(object):
"""A memcached datastore-backed counter that handles high concurrency
without sharding.
Gated so won't do puts more frequently than once every UPDATE_INTERVAL
seconds. Also uses CapabilitySet to force non-memcached approach if
memcached is planned to be down during update interval.
In cases of unidirectional incrementing, e.g., a monotonically increasing
unique ID generator, this routine is guaranteed to underestimate the true
value in worst case failure.
Based on App Engine Cookbook recipe:
"High-concurrency counters without sharding"
Usage:
>>> counter = CachedCounter('MyModel')
>>> counter.incr()
1
>>> counter.incr()
2
>>> counter.count
2
>>> counter.incr(3)
5
>>> counter.count
5
"""
def __init__(self, name, update_interval=2):
self._name = name
self._lock_key = "CounterLock:%s" % (name,)
self._incr_key = "CounterIncr:%s" % (name,)
self._count_key = "CounterValue:%s" % (name,)
self._update_interval = update_interval
@property
def count(self):
value = memcache.get(self._count_key)
incr = int(memcache.get(self._incr_key) or 0)
if value is None:
entity = Counter.get_or_insert(key_name=self._name)
value = entity.count + incr
memcache.set(self._count_key, value)
return int(value)
return int(value + incr)
def incr(self, value=1):
if value < 0:
raise ValueError('CachedCounter cannot handle negative numbers.')
def update_count(name, incr, error_possible=False):
entity = Counter.get_by_key_name(name)
if entity:
entity.count += incr
logging.debug("incr(%s): update_count on retrieved entity by %d to %d", name, incr, entity.count)
else:
entity = Counter(key_name=name, count=incr)
logging.debug("incr(%s): update_count on new entity set to %d", name, incr)
if error_possible:
entity.error_possible = True
entity.put()
return entity.count
look_ahead_time = 10 + self._update_interval
memcache_ops = CapabilitySet('memcache', methods=['add'])
memcache_down = not memcache_ops.will_remain_enabled_for(look_ahead_time)
if memcache_down or memcache.add(self._lock_key, None, time=self._update_interval):
# Update the datastore
incr = int(memcache.get(self._incr_key) or 0) + value
logging.debug("incr(%s): updating datastore with %d", self._name, incr)
memcache.set(self._incr_key, 0)
try:
stored_count = db.run_in_transaction(update_count, self._name, incr)
except:
memcache.set(self._incr_key, incr)
logging.error('Counter(%s): unable to update datastore counter.', self._name)
raise
memcache.set(self._count_key, stored_count)
return stored_count
else:
incr = memcache.get(self._incr_key)
if incr is None:
# _incr_key in memcache should be set. If not, two possibilities:
# 1) memcache has failed between last datastore update.
# 2) this branch has executed before memcache set in update branch (unlikely)
stored_count = db.run_in_transaction(update_count,
self._name, value, error_possible=True)
memcache.set(self._count_key, stored_count)
memcache.set(self._incr_key, 0)
logging.error('Counter(%s): possible memcache failure in update interval.',
self._name)
return stored_count
else:
memcache.incr(self._incr_key, delta=value)
logging.debug("incr(%s): incrementing memcache with %d", self._name, value)
return self.count
| mit |
dturner-tw/pants | src/python/pants/pantsd/service/pants_service.py | 10 | 1196 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import threading
from abc import abstractmethod
from pants.util.meta import AbstractClass
class PantsService(AbstractClass):
"""Pants daemon service base class."""
class ServiceError(Exception): pass
def __init__(self):
super(PantsService, self).__init__()
self.name = self.__class__.__name__
self._kill_switch = threading.Event()
@property
def is_killed(self):
"""A `threading.Event`-checking property to facilitate graceful shutdown of services.
Subclasses should check this property for a True value in their core runtime. If True, the
service should teardown and gracefully exit. This represents a fatal/one-time event for the
service.
"""
return self._kill_switch.is_set()
@abstractmethod
def run(self):
"""The main entry-point for the service called by the service runner."""
def terminate(self):
self._kill_switch.set()
| apache-2.0 |
Mirafratechnologies/Automation | Mirafra_automation_framework/LAN_showInterfaces/showInterfaces.py | 1 | 1287 | import unittest
import subprocess
import time
import paramiko # SSH Support
import commonData
cmd = "ifconfig -a"
class showAllInterfaces(unittest.TestCase):
def setUp(self):
self.startTime = time.time()
global ssh
ssh=paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
# ssh.connect(HOST, port, user, password)
ssh.connect(commonData.Ipaddr, commonData.Port, commonData.User, commonData.Password)
print "\nProgressStatus@ShowInterfaces@Started\n"
def tearDown(self):
t = time.time() - self.startTime
print "%s: %.3f %d" % (self.id(), t, self.status)
f = open(commonData.TestcaseResult, "a")
f.write("%d\t\t\t %.3f\t\t %s\n" % (self.status, t, "ShowInterfaces"))
f.close()
ssh.close()
if(self.status == 0):
print "ProgressStatus@ShowInterfaces@Completed@PASS\n"
else:
print "ProgressStatus@ShowInterfaces@Completed@FAIL\n"
def runTest(self):
stdin, stdout, stderr = ssh.exec_command(cmd)
ping = stdout.readlines()
resp=''.join(ping)
self.status = stdout.channel.recv_exit_status()
self.assertEqual(stdout.channel.recv_exit_status(), 0, "Command execution failed")
print(resp)
| gpl-3.0 |
MoisesTedeschi/python | Scripts-Python/Modulos-Diversos/python-com-scrapy/Lib/site-packages/hamcrest/library/collection/issequence_containing.py | 6 | 3001 | __author__ = "Jon Reid"
__copyright__ = "Copyright 2011 hamcrest.org"
__license__ = "BSD, see License.txt"
from hamcrest.core.base_matcher import BaseMatcher
from hamcrest.core.core.allof import all_of
from hamcrest.core.helpers.hasmethod import hasmethod
from hamcrest.core.helpers.wrap_matcher import wrap_matcher
class IsSequenceContaining(BaseMatcher):
def __init__(self, element_matcher):
self.element_matcher = element_matcher
def _matches(self, sequence):
try:
for item in sequence:
if self.element_matcher.matches(item):
return True
except TypeError: # not a sequence
return False
def describe_to(self, description):
description.append_text('a sequence containing ') \
.append_description_of(self.element_matcher)
# It'd be great to make use of all_of, but we can't be sure we won't
# be seeing a one-time sequence here (like a generator); see issue #20
# Instead, we wrap it inside a class that will convert the sequence into
# a concrete list and then hand it off to the all_of matcher.
class IsSequenceContainingEvery(BaseMatcher):
def __init__(self, *element_matchers):
delegates = [has_item(e) for e in element_matchers]
self.matcher = all_of(*delegates)
def _matches(self, sequence):
try:
return self.matcher.matches(list(sequence))
except TypeError:
return False
def describe_mismatch(self, item, mismatch_description):
self.matcher.describe_mismatch(item, mismatch_description)
def describe_to(self, description):
self.matcher.describe_to(description)
def has_item(match):
"""Matches if any element of sequence satisfies a given matcher.
:param match: The matcher to satisfy, or an expected value for
:py:func:`~hamcrest.core.core.isequal.equal_to` matching.
This matcher iterates the evaluated sequence, searching for any element
that satisfies a given matcher. If a matching element is found,
``has_item`` is satisfied.
If the ``match`` argument is not a matcher, it is implicitly wrapped in an
:py:func:`~hamcrest.core.core.isequal.equal_to` matcher to check for
equality.
"""
return IsSequenceContaining(wrap_matcher(match))
def has_items(*items):
"""Matches if all of the given matchers are satisfied by any elements of
the sequence.
:param match1,...: A comma-separated list of matchers.
This matcher iterates the given matchers, searching for any elements in the
evaluated sequence that satisfy them. If each matcher is satisfied, then
``has_items`` is satisfied.
Any argument that is not a matcher is implicitly wrapped in an
:py:func:`~hamcrest.core.core.isequal.equal_to` matcher to check for
equality.
"""
matchers = []
for item in items:
matchers.append(wrap_matcher(item))
return IsSequenceContainingEvery(*matchers)
| gpl-3.0 |
cg31/tensorflow | tensorflow/contrib/learn/python/learn/tests/dataframe/estimator_utils_test.py | 14 | 6500 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests of the DataFrame class."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.contrib.layers import feature_column
from tensorflow.contrib.learn.python import learn
from tensorflow.contrib.learn.python.learn.dataframe import estimator_utils
from tensorflow.contrib.learn.python.learn.tests.dataframe import mocks
from tensorflow.python.framework import tensor_shape
def setup_test_df():
"""Create a dataframe populated with some test columns."""
df = learn.DataFrame()
df["a"] = learn.TransformedSeries(
[mocks.MockSeries("foobar", mocks.MockTensor("Tensor a", tf.int32))],
mocks.MockTwoOutputTransform("iue", "eui", "snt"), "out1")
df["b"] = learn.TransformedSeries(
[mocks.MockSeries("foobar", mocks.MockTensor("Tensor b", tf.int32))],
mocks.MockTwoOutputTransform("iue", "eui", "snt"), "out2")
df["c"] = learn.TransformedSeries(
[mocks.MockSeries("foobar", mocks.MockTensor("Tensor c", tf.int32))],
mocks.MockTwoOutputTransform("iue", "eui", "snt"), "out1")
return df
def setup_test_df_3layer():
"""Create a dataframe populated with some test columns."""
df = learn.DataFrame()
df["a"] = mocks.MockSeries("a_series", mocks.MockTensor("Tensor a", tf.int32))
df["b"] = mocks.MockSeries("b_series",
mocks.MockSparseTensor("SparseTensor b", tf.int32))
df["c"] = mocks.MockSeries("c_series", mocks.MockTensor("Tensor c", tf.int32))
df["d"] = mocks.MockSeries("d_series",
mocks.MockSparseTensor("SparseTensor d", tf.int32))
df["e"] = learn.TransformedSeries([df["a"], df["b"]],
mocks.Mock2x2Transform("iue", "eui", "snt"),
"out1")
df["f"] = learn.TransformedSeries([df["c"], df["d"]],
mocks.Mock2x2Transform("iue", "eui", "snt"),
"out2")
df["g"] = learn.TransformedSeries([df["e"], df["f"]],
mocks.Mock2x2Transform("iue", "eui", "snt"),
"out1")
return df
class EstimatorUtilsTest(tf.test.TestCase):
"""Test of estimator utils."""
def test_to_feature_columns_and_input_fn(self):
df = setup_test_df_3layer()
feature_columns, input_fn = (
estimator_utils.to_feature_columns_and_input_fn(
df,
base_input_keys_with_defaults={"a": 1,
"b": 2,
"c": 3,
"d": 4},
target_keys=["g"],
feature_keys=["a", "b", "f"]))
expected_feature_column_a = feature_column.DataFrameColumn(
"a", learn.PredefinedSeries(
"a", tf.FixedLenFeature(tensor_shape.unknown_shape(), tf.int32, 1)))
expected_feature_column_b = feature_column.DataFrameColumn(
"b", learn.PredefinedSeries("b", tf.VarLenFeature(tf.int32)))
expected_feature_column_f = feature_column.DataFrameColumn(
"f", learn.TransformedSeries([
learn.PredefinedSeries("c", tf.FixedLenFeature(
tensor_shape.unknown_shape(), tf.int32, 3)),
learn.PredefinedSeries("d", tf.VarLenFeature(tf.int32))
], mocks.Mock2x2Transform("iue", "eui", "snt"), "out2"))
expected_feature_columns = [expected_feature_column_a,
expected_feature_column_b,
expected_feature_column_f]
self.assertEqual(sorted(expected_feature_columns), sorted(feature_columns))
base_features, targets = input_fn()
expected_base_features = {
"a": mocks.MockTensor("Tensor a", tf.int32),
"b": mocks.MockSparseTensor("SparseTensor b", tf.int32),
"c": mocks.MockTensor("Tensor c", tf.int32),
"d": mocks.MockSparseTensor("SparseTensor d", tf.int32)
}
self.assertEqual(expected_base_features, base_features)
expected_targets = mocks.MockTensor("Out iue", tf.int32)
self.assertEqual(expected_targets, targets)
self.assertEqual(3, len(feature_columns))
def test_to_feature_columns_and_input_fn_no_targets(self):
df = setup_test_df_3layer()
feature_columns, input_fn = (
estimator_utils.to_feature_columns_and_input_fn(
df,
base_input_keys_with_defaults={"a": 1,
"b": 2,
"c": 3,
"d": 4},
feature_keys=["a", "b", "f"]))
base_features, targets = input_fn()
expected_base_features = {
"a": mocks.MockTensor("Tensor a", tf.int32),
"b": mocks.MockSparseTensor("SparseTensor b", tf.int32),
"c": mocks.MockTensor("Tensor c", tf.int32),
"d": mocks.MockSparseTensor("SparseTensor d", tf.int32)
}
self.assertEqual(expected_base_features, base_features)
expected_targets = {}
self.assertEqual(expected_targets, targets)
self.assertEqual(3, len(feature_columns))
def test_to_estimator_not_disjoint(self):
df = setup_test_df_3layer()
# pylint: disable=unused-variable
def get_not_disjoint():
feature_columns, input_fn = (
estimator_utils.to_feature_columns_and_input_fn(
df,
base_input_keys_with_defaults={"a": 1,
"b": 2,
"c": 3,
"d": 4},
target_keys=["f"],
feature_keys=["a", "b", "f"]))
self.assertRaises(ValueError, get_not_disjoint)
if __name__ == "__main__":
tf.test.main()
| apache-2.0 |
kumar303/zamboni | mkt/commonplace/tests/test_views.py | 3 | 12875 | from gzip import GzipFile
import json
from StringIO import StringIO
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
import mock
from nose.tools import eq_, ok_
from pyquery import PyQuery as pq
import mkt.site.tests
from mkt.commonplace.models import DeployBuildId
class CommonplaceTestMixin(mkt.site.tests.TestCase):
@mock.patch('mkt.commonplace.views.fxa_auth_info')
def _test_url(self, url, fxa_mock, url_kwargs=None):
"""Test that the given url can be requested, returns a 200, and returns
a valid gzipped response when requested with Accept-Encoding over ssl.
Return the result of a regular (non-gzipped) request."""
fxa_mock.return_value = ('fakestate', 'http://example.com/fakeauthurl')
if not url_kwargs:
url_kwargs = {}
res = self.client.get(url, url_kwargs, HTTP_ACCEPT_ENCODING='gzip',
**{'wsgi.url_scheme': 'https'})
eq_(res.status_code, 200)
eq_(res['Content-Encoding'], 'gzip')
eq_(sorted(res['Vary'].split(', ')),
['Accept-Encoding', 'Accept-Language', 'Cookie'])
ungzipped_content = GzipFile('', 'r', 0, StringIO(res.content)).read()
res = self.client.get(url, url_kwargs, **{'wsgi.url_scheme': 'https'})
eq_(res.status_code, 200)
eq_(sorted(res['Vary'].split(', ')),
['Accept-Encoding', 'Accept-Language', 'Cookie'])
eq_(ungzipped_content, res.content)
return res
class TestCommonplace(CommonplaceTestMixin):
def test_fireplace_firefox_accounts(self):
res = self._test_url('/server.html')
self.assertTemplateUsed(res, 'commonplace/index.html')
self.assertEquals(res.context['repo'], 'fireplace')
self.assertContains(res, 'splash.css')
self.assertNotContains(res, 'login.persona.org/include.js')
eq_(res['Cache-Control'], 'max-age=180')
self.assertContains(res, 'fakestate')
self.assertContains(res, 'http://example.com/fakeauthurl')
def test_commbadge(self):
res = self._test_url('/comm/')
self.assertTemplateUsed(res, 'commonplace/index.html')
self.assertEquals(res.context['repo'], 'commbadge')
self.assertNotContains(res, 'splash.css')
eq_(res['Cache-Control'], 'max-age=180')
def test_submission(self):
res = self._test_url('/submission/')
self.assertTemplateUsed(res, 'commonplace/index_react.html')
self.assertEquals(res.context['repo'], 'marketplace-submission')
eq_(res['Cache-Control'], 'max-age=180')
@mock.patch('mkt.commonplace.views.fxa_auth_info')
def test_transonic(self, mock_fxa):
mock_fxa.return_value = ('fakestate', 'http://example.com/fakeauthurl')
res = self._test_url('/curate/')
self.assertTemplateUsed(res, 'commonplace/index.html')
self.assertEquals(res.context['repo'], 'transonic')
self.assertNotContains(res, 'splash.css')
eq_(res['Cache-Control'], 'max-age=180')
@mock.patch('mkt.regions.middleware.RegionMiddleware.region_from_request')
def test_region_not_included_in_fireplace_if_sim_info(self, mock_region):
test_region = mock.Mock()
test_region.slug = 'testoland'
mock_region.return_value = test_region
for url in ('/server.html?mccs=blah',
'/server.html?mcc=blah&mnc=blah'):
res = self._test_url(url)
ok_('geoip_region' not in res.context, url)
self.assertNotContains(res, 'data-region')
@mock.patch('mkt.regions.middleware.RegionMiddleware.region_from_request')
def test_region_included_in_fireplace_if_sim_info(self, mock_region):
test_region = mock.Mock()
test_region.slug = 'testoland'
mock_region.return_value = test_region
for url in ('/server.html?nativepersona=true',
'/server.html?mcc=blah', # Incomplete info from SIM.
'/server.html',
'/server.html?'):
res = self._test_url(url)
self.assertEquals(res.context['geoip_region'], test_region)
self.assertContains(res, 'data-region="testoland"')
class TestIFrames(CommonplaceTestMixin):
def setUp(self):
self.iframe_install_url = reverse('commonplace.iframe-install')
self.potatolytics_url = reverse('commonplace.potatolytics')
def _test_trailing_slashes(self, allowed_origins):
"""Utility method to test that no origin ends with a trailing slash."""
eq_(filter(lambda v: v.endswith('/'), allowed_origins), [])
@override_settings(DOMAIN='marketplace.firefox.com')
def test_basic(self):
res = self._test_url(self.iframe_install_url)
allowed_origins = json.loads(res.context['allowed_origins'])
self._test_trailing_slashes(allowed_origins)
eq_(allowed_origins,
['app://packaged.marketplace.firefox.com',
'app://marketplace.firefox.com',
'https://marketplace.firefox.com',
'app://tarako.marketplace.firefox.com',
'https://hello.firefox.com',
'https://call.firefox.com'])
res = self._test_url(self.potatolytics_url)
allowed_origins = json.loads(res.context['allowed_origins'])
self._test_trailing_slashes(allowed_origins)
eq_(allowed_origins,
['app://packaged.marketplace.firefox.com',
'app://marketplace.firefox.com',
'https://marketplace.firefox.com',
'app://tarako.marketplace.firefox.com'])
@override_settings(DOMAIN='marketplace.allizom.org')
def test_basic_stage(self):
res = self._test_url(self.iframe_install_url)
allowed_origins = json.loads(res.context['allowed_origins'])
self._test_trailing_slashes(allowed_origins)
eq_(allowed_origins,
['app://packaged.marketplace.allizom.org',
'app://marketplace.allizom.org',
'https://marketplace.allizom.org',
'app://tarako.marketplace.allizom.org',
'https://hello.firefox.com',
'https://call.firefox.com'])
res = self._test_url(self.potatolytics_url)
allowed_origins = json.loads(res.context['allowed_origins'])
self._test_trailing_slashes(allowed_origins)
eq_(allowed_origins,
['app://packaged.marketplace.allizom.org',
'app://marketplace.allizom.org',
'https://marketplace.allizom.org',
'app://tarako.marketplace.allizom.org'])
@override_settings(DOMAIN='marketplace-dev.allizom.org')
def test_basic_dev(self):
res = self._test_url(self.iframe_install_url)
allowed_origins = json.loads(res.context['allowed_origins'])
self._test_trailing_slashes(allowed_origins)
eq_(allowed_origins,
['app://packaged.marketplace-dev.allizom.org',
'app://marketplace-dev.allizom.org',
'https://marketplace-dev.allizom.org',
'app://tarako.marketplace-dev.allizom.org',
'http://localhost:8675',
'https://localhost:8675',
'http://localhost',
'https://localhost',
'http://mp.dev',
'https://mp.dev',
'https://hello.firefox.com',
'https://call.firefox.com',
'https://loop-webapp-dev.stage.mozaws.net',
'https://call.stage.mozaws.net'])
res = self._test_url(self.potatolytics_url)
allowed_origins = json.loads(res.context['allowed_origins'])
self._test_trailing_slashes(allowed_origins)
eq_(allowed_origins,
['app://packaged.marketplace-dev.allizom.org',
'app://marketplace-dev.allizom.org',
'https://marketplace-dev.allizom.org',
'app://tarako.marketplace-dev.allizom.org',
'http://localhost:8675',
'https://localhost:8675',
'http://localhost',
'https://localhost',
'http://mp.dev',
'https://mp.dev'])
@override_settings(DOMAIN='example.com', DEBUG=True)
def test_basic_debug_true(self):
res = self._test_url(self.iframe_install_url)
allowed_origins = json.loads(res.context['allowed_origins'])
self._test_trailing_slashes(allowed_origins)
eq_(allowed_origins,
['app://packaged.example.com',
'app://example.com',
'https://example.com',
'app://tarako.example.com',
'http://localhost:8675',
'https://localhost:8675',
'http://localhost',
'https://localhost',
'http://mp.dev',
'https://mp.dev',
'https://hello.firefox.com',
'https://call.firefox.com',
'https://loop-webapp-dev.stage.mozaws.net',
'https://call.stage.mozaws.net'])
res = self._test_url(self.potatolytics_url)
allowed_origins = json.loads(res.context['allowed_origins'])
self._test_trailing_slashes(allowed_origins)
eq_(allowed_origins,
['app://packaged.example.com',
'app://example.com',
'https://example.com',
'app://tarako.example.com',
'http://localhost:8675',
'https://localhost:8675',
'http://localhost',
'https://localhost',
'http://mp.dev',
'https://mp.dev'])
class TestOpenGraph(mkt.site.tests.TestCase):
def _get_tags(self, res):
"""Returns title, image, description."""
doc = pq(res.content)
return (doc('[property="og:title"]').attr('content'),
doc('[property="og:image"]').attr('content'),
doc('[name="description"]').attr('content'))
def test_basic(self):
res = self.client.get(reverse('commonplace.fireplace'))
title, image, description = self._get_tags(res)
eq_(title, 'Firefox Marketplace')
ok_(description.startswith('The Firefox Marketplace is'))
def test_detail(self):
app = mkt.site.tests.app_factory(
description='Awesome <a href="/">Home</a> "helloareyouthere"')
res = self.client.get(reverse('detail', args=[app.app_slug]))
title, image, description = self._get_tags(res)
eq_(title, app.name)
eq_(image, app.get_icon_url(64))
ok_('<meta name="description" '
'content="Awesome Home "helloareyouthere"">'
in res.content)
eq_(description, 'Awesome Home "helloareyouthere"')
def test_detail_dne(self):
res = self.client.get(reverse('detail', args=['DO NOT EXISTS']))
title, image, description = self._get_tags(res)
eq_(title, 'Firefox Marketplace')
ok_(description.startswith('The Firefox Marketplace is'))
class TestBuildId(CommonplaceTestMixin):
def test_build_id_from_db(self):
DeployBuildId.objects.create(repo='fireplace', build_id='0118999')
res = self._test_url('/server.html')
doc = pq(res.content)
scripts = doc('script')
for script in scripts:
src = pq(script).attr('src')
if 'fireplace' in src:
ok_(src.endswith('?b=0118999'))
@mock.patch('mkt.commonplace.views.storage')
def test_fallback_to_build_id_txt(self, storage_mock):
storage_mock.open = mock.mock_open(read_data='0118999')
res = self._test_url('/server.html')
doc = pq(res.content)
scripts = doc('script')
for script in scripts:
src = pq(script).attr('src')
if 'fireplace' in src:
ok_(src.endswith('?b=0118999'))
class TestLangAttrs(CommonplaceTestMixin):
def test_lang_en(self):
res = self._test_url('/server.html')
doc = pq(res.content)
html = doc('html[lang][dir]')
eq_(html.attr('lang'), 'en-US')
eq_(html.attr('dir'), 'ltr')
def test_lang_fr(self):
res = self._test_url('/server.html?lang=fr')
doc = pq(res.content)
html = doc('html[lang][dir]')
eq_(html.attr('lang'), 'fr')
eq_(html.attr('dir'), 'ltr')
@override_settings(LANGUAGE_URL_MAP={'ar': 'ar'})
def test_lang_ar(self):
res = self._test_url('/server.html?lang=ar')
doc = pq(res.content)
html = doc('html[lang][dir]')
eq_(html.attr('lang'), 'ar')
eq_(html.attr('dir'), 'rtl')
@override_settings(LANGUAGE_URL_MAP={'rtl': 'rtl'})
def test_lang_rtl(self):
res = self._test_url('/server.html?lang=rtl')
doc = pq(res.content)
html = doc('html[lang][dir]')
eq_(html.attr('lang'), 'rtl')
eq_(html.attr('dir'), 'rtl')
| bsd-3-clause |
vdmann/cse-360-image-hosting-website | lib/python2.7/site-packages/django/db/backends/mysql/base.py | 28 | 22109 | """
MySQL database backend for Django.
Requires MySQLdb: http://sourceforge.net/projects/mysql-python
"""
from __future__ import unicode_literals
import datetime
import re
import sys
import warnings
try:
import MySQLdb as Database
except ImportError as e:
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("Error loading MySQLdb module: %s" % e)
from django.utils.functional import cached_property
# We want version (1, 2, 1, 'final', 2) or later. We can't just use
# lexicographic ordering in this check because then (1, 2, 1, 'gamma')
# inadvertently passes the version test.
version = Database.version_info
if (version < (1, 2, 1) or (version[:3] == (1, 2, 1) and
(len(version) < 5 or version[3] != 'final' or version[4] < 2))):
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured("MySQLdb-1.2.1p2 or newer is required; you have %s" % Database.__version__)
from MySQLdb.converters import conversions, Thing2Literal
from MySQLdb.constants import FIELD_TYPE, CLIENT
try:
import pytz
except ImportError:
pytz = None
from django.conf import settings
from django.db import utils
from django.db.backends import *
from django.db.backends.mysql.client import DatabaseClient
from django.db.backends.mysql.creation import DatabaseCreation
from django.db.backends.mysql.introspection import DatabaseIntrospection
from django.db.backends.mysql.validation import DatabaseValidation
from django.utils.encoding import force_str, force_text
from django.utils.safestring import SafeBytes, SafeText
from django.utils import six
from django.utils import timezone
# Raise exceptions for database warnings if DEBUG is on
if settings.DEBUG:
warnings.filterwarnings("error", category=Database.Warning)
DatabaseError = Database.DatabaseError
IntegrityError = Database.IntegrityError
# It's impossible to import datetime_or_None directly from MySQLdb.times
parse_datetime = conversions[FIELD_TYPE.DATETIME]
def parse_datetime_with_timezone_support(value):
dt = parse_datetime(value)
# Confirm that dt is naive before overwriting its tzinfo.
if dt is not None and settings.USE_TZ and timezone.is_naive(dt):
dt = dt.replace(tzinfo=timezone.utc)
return dt
def adapt_datetime_with_timezone_support(value, conv):
# Equivalent to DateTimeField.get_db_prep_value. Used only by raw SQL.
if settings.USE_TZ:
if timezone.is_naive(value):
warnings.warn("MySQL received a naive datetime (%s)"
" while time zone support is active." % value,
RuntimeWarning)
default_timezone = timezone.get_default_timezone()
value = timezone.make_aware(value, default_timezone)
value = value.astimezone(timezone.utc).replace(tzinfo=None)
return Thing2Literal(value.strftime("%Y-%m-%d %H:%M:%S"), conv)
# MySQLdb-1.2.1 returns TIME columns as timedelta -- they are more like
# timedelta in terms of actual behavior as they are signed and include days --
# and Django expects time, so we still need to override that. We also need to
# add special handling for SafeText and SafeBytes as MySQLdb's type
# checking is too tight to catch those (see Django ticket #6052).
# Finally, MySQLdb always returns naive datetime objects. However, when
# timezone support is active, Django expects timezone-aware datetime objects.
django_conversions = conversions.copy()
django_conversions.update({
FIELD_TYPE.TIME: util.typecast_time,
FIELD_TYPE.DECIMAL: util.typecast_decimal,
FIELD_TYPE.NEWDECIMAL: util.typecast_decimal,
FIELD_TYPE.DATETIME: parse_datetime_with_timezone_support,
datetime.datetime: adapt_datetime_with_timezone_support,
})
# This should match the numerical portion of the version numbers (we can treat
# versions like 5.0.24 and 5.0.24a as the same). Based on the list of version
# at http://dev.mysql.com/doc/refman/4.1/en/news.html and
# http://dev.mysql.com/doc/refman/5.0/en/news.html .
server_version_re = re.compile(r'(\d{1,2})\.(\d{1,2})\.(\d{1,2})')
# MySQLdb-1.2.1 and newer automatically makes use of SHOW WARNINGS on
# MySQL-4.1 and newer, so the MysqlDebugWrapper is unnecessary. Since the
# point is to raise Warnings as exceptions, this can be done with the Python
# warning module, and this is setup when the connection is created, and the
# standard util.CursorDebugWrapper can be used. Also, using sql_mode
# TRADITIONAL will automatically cause most warnings to be treated as errors.
class CursorWrapper(object):
"""
A thin wrapper around MySQLdb's normal cursor class so that we can catch
particular exception instances and reraise them with the right types.
Implemented as a wrapper, rather than a subclass, so that we aren't stuck
to the particular underlying representation returned by Connection.cursor().
"""
codes_for_integrityerror = (1048,)
def __init__(self, cursor):
self.cursor = cursor
def execute(self, query, args=None):
try:
# args is None means no string interpolation
return self.cursor.execute(query, args)
except Database.OperationalError as e:
# Map some error codes to IntegrityError, since they seem to be
# misclassified and Django would prefer the more logical place.
if e.args[0] in self.codes_for_integrityerror:
six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2])
raise
def executemany(self, query, args):
try:
return self.cursor.executemany(query, args)
except Database.OperationalError as e:
# Map some error codes to IntegrityError, since they seem to be
# misclassified and Django would prefer the more logical place.
if e.args[0] in self.codes_for_integrityerror:
six.reraise(utils.IntegrityError, utils.IntegrityError(*tuple(e.args)), sys.exc_info()[2])
raise
def __getattr__(self, attr):
if attr in self.__dict__:
return self.__dict__[attr]
else:
return getattr(self.cursor, attr)
def __iter__(self):
return iter(self.cursor)
class DatabaseFeatures(BaseDatabaseFeatures):
empty_fetchmany_value = ()
update_can_self_select = False
allows_group_by_pk = True
related_fields_match_type = True
allow_sliced_subqueries = False
has_bulk_insert = True
has_select_for_update = True
has_select_for_update_nowait = False
supports_forward_references = False
supports_long_model_names = False
supports_microsecond_precision = False
supports_regex_backreferencing = False
supports_date_lookup_using_string = False
supports_timezones = False
requires_explicit_null_ordering_when_grouping = True
allows_primary_key_0 = False
uses_savepoints = True
atomic_transactions = False
def __init__(self, connection):
super(DatabaseFeatures, self).__init__(connection)
@cached_property
def _mysql_storage_engine(self):
"Internal method used in Django tests. Don't rely on this from your code"
cursor = self.connection.cursor()
cursor.execute('CREATE TABLE INTROSPECT_TEST (X INT)')
# This command is MySQL specific; the second column
# will tell you the default table type of the created
# table. Since all Django's test tables will have the same
# table type, that's enough to evaluate the feature.
cursor.execute("SHOW TABLE STATUS WHERE Name='INTROSPECT_TEST'")
result = cursor.fetchone()
cursor.execute('DROP TABLE INTROSPECT_TEST')
return result[1]
@cached_property
def can_introspect_foreign_keys(self):
"Confirm support for introspected foreign keys"
return self._mysql_storage_engine != 'MyISAM'
@cached_property
def has_zoneinfo_database(self):
# MySQL accepts full time zones names (eg. Africa/Nairobi) but rejects
# abbreviations (eg. EAT). When pytz isn't installed and the current
# time zone is LocalTimezone (the only sensible value in this
# context), the current time zone name will be an abbreviation. As a
# consequence, MySQL cannot perform time zone conversions reliably.
if pytz is None:
return False
# Test if the time zone definitions are installed.
cursor = self.connection.cursor()
cursor.execute("SELECT 1 FROM mysql.time_zone LIMIT 1")
return cursor.fetchone() is not None
class DatabaseOperations(BaseDatabaseOperations):
compiler_module = "django.db.backends.mysql.compiler"
def date_extract_sql(self, lookup_type, field_name):
# http://dev.mysql.com/doc/mysql/en/date-and-time-functions.html
if lookup_type == 'week_day':
# DAYOFWEEK() returns an integer, 1-7, Sunday=1.
# Note: WEEKDAY() returns 0-6, Monday=0.
return "DAYOFWEEK(%s)" % field_name
else:
return "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name)
def date_trunc_sql(self, lookup_type, field_name):
fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
format = ('%%Y-', '%%m', '-%%d', ' %%H:', '%%i', ':%%s') # Use double percents to escape.
format_def = ('0000-', '01', '-01', ' 00:', '00', ':00')
try:
i = fields.index(lookup_type) + 1
except ValueError:
sql = field_name
else:
format_str = ''.join([f for f in format[:i]] + [f for f in format_def[i:]])
sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str)
return sql
def datetime_extract_sql(self, lookup_type, field_name, tzname):
if settings.USE_TZ:
field_name = "CONVERT_TZ(%s, 'UTC', %%s)" % field_name
params = [tzname]
else:
params = []
# http://dev.mysql.com/doc/mysql/en/date-and-time-functions.html
if lookup_type == 'week_day':
# DAYOFWEEK() returns an integer, 1-7, Sunday=1.
# Note: WEEKDAY() returns 0-6, Monday=0.
sql = "DAYOFWEEK(%s)" % field_name
else:
sql = "EXTRACT(%s FROM %s)" % (lookup_type.upper(), field_name)
return sql, params
def datetime_trunc_sql(self, lookup_type, field_name, tzname):
if settings.USE_TZ:
field_name = "CONVERT_TZ(%s, 'UTC', %%s)" % field_name
params = [tzname]
else:
params = []
fields = ['year', 'month', 'day', 'hour', 'minute', 'second']
format = ('%%Y-', '%%m', '-%%d', ' %%H:', '%%i', ':%%s') # Use double percents to escape.
format_def = ('0000-', '01', '-01', ' 00:', '00', ':00')
try:
i = fields.index(lookup_type) + 1
except ValueError:
sql = field_name
else:
format_str = ''.join([f for f in format[:i]] + [f for f in format_def[i:]])
sql = "CAST(DATE_FORMAT(%s, '%s') AS DATETIME)" % (field_name, format_str)
return sql, params
def date_interval_sql(self, sql, connector, timedelta):
return "(%s %s INTERVAL '%d 0:0:%d:%d' DAY_MICROSECOND)" % (sql, connector,
timedelta.days, timedelta.seconds, timedelta.microseconds)
def drop_foreignkey_sql(self):
return "DROP FOREIGN KEY"
def force_no_ordering(self):
"""
"ORDER BY NULL" prevents MySQL from implicitly ordering by grouped
columns. If no ordering would otherwise be applied, we don't want any
implicit sorting going on.
"""
return ["NULL"]
def fulltext_search_sql(self, field_name):
return 'MATCH (%s) AGAINST (%%s IN BOOLEAN MODE)' % field_name
def last_executed_query(self, cursor, sql, params):
# With MySQLdb, cursor objects have an (undocumented) "_last_executed"
# attribute where the exact query sent to the database is saved.
# See MySQLdb/cursors.py in the source distribution.
return force_text(getattr(cursor, '_last_executed', None), errors='replace')
def no_limit_value(self):
# 2**64 - 1, as recommended by the MySQL documentation
return 18446744073709551615
def quote_name(self, name):
if name.startswith("`") and name.endswith("`"):
return name # Quoting once is enough.
return "`%s`" % name
def random_function_sql(self):
return 'RAND()'
def sql_flush(self, style, tables, sequences, allow_cascade=False):
# NB: The generated SQL below is specific to MySQL
# 'TRUNCATE x;', 'TRUNCATE y;', 'TRUNCATE z;'... style SQL statements
# to clear all tables of all data
if tables:
sql = ['SET FOREIGN_KEY_CHECKS = 0;']
for table in tables:
sql.append('%s %s;' % (
style.SQL_KEYWORD('TRUNCATE'),
style.SQL_FIELD(self.quote_name(table)),
))
sql.append('SET FOREIGN_KEY_CHECKS = 1;')
sql.extend(self.sequence_reset_by_name_sql(style, sequences))
return sql
else:
return []
def sequence_reset_by_name_sql(self, style, sequences):
# Truncate already resets the AUTO_INCREMENT field from
# MySQL version 5.0.13 onwards. Refs #16961.
if self.connection.mysql_version < (5, 0, 13):
return ["%s %s %s %s %s;" % \
(style.SQL_KEYWORD('ALTER'),
style.SQL_KEYWORD('TABLE'),
style.SQL_TABLE(self.quote_name(sequence['table'])),
style.SQL_KEYWORD('AUTO_INCREMENT'),
style.SQL_FIELD('= 1'),
) for sequence in sequences]
else:
return []
def validate_autopk_value(self, value):
# MySQLism: zero in AUTO_INCREMENT field does not work. Refs #17653.
if value == 0:
raise ValueError('The database backend does not accept 0 as a '
'value for AutoField.')
return value
def value_to_db_datetime(self, value):
if value is None:
return None
# MySQL doesn't support tz-aware datetimes
if timezone.is_aware(value):
if settings.USE_TZ:
value = value.astimezone(timezone.utc).replace(tzinfo=None)
else:
raise ValueError("MySQL backend does not support timezone-aware datetimes when USE_TZ is False.")
# MySQL doesn't support microseconds
return six.text_type(value.replace(microsecond=0))
def value_to_db_time(self, value):
if value is None:
return None
# MySQL doesn't support tz-aware times
if timezone.is_aware(value):
raise ValueError("MySQL backend does not support timezone-aware times.")
# MySQL doesn't support microseconds
return six.text_type(value.replace(microsecond=0))
def year_lookup_bounds_for_datetime_field(self, value):
# Again, no microseconds
first, second = super(DatabaseOperations, self).year_lookup_bounds_for_datetime_field(value)
return [first.replace(microsecond=0), second.replace(microsecond=0)]
def max_name_length(self):
return 64
def bulk_insert_sql(self, fields, num_values):
items_sql = "(%s)" % ", ".join(["%s"] * len(fields))
return "VALUES " + ", ".join([items_sql] * num_values)
class DatabaseWrapper(BaseDatabaseWrapper):
vendor = 'mysql'
operators = {
'exact': '= %s',
'iexact': 'LIKE %s',
'contains': 'LIKE BINARY %s',
'icontains': 'LIKE %s',
'regex': 'REGEXP BINARY %s',
'iregex': 'REGEXP %s',
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': 'LIKE BINARY %s',
'endswith': 'LIKE BINARY %s',
'istartswith': 'LIKE %s',
'iendswith': 'LIKE %s',
}
Database = Database
def __init__(self, *args, **kwargs):
super(DatabaseWrapper, self).__init__(*args, **kwargs)
self.features = DatabaseFeatures(self)
self.ops = DatabaseOperations(self)
self.client = DatabaseClient(self)
self.creation = DatabaseCreation(self)
self.introspection = DatabaseIntrospection(self)
self.validation = DatabaseValidation(self)
def get_connection_params(self):
kwargs = {
'conv': django_conversions,
'charset': 'utf8',
}
if six.PY2:
kwargs['use_unicode'] = True
settings_dict = self.settings_dict
if settings_dict['USER']:
kwargs['user'] = settings_dict['USER']
if settings_dict['NAME']:
kwargs['db'] = settings_dict['NAME']
if settings_dict['PASSWORD']:
kwargs['passwd'] = force_str(settings_dict['PASSWORD'])
if settings_dict['HOST'].startswith('/'):
kwargs['unix_socket'] = settings_dict['HOST']
elif settings_dict['HOST']:
kwargs['host'] = settings_dict['HOST']
if settings_dict['PORT']:
kwargs['port'] = int(settings_dict['PORT'])
# We need the number of potentially affected rows after an
# "UPDATE", not the number of changed rows.
kwargs['client_flag'] = CLIENT.FOUND_ROWS
kwargs.update(settings_dict['OPTIONS'])
return kwargs
def get_new_connection(self, conn_params):
conn = Database.connect(**conn_params)
conn.encoders[SafeText] = conn.encoders[six.text_type]
conn.encoders[SafeBytes] = conn.encoders[bytes]
return conn
def init_connection_state(self):
cursor = self.connection.cursor()
# SQL_AUTO_IS_NULL in MySQL controls whether an AUTO_INCREMENT column
# on a recently-inserted row will return when the field is tested for
# NULL. Disabling this value brings this aspect of MySQL in line with
# SQL standards.
cursor.execute('SET SQL_AUTO_IS_NULL = 0')
cursor.close()
def create_cursor(self):
cursor = self.connection.cursor()
return CursorWrapper(cursor)
def _rollback(self):
try:
BaseDatabaseWrapper._rollback(self)
except Database.NotSupportedError:
pass
def _set_autocommit(self, autocommit):
self.connection.autocommit(autocommit)
def disable_constraint_checking(self):
"""
Disables foreign key checks, primarily for use in adding rows with forward references. Always returns True,
to indicate constraint checks need to be re-enabled.
"""
self.cursor().execute('SET foreign_key_checks=0')
return True
def enable_constraint_checking(self):
"""
Re-enable foreign key checks after they have been disabled.
"""
# Override needs_rollback in case constraint_checks_disabled is
# nested inside transaction.atomic.
self.needs_rollback, needs_rollback = False, self.needs_rollback
try:
self.cursor().execute('SET foreign_key_checks=1')
finally:
self.needs_rollback = needs_rollback
def check_constraints(self, table_names=None):
"""
Checks each table name in `table_names` for rows with invalid foreign key references. This method is
intended to be used in conjunction with `disable_constraint_checking()` and `enable_constraint_checking()`, to
determine if rows with invalid references were entered while constraint checks were off.
Raises an IntegrityError on the first invalid foreign key reference encountered (if any) and provides
detailed information about the invalid reference in the error message.
Backends can override this method if they can more directly apply constraint checking (e.g. via "SET CONSTRAINTS
ALL IMMEDIATE")
"""
cursor = self.cursor()
if table_names is None:
table_names = self.introspection.table_names(cursor)
for table_name in table_names:
primary_key_column_name = self.introspection.get_primary_key_column(cursor, table_name)
if not primary_key_column_name:
continue
key_columns = self.introspection.get_key_columns(cursor, table_name)
for column_name, referenced_table_name, referenced_column_name in key_columns:
cursor.execute("""
SELECT REFERRING.`%s`, REFERRING.`%s` FROM `%s` as REFERRING
LEFT JOIN `%s` as REFERRED
ON (REFERRING.`%s` = REFERRED.`%s`)
WHERE REFERRING.`%s` IS NOT NULL AND REFERRED.`%s` IS NULL"""
% (primary_key_column_name, column_name, table_name, referenced_table_name,
column_name, referenced_column_name, column_name, referenced_column_name))
for bad_row in cursor.fetchall():
raise utils.IntegrityError("The row in table '%s' with primary key '%s' has an invalid "
"foreign key: %s.%s contains a value '%s' that does not have a corresponding value in %s.%s."
% (table_name, bad_row[0],
table_name, column_name, bad_row[1],
referenced_table_name, referenced_column_name))
def is_usable(self):
try:
self.connection.ping()
except DatabaseError:
return False
else:
return True
@cached_property
def mysql_version(self):
with self.temporary_connection():
server_info = self.connection.get_server_info()
match = server_version_re.match(server_info)
if not match:
raise Exception('Unable to determine MySQL version from version string %r' % server_info)
return tuple([int(x) for x in match.groups()])
| mit |
mccheung/kbengine | kbe/src/lib/python/Lib/idlelib/ParenMatch.py | 113 | 6713 | """ParenMatch -- An IDLE extension for parenthesis matching.
When you hit a right paren, the cursor should move briefly to the left
paren. Paren here is used generically; the matching applies to
parentheses, square brackets, and curly braces.
"""
from idlelib.HyperParser import HyperParser
from idlelib.configHandler import idleConf
_openers = {')':'(',']':'[','}':'{'}
CHECK_DELAY = 100 # miliseconds
class ParenMatch:
"""Highlight matching parentheses
There are three supported style of paren matching, based loosely
on the Emacs options. The style is select based on the
HILITE_STYLE attribute; it can be changed used the set_style
method.
The supported styles are:
default -- When a right paren is typed, highlight the matching
left paren for 1/2 sec.
expression -- When a right paren is typed, highlight the entire
expression from the left paren to the right paren.
TODO:
- extend IDLE with configuration dialog to change options
- implement rest of Emacs highlight styles (see below)
- print mismatch warning in IDLE status window
Note: In Emacs, there are several styles of highlight where the
matching paren is highlighted whenever the cursor is immediately
to the right of a right paren. I don't know how to do that in Tk,
so I haven't bothered.
"""
menudefs = [
('edit', [
("Show surrounding parens", "<<flash-paren>>"),
])
]
STYLE = idleConf.GetOption('extensions','ParenMatch','style',
default='expression')
FLASH_DELAY = idleConf.GetOption('extensions','ParenMatch','flash-delay',
type='int',default=500)
HILITE_CONFIG = idleConf.GetHighlight(idleConf.CurrentTheme(),'hilite')
BELL = idleConf.GetOption('extensions','ParenMatch','bell',
type='bool',default=1)
RESTORE_VIRTUAL_EVENT_NAME = "<<parenmatch-check-restore>>"
# We want the restore event be called before the usual return and
# backspace events.
RESTORE_SEQUENCES = ("<KeyPress>", "<ButtonPress>",
"<Key-Return>", "<Key-BackSpace>")
def __init__(self, editwin):
self.editwin = editwin
self.text = editwin.text
# Bind the check-restore event to the function restore_event,
# so that we can then use activate_restore (which calls event_add)
# and deactivate_restore (which calls event_delete).
editwin.text.bind(self.RESTORE_VIRTUAL_EVENT_NAME,
self.restore_event)
self.counter = 0
self.is_restore_active = 0
self.set_style(self.STYLE)
def activate_restore(self):
if not self.is_restore_active:
for seq in self.RESTORE_SEQUENCES:
self.text.event_add(self.RESTORE_VIRTUAL_EVENT_NAME, seq)
self.is_restore_active = True
def deactivate_restore(self):
if self.is_restore_active:
for seq in self.RESTORE_SEQUENCES:
self.text.event_delete(self.RESTORE_VIRTUAL_EVENT_NAME, seq)
self.is_restore_active = False
def set_style(self, style):
self.STYLE = style
if style == "default":
self.create_tag = self.create_tag_default
self.set_timeout = self.set_timeout_last
elif style == "expression":
self.create_tag = self.create_tag_expression
self.set_timeout = self.set_timeout_none
def flash_paren_event(self, event):
indices = (HyperParser(self.editwin, "insert")
.get_surrounding_brackets())
if indices is None:
self.warn_mismatched()
return
self.activate_restore()
self.create_tag(indices)
self.set_timeout_last()
def paren_closed_event(self, event):
# If it was a shortcut and not really a closing paren, quit.
closer = self.text.get("insert-1c")
if closer not in _openers:
return
hp = HyperParser(self.editwin, "insert-1c")
if not hp.is_in_code():
return
indices = hp.get_surrounding_brackets(_openers[closer], True)
if indices is None:
self.warn_mismatched()
return
self.activate_restore()
self.create_tag(indices)
self.set_timeout()
def restore_event(self, event=None):
self.text.tag_delete("paren")
self.deactivate_restore()
self.counter += 1 # disable the last timer, if there is one.
def handle_restore_timer(self, timer_count):
if timer_count == self.counter:
self.restore_event()
def warn_mismatched(self):
if self.BELL:
self.text.bell()
# any one of the create_tag_XXX methods can be used depending on
# the style
def create_tag_default(self, indices):
"""Highlight the single paren that matches"""
self.text.tag_add("paren", indices[0])
self.text.tag_config("paren", self.HILITE_CONFIG)
def create_tag_expression(self, indices):
"""Highlight the entire expression"""
if self.text.get(indices[1]) in (')', ']', '}'):
rightindex = indices[1]+"+1c"
else:
rightindex = indices[1]
self.text.tag_add("paren", indices[0], rightindex)
self.text.tag_config("paren", self.HILITE_CONFIG)
# any one of the set_timeout_XXX methods can be used depending on
# the style
def set_timeout_none(self):
"""Highlight will remain until user input turns it off
or the insert has moved"""
# After CHECK_DELAY, call a function which disables the "paren" tag
# if the event is for the most recent timer and the insert has changed,
# or schedules another call for itself.
self.counter += 1
def callme(callme, self=self, c=self.counter,
index=self.text.index("insert")):
if index != self.text.index("insert"):
self.handle_restore_timer(c)
else:
self.editwin.text_frame.after(CHECK_DELAY, callme, callme)
self.editwin.text_frame.after(CHECK_DELAY, callme, callme)
def set_timeout_last(self):
"""The last highlight created will be removed after .5 sec"""
# associate a counter with an event; only disable the "paren"
# tag if the event is for the most recent timer.
self.counter += 1
self.editwin.text_frame.after(
self.FLASH_DELAY,
lambda self=self, c=self.counter: self.handle_restore_timer(c))
if __name__ == '__main__':
import unittest
unittest.main('idlelib.idle_test.test_parenmatch', verbosity=2)
| lgpl-3.0 |
rgeleta/odoo | openerp/addons/base/tests/test_uninstall.py | 200 | 2717 | # -*- coding: utf-8 -*-
# This assumes an existing but uninitialized database.
import unittest2
import openerp
from openerp import SUPERUSER_ID
import common
ADMIN_USER_ID = common.ADMIN_USER_ID
def registry(model):
return openerp.modules.registry.RegistryManager.get(common.get_db_name())[model]
def cursor():
return openerp.modules.registry.RegistryManager.get(common.get_db_name()).cursor()
def get_module(module_name):
registry = openerp.modules.registry.RegistryManager.get(common.get_db_name())
return registry.get(module_name)
def reload_registry():
openerp.modules.registry.RegistryManager.new(
common.get_db_name(), update_module=True)
def search_registry(model_name, domain):
cr = cursor()
model = registry(model_name)
record_ids = model.search(cr, SUPERUSER_ID, domain, {})
cr.close()
return record_ids
def install_module(module_name):
ir_module_module = registry('ir.module.module')
cr = cursor()
module_ids = ir_module_module.search(cr, SUPERUSER_ID,
[('name', '=', module_name)], {})
assert len(module_ids) == 1
ir_module_module.button_install(cr, SUPERUSER_ID, module_ids, {})
cr.commit()
cr.close()
reload_registry()
def uninstall_module(module_name):
ir_module_module = registry('ir.module.module')
cr = cursor()
module_ids = ir_module_module.search(cr, SUPERUSER_ID,
[('name', '=', module_name)], {})
assert len(module_ids) == 1
ir_module_module.button_uninstall(cr, SUPERUSER_ID, module_ids, {})
cr.commit()
cr.close()
reload_registry()
class test_uninstall(unittest2.TestCase):
"""
Test the install/uninstall of a test module. The module is available in
`openerp.tests` which should be present in the addons-path.
"""
def test_01_install(self):
""" Check a few things showing the module is installed. """
install_module('test_uninstall')
assert get_module('test_uninstall.model')
assert search_registry('ir.model.data',
[('module', '=', 'test_uninstall')])
assert search_registry('ir.model.fields',
[('model', '=', 'test_uninstall.model')])
def test_02_uninstall(self):
""" Check a few things showing the module is uninstalled. """
uninstall_module('test_uninstall')
assert not get_module('test_uninstall.model')
assert not search_registry('ir.model.data',
[('module', '=', 'test_uninstall')])
assert not search_registry('ir.model.fields',
[('model', '=', 'test_uninstall.model')])
if __name__ == '__main__':
unittest2.main()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
ABaldwinHunter/django-clone-classic | django/contrib/contenttypes/models.py | 80 | 7376 | from __future__ import unicode_literals
from django.apps import apps
from django.db import models
from django.db.utils import IntegrityError, OperationalError, ProgrammingError
from django.utils.encoding import force_text, python_2_unicode_compatible
from django.utils.translation import ugettext_lazy as _
class ContentTypeManager(models.Manager):
use_in_migrations = True
# Cache to avoid re-looking up ContentType objects all over the place.
# This cache is shared by all the get_for_* methods.
_cache = {}
def get_by_natural_key(self, app_label, model):
try:
ct = self.__class__._cache[self.db][(app_label, model)]
except KeyError:
ct = self.get(app_label=app_label, model=model)
self._add_to_cache(self.db, ct)
return ct
def _get_opts(self, model, for_concrete_model):
if for_concrete_model:
model = model._meta.concrete_model
elif model._deferred:
model = model._meta.proxy_for_model
return model._meta
def _get_from_cache(self, opts):
key = (opts.app_label, opts.model_name)
return self.__class__._cache[self.db][key]
def get_for_model(self, model, for_concrete_model=True):
"""
Returns the ContentType object for a given model, creating the
ContentType if necessary. Lookups are cached so that subsequent lookups
for the same model don't hit the database.
"""
opts = self._get_opts(model, for_concrete_model)
try:
return self._get_from_cache(opts)
except KeyError:
pass
# The ContentType entry was not found in the cache, therefore we
# proceed to load or create it.
try:
try:
# We start with get() and not get_or_create() in order to use
# the db_for_read (see #20401).
ct = self.get(app_label=opts.app_label, model=opts.model_name)
except self.model.DoesNotExist:
# Not found in the database; we proceed to create it. This time we
# use get_or_create to take care of any race conditions.
ct, created = self.get_or_create(
app_label=opts.app_label,
model=opts.model_name,
)
except (OperationalError, ProgrammingError, IntegrityError):
# It's possible to migrate a single app before contenttypes,
# as it's not a required initial dependency (it's contrib!)
# Have a nice error for this.
raise RuntimeError(
"Error creating new content types. Please make sure contenttypes "
"is migrated before trying to migrate apps individually."
)
self._add_to_cache(self.db, ct)
return ct
def get_for_models(self, *models, **kwargs):
"""
Given *models, returns a dictionary mapping {model: content_type}.
"""
for_concrete_models = kwargs.pop('for_concrete_models', True)
# Final results
results = {}
# models that aren't already in the cache
needed_app_labels = set()
needed_models = set()
needed_opts = set()
for model in models:
opts = self._get_opts(model, for_concrete_models)
try:
ct = self._get_from_cache(opts)
except KeyError:
needed_app_labels.add(opts.app_label)
needed_models.add(opts.model_name)
needed_opts.add(opts)
else:
results[model] = ct
if needed_opts:
cts = self.filter(
app_label__in=needed_app_labels,
model__in=needed_models
)
for ct in cts:
model = ct.model_class()
if model._meta in needed_opts:
results[model] = ct
needed_opts.remove(model._meta)
self._add_to_cache(self.db, ct)
for opts in needed_opts:
# These weren't in the cache, or the DB, create them.
ct = self.create(
app_label=opts.app_label,
model=opts.model_name,
)
self._add_to_cache(self.db, ct)
results[ct.model_class()] = ct
return results
def get_for_id(self, id):
"""
Lookup a ContentType by ID. Uses the same shared cache as get_for_model
(though ContentTypes are obviously not created on-the-fly by get_by_id).
"""
try:
ct = self.__class__._cache[self.db][id]
except KeyError:
# This could raise a DoesNotExist; that's correct behavior and will
# make sure that only correct ctypes get stored in the cache dict.
ct = self.get(pk=id)
self._add_to_cache(self.db, ct)
return ct
def clear_cache(self):
"""
Clear out the content-type cache. This needs to happen during database
flushes to prevent caching of "stale" content type IDs (see
django.contrib.contenttypes.management.update_contenttypes for where
this gets called).
"""
self.__class__._cache.clear()
def _add_to_cache(self, using, ct):
"""Insert a ContentType into the cache."""
# Note it's possible for ContentType objects to be stale; model_class() will return None.
# Hence, there is no reliance on model._meta.app_label here, just using the model fields instead.
key = (ct.app_label, ct.model)
self.__class__._cache.setdefault(using, {})[key] = ct
self.__class__._cache.setdefault(using, {})[ct.id] = ct
@python_2_unicode_compatible
class ContentType(models.Model):
app_label = models.CharField(max_length=100)
model = models.CharField(_('python model class name'), max_length=100)
objects = ContentTypeManager()
class Meta:
verbose_name = _('content type')
verbose_name_plural = _('content types')
db_table = 'django_content_type'
unique_together = (('app_label', 'model'),)
def __str__(self):
return self.name
@property
def name(self):
model = self.model_class()
if not model:
return self.model
return force_text(model._meta.verbose_name)
def model_class(self):
"Returns the Python model class for this type of content."
try:
return apps.get_model(self.app_label, self.model)
except LookupError:
return None
def get_object_for_this_type(self, **kwargs):
"""
Returns an object of this type for the keyword arguments given.
Basically, this is a proxy around this object_type's get_object() model
method. The ObjectNotExist exception, if thrown, will not be caught,
so code that calls this method should catch it.
"""
return self.model_class()._base_manager.using(self._state.db).get(**kwargs)
def get_all_objects_for_this_type(self, **kwargs):
"""
Returns all objects of this type for the keyword arguments given.
"""
return self.model_class()._base_manager.using(self._state.db).filter(**kwargs)
def natural_key(self):
return (self.app_label, self.model)
| bsd-3-clause |
Leila20/django | django/contrib/gis/geos/point.py | 89 | 5969 | import warnings
from ctypes import c_uint
from django.contrib.gis.geos import prototypes as capi
from django.contrib.gis.geos.error import GEOSException
from django.contrib.gis.geos.geometry import GEOSGeometry
from django.utils import six
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.six.moves import range
class Point(GEOSGeometry):
_minlength = 2
_maxlength = 3
has_cs = True
def __init__(self, x=None, y=None, z=None, srid=None):
"""
The Point object may be initialized with either a tuple, or individual
parameters.
For Example:
>>> p = Point((5, 23)) # 2D point, passed in as a tuple
>>> p = Point(5, 23, 8) # 3D point, passed in with individual parameters
"""
if x is None:
coords = []
elif isinstance(x, (tuple, list)):
# Here a tuple or list was passed in under the `x` parameter.
coords = x
elif isinstance(x, six.integer_types + (float,)) and isinstance(y, six.integer_types + (float,)):
# Here X, Y, and (optionally) Z were passed in individually, as parameters.
if isinstance(z, six.integer_types + (float,)):
coords = [x, y, z]
else:
coords = [x, y]
else:
raise TypeError('Invalid parameters given for Point initialization.')
point = self._create_point(len(coords), coords)
# Initializing using the address returned from the GEOS
# createPoint factory.
super(Point, self).__init__(point, srid=srid)
def _create_point(self, ndim, coords):
"""
Create a coordinate sequence, set X, Y, [Z], and create point
"""
if not ndim:
return capi.create_point(None)
if ndim < 2 or ndim > 3:
raise TypeError('Invalid point dimension: %s' % str(ndim))
cs = capi.create_cs(c_uint(1), c_uint(ndim))
i = iter(coords)
capi.cs_setx(cs, 0, next(i))
capi.cs_sety(cs, 0, next(i))
if ndim == 3:
capi.cs_setz(cs, 0, next(i))
return capi.create_point(cs)
def _set_list(self, length, items):
ptr = self._create_point(length, items)
if ptr:
capi.destroy_geom(self.ptr)
self._ptr = ptr
self._set_cs()
else:
# can this happen?
raise GEOSException('Geometry resulting from slice deletion was invalid.')
def _set_single(self, index, value):
self._cs.setOrdinate(index, 0, value)
def __iter__(self):
"Allows iteration over coordinates of this Point."
for i in range(len(self)):
yield self[i]
def __len__(self):
"Returns the number of dimensions for this Point (either 0, 2 or 3)."
if self.empty:
return 0
if self.hasz:
return 3
else:
return 2
def _get_single_external(self, index):
if index == 0:
return self.x
elif index == 1:
return self.y
elif index == 2:
return self.z
_get_single_internal = _get_single_external
@property
def x(self):
"Returns the X component of the Point."
return self._cs.getOrdinate(0, 0)
@x.setter
def x(self, value):
"Sets the X component of the Point."
self._cs.setOrdinate(0, 0, value)
@property
def y(self):
"Returns the Y component of the Point."
return self._cs.getOrdinate(1, 0)
@y.setter
def y(self, value):
"Sets the Y component of the Point."
self._cs.setOrdinate(1, 0, value)
@property
def z(self):
"Returns the Z component of the Point."
return self._cs.getOrdinate(2, 0) if self.hasz else None
@z.setter
def z(self, value):
"Sets the Z component of the Point."
if not self.hasz:
raise GEOSException('Cannot set Z on 2D Point.')
self._cs.setOrdinate(2, 0, value)
def get_x(self):
warnings.warn(
"`get_x()` is deprecated, use the `x` property instead.",
RemovedInDjango20Warning, 2
)
return self.x
def set_x(self, value):
warnings.warn(
"`set_x()` is deprecated, use the `x` property instead.",
RemovedInDjango20Warning, 2
)
self.x = value
def get_y(self):
warnings.warn(
"`get_y()` is deprecated, use the `y` property instead.",
RemovedInDjango20Warning, 2
)
return self.y
def set_y(self, value):
warnings.warn(
"`set_y()` is deprecated, use the `y` property instead.",
RemovedInDjango20Warning, 2
)
self.y = value
def get_z(self):
warnings.warn(
"`get_z()` is deprecated, use the `z` property instead.",
RemovedInDjango20Warning, 2
)
return self.z
def set_z(self, value):
warnings.warn(
"`set_z()` is deprecated, use the `z` property instead.",
RemovedInDjango20Warning, 2
)
self.z = value
# ### Tuple setting and retrieval routines. ###
@property
def tuple(self):
"Returns a tuple of the point."
return self._cs.tuple
@tuple.setter
def tuple(self, tup):
"Sets the coordinates of the point with the given tuple."
self._cs[0] = tup
def get_coords(self):
warnings.warn(
"`get_coords()` is deprecated, use the `tuple` property instead.",
RemovedInDjango20Warning, 2
)
return self.tuple
def set_coords(self, tup):
warnings.warn(
"`set_coords()` is deprecated, use the `tuple` property instead.",
RemovedInDjango20Warning, 2
)
self.tuple = tup
# The tuple and coords properties
coords = tuple
| bsd-3-clause |
pku9104038/edx-platform | common/djangoapps/track/middleware.py | 15 | 3327 | import json
import re
import logging
from django.conf import settings
from track import views
from track import contexts
from eventtracking import tracker
log = logging.getLogger(__name__)
CONTEXT_NAME = 'edx.request'
class TrackMiddleware(object):
"""
Tracks all requests made, as well as setting up context for other server
emitted events.
"""
def process_request(self, request):
try:
self.enter_request_context(request)
if not self.should_process_request(request):
return
# Removes passwords from the tracking logs
# WARNING: This list needs to be changed whenever we change
# password handling functionality.
#
# As of the time of this comment, only 'password' is used
# The rest are there for future extension.
#
# Passwords should never be sent as GET requests, but
# this can happen due to older browser bugs. We censor
# this too.
#
# We should manually confirm no passwords make it into log
# files when we change this.
censored_strings = ['password', 'newpassword', 'new_password',
'oldpassword', 'old_password']
post_dict = dict(request.POST)
get_dict = dict(request.GET)
for string in censored_strings:
if string in post_dict:
post_dict[string] = '*' * 8
if string in get_dict:
get_dict[string] = '*' * 8
event = {'GET': dict(get_dict),
'POST': dict(post_dict)}
# TODO: Confirm no large file uploads
event = json.dumps(event)
event = event[:512]
views.server_track(request, request.META['PATH_INFO'], event)
except:
pass
def should_process_request(self, request):
"""Don't track requests to the specified URL patterns"""
path = request.META['PATH_INFO']
ignored_url_patterns = getattr(settings, 'TRACKING_IGNORE_URL_PATTERNS', [])
for pattern in ignored_url_patterns:
# Note we are explicitly relying on python's internal caching of
# compiled regular expressions here.
if re.match(pattern, path):
return False
return True
def enter_request_context(self, request):
"""
Extract information from the request and add it to the tracking
context.
"""
context = {}
context.update(contexts.course_context_from_url(request.build_absolute_uri()))
try:
context['user_id'] = request.user.pk
except AttributeError:
context['user_id'] = ''
if settings.DEBUG:
log.error('Cannot determine primary key of logged in user.')
tracker.get_tracker().enter_context(
CONTEXT_NAME,
context
)
def process_response(self, request, response): # pylint: disable=unused-argument
"""Exit the context if it exists."""
try:
tracker.get_tracker().exit_context(CONTEXT_NAME)
except: # pylint: disable=bare-except
pass
return response
| agpl-3.0 |
smmribeiro/intellij-community | python/helpers/pydev/pydevd_attach_to_process/winappdbg/window.py | 102 | 24309 | #!~/.wine/drive_c/Python25/python.exe
# -*- coding: utf-8 -*-
# Copyright (c) 2009-2014, Mario Vilas
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice,this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
Window instrumentation.
@group Instrumentation:
Window
"""
__revision__ = "$Id$"
__all__ = ['Window']
from winappdbg import win32
# delayed imports
Process = None
Thread = None
#==============================================================================
# Unlike Process, Thread and Module, there's no container for Window objects.
# That's because Window objects don't really store any data besides the handle.
# XXX TODO
# * implement sending fake user input (mouse and keyboard messages)
# * maybe implement low-level hooks? (they don't require a dll to be injected)
# XXX TODO
#
# Will it be possible to implement window hooks too? That requires a DLL to be
# injected in the target process. Perhaps with CPython it could be done easier,
# compiling a native extension is the safe bet, but both require having a non
# pure Python module, which is something I was trying to avoid so far.
#
# Another possibility would be to malloc some CC's in the target process and
# point the hook callback to it. We'd need to have the remote procedure call
# feature first as (I believe) the hook can't be set remotely in this case.
class Window (object):
"""
Interface to an open window in the current desktop.
@group Properties:
get_handle, get_pid, get_tid,
get_process, get_thread,
set_process, set_thread,
get_classname, get_style, get_extended_style,
get_text, set_text,
get_placement, set_placement,
get_screen_rect, get_client_rect,
screen_to_client, client_to_screen
@group State:
is_valid, is_visible, is_enabled, is_maximized, is_minimized, is_child,
is_zoomed, is_iconic
@group Navigation:
get_parent, get_children, get_root, get_tree,
get_child_at
@group Instrumentation:
enable, disable, show, hide, maximize, minimize, restore, move, kill
@group Low-level access:
send, post
@type hWnd: int
@ivar hWnd: Window handle.
@type dwProcessId: int
@ivar dwProcessId: Global ID of the process that owns this window.
@type dwThreadId: int
@ivar dwThreadId: Global ID of the thread that owns this window.
@type process: L{Process}
@ivar process: Process that owns this window.
Use the L{get_process} method instead.
@type thread: L{Thread}
@ivar thread: Thread that owns this window.
Use the L{get_thread} method instead.
@type classname: str
@ivar classname: Window class name.
@type text: str
@ivar text: Window text (caption).
@type placement: L{win32.WindowPlacement}
@ivar placement: Window placement in the desktop.
"""
def __init__(self, hWnd = None, process = None, thread = None):
"""
@type hWnd: int or L{win32.HWND}
@param hWnd: Window handle.
@type process: L{Process}
@param process: (Optional) Process that owns this window.
@type thread: L{Thread}
@param thread: (Optional) Thread that owns this window.
"""
self.hWnd = hWnd
self.dwProcessId = None
self.dwThreadId = None
self.set_process(process)
self.set_thread(thread)
@property
def _as_parameter_(self):
"""
Compatibility with ctypes.
Allows passing transparently a Window object to an API call.
"""
return self.get_handle()
def get_handle(self):
"""
@rtype: int
@return: Window handle.
@raise ValueError: No window handle set.
"""
if self.hWnd is None:
raise ValueError("No window handle set!")
return self.hWnd
def get_pid(self):
"""
@rtype: int
@return: Global ID of the process that owns this window.
"""
if self.dwProcessId is not None:
return self.dwProcessId
self.__get_pid_and_tid()
return self.dwProcessId
def get_tid(self):
"""
@rtype: int
@return: Global ID of the thread that owns this window.
"""
if self.dwThreadId is not None:
return self.dwThreadId
self.__get_pid_and_tid()
return self.dwThreadId
def __get_pid_and_tid(self):
"Internally used by get_pid() and get_tid()."
self.dwThreadId, self.dwProcessId = \
win32.GetWindowThreadProcessId(self.get_handle())
def __load_Process_class(self):
global Process # delayed import
if Process is None:
from winappdbg.process import Process
def __load_Thread_class(self):
global Thread # delayed import
if Thread is None:
from winappdbg.thread import Thread
def get_process(self):
"""
@rtype: L{Process}
@return: Parent Process object.
"""
if self.__process is not None:
return self.__process
self.__load_Process_class()
self.__process = Process(self.get_pid())
return self.__process
def set_process(self, process = None):
"""
Manually set the parent process. Use with care!
@type process: L{Process}
@param process: (Optional) Process object. Use C{None} to autodetect.
"""
if process is None:
self.__process = None
else:
self.__load_Process_class()
if not isinstance(process, Process):
msg = "Parent process must be a Process instance, "
msg += "got %s instead" % type(process)
raise TypeError(msg)
self.dwProcessId = process.get_pid()
self.__process = process
def get_thread(self):
"""
@rtype: L{Thread}
@return: Parent Thread object.
"""
if self.__thread is not None:
return self.__thread
self.__load_Thread_class()
self.__thread = Thread(self.get_tid())
return self.__thread
def set_thread(self, thread = None):
"""
Manually set the thread process. Use with care!
@type thread: L{Thread}
@param thread: (Optional) Thread object. Use C{None} to autodetect.
"""
if thread is None:
self.__thread = None
else:
self.__load_Thread_class()
if not isinstance(thread, Thread):
msg = "Parent thread must be a Thread instance, "
msg += "got %s instead" % type(thread)
raise TypeError(msg)
self.dwThreadId = thread.get_tid()
self.__thread = thread
def __get_window(self, hWnd):
"""
User internally to get another Window from this one.
It'll try to copy the parent Process and Thread references if possible.
"""
window = Window(hWnd)
if window.get_pid() == self.get_pid():
window.set_process( self.get_process() )
if window.get_tid() == self.get_tid():
window.set_thread( self.get_thread() )
return window
#------------------------------------------------------------------------------
def get_classname(self):
"""
@rtype: str
@return: Window class name.
@raise WindowsError: An error occured while processing this request.
"""
return win32.GetClassName( self.get_handle() )
def get_style(self):
"""
@rtype: int
@return: Window style mask.
@raise WindowsError: An error occured while processing this request.
"""
return win32.GetWindowLongPtr( self.get_handle(), win32.GWL_STYLE )
def get_extended_style(self):
"""
@rtype: int
@return: Window extended style mask.
@raise WindowsError: An error occured while processing this request.
"""
return win32.GetWindowLongPtr( self.get_handle(), win32.GWL_EXSTYLE )
def get_text(self):
"""
@see: L{set_text}
@rtype: str
@return: Window text (caption) on success, C{None} on error.
"""
try:
return win32.GetWindowText( self.get_handle() )
except WindowsError:
return None
def set_text(self, text):
"""
Set the window text (caption).
@see: L{get_text}
@type text: str
@param text: New window text.
@raise WindowsError: An error occured while processing this request.
"""
win32.SetWindowText( self.get_handle(), text )
def get_placement(self):
"""
Retrieve the window placement in the desktop.
@see: L{set_placement}
@rtype: L{win32.WindowPlacement}
@return: Window placement in the desktop.
@raise WindowsError: An error occured while processing this request.
"""
return win32.GetWindowPlacement( self.get_handle() )
def set_placement(self, placement):
"""
Set the window placement in the desktop.
@see: L{get_placement}
@type placement: L{win32.WindowPlacement}
@param placement: Window placement in the desktop.
@raise WindowsError: An error occured while processing this request.
"""
win32.SetWindowPlacement( self.get_handle(), placement )
def get_screen_rect(self):
"""
Get the window coordinates in the desktop.
@rtype: L{win32.Rect}
@return: Rectangle occupied by the window in the desktop.
@raise WindowsError: An error occured while processing this request.
"""
return win32.GetWindowRect( self.get_handle() )
def get_client_rect(self):
"""
Get the window's client area coordinates in the desktop.
@rtype: L{win32.Rect}
@return: Rectangle occupied by the window's client area in the desktop.
@raise WindowsError: An error occured while processing this request.
"""
cr = win32.GetClientRect( self.get_handle() )
cr.left, cr.top = self.client_to_screen(cr.left, cr.top)
cr.right, cr.bottom = self.client_to_screen(cr.right, cr.bottom)
return cr
# XXX TODO
# * properties x, y, width, height
# * properties left, top, right, bottom
process = property(get_process, set_process, doc="")
thread = property(get_thread, set_thread, doc="")
classname = property(get_classname, doc="")
style = property(get_style, doc="")
exstyle = property(get_extended_style, doc="")
text = property(get_text, set_text, doc="")
placement = property(get_placement, set_placement, doc="")
#------------------------------------------------------------------------------
def client_to_screen(self, x, y):
"""
Translates window client coordinates to screen coordinates.
@note: This is a simplified interface to some of the functionality of
the L{win32.Point} class.
@see: {win32.Point.client_to_screen}
@type x: int
@param x: Horizontal coordinate.
@type y: int
@param y: Vertical coordinate.
@rtype: tuple( int, int )
@return: Translated coordinates in a tuple (x, y).
@raise WindowsError: An error occured while processing this request.
"""
return tuple( win32.ClientToScreen( self.get_handle(), (x, y) ) )
def screen_to_client(self, x, y):
"""
Translates window screen coordinates to client coordinates.
@note: This is a simplified interface to some of the functionality of
the L{win32.Point} class.
@see: {win32.Point.screen_to_client}
@type x: int
@param x: Horizontal coordinate.
@type y: int
@param y: Vertical coordinate.
@rtype: tuple( int, int )
@return: Translated coordinates in a tuple (x, y).
@raise WindowsError: An error occured while processing this request.
"""
return tuple( win32.ScreenToClient( self.get_handle(), (x, y) ) )
#------------------------------------------------------------------------------
def get_parent(self):
"""
@see: L{get_children}
@rtype: L{Window} or None
@return: Parent window. Returns C{None} if the window has no parent.
@raise WindowsError: An error occured while processing this request.
"""
hWnd = win32.GetParent( self.get_handle() )
if hWnd:
return self.__get_window(hWnd)
def get_children(self):
"""
@see: L{get_parent}
@rtype: list( L{Window} )
@return: List of child windows.
@raise WindowsError: An error occured while processing this request.
"""
return [
self.__get_window(hWnd) \
for hWnd in win32.EnumChildWindows( self.get_handle() )
]
def get_tree(self):
"""
@see: L{get_root}
@rtype: dict( L{Window} S{->} dict( ... ) )
@return: Dictionary of dictionaries forming a tree of child windows.
@raise WindowsError: An error occured while processing this request.
"""
subtree = dict()
for aWindow in self.get_children():
subtree[ aWindow ] = aWindow.get_tree()
return subtree
def get_root(self):
"""
@see: L{get_tree}
@rtype: L{Window}
@return: If this is a child window, return the top-level window it
belongs to.
If this window is already a top-level window, returns itself.
@raise WindowsError: An error occured while processing this request.
"""
hWnd = self.get_handle()
history = set()
hPrevWnd = hWnd
while hWnd and hWnd not in history:
history.add(hWnd)
hPrevWnd = hWnd
hWnd = win32.GetParent(hWnd)
if hWnd in history:
# See: https://docs.google.com/View?id=dfqd62nk_228h28szgz
return self
if hPrevWnd != self.get_handle():
return self.__get_window(hPrevWnd)
return self
def get_child_at(self, x, y, bAllowTransparency = True):
"""
Get the child window located at the given coordinates. If no such
window exists an exception is raised.
@see: L{get_children}
@type x: int
@param x: Horizontal coordinate.
@type y: int
@param y: Vertical coordinate.
@type bAllowTransparency: bool
@param bAllowTransparency: If C{True} transparent areas in windows are
ignored, returning the window behind them. If C{False} transparent
areas are treated just like any other area.
@rtype: L{Window}
@return: Child window at the requested position, or C{None} if there
is no window at those coordinates.
"""
try:
if bAllowTransparency:
hWnd = win32.RealChildWindowFromPoint( self.get_handle(), (x, y) )
else:
hWnd = win32.ChildWindowFromPoint( self.get_handle(), (x, y) )
if hWnd:
return self.__get_window(hWnd)
except WindowsError:
pass
return None
#------------------------------------------------------------------------------
def is_valid(self):
"""
@rtype: bool
@return: C{True} if the window handle is still valid.
"""
return win32.IsWindow( self.get_handle() )
def is_visible(self):
"""
@see: {show}, {hide}
@rtype: bool
@return: C{True} if the window is in a visible state.
"""
return win32.IsWindowVisible( self.get_handle() )
def is_enabled(self):
"""
@see: {enable}, {disable}
@rtype: bool
@return: C{True} if the window is in an enabled state.
"""
return win32.IsWindowEnabled( self.get_handle() )
def is_maximized(self):
"""
@see: L{maximize}
@rtype: bool
@return: C{True} if the window is maximized.
"""
return win32.IsZoomed( self.get_handle() )
def is_minimized(self):
"""
@see: L{minimize}
@rtype: bool
@return: C{True} if the window is minimized.
"""
return win32.IsIconic( self.get_handle() )
def is_child(self):
"""
@see: L{get_parent}
@rtype: bool
@return: C{True} if the window is a child window.
"""
return win32.IsChild( self.get_handle() )
is_zoomed = is_maximized
is_iconic = is_minimized
#------------------------------------------------------------------------------
def enable(self):
"""
Enable the user input for the window.
@see: L{disable}
@raise WindowsError: An error occured while processing this request.
"""
win32.EnableWindow( self.get_handle(), True )
def disable(self):
"""
Disable the user input for the window.
@see: L{enable}
@raise WindowsError: An error occured while processing this request.
"""
win32.EnableWindow( self.get_handle(), False )
def show(self, bAsync = True):
"""
Make the window visible.
@see: L{hide}
@type bAsync: bool
@param bAsync: Perform the request asynchronously.
@raise WindowsError: An error occured while processing this request.
"""
if bAsync:
win32.ShowWindowAsync( self.get_handle(), win32.SW_SHOW )
else:
win32.ShowWindow( self.get_handle(), win32.SW_SHOW )
def hide(self, bAsync = True):
"""
Make the window invisible.
@see: L{show}
@type bAsync: bool
@param bAsync: Perform the request asynchronously.
@raise WindowsError: An error occured while processing this request.
"""
if bAsync:
win32.ShowWindowAsync( self.get_handle(), win32.SW_HIDE )
else:
win32.ShowWindow( self.get_handle(), win32.SW_HIDE )
def maximize(self, bAsync = True):
"""
Maximize the window.
@see: L{minimize}, L{restore}
@type bAsync: bool
@param bAsync: Perform the request asynchronously.
@raise WindowsError: An error occured while processing this request.
"""
if bAsync:
win32.ShowWindowAsync( self.get_handle(), win32.SW_MAXIMIZE )
else:
win32.ShowWindow( self.get_handle(), win32.SW_MAXIMIZE )
def minimize(self, bAsync = True):
"""
Minimize the window.
@see: L{maximize}, L{restore}
@type bAsync: bool
@param bAsync: Perform the request asynchronously.
@raise WindowsError: An error occured while processing this request.
"""
if bAsync:
win32.ShowWindowAsync( self.get_handle(), win32.SW_MINIMIZE )
else:
win32.ShowWindow( self.get_handle(), win32.SW_MINIMIZE )
def restore(self, bAsync = True):
"""
Unmaximize and unminimize the window.
@see: L{maximize}, L{minimize}
@type bAsync: bool
@param bAsync: Perform the request asynchronously.
@raise WindowsError: An error occured while processing this request.
"""
if bAsync:
win32.ShowWindowAsync( self.get_handle(), win32.SW_RESTORE )
else:
win32.ShowWindow( self.get_handle(), win32.SW_RESTORE )
def move(self, x = None, y = None, width = None, height = None,
bRepaint = True):
"""
Moves and/or resizes the window.
@note: This is request is performed syncronously.
@type x: int
@param x: (Optional) New horizontal coordinate.
@type y: int
@param y: (Optional) New vertical coordinate.
@type width: int
@param width: (Optional) Desired window width.
@type height: int
@param height: (Optional) Desired window height.
@type bRepaint: bool
@param bRepaint:
(Optional) C{True} if the window should be redrawn afterwards.
@raise WindowsError: An error occured while processing this request.
"""
if None in (x, y, width, height):
rect = self.get_screen_rect()
if x is None:
x = rect.left
if y is None:
y = rect.top
if width is None:
width = rect.right - rect.left
if height is None:
height = rect.bottom - rect.top
win32.MoveWindow(self.get_handle(), x, y, width, height, bRepaint)
def kill(self):
"""
Signals the program to quit.
@note: This is an asyncronous request.
@raise WindowsError: An error occured while processing this request.
"""
self.post(win32.WM_QUIT)
def send(self, uMsg, wParam = None, lParam = None, dwTimeout = None):
"""
Send a low-level window message syncronically.
@type uMsg: int
@param uMsg: Message code.
@param wParam:
The type and meaning of this parameter depends on the message.
@param lParam:
The type and meaning of this parameter depends on the message.
@param dwTimeout: Optional timeout for the operation.
Use C{None} to wait indefinitely.
@rtype: int
@return: The meaning of the return value depends on the window message.
Typically a value of C{0} means an error occured. You can get the
error code by calling L{win32.GetLastError}.
"""
if dwTimeout is None:
return win32.SendMessage(self.get_handle(), uMsg, wParam, lParam)
return win32.SendMessageTimeout(
self.get_handle(), uMsg, wParam, lParam,
win32.SMTO_ABORTIFHUNG | win32.SMTO_ERRORONEXIT, dwTimeout)
def post(self, uMsg, wParam = None, lParam = None):
"""
Post a low-level window message asyncronically.
@type uMsg: int
@param uMsg: Message code.
@param wParam:
The type and meaning of this parameter depends on the message.
@param lParam:
The type and meaning of this parameter depends on the message.
@raise WindowsError: An error occured while sending the message.
"""
win32.PostMessage(self.get_handle(), uMsg, wParam, lParam)
| apache-2.0 |
TobiasLohner/SkyLines | skylines/frontend/views/users.py | 1 | 4343 | from email.mime.text import MIMEText
from email.utils import formatdate
import smtplib
from flask import Blueprint, request, render_template, redirect, url_for, abort, current_app, flash, g
from flask.ext.babel import _
from werkzeug.exceptions import ServiceUnavailable
from sqlalchemy import func
from sqlalchemy.orm import joinedload
from skylines.database import db
from skylines.model import User
from skylines.model.event import create_new_user_event
from skylines.frontend.forms import CreatePilotForm, RecoverStep1Form, RecoverStep2Form
users_blueprint = Blueprint('users', 'skylines')
@users_blueprint.route('/')
def index():
users = User.query() \
.options(joinedload(User.club)) \
.order_by(func.lower(User.name))
return render_template('users/list.jinja',
active_page='settings',
users=users)
@users_blueprint.route('/new', methods=['GET', 'POST'])
def new():
form = CreatePilotForm()
if form.validate_on_submit():
return new_post(form)
return render_template('users/new.jinja', form=form)
def new_post(form):
user = User(
first_name=form.first_name.data,
last_name=form.last_name.data,
email_address=form.email_address.data,
password=form.password.data
)
user.created_ip = request.remote_addr
db.session.add(user)
create_new_user_event(user)
db.session.commit()
flash(_('Welcome to SkyLines, %(user)s! You can now log in and share your flights with the world!', user=user))
return redirect(url_for('index'))
def hex(value):
return int(value, 16)
@users_blueprint.route('/recover', methods=['GET', 'POST'])
def recover():
key = request.values.get('key', type=hex)
if key is None:
return recover_step1()
else:
return recover_step2(key)
def recover_step1():
form = RecoverStep1Form()
if form.validate_on_submit():
return recover_step1_post(form)
return render_template('users/recover_step1.jinja', form=form)
def recover_step1_post(form):
user = User.by_email_address(form.email_address.data)
if not user:
abort(404)
user.generate_recover_key(request.remote_addr)
send_recover_mail(user)
flash('Check your email, we have sent you a link to recover your password.')
db.session.commit()
return redirect(url_for('index'))
def send_recover_mail(user):
text = u"""Hi %s,
you have asked to recover your password (from IP %s). To enter a new
password, click on the following link:
http://skylines.aero/users/recover?key=%x
The SkyLines Team
""" % (unicode(user), request.remote_addr, user.recover_key)
msg = MIMEText(text.encode('utf-8'), 'plain', 'utf-8')
msg['Subject'] = 'SkyLines password recovery'
msg['From'] = current_app.config['EMAIL_FROM']
msg['To'] = user.email_address.encode('ascii')
msg['Date'] = formatdate(localtime=1)
try:
smtp = smtplib.SMTP(current_app.config['SMTP_SERVER'])
smtp.ehlo()
smtp.sendmail(current_app.config['EMAIL_FROM'].encode('ascii'),
user.email_address.encode('ascii'), msg.as_string())
smtp.quit()
except:
raise ServiceUnavailable(description=_(
"The mail server is currently not reachable. "
"Please try again later or contact the developers."))
def recover_step2(key):
user = User.by_recover_key(key)
if not user:
abort(404)
form = RecoverStep2Form(key='%x' % key)
if form.validate_on_submit():
return recover_step2_post(key, form)
return render_template('users/recover_step2.jinja', form=form)
def recover_step2_post(key, form):
user = User.by_recover_key(key)
if not user:
abort(404)
user.password = form.password.data
user.recover_key = None
flash(_('Password changed.'))
db.session.commit()
return redirect(url_for('index'))
@users_blueprint.route('/generate_keys')
def generate_keys():
"""Hidden method that generates missing tracking keys."""
if not g.current_user or not g.current_user.is_manager():
abort(403)
for user in User.query():
if user.tracking_key is None:
user.generate_tracking_key()
db.session.commit()
return redirect(url_for('.index'))
| agpl-3.0 |
flurischt/jedi | test/completion/flow_analysis.py | 13 | 2934 | def foo(x):
if 1.0:
return 1
else:
return ''
#? int()
foo(1)
# Exceptions are not analyzed. So check both if branches
def try_except(x):
try:
if 0:
return 1
else:
return ''
except AttributeError:
return 1.0
#? float() str()
try_except(1)
# Exceptions are not analyzed. So check both if branches
def try_except(x):
try:
if 0:
return 1
else:
return ''
except AttributeError:
return 1.0
#? float() str()
try_except(1)
# -----------------
# elif
# -----------------
def elif_flows1(x):
if False:
return 1
elif True:
return 1.0
else:
return ''
#? float()
elif_flows1(1)
def elif_flows2(x):
try:
if False:
return 1
elif 0:
return 1.0
else:
return ''
except ValueError:
return set
#? str() set
elif_flows2(1)
def elif_flows3(x):
try:
if True:
return 1
elif 0:
return 1.0
else:
return ''
except ValueError:
return set
#? int() set
elif_flows3(1)
# -----------------
# mid-difficulty if statements
# -----------------
def check(a):
if a is None:
return 1
return ''
return set
#? int()
check(None)
#? str()
check('asb')
a = list
if 2 == True:
a = set
elif 1 == True:
a = 0
#? int()
a
if check != 1:
a = ''
#? str()
a
if check == check:
a = list
#? list
a
if check != check:
a = set
else:
a = dict
#? dict
a
if not (check is not check):
a = 1
#? int()
a
# -----------------
# name resolution
# -----------------
a = list
def elif_name(x):
try:
if True:
a = 1
elif 0:
a = 1.0
else:
return ''
except ValueError:
a = x
return a
#? int() set
elif_name(set)
if 0:
a = ''
else:
a = int
#? int
a
# -----------------
# isinstance
# -----------------
class A(): pass
def isinst(x):
if isinstance(x, A):
return dict
elif isinstance(x, int) and x == 1 or x is True:
return set
elif isinstance(x, (float, reversed)):
return list
elif not isinstance(x, str):
return tuple
return 1
#? dict
isinst(A())
#? set
isinst(True)
#? set
isinst(1)
#? tuple
isinst(2)
#? list
isinst(1.0)
#? tuple
isinst(False)
#? int()
isinst('')
# -----------------
# flows that are not reachable should be able to access parent scopes.
# -----------------
foobar = ''
if 0:
within_flow = 1.0
#? float()
within_flow
#? str()
foobar
if 0:
nested = 1
#? int()
nested
#? float()
within_flow
#? str()
foobar
#?
nested
# -----------------
# True objects like modules
# -----------------
class X():
pass
if X:
a = 1
else:
a = ''
#? int()
a
| mit |
Ramalus/kivy | kivy/input/__init__.py | 45 | 1148 | # pylint: disable=W0611
'''
Input management
================
Our input system is wide and simple at the same time. We are currently able to
natively support :
* Windows multitouch events (pencil and finger)
* MacOSX touchpads
* Linux multitouch events (kernel and mtdev)
* Linux wacom drivers (pencil and finger)
* TUIO
All the input management is configurable in the Kivy :mod:`~kivy.config`. You
can easily use many multitouch devices in one Kivy application.
When the events have been read from the devices, they are dispatched through
a post processing module before being sent to your application. We also have
several default modules for :
* Double tap detection
* Decreasing jittering
* Decreasing the inaccuracy of touch on "bad" DIY hardware
* Ignoring regions
'''
from kivy.input.motionevent import MotionEvent
from kivy.input.postproc import kivy_postproc_modules
from kivy.input.provider import MotionEventProvider
from kivy.input.factory import MotionEventFactory
import kivy.input.providers
__all__ = (
MotionEvent.__name__,
MotionEventProvider.__name__,
MotionEventFactory.__name__,
'kivy_postproc_modules')
| mit |
kornicameister/ansible-modules-extras | notification/grove.py | 68 | 3450 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: grove
version_added: 1.4
short_description: Sends a notification to a grove.io channel
description:
- The M(grove) module sends a message for a service to a Grove.io
channel.
options:
channel_token:
description:
- Token of the channel to post to.
required: true
service:
description:
- Name of the service (displayed as the "user" in the message)
required: false
default: ansible
message:
description:
- Message content
required: true
url:
description:
- Service URL for the web client
required: false
icon_url:
description:
- Icon for the service
required: false
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
version_added: 1.5.1
author: "Jonas Pfenniger (@zimbatm)"
'''
EXAMPLES = '''
- grove: >
channel_token=6Ph62VBBJOccmtTPZbubiPzdrhipZXtg
service=my-app
message=deployed {{ target }}
'''
import urllib
BASE_URL = 'https://grove.io/api/notice/%s/'
# ==============================================================
# do_notify_grove
def do_notify_grove(module, channel_token, service, message, url=None, icon_url=None):
my_url = BASE_URL % (channel_token,)
my_data = dict(service=service, message=message)
if url is not None:
my_data['url'] = url
if icon_url is not None:
my_data['icon_url'] = icon_url
data = urllib.urlencode(my_data)
response, info = fetch_url(module, my_url, data=data)
if info['status'] != 200:
module.fail_json(msg="failed to send notification: %s" % info['msg'])
# ==============================================================
# main
def main():
module = AnsibleModule(
argument_spec = dict(
channel_token = dict(type='str', required=True),
message = dict(type='str', required=True),
service = dict(type='str', default='ansible'),
url = dict(type='str', default=None),
icon_url = dict(type='str', default=None),
validate_certs = dict(default='yes', type='bool'),
)
)
channel_token = module.params['channel_token']
service = module.params['service']
message = module.params['message']
url = module.params['url']
icon_url = module.params['icon_url']
do_notify_grove(module, channel_token, service, message, url, icon_url)
# Mission complete
module.exit_json(msg="OK")
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
main()
| gpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.