repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
fbradyirl/home-assistant | homeassistant/auth/providers/legacy_api_password.py | 4 | 3686 | """
Support Legacy API password auth provider.
It will be removed when auth system production ready
"""
import hmac
from typing import Any, Dict, Optional, cast
import voluptuous as vol
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
import homeassistant.helpers.config_validation as cv
from . import AuthProvider, AUTH_PROVIDER_SCHEMA, AUTH_PROVIDERS, LoginFlow
from .. import AuthManager
from ..models import Credentials, UserMeta, User
AUTH_PROVIDER_TYPE = "legacy_api_password"
CONF_API_PASSWORD = "api_password"
CONFIG_SCHEMA = AUTH_PROVIDER_SCHEMA.extend(
{vol.Required(CONF_API_PASSWORD): cv.string}, extra=vol.PREVENT_EXTRA
)
LEGACY_USER_NAME = "Legacy API password user"
class InvalidAuthError(HomeAssistantError):
"""Raised when submitting invalid authentication."""
async def async_validate_password(hass: HomeAssistant, password: str) -> Optional[User]:
"""Return a user if password is valid. None if not."""
auth = cast(AuthManager, hass.auth) # type: ignore
providers = auth.get_auth_providers(AUTH_PROVIDER_TYPE)
if not providers:
raise ValueError("Legacy API password provider not found")
try:
provider = cast(LegacyApiPasswordAuthProvider, providers[0])
provider.async_validate_login(password)
return await auth.async_get_or_create_user(
await provider.async_get_or_create_credentials({})
)
except InvalidAuthError:
return None
@AUTH_PROVIDERS.register(AUTH_PROVIDER_TYPE)
class LegacyApiPasswordAuthProvider(AuthProvider):
"""An auth provider support legacy api_password."""
DEFAULT_TITLE = "Legacy API Password"
@property
def api_password(self) -> str:
"""Return api_password."""
return str(self.config[CONF_API_PASSWORD])
async def async_login_flow(self, context: Optional[Dict]) -> LoginFlow:
"""Return a flow to login."""
return LegacyLoginFlow(self)
@callback
def async_validate_login(self, password: str) -> None:
"""Validate password."""
api_password = str(self.config[CONF_API_PASSWORD])
if not hmac.compare_digest(
api_password.encode("utf-8"), password.encode("utf-8")
):
raise InvalidAuthError
async def async_get_or_create_credentials(
self, flow_result: Dict[str, str]
) -> Credentials:
"""Return credentials for this login."""
credentials = await self.async_credentials()
if credentials:
return credentials[0]
return self.async_create_credentials({})
async def async_user_meta_for_credentials(
self, credentials: Credentials
) -> UserMeta:
"""
Return info for the user.
Will be used to populate info when creating a new user.
"""
return UserMeta(name=LEGACY_USER_NAME, is_active=True)
class LegacyLoginFlow(LoginFlow):
"""Handler for the login flow."""
async def async_step_init(
self, user_input: Optional[Dict[str, str]] = None
) -> Dict[str, Any]:
"""Handle the step of the form."""
errors = {}
if user_input is not None:
try:
cast(
LegacyApiPasswordAuthProvider, self._auth_provider
).async_validate_login(user_input["password"])
except InvalidAuthError:
errors["base"] = "invalid_auth"
if not errors:
return await self.async_finish({})
return self.async_show_form(
step_id="init", data_schema=vol.Schema({"password": str}), errors=errors
)
| apache-2.0 |
kwlzn/pants | src/python/pants/bin/exiter.py | 13 | 4940 | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import datetime
import logging
import os
import sys
import traceback
from pants.util.dirutil import safe_open
logger = logging.getLogger(__name__)
class Exiter(object):
"""A class that provides standard runtime exit and global exception handling behavior.
The expected method call order of this class is as follows:
1) Call Exiter.set_except_hook() to set sys.excepthook to the internal exception hook. This
should happen as early as possible to ensure any/all exceptions are handled by the hook.
2) Call Exiter.apply_options() to set traceback printing behavior via an Options object.
3) Perform other operations as normal.
4) Call Exiter.exit(), Exiter.exit_and_fail() or exiter_inst() when you wish to exit the runtime.
"""
def __init__(self, exiter=sys.exit, formatter=traceback.format_tb, print_backtraces=True):
"""
:param func exiter: A function to be called to conduct the final exit of the runtime. (Optional)
:param func formatter: A function to be called to format any encountered tracebacks. (Optional)
:param bool print_backtraces: Whether or not to print backtraces by default. Can be
overridden by Exiter.apply_options(). (Optional)
"""
# Since we have some exit paths that run via the sys.excepthook,
# symbols we use can become garbage collected before we use them; ie:
# we can find `sys` and `traceback` are `None`. As a result we capture
# all symbols we need here to ensure we function in excepthook context.
# See: http://stackoverflow.com/questions/2572172/referencing-other-modules-in-atexit
self._exit = exiter
self._format_tb = formatter
self._should_print_backtrace = print_backtraces
self._workdir = None
def __call__(self, *args, **kwargs):
"""Map class calls to self.exit() to support sys.exit() fungibility."""
return self.exit(*args, **kwargs)
def apply_options(self, options):
"""Applies global configuration options to internal behavior.
:param Options options: An instance of an Options object to fetch global options from.
"""
self._should_print_backtrace = options.for_global_scope().print_exception_stacktrace
self._workdir = options.for_global_scope().pants_workdir
def exit(self, result=0, msg=None, out=None):
"""Exits the runtime.
:param result: The exit status. Typically a 0 indicating success or a 1 indicating failure, but
can be a string as well. (Optional)
:param msg: A string message to print to stderr or another custom file desciptor before exiting.
(Optional)
:param out: The file descriptor to emit `msg` to. (Optional)
"""
if msg:
print(msg, file=out or sys.stderr)
self._exit(result)
def exit_and_fail(self, msg=None):
"""Exits the runtime with an exit code of 1, indicating failure.
:param str msg: A string message to print to stderr before exiting. (Optional)
"""
self.exit(result=1, msg=msg)
def handle_unhandled_exception(self, exc_class=None, exc=None, tb=None, add_newline=False):
"""Default sys.excepthook implementation for unhandled exceptions."""
exc_class = exc_class or sys.exc_type
exc = exc or sys.exc_value
tb = tb or sys.exc_traceback
def format_msg(print_backtrace=True):
msg = 'Exception caught: ({})\n'.format(type(exc))
msg += '{}\n'.format(''.join(self._format_tb(tb))) if print_backtrace else '\n'
msg += 'Exception message: {}\n'.format(exc if str(exc) else 'none')
msg += '\n' if add_newline else ''
return msg
# Always output the unhandled exception details into a log file.
self._log_exception(format_msg())
self.exit_and_fail(format_msg(self._should_print_backtrace))
def _log_exception(self, msg):
if self._workdir:
try:
output_path = os.path.join(self._workdir, 'logs', 'exceptions.log')
with safe_open(output_path, 'a') as exception_log:
exception_log.write('timestamp: {}\n'.format(datetime.datetime.now().isoformat()))
exception_log.write('args: {}\n'.format(sys.argv))
exception_log.write('pid: {}\n'.format(os.getpid()))
exception_log.write(msg)
exception_log.write('\n')
except Exception as e:
# This is all error recovery logic so we catch all exceptions from the logic above because
# we don't want to hide the original error.
logger.error('Problem logging original exception: {}'.format(e))
def set_except_hook(self):
"""Sets the global exception hook."""
sys.excepthook = self.handle_unhandled_exception
| apache-2.0 |
VPAC/pbs_python | examples/resmom_info.py | 3 | 1057 | #!/usr/bin/env python
#
# Author: Bas van der Vlies <basv@sara.nl>
# Date : 27 Feb 2002
# Desc. : This will query the pbs_mom daemon about its resources
#
# $Id$
#
import pbs
import sys
import time
import socket
def main():
server = pbs.pbs_default()
c = pbs.pbs_connect(server)
nodes = pbs.pbs_statnode(c, '', 'NULL', 'NULL')
for node in nodes:
print node.name, ' :'
attrs = node.attribs
for attr in attrs:
print '\t%s = %s' %(attr.name, attr.value)
try:
mom_port = socket.getservbyname('pbs_resmon', 'tcp')
except socket.error:
mom_port = pbs.PBS_MANAGER_SERVICE_PORT
mom_id = pbs.openrm(node.name, mom_port)
mom_keys = pbs.get_mom_values(mom_id)
for key in mom_keys.keys():
print '\t%s = %s' %(key, mom_keys[key])
print '\nTesting list with user supplied keywords'
l = [ 'bas', 'ncpus', 'loadave' ]
mom_keys = pbs.get_mom_values(mom_id, l)
for key in mom_keys.keys():
print '\t%s = %s' %(key, mom_keys[key])
print ''
pbs.closerm(mom_id)
main()
| gpl-3.0 |
mavit/ansible | test/units/module_utils/test_text.py | 111 | 2172 | # -*- coding: utf-8 -*-
# (c) 2016 Toshio Kuratomi <tkuratomi@ansible.com>
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division)
__metaclass__ = type
import itertools
import pytest
from ansible.module_utils.six import PY3
# Internal API while this is still being developed. Eventually move to
# module_utils.common.text
from ansible.module_utils._text import to_text, to_bytes, to_native
# Format: byte representation, text representation, encoding of byte representation
VALID_STRINGS = (
(b'abcde', u'abcde', 'ascii'),
(b'caf\xc3\xa9', u'caf\xe9', 'utf-8'),
(b'caf\xe9', u'caf\xe9', 'latin-1'),
# u'くらとみ'
(b'\xe3\x81\x8f\xe3\x82\x89\xe3\x81\xa8\xe3\x81\xbf', u'\u304f\u3089\u3068\u307f', 'utf-8'),
(b'\x82\xad\x82\xe7\x82\xc6\x82\xdd', u'\u304f\u3089\u3068\u307f', 'shift-jis'),
)
@pytest.mark.parametrize('in_string, encoding, expected',
itertools.chain(((d[0], d[2], d[1]) for d in VALID_STRINGS),
((d[1], d[2], d[1]) for d in VALID_STRINGS)))
def test_to_text(in_string, encoding, expected):
"""test happy path of decoding to text"""
assert to_text(in_string, encoding) == expected
@pytest.mark.parametrize('in_string, encoding, expected',
itertools.chain(((d[0], d[2], d[0]) for d in VALID_STRINGS),
((d[1], d[2], d[0]) for d in VALID_STRINGS)))
def test_to_bytes(in_string, encoding, expected):
"""test happy path of encoding to bytes"""
assert to_bytes(in_string, encoding) == expected
@pytest.mark.parametrize('in_string, encoding, expected',
itertools.chain(((d[0], d[2], d[1] if PY3 else d[0]) for d in VALID_STRINGS),
((d[1], d[2], d[1] if PY3 else d[0]) for d in VALID_STRINGS)))
def test_to_native(in_string, encoding, expected):
"""test happy path of encoding to native strings"""
assert to_native(in_string, encoding) == expected
| gpl-3.0 |
rhgong/itk-with-dom | Utilities/Maintenance/BuildHeaderTest.py | 1 | 5017 | #!/usr/bin/env python
#==========================================================================
#
# Copyright Insight Software Consortium
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#==========================================================================*/
usage = """usage: BuildHeaderTest.py <module_name> <module_source_path> <module_binary_path> <maximum_number_of_headers>
This script generates a a source file designed to check the headers in each
module. The generated HeaderTest can be found in the module binary 'test'
directory in a file itk<module_name>HeaderTest#.cxx. This contains a null
main(), but includes all the classes in the module. The primary purpose of this
test is to make sure there are not missing module dependencies. It also tests
for syntax and missing #include's.
"""
# Headers to not test because of dependecy issues, etc.
BANNED_HEADERS = set(('itkExceptionObject.h', # There is a pre-processor check so people use itkMacro.h instead.
'itkMINC2ImageIO.h', # In case minc2.h is not available.
'itkFFTWForwardFFTImageFilter.h',
'itkFFTWInverseFFTImageFilter.h',
'itkFFTWRealToHalfHermitianForwardFFTImageFilter.h',
'itkFFTWHalfHermitianToRealInverseFFTImageFilter.h',
'itkFFTWComplexToComplexImageFilter.h',
'itkFFTWCommon.h',
'itkPyBuffer.h', # needs Python.h, etc
'itkVanHerkGilWermanErodeDilateImageFilter.h', # circular include's
'itkBSplineDeformableTransform.h', # deprecated
'itkBSplineDeformableTransformInitializer.h'))
HEADER = """/*=========================================================================
*
* Copyright Insight Software Consortium
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0.txt
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*=========================================================================*/
// This file has been generated by BuildHeaderTest.py
// To regenerate, build the ITKHeaderTests target.
// This is a test to include each header file for Insight.
"""
TRAILER = """
#include <cstdlib> // needed for EXIT_SUCCESS macro
int main ( int , char* [] )
{
return EXIT_SUCCESS;
}
"""
import glob
import os
import sys
if len(sys.argv) < 6:
print(usage)
sys.exit(1)
def main():
module_name = sys.argv[1]
module_source_path = sys.argv[2]
module_binary_path = sys.argv[3]
maximum_number_of_headers = int(sys.argv[4])
test_num = int(sys.argv[5])
# Get all the header files.
include_dir = os.path.join(module_source_path, 'include')
h_files = glob.glob(os.path.join(include_dir, '*.h'))
h_files = [os.path.basename(h) for h in h_files]
added_header_idx = maximum_number_of_headers * (test_num - 1)
test_source_path = os.path.join(module_binary_path, 'test')
if not os.path.exists(test_source_path):
os.makedirs(test_source_path)
test_source_file = os.path.join(test_source_path,
str(module_name) + 'HeaderTest' + str(test_num) + '.cxx')
test_src = open(test_source_file, 'w')
try:
test_src.write(HEADER)
if added_header_idx + maximum_number_of_headers > len(h_files):
max_idx = added_header_idx + len(h_files) % maximum_number_of_headers
else:
max_idx = added_header_idx + maximum_number_of_headers
for i in range(added_header_idx, max_idx):
# Use the .hxx if possible.
hxx_file = h_files[i][:-1] + 'hxx'
if h_files[i] in BANNED_HEADERS:
to_include = '// #include "' + h_files[i] + '" // Banned in BuildHeaderTest.py\n'
elif os.path.exists(os.path.join(module_source_path, 'include',
hxx_file)):
to_include = '#include "' + hxx_file + '"\n'
else:
to_include = '#include "' + h_files[i] + '"\n'
test_src.write(to_include)
test_src.write(TRAILER)
finally:
test_src.close()
return 0
if __name__ == "__main__":
ret = main()
sys.exit(ret)
| apache-2.0 |
stacywsmith/ansible | lib/ansible/modules/cloud/amazon/ec2_tag.py | 71 | 5973 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'curated'}
DOCUMENTATION = '''
---
module: ec2_tag
short_description: create and remove tag(s) to ec2 resources.
description:
- Creates, removes and lists tags from any EC2 resource. The resource is referenced by its resource id (e.g. an instance being i-XXXXXXX).
It is designed to be used with complex args (tags), see the examples. This module has a dependency on python-boto.
version_added: "1.3"
options:
resource:
description:
- The EC2 resource id.
required: true
default: null
aliases: []
state:
description:
- Whether the tags should be present or absent on the resource. Use list to interrogate the tags of an instance.
required: false
default: present
choices: ['present', 'absent', 'list']
aliases: []
tags:
description:
- a hash/dictionary of tags to add to the resource; '{"key":"value"}' and '{"key":"value","key":"value"}'
required: true
default: null
aliases: []
author: "Lester Wade (@lwade)"
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
- name: Ensure tags are present on a resource
ec2_tag:
region: eu-west-1
resource: vol-XXXXXX
state: present
tags:
Name: ubervol
env: prod
- name: Ensure one dbserver is running
ec2:
count_tags:
Name: dbserver
Env: production
exact_count: 1
group: '{{ security_group }}'
keypair: '{{ keypair }}'
image: '{{ image_id }}'
instance_tags:
Name: dbserver
Env: production
instance_type: '{{ instance_type }}'
region: eu-west-1
volumes:
- device_name: /dev/xvdb
device_type: standard
volume_size: 10
delete_on_termination: True
wait: True
register: ec2
- name: Retrieve all volumes for a queried instance
ec2_vol:
instance: '{{ item.id }}'
region: eu-west-1
state: list
with_items: '{{ ec2.tagged_instances }}'
register: ec2_vol
- name: Ensure all volumes are tagged
ec2_tag:
region: eu-west-1
resource: '{{ item.id }}'
state: present
tags:
Name: dbserver
Env: production
with_items:
- ec2_vol.volumes
- name: Get EC2 facts
action: ec2_facts
- name: Retrieve all tags on an instance
ec2_tag:
region: '{{ ansible_ec2_placement_region }}'
resource: '{{ ansible_ec2_instance_id }}'
state: list
register: ec2_tags
- name: List tags, such as Name and env
debug:
msg: '{{ ec2_tags.tags.Name }} {{ ec2_tags.tags.env }}'
'''
try:
import boto.ec2
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
resource = dict(required=True),
tags = dict(type='dict'),
state = dict(default='present', choices=['present', 'absent', 'list']),
)
)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
resource = module.params.get('resource')
tags = module.params.get('tags')
state = module.params.get('state')
ec2 = ec2_connect(module)
# We need a comparison here so that we can accurately report back changed status.
# Need to expand the gettags return format and compare with "tags" and then tag or detag as appropriate.
filters = {'resource-id' : resource}
gettags = ec2.get_all_tags(filters=filters)
dictadd = {}
dictremove = {}
baddict = {}
tagdict = {}
for tag in gettags:
tagdict[tag.name] = tag.value
if state == 'present':
if not tags:
module.fail_json(msg="tags argument is required when state is present")
if set(tags.items()).issubset(set(tagdict.items())):
module.exit_json(msg="Tags already exists in %s." %resource, changed=False)
else:
for (key, value) in set(tags.items()):
if (key, value) not in set(tagdict.items()):
dictadd[key] = value
if not module.check_mode:
ec2.create_tags(resource, dictadd)
module.exit_json(msg="Tags %s created for resource %s." % (dictadd,resource), changed=True)
if state == 'absent':
if not tags:
module.fail_json(msg="tags argument is required when state is absent")
for (key, value) in set(tags.items()):
if (key, value) not in set(tagdict.items()):
baddict[key] = value
if set(baddict) == set(tags):
module.exit_json(msg="Nothing to remove here. Move along.", changed=False)
for (key, value) in set(tags.items()):
if (key, value) in set(tagdict.items()):
dictremove[key] = value
if not module.check_mode:
ec2.delete_tags(resource, dictremove)
module.exit_json(msg="Tags %s removed for resource %s." % (dictremove,resource), changed=True)
if state == 'list':
module.exit_json(changed=False, tags=tagdict)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
| gpl-3.0 |
swails/chemlab | chemlab/db/base.py | 6 | 1192 | class EntryNotFound(Exception):
pass
class AbstractDB(object):
"""Interface for a generic database.
A typical database can be used to retrieve
molecules by calling the *get* method::
water = db.get("molecule", "example.water")
A database can also provide custom functionalities to store or
search for entries. Those are implemented in custom methods.
See the other implementations for more relevant examples.
"""
def get(self, feature, key, *args, **kwargs):
'''Get a data entry from the database.
Subclasses are required to implement this method to provide
access to the database.
**Parameters**
- feature: str
An identifier that represents the kind of data
that we want to extract. Examples of such identifier are
"system", "molecule", "data" etc.
- key: str
The key associated with the database entry. By convention
you can use dotted names to provide some kind of nested structure.
- args, kwargs:
Custom extra arguments.
'''
raise NotImplementedError()
| gpl-3.0 |
timy/dm_spec | ana/mpi_spec_1d/plot_orien.py | 1 | 1634 | from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
import numpy as np
from itertools import product, combinations
n_esmb = 100000
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.set_aspect("equal")
data = np.loadtxt("res/euler.dat")
#draw cube
# r = [-1, 1]
# for s, e in combinations(np.array(list(product(r, r, r))), 2):
# if np.sum(np.abs(s - e)) == (r[1] - r[0]):
# ax.plot3D(*zip(s, e), color="b")
#draw sphere
u, v = np.mgrid[0:2*np.pi:20j, 0:np.pi:10j]
x = np.cos(u) * np.sin(v)
y = np.sin(u) * np.sin(v)
z = np.cos(v)
ax.plot_wireframe(x, y, z, color="r")
#draw a point
ax.scatter([0],[0],[0],color="g",s=100)
#draw a vector
from matplotlib.patches import FancyArrowPatch
from mpl_toolkits.mplot3d import proj3d
class Arrow3D(FancyArrowPatch):
def __init__(self, xs, ys, zs, *args, **kwargs):
FancyArrowPatch.__init__(self, (0,0), (0,0), *args, **kwargs)
self._verts3d = xs, ys, zs
def draw(self, renderer):
xs3d, ys3d, zs3d = self._verts3d
xs, ys, zs = proj3d.proj_transform(xs3d, ys3d, zs3d, renderer.M)
self.set_positions((xs[0],ys[0]),(xs[1],ys[1]))
FancyArrowPatch.draw(self, renderer)
phi = data[:,0]
theta = data[:,1]
psi = data[:,2]
x = np.sin(theta) * np.sin(psi);
y = np.sin(theta) * np.cos(psi);
z = np.cos(theta);
for i_esmb in range(n_esmb):
a = Arrow3D( [0, x[i_esmb]], [0, y[i_esmb]], [0, z[i_esmb]],
mutation_scale=20, lw=1, arrowstyle="-|>",
color=plt.cm.RdYlBu(i_esmb) )
ax.add_artist(a)
ax.set_xlabel('x')
ax.set_ylabel('y')
ax.set_zlabel('z')
plt.show()
| mit |
timm/timmnix | pypy3-v5.5.0-linux64/lib-python/3/xml/sax/_exceptions.py | 359 | 4785 | """Different kinds of SAX Exceptions"""
import sys
if sys.platform[:4] == "java":
from java.lang import Exception
del sys
# ===== SAXEXCEPTION =====
class SAXException(Exception):
"""Encapsulate an XML error or warning. This class can contain
basic error or warning information from either the XML parser or
the application: you can subclass it to provide additional
functionality, or to add localization. Note that although you will
receive a SAXException as the argument to the handlers in the
ErrorHandler interface, you are not actually required to raise
the exception; instead, you can simply read the information in
it."""
def __init__(self, msg, exception=None):
"""Creates an exception. The message is required, but the exception
is optional."""
self._msg = msg
self._exception = exception
Exception.__init__(self, msg)
def getMessage(self):
"Return a message for this exception."
return self._msg
def getException(self):
"Return the embedded exception, or None if there was none."
return self._exception
def __str__(self):
"Create a string representation of the exception."
return self._msg
def __getitem__(self, ix):
"""Avoids weird error messages if someone does exception[ix] by
mistake, since Exception has __getitem__ defined."""
raise AttributeError("__getitem__")
# ===== SAXPARSEEXCEPTION =====
class SAXParseException(SAXException):
"""Encapsulate an XML parse error or warning.
This exception will include information for locating the error in
the original XML document. Note that although the application will
receive a SAXParseException as the argument to the handlers in the
ErrorHandler interface, the application is not actually required
to raise the exception; instead, it can simply read the
information in it and take a different action.
Since this exception is a subclass of SAXException, it inherits
the ability to wrap another exception."""
def __init__(self, msg, exception, locator):
"Creates the exception. The exception parameter is allowed to be None."
SAXException.__init__(self, msg, exception)
self._locator = locator
# We need to cache this stuff at construction time.
# If this exception is raised, the objects through which we must
# traverse to get this information may be deleted by the time
# it gets caught.
self._systemId = self._locator.getSystemId()
self._colnum = self._locator.getColumnNumber()
self._linenum = self._locator.getLineNumber()
def getColumnNumber(self):
"""The column number of the end of the text where the exception
occurred."""
return self._colnum
def getLineNumber(self):
"The line number of the end of the text where the exception occurred."
return self._linenum
def getPublicId(self):
"Get the public identifier of the entity where the exception occurred."
return self._locator.getPublicId()
def getSystemId(self):
"Get the system identifier of the entity where the exception occurred."
return self._systemId
def __str__(self):
"Create a string representation of the exception."
sysid = self.getSystemId()
if sysid is None:
sysid = "<unknown>"
linenum = self.getLineNumber()
if linenum is None:
linenum = "?"
colnum = self.getColumnNumber()
if colnum is None:
colnum = "?"
return "%s:%s:%s: %s" % (sysid, linenum, colnum, self._msg)
# ===== SAXNOTRECOGNIZEDEXCEPTION =====
class SAXNotRecognizedException(SAXException):
"""Exception class for an unrecognized identifier.
An XMLReader will raise this exception when it is confronted with an
unrecognized feature or property. SAX applications and extensions may
use this class for similar purposes."""
# ===== SAXNOTSUPPORTEDEXCEPTION =====
class SAXNotSupportedException(SAXException):
"""Exception class for an unsupported operation.
An XMLReader will raise this exception when a service it cannot
perform is requested (specifically setting a state or value). SAX
applications and extensions may use this class for similar
purposes."""
# ===== SAXNOTSUPPORTEDEXCEPTION =====
class SAXReaderNotAvailable(SAXNotSupportedException):
"""Exception class for a missing driver.
An XMLReader module (driver) should raise this exception when it
is first imported, e.g. when a support module cannot be imported.
It also may be raised during parsing, e.g. if executing an external
program is not permitted."""
| mit |
CodeMath/jinrockets | BluePrint/lib/flask/sessions.py | 348 | 12882 | # -*- coding: utf-8 -*-
"""
flask.sessions
~~~~~~~~~~~~~~
Implements cookie based sessions based on itsdangerous.
:copyright: (c) 2012 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import uuid
import hashlib
from datetime import datetime
from werkzeug.http import http_date, parse_date
from werkzeug.datastructures import CallbackDict
from . import Markup, json
from ._compat import iteritems, text_type
from itsdangerous import URLSafeTimedSerializer, BadSignature
def total_seconds(td):
return td.days * 60 * 60 * 24 + td.seconds
class SessionMixin(object):
"""Expands a basic dictionary with an accessors that are expected
by Flask extensions and users for the session.
"""
def _get_permanent(self):
return self.get('_permanent', False)
def _set_permanent(self, value):
self['_permanent'] = bool(value)
#: this reflects the ``'_permanent'`` key in the dict.
permanent = property(_get_permanent, _set_permanent)
del _get_permanent, _set_permanent
#: some session backends can tell you if a session is new, but that is
#: not necessarily guaranteed. Use with caution. The default mixin
#: implementation just hardcodes `False` in.
new = False
#: for some backends this will always be `True`, but some backends will
#: default this to false and detect changes in the dictionary for as
#: long as changes do not happen on mutable structures in the session.
#: The default mixin implementation just hardcodes `True` in.
modified = True
class TaggedJSONSerializer(object):
"""A customized JSON serializer that supports a few extra types that
we take for granted when serializing (tuples, markup objects, datetime).
"""
def dumps(self, value):
def _tag(value):
if isinstance(value, tuple):
return {' t': [_tag(x) for x in value]}
elif isinstance(value, uuid.UUID):
return {' u': value.hex}
elif callable(getattr(value, '__html__', None)):
return {' m': text_type(value.__html__())}
elif isinstance(value, list):
return [_tag(x) for x in value]
elif isinstance(value, datetime):
return {' d': http_date(value)}
elif isinstance(value, dict):
return dict((k, _tag(v)) for k, v in iteritems(value))
elif isinstance(value, str):
try:
return text_type(value)
except UnicodeError:
raise UnexpectedUnicodeError(u'A byte string with '
u'non-ASCII data was passed to the session system '
u'which can only store unicode strings. Consider '
u'base64 encoding your string (String was %r)' % value)
return value
return json.dumps(_tag(value), separators=(',', ':'))
def loads(self, value):
def object_hook(obj):
if len(obj) != 1:
return obj
the_key, the_value = next(iteritems(obj))
if the_key == ' t':
return tuple(the_value)
elif the_key == ' u':
return uuid.UUID(the_value)
elif the_key == ' m':
return Markup(the_value)
elif the_key == ' d':
return parse_date(the_value)
return obj
return json.loads(value, object_hook=object_hook)
session_json_serializer = TaggedJSONSerializer()
class SecureCookieSession(CallbackDict, SessionMixin):
"""Baseclass for sessions based on signed cookies."""
def __init__(self, initial=None):
def on_update(self):
self.modified = True
CallbackDict.__init__(self, initial, on_update)
self.modified = False
class NullSession(SecureCookieSession):
"""Class used to generate nicer error messages if sessions are not
available. Will still allow read-only access to the empty session
but fail on setting.
"""
def _fail(self, *args, **kwargs):
raise RuntimeError('the session is unavailable because no secret '
'key was set. Set the secret_key on the '
'application to something unique and secret.')
__setitem__ = __delitem__ = clear = pop = popitem = \
update = setdefault = _fail
del _fail
class SessionInterface(object):
"""The basic interface you have to implement in order to replace the
default session interface which uses werkzeug's securecookie
implementation. The only methods you have to implement are
:meth:`open_session` and :meth:`save_session`, the others have
useful defaults which you don't need to change.
The session object returned by the :meth:`open_session` method has to
provide a dictionary like interface plus the properties and methods
from the :class:`SessionMixin`. We recommend just subclassing a dict
and adding that mixin::
class Session(dict, SessionMixin):
pass
If :meth:`open_session` returns `None` Flask will call into
:meth:`make_null_session` to create a session that acts as replacement
if the session support cannot work because some requirement is not
fulfilled. The default :class:`NullSession` class that is created
will complain that the secret key was not set.
To replace the session interface on an application all you have to do
is to assign :attr:`flask.Flask.session_interface`::
app = Flask(__name__)
app.session_interface = MySessionInterface()
.. versionadded:: 0.8
"""
#: :meth:`make_null_session` will look here for the class that should
#: be created when a null session is requested. Likewise the
#: :meth:`is_null_session` method will perform a typecheck against
#: this type.
null_session_class = NullSession
#: A flag that indicates if the session interface is pickle based.
#: This can be used by flask extensions to make a decision in regards
#: to how to deal with the session object.
#:
#: .. versionadded:: 0.10
pickle_based = False
def make_null_session(self, app):
"""Creates a null session which acts as a replacement object if the
real session support could not be loaded due to a configuration
error. This mainly aids the user experience because the job of the
null session is to still support lookup without complaining but
modifications are answered with a helpful error message of what
failed.
This creates an instance of :attr:`null_session_class` by default.
"""
return self.null_session_class()
def is_null_session(self, obj):
"""Checks if a given object is a null session. Null sessions are
not asked to be saved.
This checks if the object is an instance of :attr:`null_session_class`
by default.
"""
return isinstance(obj, self.null_session_class)
def get_cookie_domain(self, app):
"""Helpful helper method that returns the cookie domain that should
be used for the session cookie if session cookies are used.
"""
if app.config['SESSION_COOKIE_DOMAIN'] is not None:
return app.config['SESSION_COOKIE_DOMAIN']
if app.config['SERVER_NAME'] is not None:
# chop of the port which is usually not supported by browsers
rv = '.' + app.config['SERVER_NAME'].rsplit(':', 1)[0]
# Google chrome does not like cookies set to .localhost, so
# we just go with no domain then. Flask documents anyways that
# cross domain cookies need a fully qualified domain name
if rv == '.localhost':
rv = None
# If we infer the cookie domain from the server name we need
# to check if we are in a subpath. In that case we can't
# set a cross domain cookie.
if rv is not None:
path = self.get_cookie_path(app)
if path != '/':
rv = rv.lstrip('.')
return rv
def get_cookie_path(self, app):
"""Returns the path for which the cookie should be valid. The
default implementation uses the value from the SESSION_COOKIE_PATH``
config var if it's set, and falls back to ``APPLICATION_ROOT`` or
uses ``/`` if it's `None`.
"""
return app.config['SESSION_COOKIE_PATH'] or \
app.config['APPLICATION_ROOT'] or '/'
def get_cookie_httponly(self, app):
"""Returns True if the session cookie should be httponly. This
currently just returns the value of the ``SESSION_COOKIE_HTTPONLY``
config var.
"""
return app.config['SESSION_COOKIE_HTTPONLY']
def get_cookie_secure(self, app):
"""Returns True if the cookie should be secure. This currently
just returns the value of the ``SESSION_COOKIE_SECURE`` setting.
"""
return app.config['SESSION_COOKIE_SECURE']
def get_expiration_time(self, app, session):
"""A helper method that returns an expiration date for the session
or `None` if the session is linked to the browser session. The
default implementation returns now + the permanent session
lifetime configured on the application.
"""
if session.permanent:
return datetime.utcnow() + app.permanent_session_lifetime
def open_session(self, app, request):
"""This method has to be implemented and must either return `None`
in case the loading failed because of a configuration error or an
instance of a session object which implements a dictionary like
interface + the methods and attributes on :class:`SessionMixin`.
"""
raise NotImplementedError()
def save_session(self, app, session, response):
"""This is called for actual sessions returned by :meth:`open_session`
at the end of the request. This is still called during a request
context so if you absolutely need access to the request you can do
that.
"""
raise NotImplementedError()
class SecureCookieSessionInterface(SessionInterface):
"""The default session interface that stores sessions in signed cookies
through the :mod:`itsdangerous` module.
"""
#: the salt that should be applied on top of the secret key for the
#: signing of cookie based sessions.
salt = 'cookie-session'
#: the hash function to use for the signature. The default is sha1
digest_method = staticmethod(hashlib.sha1)
#: the name of the itsdangerous supported key derivation. The default
#: is hmac.
key_derivation = 'hmac'
#: A python serializer for the payload. The default is a compact
#: JSON derived serializer with support for some extra Python types
#: such as datetime objects or tuples.
serializer = session_json_serializer
session_class = SecureCookieSession
def get_signing_serializer(self, app):
if not app.secret_key:
return None
signer_kwargs = dict(
key_derivation=self.key_derivation,
digest_method=self.digest_method
)
return URLSafeTimedSerializer(app.secret_key, salt=self.salt,
serializer=self.serializer,
signer_kwargs=signer_kwargs)
def open_session(self, app, request):
s = self.get_signing_serializer(app)
if s is None:
return None
val = request.cookies.get(app.session_cookie_name)
if not val:
return self.session_class()
max_age = total_seconds(app.permanent_session_lifetime)
try:
data = s.loads(val, max_age=max_age)
return self.session_class(data)
except BadSignature:
return self.session_class()
def save_session(self, app, session, response):
domain = self.get_cookie_domain(app)
path = self.get_cookie_path(app)
if not session:
if session.modified:
response.delete_cookie(app.session_cookie_name,
domain=domain, path=path)
return
httponly = self.get_cookie_httponly(app)
secure = self.get_cookie_secure(app)
expires = self.get_expiration_time(app, session)
val = self.get_signing_serializer(app).dumps(dict(session))
response.set_cookie(app.session_cookie_name, val,
expires=expires, httponly=httponly,
domain=domain, path=path, secure=secure)
from flask.debughelpers import UnexpectedUnicodeError
| mit |
gtmanfred/livestreamer | examples/gst-player.py | 22 | 3897 | #!/usr/bin/env python
from __future__ import print_function
import sys
import gi
from gi.repository import GObject as gobject, Gst as gst
from livestreamer import Livestreamer, StreamError, PluginError, NoPluginError
def exit(msg):
print(msg, file=sys.stderr)
sys.exit()
class LivestreamerPlayer(object):
def __init__(self):
self.fd = None
self.mainloop = gobject.MainLoop()
# This creates a playbin pipeline and using the appsrc source
# we can feed it our stream data
self.pipeline = gst.ElementFactory.make("playbin", None)
self.pipeline.set_property("uri", "appsrc://")
# When the playbin creates the appsrc source it will call
# this callback and allow us to configure it
self.pipeline.connect("source-setup", self.on_source_setup)
# Creates a bus and set callbacks to receive errors
self.bus = self.pipeline.get_bus()
self.bus.add_signal_watch()
self.bus.connect("message::eos", self.on_eos)
self.bus.connect("message::error", self.on_error)
def exit(self, msg):
self.stop()
exit(msg)
def stop(self):
# Stop playback and exit mainloop
self.pipeline.set_state(gst.State.NULL)
self.mainloop.quit()
# Close the stream
if self.fd:
self.fd.close()
def play(self, stream):
# Attempt to open the stream
try:
self.fd = stream.open()
except StreamError as err:
self.exit("Failed to open stream: {0}".format(err))
# Start playback
self.pipeline.set_state(gst.State.PLAYING)
self.mainloop.run()
def on_source_setup(self, element, source):
# When this callback is called the appsrc expects
# us to feed it more data
source.connect("need-data", self.on_source_need_data)
def on_source_need_data(self, source, length):
# Attempt to read data from the stream
try:
data = self.fd.read(length)
except IOError as err:
self.exit("Failed to read data from stream: {0}".format(err))
# If data is empty it's the end of stream
if not data:
source.emit("end-of-stream")
return
# Convert the Python bytes into a GStreamer Buffer
# and then push it to the appsrc
buf = gst.Buffer.new_wrapped(data)
source.emit("push-buffer", buf)
def on_eos(self, bus, msg):
# Stop playback on end of stream
self.stop()
def on_error(self, bus, msg):
# Print error message and exit on error
error = msg.parse_error()[1]
self.exit(error)
def main():
if len(sys.argv) < 3:
exit("Usage: {0} <url> <quality>".format(sys.argv[0]))
# Initialize and check GStreamer version
gi.require_version("Gst", "1.0")
gobject.threads_init()
gst.init(None)
# Collect arguments
url = sys.argv[1]
quality = sys.argv[2]
# Create the Livestreamer session
livestreamer = Livestreamer()
# Enable logging
livestreamer.set_loglevel("info")
livestreamer.set_logoutput(sys.stdout)
# Attempt to fetch streams
try:
streams = livestreamer.streams(url)
except NoPluginError:
exit("Livestreamer is unable to handle the URL '{0}'".format(url))
except PluginError as err:
exit("Plugin error: {0}".format(err))
if not streams:
exit("No streams found on URL '{0}'".format(url))
# Look for specified stream
if quality not in streams:
exit("Unable to find '{0}' stream on URL '{1}'".format(quality, url))
# We found the stream
stream = streams[quality]
# Create the player and start playback
player = LivestreamerPlayer()
# Blocks until playback is done
player.play(stream)
if __name__ == "__main__":
main()
| bsd-2-clause |
heeraj123/oh-mainline | vendor/packages/kombu/kombu/transport/redis.py | 15 | 33278 | """
kombu.transport.redis
=====================
Redis transport.
"""
from __future__ import absolute_import
import numbers
import socket
from bisect import bisect
from collections import namedtuple
from contextlib import contextmanager
from time import time
from amqp import promise
from anyjson import loads, dumps
from kombu.exceptions import InconsistencyError, VersionMismatch
from kombu.five import Empty, values, string_t
from kombu.log import get_logger
from kombu.utils import cached_property, uuid
from kombu.utils.eventio import poll, READ, ERR
from kombu.utils.encoding import bytes_to_str
from kombu.utils.url import _parse_url
NO_ROUTE_ERROR = """
Cannot route message for exchange {0!r}: Table empty or key no longer exists.
Probably the key ({1!r}) has been removed from the Redis database.
"""
try:
from billiard.util import register_after_fork
except ImportError: # pragma: no cover
try:
from multiprocessing.util import register_after_fork # noqa
except ImportError:
def register_after_fork(*args, **kwargs): # noqa
pass
try:
import redis
except ImportError: # pragma: no cover
redis = None # noqa
from . import virtual
logger = get_logger('kombu.transport.redis')
crit, warn = logger.critical, logger.warn
DEFAULT_PORT = 6379
DEFAULT_DB = 0
PRIORITY_STEPS = [0, 3, 6, 9]
error_classes_t = namedtuple('error_classes_t', (
'connection_errors', 'channel_errors',
))
# This implementation may seem overly complex, but I assure you there is
# a good reason for doing it this way.
#
# Consuming from several connections enables us to emulate channels,
# which means we can have different service guarantees for individual
# channels.
#
# So we need to consume messages from multiple connections simultaneously,
# and using epoll means we don't have to do so using multiple threads.
#
# Also it means we can easily use PUBLISH/SUBSCRIBE to do fanout
# exchanges (broadcast), as an alternative to pushing messages to fanout-bound
# queues manually.
def get_redis_error_classes():
from redis import exceptions
# This exception suddenly changed name between redis-py versions
if hasattr(exceptions, 'InvalidData'):
DataError = exceptions.InvalidData
else:
DataError = exceptions.DataError
return error_classes_t(
(virtual.Transport.connection_errors + (
InconsistencyError,
socket.error,
IOError,
OSError,
exceptions.ConnectionError,
exceptions.AuthenticationError)),
(virtual.Transport.channel_errors + (
DataError,
exceptions.InvalidResponse,
exceptions.ResponseError)),
)
class MutexHeld(Exception):
pass
@contextmanager
def Mutex(client, name, expire):
lock_id = uuid()
i_won = client.setnx(name, lock_id)
try:
if i_won:
client.expire(name, expire)
yield
else:
if not client.ttl(name):
client.expire(name, expire)
raise MutexHeld()
finally:
if i_won:
pipe = client.pipeline(True)
try:
pipe.watch(name)
if pipe.get(name) == lock_id:
pipe.multi()
pipe.delete(name)
pipe.execute()
pipe.unwatch()
except redis.WatchError:
pass
class QoS(virtual.QoS):
restore_at_shutdown = True
def __init__(self, *args, **kwargs):
super(QoS, self).__init__(*args, **kwargs)
self._vrestore_count = 0
def append(self, message, delivery_tag):
delivery = message.delivery_info
EX, RK = delivery['exchange'], delivery['routing_key']
with self.pipe_or_acquire() as pipe:
pipe.zadd(self.unacked_index_key, delivery_tag, time()) \
.hset(self.unacked_key, delivery_tag,
dumps([message._raw, EX, RK])) \
.execute()
super(QoS, self).append(message, delivery_tag)
def restore_unacked(self):
for tag in self._delivered:
self.restore_by_tag(tag)
self._delivered.clear()
def ack(self, delivery_tag):
self._remove_from_indices(delivery_tag).execute()
super(QoS, self).ack(delivery_tag)
def reject(self, delivery_tag, requeue=False):
if requeue:
self.restore_by_tag(delivery_tag, leftmost=True)
self.ack(delivery_tag)
@contextmanager
def pipe_or_acquire(self, pipe=None):
if pipe:
yield pipe
else:
with self.channel.conn_or_acquire() as client:
yield client.pipeline()
def _remove_from_indices(self, delivery_tag, pipe=None):
with self.pipe_or_acquire(pipe) as pipe:
return pipe.zrem(self.unacked_index_key, delivery_tag) \
.hdel(self.unacked_key, delivery_tag)
def restore_visible(self, start=0, num=10, interval=10):
self._vrestore_count += 1
if (self._vrestore_count - 1) % interval:
return
with self.channel.conn_or_acquire() as client:
ceil = time() - self.visibility_timeout
try:
with Mutex(client, self.unacked_mutex_key,
self.unacked_mutex_expire):
visible = client.zrevrangebyscore(
self.unacked_index_key, ceil, 0,
start=num and start, num=num, withscores=True)
for tag, score in visible or []:
self.restore_by_tag(tag, client)
except MutexHeld:
pass
def restore_by_tag(self, tag, client=None, leftmost=False):
with self.channel.conn_or_acquire(client) as client:
p, _, _ = self._remove_from_indices(
tag, client.pipeline().hget(self.unacked_key, tag)).execute()
if p:
M, EX, RK = loads(bytes_to_str(p)) # json is unicode
self.channel._do_restore_message(M, EX, RK, client, leftmost)
@cached_property
def unacked_key(self):
return self.channel.unacked_key
@cached_property
def unacked_index_key(self):
return self.channel.unacked_index_key
@cached_property
def unacked_mutex_key(self):
return self.channel.unacked_mutex_key
@cached_property
def unacked_mutex_expire(self):
return self.channel.unacked_mutex_expire
@cached_property
def visibility_timeout(self):
return self.channel.visibility_timeout
class MultiChannelPoller(object):
eventflags = READ | ERR
#: Set by :meth:`get` while reading from the socket.
_in_protected_read = False
#: Set of one-shot callbacks to call after reading from socket.
after_read = None
def __init__(self):
# active channels
self._channels = set()
# file descriptor -> channel map.
self._fd_to_chan = {}
# channel -> socket map
self._chan_to_sock = {}
# poll implementation (epoll/kqueue/select)
self.poller = poll()
# one-shot callbacks called after reading from socket.
self.after_read = set()
def close(self):
for fd in values(self._chan_to_sock):
try:
self.poller.unregister(fd)
except (KeyError, ValueError):
pass
self._channels.clear()
self._fd_to_chan.clear()
self._chan_to_sock.clear()
def add(self, channel):
self._channels.add(channel)
def discard(self, channel):
self._channels.discard(channel)
def _on_connection_disconnect(self, connection):
sock = getattr(connection, '_sock', None)
if sock is not None:
self.poller.unregister(sock)
def _register(self, channel, client, type):
if (channel, client, type) in self._chan_to_sock:
self._unregister(channel, client, type)
if client.connection._sock is None: # not connected yet.
client.connection.connect()
sock = client.connection._sock
self._fd_to_chan[sock.fileno()] = (channel, type)
self._chan_to_sock[(channel, client, type)] = sock
self.poller.register(sock, self.eventflags)
def _unregister(self, channel, client, type):
self.poller.unregister(self._chan_to_sock[(channel, client, type)])
def _register_BRPOP(self, channel):
"""enable BRPOP mode for channel."""
ident = channel, channel.client, 'BRPOP'
if channel.client.connection._sock is None or \
ident not in self._chan_to_sock:
channel._in_poll = False
self._register(*ident)
if not channel._in_poll: # send BRPOP
channel._brpop_start()
def _register_LISTEN(self, channel):
"""enable LISTEN mode for channel."""
if channel.subclient.connection._sock is None:
channel._in_listen = False
self._register(channel, channel.subclient, 'LISTEN')
if not channel._in_listen:
channel._subscribe() # send SUBSCRIBE
def on_poll_start(self):
for channel in self._channels:
if channel.active_queues: # BRPOP mode?
if channel.qos.can_consume():
self._register_BRPOP(channel)
if channel.active_fanout_queues: # LISTEN mode?
self._register_LISTEN(channel)
def on_poll_init(self, poller):
self.poller = poller
for channel in self._channels:
return channel.qos.restore_visible(
num=channel.unacked_restore_limit,
)
def maybe_restore_messages(self):
for channel in self._channels:
if channel.active_queues:
# only need to do this once, as they are not local to channel.
return channel.qos.restore_visible(
num=channel.unacked_restore_limit,
)
def on_readable(self, fileno):
chan, type = self._fd_to_chan[fileno]
if chan.qos.can_consume():
return chan.handlers[type]()
def handle_event(self, fileno, event):
if event & READ:
return self.on_readable(fileno), self
elif event & ERR:
chan, type = self._fd_to_chan[fileno]
chan._poll_error(type)
def get(self, timeout=None):
self._in_protected_read = True
try:
for channel in self._channels:
if channel.active_queues: # BRPOP mode?
if channel.qos.can_consume():
self._register_BRPOP(channel)
if channel.active_fanout_queues: # LISTEN mode?
self._register_LISTEN(channel)
events = self.poller.poll(timeout)
for fileno, event in events or []:
ret = self.handle_event(fileno, event)
if ret:
return ret
# - no new data, so try to restore messages.
# - reset active redis commands.
self.maybe_restore_messages()
raise Empty()
finally:
self._in_protected_read = False
while self.after_read:
try:
fun = self.after_read.pop()
except KeyError:
break
else:
fun()
@property
def fds(self):
return self._fd_to_chan
class Channel(virtual.Channel):
QoS = QoS
_client = None
_subclient = None
supports_fanout = True
keyprefix_queue = '_kombu.binding.%s'
keyprefix_fanout = '/{db}.'
sep = '\x06\x16'
_in_poll = False
_in_listen = False
_fanout_queues = {}
ack_emulation = True
unacked_key = 'unacked'
unacked_index_key = 'unacked_index'
unacked_mutex_key = 'unacked_mutex'
unacked_mutex_expire = 300 # 5 minutes
unacked_restore_limit = None
visibility_timeout = 3600 # 1 hour
priority_steps = PRIORITY_STEPS
socket_timeout = None
max_connections = 10
#: Transport option to enable disable fanout keyprefix.
#: Should be enabled by default, but that is not
#: backwards compatible. Can also be string, in which
#: case it changes the default prefix ('/{db}.') into to something
#: else. The prefix must include a leading slash and a trailing dot.
fanout_prefix = False
#: If enabled the fanout exchange will support patterns in routing
#: and binding keys (like a topic exchange but using PUB/SUB).
#: This will be enabled by default in a future version.
fanout_patterns = False
_pool = None
from_transport_options = (
virtual.Channel.from_transport_options +
('ack_emulation',
'unacked_key',
'unacked_index_key',
'unacked_mutex_key',
'unacked_mutex_expire',
'visibility_timeout',
'unacked_restore_limit',
'fanout_prefix',
'fanout_patterns',
'socket_timeout',
'max_connections',
'priority_steps') # <-- do not add comma here!
)
def __init__(self, *args, **kwargs):
super_ = super(Channel, self)
super_.__init__(*args, **kwargs)
if not self.ack_emulation: # disable visibility timeout
self.QoS = virtual.QoS
self._queue_cycle = []
self.Client = self._get_client()
self.ResponseError = self._get_response_error()
self.active_fanout_queues = set()
self.auto_delete_queues = set()
self._fanout_to_queue = {}
self.handlers = {'BRPOP': self._brpop_read, 'LISTEN': self._receive}
if self.fanout_prefix:
if isinstance(self.fanout_prefix, string_t):
self.keyprefix_fanout = self.fanout_prefix
else:
# previous versions did not set a fanout, so cannot enable
# by default.
self.keyprefix_fanout = ''
# Evaluate connection.
try:
self.client.info()
except Exception:
if self._pool:
self._pool.disconnect()
raise
self.connection.cycle.add(self) # add to channel poller.
# copy errors, in case channel closed but threads still
# are still waiting for data.
self.connection_errors = self.connection.connection_errors
register_after_fork(self, self._after_fork)
def _after_fork(self):
if self._pool is not None:
self._pool.disconnect()
def _on_connection_disconnect(self, connection):
if self.connection and self.connection.cycle:
self.connection.cycle._on_connection_disconnect(connection)
def _do_restore_message(self, payload, exchange, routing_key,
client=None, leftmost=False):
with self.conn_or_acquire(client) as client:
try:
try:
payload['headers']['redelivered'] = True
except KeyError:
pass
for queue in self._lookup(exchange, routing_key):
(client.lpush if leftmost else client.rpush)(
queue, dumps(payload),
)
except Exception:
crit('Could not restore message: %r', payload, exc_info=True)
def _restore(self, message, leftmost=False):
if not self.ack_emulation:
return super(Channel, self)._restore(message)
tag = message.delivery_tag
with self.conn_or_acquire() as client:
P, _ = client.pipeline() \
.hget(self.unacked_key, tag) \
.hdel(self.unacked_key, tag) \
.execute()
if P:
M, EX, RK = loads(bytes_to_str(P)) # json is unicode
self._do_restore_message(M, EX, RK, client, leftmost)
def _restore_at_beginning(self, message):
return self._restore(message, leftmost=True)
def basic_consume(self, queue, *args, **kwargs):
if queue in self._fanout_queues:
exchange, _ = self._fanout_queues[queue]
self.active_fanout_queues.add(queue)
self._fanout_to_queue[exchange] = queue
ret = super(Channel, self).basic_consume(queue, *args, **kwargs)
self._update_cycle()
return ret
def basic_cancel(self, consumer_tag):
# If we are busy reading messages we may experience
# a race condition where a message is consumed after
# cancelling, so we must delay this operation until reading
# is complete (Issue celery/celery#1773).
connection = self.connection
if connection:
if connection.cycle._in_protected_read:
return connection.cycle.after_read.add(
promise(self._basic_cancel, (consumer_tag, )),
)
return self._basic_cancel(consumer_tag)
def _basic_cancel(self, consumer_tag):
try:
queue = self._tag_to_queue[consumer_tag]
except KeyError:
return
try:
self.active_fanout_queues.remove(queue)
except KeyError:
pass
else:
self._unsubscribe_from(queue)
try:
exchange, _ = self._fanout_queues[queue]
self._fanout_to_queue.pop(exchange)
except KeyError:
pass
ret = super(Channel, self).basic_cancel(consumer_tag)
self._update_cycle()
return ret
def _get_publish_topic(self, exchange, routing_key):
if routing_key and self.fanout_patterns:
return ''.join([self.keyprefix_fanout, exchange, '/', routing_key])
return ''.join([self.keyprefix_fanout, exchange])
def _get_subscribe_topic(self, queue):
exchange, routing_key = self._fanout_queues[queue]
return self._get_publish_topic(exchange, routing_key)
def _subscribe(self):
keys = [self._get_subscribe_topic(queue)
for queue in self.active_fanout_queues]
if not keys:
return
c = self.subclient
if c.connection._sock is None:
c.connection.connect()
self._in_listen = True
c.psubscribe(keys)
def _unsubscribe_from(self, queue):
topic = self._get_subscribe_topic(queue)
c = self.subclient
should_disconnect = False
if c.connection._sock is None:
c.connection.connect()
should_disconnect = True
try:
c.unsubscribe([topic])
finally:
if should_disconnect and c.connection:
c.connection.disconnect()
def _handle_message(self, client, r):
if bytes_to_str(r[0]) == 'unsubscribe' and r[2] == 0:
client.subscribed = False
elif bytes_to_str(r[0]) == 'pmessage':
return {'type': r[0], 'pattern': r[1],
'channel': r[2], 'data': r[3]}
else:
return {'type': r[0], 'pattern': None,
'channel': r[1], 'data': r[2]}
def _receive(self):
c = self.subclient
response = None
try:
response = c.parse_response()
except self.connection_errors:
self._in_listen = False
raise Empty()
if response is not None:
payload = self._handle_message(c, response)
if bytes_to_str(payload['type']).endswith('message'):
channel = bytes_to_str(payload['channel'])
if payload['data']:
if channel[0] == '/':
_, _, channel = channel.partition('.')
try:
message = loads(bytes_to_str(payload['data']))
except (TypeError, ValueError):
warn('Cannot process event on channel %r: %s',
channel, repr(payload)[:4096], exc_info=1)
raise Empty()
exchange = channel.split('/', 1)[0]
return message, self._fanout_to_queue[exchange]
raise Empty()
def _brpop_start(self, timeout=1):
queues = self._consume_cycle()
if not queues:
return
keys = [self._q_for_pri(queue, pri) for pri in PRIORITY_STEPS
for queue in queues] + [timeout or 0]
self._in_poll = True
self.client.connection.send_command('BRPOP', *keys)
def _brpop_read(self, **options):
try:
try:
dest__item = self.client.parse_response(self.client.connection,
'BRPOP',
**options)
except self.connection_errors:
# if there's a ConnectionError, disconnect so the next
# iteration will reconnect automatically.
self.client.connection.disconnect()
raise Empty()
if dest__item:
dest, item = dest__item
dest = bytes_to_str(dest).rsplit(self.sep, 1)[0]
self._rotate_cycle(dest)
return loads(bytes_to_str(item)), dest
else:
raise Empty()
finally:
self._in_poll = False
def _poll_error(self, type, **options):
if type == 'LISTEN':
self.subclient.parse_response()
else:
self.client.parse_response(self.client.connection, type)
def _get(self, queue):
with self.conn_or_acquire() as client:
for pri in PRIORITY_STEPS:
item = client.rpop(self._q_for_pri(queue, pri))
if item:
return loads(bytes_to_str(item))
raise Empty()
def _size(self, queue):
with self.conn_or_acquire() as client:
cmds = client.pipeline()
for pri in PRIORITY_STEPS:
cmds = cmds.llen(self._q_for_pri(queue, pri))
sizes = cmds.execute()
return sum(size for size in sizes
if isinstance(size, numbers.Integral))
def _q_for_pri(self, queue, pri):
pri = self.priority(pri)
return '%s%s%s' % ((queue, self.sep, pri) if pri else (queue, '', ''))
def priority(self, n):
steps = self.priority_steps
return steps[bisect(steps, n) - 1]
def _put(self, queue, message, **kwargs):
"""Deliver message."""
try:
pri = max(min(int(
message['properties']['delivery_info']['priority']), 9), 0)
except (TypeError, ValueError, KeyError):
pri = 0
with self.conn_or_acquire() as client:
client.lpush(self._q_for_pri(queue, pri), dumps(message))
def _put_fanout(self, exchange, message, routing_key, **kwargs):
"""Deliver fanout message."""
with self.conn_or_acquire() as client:
client.publish(
self._get_publish_topic(exchange, routing_key),
dumps(message),
)
def _new_queue(self, queue, auto_delete=False, **kwargs):
if auto_delete:
self.auto_delete_queues.add(queue)
def _queue_bind(self, exchange, routing_key, pattern, queue):
if self.typeof(exchange).type == 'fanout':
# Mark exchange as fanout.
self._fanout_queues[queue] = (
exchange, routing_key.replace('#', '*'),
)
with self.conn_or_acquire() as client:
client.sadd(self.keyprefix_queue % (exchange, ),
self.sep.join([routing_key or '',
pattern or '',
queue or '']))
def _delete(self, queue, exchange, routing_key, pattern, *args):
self.auto_delete_queues.discard(queue)
with self.conn_or_acquire() as client:
client.srem(self.keyprefix_queue % (exchange, ),
self.sep.join([routing_key or '',
pattern or '',
queue or '']))
cmds = client.pipeline()
for pri in PRIORITY_STEPS:
cmds = cmds.delete(self._q_for_pri(queue, pri))
cmds.execute()
def _has_queue(self, queue, **kwargs):
with self.conn_or_acquire() as client:
cmds = client.pipeline()
for pri in PRIORITY_STEPS:
cmds = cmds.exists(self._q_for_pri(queue, pri))
return any(cmds.execute())
def get_table(self, exchange):
key = self.keyprefix_queue % exchange
with self.conn_or_acquire() as client:
values = client.smembers(key)
if not values:
raise InconsistencyError(NO_ROUTE_ERROR.format(exchange, key))
return [tuple(bytes_to_str(val).split(self.sep)) for val in values]
def _purge(self, queue):
with self.conn_or_acquire() as client:
cmds = client.pipeline()
for pri in PRIORITY_STEPS:
priq = self._q_for_pri(queue, pri)
cmds = cmds.llen(priq).delete(priq)
sizes = cmds.execute()
return sum(sizes[::2])
def close(self):
if self._pool:
self._pool.disconnect()
if not self.closed:
# remove from channel poller.
self.connection.cycle.discard(self)
# delete fanout bindings
for queue in self._fanout_queues:
if queue in self.auto_delete_queues:
self.queue_delete(queue)
self._close_clients()
super(Channel, self).close()
def _close_clients(self):
# Close connections
for attr in 'client', 'subclient':
try:
self.__dict__[attr].connection.disconnect()
except (KeyError, AttributeError, self.ResponseError):
pass
def _prepare_virtual_host(self, vhost):
if not isinstance(vhost, numbers.Integral):
if not vhost or vhost == '/':
vhost = DEFAULT_DB
elif vhost.startswith('/'):
vhost = vhost[1:]
try:
vhost = int(vhost)
except ValueError:
raise ValueError(
'Database is int between 0 and limit - 1, not {0}'.format(
vhost,
))
return vhost
def _connparams(self):
conninfo = self.connection.client
connparams = {'host': conninfo.hostname or '127.0.0.1',
'port': conninfo.port or DEFAULT_PORT,
'virtual_host': conninfo.virtual_host,
'password': conninfo.password,
'max_connections': self.max_connections,
'socket_timeout': self.socket_timeout}
host = connparams['host']
if '://' in host:
scheme, _, _, _, _, path, query = _parse_url(host)
if scheme == 'socket':
connparams.update({
'connection_class': redis.UnixDomainSocketConnection,
'path': '/' + path}, **query)
connparams.pop('host', None)
connparams.pop('port', None)
connparams['db'] = self._prepare_virtual_host(
connparams.pop('virtual_host', None))
channel = self
connection_cls = (
connparams.get('connection_class') or
redis.Connection
)
class Connection(connection_cls):
def disconnect(self):
channel._on_connection_disconnect(self)
super(Connection, self).disconnect()
connparams['connection_class'] = Connection
return connparams
def _create_client(self):
return self.Client(connection_pool=self.pool)
def _get_pool(self):
params = self._connparams()
self.keyprefix_fanout = self.keyprefix_fanout.format(db=params['db'])
return redis.ConnectionPool(**params)
def _get_client(self):
if redis.VERSION < (2, 4, 4):
raise VersionMismatch(
'Redis transport requires redis-py versions 2.4.4 or later. '
'You have {0.__version__}'.format(redis))
# KombuRedis maintains a connection attribute on it's instance and
# uses that when executing commands
# This was added after redis-py was changed.
class KombuRedis(redis.Redis): # pragma: no cover
def __init__(self, *args, **kwargs):
super(KombuRedis, self).__init__(*args, **kwargs)
self.connection = self.connection_pool.get_connection('_')
return KombuRedis
@contextmanager
def conn_or_acquire(self, client=None):
if client:
yield client
else:
if self._in_poll:
client = self._create_client()
try:
yield client
finally:
self.pool.release(client.connection)
else:
yield self.client
@property
def pool(self):
if self._pool is None:
self._pool = self._get_pool()
return self._pool
@cached_property
def client(self):
"""Client used to publish messages, BRPOP etc."""
return self._create_client()
@cached_property
def subclient(self):
"""Pub/Sub connection used to consume fanout queues."""
client = self._create_client()
pubsub = client.pubsub()
pool = pubsub.connection_pool
pubsub.connection = pool.get_connection('pubsub', pubsub.shard_hint)
return pubsub
def _update_cycle(self):
"""Update fair cycle between queues.
We cycle between queues fairly to make sure that
each queue is equally likely to be consumed from,
so that a very busy queue will not block others.
This works by using Redis's `BRPOP` command and
by rotating the most recently used queue to the
and of the list. See Kombu github issue #166 for
more discussion of this method.
"""
self._queue_cycle = list(self.active_queues)
def _consume_cycle(self):
"""Get a fresh list of queues from the queue cycle."""
active = len(self.active_queues)
return self._queue_cycle[0:active]
def _rotate_cycle(self, used):
"""Move most recently used queue to end of list."""
cycle = self._queue_cycle
try:
cycle.append(cycle.pop(cycle.index(used)))
except ValueError:
pass
def _get_response_error(self):
from redis import exceptions
return exceptions.ResponseError
@property
def active_queues(self):
"""Set of queues being consumed from (excluding fanout queues)."""
return set(queue for queue in self._active_queues
if queue not in self.active_fanout_queues)
class Transport(virtual.Transport):
Channel = Channel
polling_interval = None # disable sleep between unsuccessful polls.
default_port = DEFAULT_PORT
supports_ev = True
driver_type = 'redis'
driver_name = 'redis'
def __init__(self, *args, **kwargs):
if redis is None:
raise ImportError('Missing redis library (pip install redis)')
super(Transport, self).__init__(*args, **kwargs)
# Get redis-py exceptions.
self.connection_errors, self.channel_errors = self._get_errors()
# All channels share the same poller.
self.cycle = MultiChannelPoller()
def driver_version(self):
return redis.__version__
def register_with_event_loop(self, connection, loop):
cycle = self.cycle
cycle.on_poll_init(loop.poller)
cycle_poll_start = cycle.on_poll_start
add_reader = loop.add_reader
on_readable = self.on_readable
def _on_disconnect(connection):
if connection._sock:
loop.remove(connection._sock)
cycle._on_connection_disconnect = _on_disconnect
def on_poll_start():
cycle_poll_start()
[add_reader(fd, on_readable, fd) for fd in cycle.fds]
loop.on_tick.add(on_poll_start)
loop.call_repeatedly(10, cycle.maybe_restore_messages)
def on_readable(self, fileno):
"""Handle AIO event for one of our file descriptors."""
item = self.cycle.on_readable(fileno)
if item:
message, queue = item
if not queue or queue not in self._callbacks:
raise KeyError(
'Message for queue {0!r} without consumers: {1}'.format(
queue, message))
self._callbacks[queue](message)
def _get_errors(self):
"""Utility to import redis-py's exceptions at runtime."""
return get_redis_error_classes()
| agpl-3.0 |
jbenden/ansible | lib/ansible/modules/network/nxos/nxos_static_route.py | 40 | 8421 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {
'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community',
}
DOCUMENTATION = '''
---
module: nxos_static_route
extends_documentation_fragment: nxos
version_added: "2.2"
short_description: Manages static route configuration
description:
- Manages static route configuration
author: Gabriele Gerbino (@GGabriele)
notes:
- If no vrf is supplied, vrf is set to default.
- If C(state=absent), the route will be removed, regardless of the
non-required parameters.
options:
prefix:
description:
- Destination prefix of static route.
required: true
next_hop:
description:
- Next hop address or interface of static route.
If interface, it must be the fully-qualified interface name.
required: true
vrf:
description:
- VRF for static route.
required: false
default: default
tag:
description:
- Route tag value (numeric).
required: false
default: null
route_name:
description:
- Name of the route. Used with the name parameter on the CLI.
required: false
default: null
pref:
description:
- Preference or administrative difference of route (range 1-255).
required: false
default: null
state:
description:
- Manage the state of the resource.
required: true
choices: ['present','absent']
'''
EXAMPLES = '''
- nxos_static_route:
prefix: "192.168.20.64/24"
next_hop: "3.3.3.3"
route_name: testing
pref: 100
'''
RETURN = '''
commands:
description: commands sent to the device
returned: always
type: list
sample: ["ip route 192.168.20.0/24 3.3.3.3 name testing 100"]
'''
import re
from ansible.module_utils.nxos import get_config, load_config
from ansible.module_utils.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.netcfg import CustomNetworkConfig
def reconcile_candidate(module, candidate, prefix):
netcfg = CustomNetworkConfig(indent=2, contents=get_config(module))
state = module.params['state']
set_command = set_route_command(module, prefix)
remove_command = remove_route_command(module, prefix)
parents = []
commands = []
if module.params['vrf'] == 'default':
config = netcfg.get_section(set_command)
if config and state == 'absent':
commands = [remove_command]
elif not config and state == 'present':
commands = [set_command]
else:
parents = ['vrf context {0}'.format(module.params['vrf'])]
config = netcfg.get_section(parents)
if not isinstance(config, list):
config = config.split('\n')
config = [line.strip() for line in config]
if set_command in config and state == 'absent':
commands = [remove_command]
elif set_command not in config and state == 'present':
commands = [set_command]
if commands:
candidate.add(commands, parents=parents)
def fix_prefix_to_regex(prefix):
prefix = prefix.replace('.', r'\.').replace('/', r'\/')
return prefix
def get_existing(module, prefix, warnings):
key_map = ['tag', 'pref', 'route_name', 'next_hop']
netcfg = CustomNetworkConfig(indent=2, contents=get_config(module))
parents = 'vrf context {0}'.format(module.params['vrf'])
prefix_to_regex = fix_prefix_to_regex(prefix)
route_regex = r'.*ip\sroute\s{0}\s(?P<next_hop>\S+)(\sname\s(?P<route_name>\S+))?(\stag\s(?P<tag>\d+))?(\s(?P<pref>\d+))?.*'.format(prefix_to_regex)
if module.params['vrf'] == 'default':
config = str(netcfg)
else:
config = netcfg.get_section(parents)
if config:
try:
match_route = re.match(route_regex, config, re.DOTALL)
group_route = match_route.groupdict()
for key in key_map:
if key not in group_route:
group_route[key] = ''
group_route['prefix'] = prefix
group_route['vrf'] = module.params['vrf']
except (AttributeError, TypeError):
group_route = {}
else:
group_route = {}
msg = ("VRF {0} didn't exist.".format(module.params['vrf']))
if msg not in warnings:
warnings.append(msg)
return group_route
def remove_route_command(module, prefix):
return 'no ip route {0} {1}'.format(prefix, module.params['next_hop'])
def set_route_command(module, prefix):
route_cmd = 'ip route {0} {1}'.format(prefix, module.params['next_hop'])
if module.params['route_name']:
route_cmd += ' name {0}'.format(module.params['route_name'])
if module.params['tag']:
route_cmd += ' tag {0}'.format(module.params['tag'])
if module.params['pref']:
route_cmd += ' {0}'.format(module.params['pref'])
return route_cmd
def get_dotted_mask(mask):
bits = 0
for i in range(32-mask, 32):
bits |= (1 << i)
mask = ("%d.%d.%d.%d" % ((bits & 0xff000000) >> 24, (bits & 0xff0000) >> 16, (bits & 0xff00) >> 8, (bits & 0xff)))
return mask
def get_network_start(address, netmask):
address = address.split('.')
netmask = netmask.split('.')
return [str(int(address[x]) & int(netmask[x])) for x in range(0, 4)]
def network_from_string(address, mask, module):
octects = address.split('.')
if len(octects) > 4:
module.fail_json(msg='Incorrect address format.', address=address)
for octect in octects:
try:
if int(octect) < 0 or int(octect) > 255:
module.fail_json(msg='Address may contain invalid values.',
address=address)
except ValueError:
module.fail_json(msg='Address may contain non-integer values.',
address=address)
try:
if int(mask) < 0 or int(mask) > 32:
module.fail_json(msg='Incorrect mask value.', mask=mask)
except ValueError:
module.fail_json(msg='Mask may contain non-integer values.', mask=mask)
netmask = get_dotted_mask(int(mask))
return '.'.join(get_network_start(address, netmask))
def normalize_prefix(module, prefix):
splitted_prefix = prefix.split('/')
address = splitted_prefix[0]
if len(splitted_prefix) > 2:
module.fail_json(msg='Incorrect address format.', address=address)
elif len(splitted_prefix) == 2:
mask = splitted_prefix[1]
network = network_from_string(address, mask, module)
normalized_prefix = str(network) + '/' + str(mask)
else:
normalized_prefix = prefix + '/' + str(32)
return normalized_prefix
def main():
argument_spec = dict(
prefix=dict(required=True, type='str'),
next_hop=dict(required=True, type='str'),
vrf=dict(type='str', default='default'),
tag=dict(type='str'),
route_name=dict(type='str'),
pref=dict(type='str'),
state=dict(choices=['absent', 'present'], default='present'),
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
warnings = list()
check_args(module, warnings)
result = dict(changed=False, warnings=warnings)
prefix = normalize_prefix(module, module.params['prefix'])
candidate = CustomNetworkConfig(indent=3)
reconcile_candidate(module, candidate, prefix)
if candidate:
candidate = candidate.items_text()
load_config(module, candidate)
result['commands'] = candidate
result['changed'] = True
else:
result['commands'] = []
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
imjonsnooow/vivisect | vstruct/bitfield.py | 8 | 3569 | import envi.bits as e_bits
from vstruct import VStruct
from vstruct.primitives import *
from binascii import unhexlify
class v_bits(v_number):
def __init__(self, width):
v_number.__init__(self)
self._vs_bitwidth = width
def vsSetValue(self, value):
'''
override base because we are not using the min or max val.
'''
self._vs_value = long(value)
def vsSetBitWidth(self, width):
self._vs_bitwidth = width
class VBitField(VStruct):
'''
A VStruct *like* field which may contain v_bits
children. To all VStruct related operations, we are
a primitive, but we do have internal structure.
Example:
see vstruct/defs/swf.py
NOTE: this object will pad itself to byte aligned bounds
'''
def __init__(self):
VStruct.__init__(self)
def vsIsPrim(self):
return True
def vsAddField(self, name, value):
if not isinstance(value, v_bits):
raise Exception('VBitField *must* use v_bits() kids!')
return VStruct.vsAddField(self, name, value)
def vsGetPrintInfo(self, offset=0, indent=0, top=True):
ret = []
if top:
ret.append((offset, indent, self._vs_name, self))
indent += 1
bitoff = 0
for fname,field in self.vsGetFields():
# use vsSetBitWidth(0) to disable fields
if field._vs_bitwidth == 0:
continue
bitname = '%s[%d:%d]' % (fname,bitoff,bitoff + field._vs_bitwidth)
ret.append( (offset, indent, bitname, field) )
bitoff += field._vs_bitwidth
return ret
def __len__(self):
bits = sum([ f._vs_bitwidth for (n,f) in self.vsGetFields() ])
bittobyte,bitoff = divmod(bits,8)
if bitoff:
bittobyte += 1
return bittobyte
def vsParse(self, bytez, offset=0):
bitoff = 0
for fname,field in self.vsGetFields():
# use vsSetBitWidth(0) to disable fields
if field._vs_bitwidth == 0:
continue
# adjust forward from last fields bits % 8
startbyte,startbit = divmod(bitoff,8)
#print 'BYTE BIT OFF',byteoff,bitoff,(
#offset += bittobyte
endbyte,endbit = divmod(bitoff + field._vs_bitwidth,8)
# if we have an endbit remainder, we need to grab
# an additional byte...
endround = 0
endshift = 0
if endbit:
endshift = (8-endbit)
endround = 1
fieldbytes = bytez[offset + startbyte:offset+endbyte+endround]
rawint = int( fieldbytes.encode('hex'), 16)
if endshift:
#if bitshift:
rawint >>= endshift
rawint &= (2**field._vs_bitwidth)-1
field.vsSetValue(rawint)
bitoff += field._vs_bitwidth
self._vsFireCallbacks(fname)
offbytes,offbits = divmod(bitoff,8)
offset += offbytes
# mop up any remaining bits int a byte boundary
if offbits:
offset += 1
return offset
def vsEmit(self):
valu = 0
width = 0
for name,field in self.vsGetFields():
width += field._vs_bitwidth
valu = ( valu << field._vs_bitwidth ) | field._vs_value
bytelen,bitrem = divmod(width,8)
if bitrem:
bytelen += 1
valu <<= ( 8 - bitrem )
return unhexlify(('%.' + str(bytelen*2) + 'x') % valu)
| apache-2.0 |
raildo/nova | nova/version.py | 61 | 2298 | # Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
from nova.i18n import _LE
NOVA_VENDOR = "OpenStack Foundation"
NOVA_PRODUCT = "OpenStack Nova"
NOVA_PACKAGE = None # OS distro package version suffix
loaded = False
version_info = pbr.version.VersionInfo('nova')
version_string = version_info.version_string
def _load_config():
# Don't load in global context, since we can't assume
# these modules are accessible when distutils uses
# this module
from six.moves import configparser
from oslo_config import cfg
import logging
global loaded, NOVA_VENDOR, NOVA_PRODUCT, NOVA_PACKAGE
if loaded:
return
loaded = True
cfgfile = cfg.CONF.find_file("release")
if cfgfile is None:
return
try:
cfg = configparser.RawConfigParser()
cfg.read(cfgfile)
if cfg.has_option("Nova", "vendor"):
NOVA_VENDOR = cfg.get("Nova", "vendor")
if cfg.has_option("Nova", "product"):
NOVA_PRODUCT = cfg.get("Nova", "product")
if cfg.has_option("Nova", "package"):
NOVA_PACKAGE = cfg.get("Nova", "package")
except Exception as ex:
LOG = logging.getLogger(__name__)
LOG.error(_LE("Failed to load %(cfgfile)s: %(ex)s"),
{'cfgfile': cfgfile, 'ex': ex})
def vendor_string():
_load_config()
return NOVA_VENDOR
def product_string():
_load_config()
return NOVA_PRODUCT
def package_string():
_load_config()
return NOVA_PACKAGE
def version_string_with_package():
if package_string() is None:
return version_info.version_string()
else:
return "%s-%s" % (version_info.version_string(), package_string())
| apache-2.0 |
mezz64/home-assistant | tests/components/imap_email_content/test_sensor.py | 9 | 6960 | """The tests for the IMAP email content sensor platform."""
from collections import deque
import datetime
import email
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from homeassistant.components.imap_email_content import sensor as imap_email_content
from homeassistant.helpers.event import async_track_state_change
from homeassistant.helpers.template import Template
class FakeEMailReader:
"""A test class for sending test emails."""
def __init__(self, messages):
"""Set up the fake email reader."""
self._messages = messages
def connect(self):
"""Stay always Connected."""
return True
def read_next(self):
"""Get the next email."""
if len(self._messages) == 0:
return None
return self._messages.popleft()
async def test_allowed_sender(hass):
"""Test emails from allowed sender."""
test_message = email.message.Message()
test_message["From"] = "sender@test.com"
test_message["Subject"] = "Test"
test_message["Date"] = datetime.datetime(2016, 1, 1, 12, 44, 57)
test_message.set_payload("Test Message")
sensor = imap_email_content.EmailContentSensor(
hass,
FakeEMailReader(deque([test_message])),
"test_emails_sensor",
["sender@test.com"],
None,
)
sensor.entity_id = "sensor.emailtest"
sensor.async_schedule_update_ha_state(True)
await hass.async_block_till_done()
assert "Test" == sensor.state
assert "Test Message" == sensor.device_state_attributes["body"]
assert "sender@test.com" == sensor.device_state_attributes["from"]
assert "Test" == sensor.device_state_attributes["subject"]
assert (
datetime.datetime(2016, 1, 1, 12, 44, 57)
== sensor.device_state_attributes["date"]
)
async def test_multi_part_with_text(hass):
"""Test multi part emails."""
msg = MIMEMultipart("alternative")
msg["Subject"] = "Link"
msg["From"] = "sender@test.com"
text = "Test Message"
html = "<html><head></head><body>Test Message</body></html>"
textPart = MIMEText(text, "plain")
htmlPart = MIMEText(html, "html")
msg.attach(textPart)
msg.attach(htmlPart)
sensor = imap_email_content.EmailContentSensor(
hass,
FakeEMailReader(deque([msg])),
"test_emails_sensor",
["sender@test.com"],
None,
)
sensor.entity_id = "sensor.emailtest"
sensor.async_schedule_update_ha_state(True)
await hass.async_block_till_done()
assert "Link" == sensor.state
assert "Test Message" == sensor.device_state_attributes["body"]
async def test_multi_part_only_html(hass):
"""Test multi part emails with only HTML."""
msg = MIMEMultipart("alternative")
msg["Subject"] = "Link"
msg["From"] = "sender@test.com"
html = "<html><head></head><body>Test Message</body></html>"
htmlPart = MIMEText(html, "html")
msg.attach(htmlPart)
sensor = imap_email_content.EmailContentSensor(
hass,
FakeEMailReader(deque([msg])),
"test_emails_sensor",
["sender@test.com"],
None,
)
sensor.entity_id = "sensor.emailtest"
sensor.async_schedule_update_ha_state(True)
await hass.async_block_till_done()
assert "Link" == sensor.state
assert (
"<html><head></head><body>Test Message</body></html>"
== sensor.device_state_attributes["body"]
)
async def test_multi_part_only_other_text(hass):
"""Test multi part emails with only other text."""
msg = MIMEMultipart("alternative")
msg["Subject"] = "Link"
msg["From"] = "sender@test.com"
other = "Test Message"
htmlPart = MIMEText(other, "other")
msg.attach(htmlPart)
sensor = imap_email_content.EmailContentSensor(
hass,
FakeEMailReader(deque([msg])),
"test_emails_sensor",
["sender@test.com"],
None,
)
sensor.entity_id = "sensor.emailtest"
sensor.async_schedule_update_ha_state(True)
await hass.async_block_till_done()
assert "Link" == sensor.state
assert "Test Message" == sensor.device_state_attributes["body"]
async def test_multiple_emails(hass):
"""Test multiple emails."""
states = []
test_message1 = email.message.Message()
test_message1["From"] = "sender@test.com"
test_message1["Subject"] = "Test"
test_message1["Date"] = datetime.datetime(2016, 1, 1, 12, 44, 57)
test_message1.set_payload("Test Message")
test_message2 = email.message.Message()
test_message2["From"] = "sender@test.com"
test_message2["Subject"] = "Test 2"
test_message2["Date"] = datetime.datetime(2016, 1, 1, 12, 44, 57)
test_message2.set_payload("Test Message 2")
def state_changed_listener(entity_id, from_s, to_s):
states.append(to_s)
async_track_state_change(hass, ["sensor.emailtest"], state_changed_listener)
sensor = imap_email_content.EmailContentSensor(
hass,
FakeEMailReader(deque([test_message1, test_message2])),
"test_emails_sensor",
["sender@test.com"],
None,
)
sensor.entity_id = "sensor.emailtest"
sensor.async_schedule_update_ha_state(True)
await hass.async_block_till_done()
sensor.async_schedule_update_ha_state(True)
await hass.async_block_till_done()
assert "Test" == states[0].state
assert "Test 2" == states[1].state
assert "Test Message 2" == sensor.device_state_attributes["body"]
async def test_sender_not_allowed(hass):
"""Test not whitelisted emails."""
test_message = email.message.Message()
test_message["From"] = "sender@test.com"
test_message["Subject"] = "Test"
test_message["Date"] = datetime.datetime(2016, 1, 1, 12, 44, 57)
test_message.set_payload("Test Message")
sensor = imap_email_content.EmailContentSensor(
hass,
FakeEMailReader(deque([test_message])),
"test_emails_sensor",
["other@test.com"],
None,
)
sensor.entity_id = "sensor.emailtest"
sensor.async_schedule_update_ha_state(True)
await hass.async_block_till_done()
assert sensor.state is None
async def test_template(hass):
"""Test value template."""
test_message = email.message.Message()
test_message["From"] = "sender@test.com"
test_message["Subject"] = "Test"
test_message["Date"] = datetime.datetime(2016, 1, 1, 12, 44, 57)
test_message.set_payload("Test Message")
sensor = imap_email_content.EmailContentSensor(
hass,
FakeEMailReader(deque([test_message])),
"test_emails_sensor",
["sender@test.com"],
Template("{{ subject }} from {{ from }} with message {{ body }}", hass),
)
sensor.entity_id = "sensor.emailtest"
sensor.async_schedule_update_ha_state(True)
await hass.async_block_till_done()
assert "Test from sender@test.com with message Test Message" == sensor.state
| apache-2.0 |
Tangcuyu/perfectinfo | lib/tsd/node_modules/node-gyp/gyp/pylib/gyp/MSVSUserFile.py | 2710 | 5094 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Visual Studio user preferences file writer."""
import os
import re
import socket # for gethostname
import gyp.common
import gyp.easy_xml as easy_xml
#------------------------------------------------------------------------------
def _FindCommandInPath(command):
"""If there are no slashes in the command given, this function
searches the PATH env to find the given command, and converts it
to an absolute path. We have to do this because MSVS is looking
for an actual file to launch a debugger on, not just a command
line. Note that this happens at GYP time, so anything needing to
be built needs to have a full path."""
if '/' in command or '\\' in command:
# If the command already has path elements (either relative or
# absolute), then assume it is constructed properly.
return command
else:
# Search through the path list and find an existing file that
# we can access.
paths = os.environ.get('PATH','').split(os.pathsep)
for path in paths:
item = os.path.join(path, command)
if os.path.isfile(item) and os.access(item, os.X_OK):
return item
return command
def _QuoteWin32CommandLineArgs(args):
new_args = []
for arg in args:
# Replace all double-quotes with double-double-quotes to escape
# them for cmd shell, and then quote the whole thing if there
# are any.
if arg.find('"') != -1:
arg = '""'.join(arg.split('"'))
arg = '"%s"' % arg
# Otherwise, if there are any spaces, quote the whole arg.
elif re.search(r'[ \t\n]', arg):
arg = '"%s"' % arg
new_args.append(arg)
return new_args
class Writer(object):
"""Visual Studio XML user user file writer."""
def __init__(self, user_file_path, version, name):
"""Initializes the user file.
Args:
user_file_path: Path to the user file.
version: Version info.
name: Name of the user file.
"""
self.user_file_path = user_file_path
self.version = version
self.name = name
self.configurations = {}
def AddConfig(self, name):
"""Adds a configuration to the project.
Args:
name: Configuration name.
"""
self.configurations[name] = ['Configuration', {'Name': name}]
def AddDebugSettings(self, config_name, command, environment = {},
working_directory=""):
"""Adds a DebugSettings node to the user file for a particular config.
Args:
command: command line to run. First element in the list is the
executable. All elements of the command will be quoted if
necessary.
working_directory: other files which may trigger the rule. (optional)
"""
command = _QuoteWin32CommandLineArgs(command)
abs_command = _FindCommandInPath(command[0])
if environment and isinstance(environment, dict):
env_list = ['%s="%s"' % (key, val)
for (key,val) in environment.iteritems()]
environment = ' '.join(env_list)
else:
environment = ''
n_cmd = ['DebugSettings',
{'Command': abs_command,
'WorkingDirectory': working_directory,
'CommandArguments': " ".join(command[1:]),
'RemoteMachine': socket.gethostname(),
'Environment': environment,
'EnvironmentMerge': 'true',
# Currently these are all "dummy" values that we're just setting
# in the default manner that MSVS does it. We could use some of
# these to add additional capabilities, I suppose, but they might
# not have parity with other platforms then.
'Attach': 'false',
'DebuggerType': '3', # 'auto' debugger
'Remote': '1',
'RemoteCommand': '',
'HttpUrl': '',
'PDBPath': '',
'SQLDebugging': '',
'DebuggerFlavor': '0',
'MPIRunCommand': '',
'MPIRunArguments': '',
'MPIRunWorkingDirectory': '',
'ApplicationCommand': '',
'ApplicationArguments': '',
'ShimCommand': '',
'MPIAcceptMode': '',
'MPIAcceptFilter': ''
}]
# Find the config, and add it if it doesn't exist.
if config_name not in self.configurations:
self.AddConfig(config_name)
# Add the DebugSettings onto the appropriate config.
self.configurations[config_name].append(n_cmd)
def WriteIfChanged(self):
"""Writes the user file."""
configs = ['Configurations']
for config, spec in sorted(self.configurations.iteritems()):
configs.append(spec)
content = ['VisualStudioUserFile',
{'Version': self.version.ProjectVersion(),
'Name': self.name
},
configs]
easy_xml.WriteXmlIfChanged(content, self.user_file_path,
encoding="Windows-1252")
| mit |
franramirez688/common | test/utils/decorators_test.py | 5 | 1225 | import unittest
from mock import Mock, patch
from mock import call
from biicode.common.utils.decorators import os_constraint
from biicode.common.settings.osinfo import OSInfo
class DecoratorsTest(unittest.TestCase):
@patch('biicode.common.utils.decorators.OSInfo')
def test_constraint_os_same_os(self, os_info):
class Aux(object):
def __init__(self, os_name_mock):
os_info.capture = Mock(return_value=OSInfo(os_name_mock))
self.user_io = Mock()
@os_constraint("Linux")
def linux_call(self):
self.linux = 1
@os_constraint("Windows")
def windows_call(self):
self.windows = 2
aux_instance = Aux("Linux")
aux_instance.linux_call()
aux_instance.windows_call()
self.assertEquals(aux_instance.linux, 1)
aux_instance.user_io.assert_has_calls([call.out.error('You need to use a Windows OS')])
aux_instance = Aux("Windows")
aux_instance.linux_call()
aux_instance.windows_call()
self.assertEquals(aux_instance.windows, 2)
aux_instance.user_io.assert_has_calls([call.out.error('You need to use a Linux OS')])
| mit |
yephper/django | tests/urlpatterns_reverse/middleware.py | 1 | 1075 | from django.http import HttpResponse, StreamingHttpResponse
from django.urls import reverse
from . import urlconf_inner
class ChangeURLconfMiddleware(object):
def process_request(self, request):
request.urlconf = urlconf_inner.__name__
class NullChangeURLconfMiddleware(object):
def process_request(self, request):
request.urlconf = None
class ReverseInnerInResponseMiddleware(object):
def process_response(self, *args, **kwargs):
return HttpResponse(reverse('inner'))
class ReverseOuterInResponseMiddleware(object):
def process_response(self, *args, **kwargs):
return HttpResponse(reverse('outer'))
class ReverseInnerInStreaming(object):
def process_view(self, *args, **kwargs):
def stream():
yield reverse('inner')
return StreamingHttpResponse(stream())
class ReverseOuterInStreaming(object):
def process_view(self, *args, **kwargs):
def stream():
yield reverse('outer')
return StreamingHttpResponse(stream())
| bsd-3-clause |
darmaa/odoo | addons/hr_timesheet_invoice/wizard/hr_timesheet_final_invoice_create.py | 44 | 3003 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
from openerp.tools.translate import _
#
# Create an final invoice based on selected timesheet lines
#
#
# TODO: check unit of measure !!!
#
class final_invoice_create(osv.osv_memory):
_name = 'hr.timesheet.invoice.create.final'
_description = 'Create invoice from timesheet final'
_columns = {
'date': fields.boolean('Date', help='Display date in the history of works'),
'time': fields.boolean('Time Spent', help='Display time in the history of works'),
'name': fields.boolean('Log of Activity', help='Display detail of work in the invoice line.'),
'price': fields.boolean('Cost', help='Display cost of the item you reinvoice'),
'product': fields.many2one('product.product', 'Product', help='The product that will be used to invoice the remaining amount'),
}
def do_create(self, cr, uid, ids, context=None):
data = self.read(cr, uid, ids, [], context=context)[0]
# hack for fixing small issue (context should not propagate implicitly between actions)
if 'default_type' in context:
del context['default_type']
ids = self.pool.get('account.analytic.line').search(cr, uid, [('invoice_id','=',False),('to_invoice','<>', False), ('account_id', 'in', context['active_ids'])], context=context)
invs = self.pool.get('account.analytic.line').invoice_cost_create(cr, uid, ids, data, context=context)
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
mod_ids = mod_obj.search(cr, uid, [('name', '=', 'action_invoice_tree1')], context=context)[0]
res_id = mod_obj.read(cr, uid, mod_ids, ['res_id'], context=context)['res_id']
act_win = act_obj.read(cr, uid, res_id, [], context=context)
act_win['domain'] = [('id','in',invs),('type','=','out_invoice')]
act_win['name'] = _('Invoices')
return act_win
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
nandhp/youtube-dl | youtube_dl/extractor/livestream.py | 6 | 13785 | from __future__ import unicode_literals
import re
import itertools
from .common import InfoExtractor
from ..compat import (
compat_str,
compat_urlparse,
)
from ..utils import (
find_xpath_attr,
xpath_attr,
xpath_with_ns,
xpath_text,
orderedSet,
update_url_query,
int_or_none,
float_or_none,
parse_iso8601,
determine_ext,
)
class LivestreamIE(InfoExtractor):
IE_NAME = 'livestream'
_VALID_URL = r'https?://(?:new\.)?livestream\.com/(?:accounts/(?P<account_id>\d+)|(?P<account_name>[^/]+))/(?:events/(?P<event_id>\d+)|(?P<event_name>[^/]+))(?:/videos/(?P<id>\d+))?'
_TESTS = [{
'url': 'http://new.livestream.com/CoheedandCambria/WebsterHall/videos/4719370',
'md5': '53274c76ba7754fb0e8d072716f2292b',
'info_dict': {
'id': '4719370',
'ext': 'mp4',
'title': 'Live from Webster Hall NYC',
'timestamp': 1350008072,
'upload_date': '20121012',
'duration': 5968.0,
'like_count': int,
'view_count': int,
'thumbnail': 're:^http://.*\.jpg$'
}
}, {
'url': 'http://new.livestream.com/tedx/cityenglish',
'info_dict': {
'title': 'TEDCity2.0 (English)',
'id': '2245590',
},
'playlist_mincount': 4,
}, {
'url': 'http://new.livestream.com/chess24/tatasteelchess',
'info_dict': {
'title': 'Tata Steel Chess',
'id': '3705884',
},
'playlist_mincount': 60,
}, {
'url': 'https://new.livestream.com/accounts/362/events/3557232/videos/67864563/player?autoPlay=false&height=360&mute=false&width=640',
'only_matching': True,
}, {
'url': 'http://livestream.com/bsww/concacafbeachsoccercampeonato2015',
'only_matching': True,
}]
_API_URL_TEMPLATE = 'http://livestream.com/api/accounts/%s/events/%s'
def _parse_smil_formats(self, smil, smil_url, video_id, namespace=None, f4m_params=None, transform_rtmp_url=None):
base_ele = find_xpath_attr(
smil, self._xpath_ns('.//meta', namespace), 'name', 'httpBase')
base = base_ele.get('content') if base_ele is not None else 'http://livestreamvod-f.akamaihd.net/'
formats = []
video_nodes = smil.findall(self._xpath_ns('.//video', namespace))
for vn in video_nodes:
tbr = int_or_none(vn.attrib.get('system-bitrate'), 1000)
furl = (
update_url_query(compat_urlparse.urljoin(base, vn.attrib['src']), {
'v': '3.0.3',
'fp': 'WIN% 14,0,0,145',
}))
if 'clipBegin' in vn.attrib:
furl += '&ssek=' + vn.attrib['clipBegin']
formats.append({
'url': furl,
'format_id': 'smil_%d' % tbr,
'ext': 'flv',
'tbr': tbr,
'preference': -1000,
})
return formats
def _extract_video_info(self, video_data):
video_id = compat_str(video_data['id'])
FORMAT_KEYS = (
('sd', 'progressive_url'),
('hd', 'progressive_url_hd'),
)
formats = []
for format_id, key in FORMAT_KEYS:
video_url = video_data.get(key)
if video_url:
ext = determine_ext(video_url)
if ext == 'm3u8':
continue
bitrate = int_or_none(self._search_regex(
r'(\d+)\.%s' % ext, video_url, 'bitrate', default=None))
formats.append({
'url': video_url,
'format_id': format_id,
'tbr': bitrate,
'ext': ext,
})
smil_url = video_data.get('smil_url')
if smil_url:
formats.extend(self._extract_smil_formats(smil_url, video_id))
m3u8_url = video_data.get('m3u8_url')
if m3u8_url:
formats.extend(self._extract_m3u8_formats(
m3u8_url, video_id, 'mp4', 'm3u8_native', m3u8_id='hls', fatal=False))
f4m_url = video_data.get('f4m_url')
if f4m_url:
formats.extend(self._extract_f4m_formats(
f4m_url, video_id, f4m_id='hds', fatal=False))
self._sort_formats(formats)
comments = [{
'author_id': comment.get('author_id'),
'author': comment.get('author', {}).get('full_name'),
'id': comment.get('id'),
'text': comment['text'],
'timestamp': parse_iso8601(comment.get('created_at')),
} for comment in video_data.get('comments', {}).get('data', [])]
return {
'id': video_id,
'formats': formats,
'title': video_data['caption'],
'description': video_data.get('description'),
'thumbnail': video_data.get('thumbnail_url'),
'duration': float_or_none(video_data.get('duration'), 1000),
'timestamp': parse_iso8601(video_data.get('publish_at')),
'like_count': video_data.get('likes', {}).get('total'),
'comment_count': video_data.get('comments', {}).get('total'),
'view_count': video_data.get('views'),
'comments': comments,
}
def _extract_stream_info(self, stream_info):
broadcast_id = stream_info['broadcast_id']
is_live = stream_info.get('is_live')
formats = []
smil_url = stream_info.get('play_url')
if smil_url:
formats.extend(self._extract_smil_formats(smil_url, broadcast_id))
entry_protocol = 'm3u8' if is_live else 'm3u8_native'
m3u8_url = stream_info.get('m3u8_url')
if m3u8_url:
formats.extend(self._extract_m3u8_formats(
m3u8_url, broadcast_id, 'mp4', entry_protocol, m3u8_id='hls', fatal=False))
rtsp_url = stream_info.get('rtsp_url')
if rtsp_url:
formats.append({
'url': rtsp_url,
'format_id': 'rtsp',
})
self._sort_formats(formats)
return {
'id': broadcast_id,
'formats': formats,
'title': self._live_title(stream_info['stream_title']) if is_live else stream_info['stream_title'],
'thumbnail': stream_info.get('thumbnail_url'),
'is_live': is_live,
}
def _extract_event(self, event_data):
event_id = compat_str(event_data['id'])
account_id = compat_str(event_data['owner_account_id'])
feed_root_url = self._API_URL_TEMPLATE % (account_id, event_id) + '/feed.json'
stream_info = event_data.get('stream_info')
if stream_info:
return self._extract_stream_info(stream_info)
last_video = None
entries = []
for i in itertools.count(1):
if last_video is None:
info_url = feed_root_url
else:
info_url = '{root}?&id={id}&newer=-1&type=video'.format(
root=feed_root_url, id=last_video)
videos_info = self._download_json(
info_url, event_id, 'Downloading page {0}'.format(i))['data']
videos_info = [v['data'] for v in videos_info if v['type'] == 'video']
if not videos_info:
break
for v in videos_info:
entries.append(self.url_result(
'http://livestream.com/accounts/%s/events/%s/videos/%s' % (account_id, event_id, v['id']),
'Livestream', v['id'], v['caption']))
last_video = videos_info[-1]['id']
return self.playlist_result(entries, event_id, event_data['full_name'])
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
event = mobj.group('event_id') or mobj.group('event_name')
account = mobj.group('account_id') or mobj.group('account_name')
api_url = self._API_URL_TEMPLATE % (account, event)
if video_id:
video_data = self._download_json(
api_url + '/videos/%s' % video_id, video_id)
return self._extract_video_info(video_data)
else:
event_data = self._download_json(api_url, video_id)
return self._extract_event(event_data)
# The original version of Livestream uses a different system
class LivestreamOriginalIE(InfoExtractor):
IE_NAME = 'livestream:original'
_VALID_URL = r'''(?x)https?://original\.livestream\.com/
(?P<user>[^/\?#]+)(?:/(?P<type>video|folder)
(?:(?:\?.*?Id=|/)(?P<id>.*?)(&|$))?)?
'''
_TESTS = [{
'url': 'http://original.livestream.com/dealbook/video?clipId=pla_8aa4a3f1-ba15-46a4-893b-902210e138fb',
'info_dict': {
'id': 'pla_8aa4a3f1-ba15-46a4-893b-902210e138fb',
'ext': 'mp4',
'title': 'Spark 1 (BitCoin) with Cameron Winklevoss & Tyler Winklevoss of Winklevoss Capital',
'duration': 771.301,
'view_count': int,
},
}, {
'url': 'https://original.livestream.com/newplay/folder?dirId=a07bf706-d0e4-4e75-a747-b021d84f2fd3',
'info_dict': {
'id': 'a07bf706-d0e4-4e75-a747-b021d84f2fd3',
},
'playlist_mincount': 4,
}, {
# live stream
'url': 'http://original.livestream.com/znsbahamas',
'only_matching': True,
}]
def _extract_video_info(self, user, video_id):
api_url = 'http://x%sx.api.channel.livestream.com/2.0/clipdetails?extendedInfo=true&id=%s' % (user, video_id)
info = self._download_xml(api_url, video_id)
item = info.find('channel').find('item')
title = xpath_text(item, 'title')
media_ns = {'media': 'http://search.yahoo.com/mrss'}
thumbnail_url = xpath_attr(
item, xpath_with_ns('media:thumbnail', media_ns), 'url')
duration = float_or_none(xpath_attr(
item, xpath_with_ns('media:content', media_ns), 'duration'))
ls_ns = {'ls': 'http://api.channel.livestream.com/2.0'}
view_count = int_or_none(xpath_text(
item, xpath_with_ns('ls:viewsCount', ls_ns)))
return {
'id': video_id,
'title': title,
'thumbnail': thumbnail_url,
'duration': duration,
'view_count': view_count,
}
def _extract_video_formats(self, video_data, video_id, entry_protocol):
formats = []
progressive_url = video_data.get('progressiveUrl')
if progressive_url:
formats.append({
'url': progressive_url,
'format_id': 'http',
})
m3u8_url = video_data.get('httpUrl')
if m3u8_url:
formats.extend(self._extract_m3u8_formats(
m3u8_url, video_id, 'mp4', entry_protocol, m3u8_id='hls', fatal=False))
rtsp_url = video_data.get('rtspUrl')
if rtsp_url:
formats.append({
'url': rtsp_url,
'format_id': 'rtsp',
})
self._sort_formats(formats)
return formats
def _extract_folder(self, url, folder_id):
webpage = self._download_webpage(url, folder_id)
paths = orderedSet(re.findall(
r'''(?x)(?:
<li\s+class="folder">\s*<a\s+href="|
<a\s+href="(?=https?://livestre\.am/)
)([^"]+)"''', webpage))
entries = [{
'_type': 'url',
'url': compat_urlparse.urljoin(url, p),
} for p in paths]
return self.playlist_result(entries, folder_id)
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
user = mobj.group('user')
url_type = mobj.group('type')
content_id = mobj.group('id')
if url_type == 'folder':
return self._extract_folder(url, content_id)
else:
# this url is used on mobile devices
stream_url = 'http://x%sx.api.channel.livestream.com/3.0/getstream.json' % user
info = {}
if content_id:
stream_url += '?id=%s' % content_id
info = self._extract_video_info(user, content_id)
else:
content_id = user
webpage = self._download_webpage(url, content_id)
info = {
'title': self._og_search_title(webpage),
'description': self._og_search_description(webpage),
'thumbnail': self._search_regex(r'channelLogo.src\s*=\s*"([^"]+)"', webpage, 'thumbnail', None),
}
video_data = self._download_json(stream_url, content_id)
is_live = video_data.get('isLive')
entry_protocol = 'm3u8' if is_live else 'm3u8_native'
info.update({
'id': content_id,
'title': self._live_title(info['title']) if is_live else info['title'],
'formats': self._extract_video_formats(video_data, content_id, entry_protocol),
'is_live': is_live,
})
return info
# The server doesn't support HEAD request, the generic extractor can't detect
# the redirection
class LivestreamShortenerIE(InfoExtractor):
IE_NAME = 'livestream:shortener'
IE_DESC = False # Do not list
_VALID_URL = r'https?://livestre\.am/(?P<id>.+)'
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
id = mobj.group('id')
webpage = self._download_webpage(url, id)
return {
'_type': 'url',
'url': self._og_search_url(webpage),
}
| unlicense |
Zephor5/scrapy | tests/test_dupefilters.py | 110 | 2315 | import hashlib
import tempfile
import unittest
import shutil
from scrapy.dupefilters import RFPDupeFilter
from scrapy.http import Request
from scrapy.utils.python import to_bytes
class RFPDupeFilterTest(unittest.TestCase):
def test_filter(self):
dupefilter = RFPDupeFilter()
dupefilter.open()
r1 = Request('http://scrapytest.org/1')
r2 = Request('http://scrapytest.org/2')
r3 = Request('http://scrapytest.org/2')
assert not dupefilter.request_seen(r1)
assert dupefilter.request_seen(r1)
assert not dupefilter.request_seen(r2)
assert dupefilter.request_seen(r3)
dupefilter.close('finished')
def test_dupefilter_path(self):
r1 = Request('http://scrapytest.org/1')
r2 = Request('http://scrapytest.org/2')
path = tempfile.mkdtemp()
try:
df = RFPDupeFilter(path)
df.open()
assert not df.request_seen(r1)
assert df.request_seen(r1)
df.close('finished')
df2 = RFPDupeFilter(path)
df2.open()
assert df2.request_seen(r1)
assert not df2.request_seen(r2)
assert df2.request_seen(r2)
df2.close('finished')
finally:
shutil.rmtree(path)
def test_request_fingerprint(self):
"""Test if customization of request_fingerprint method will change
output of request_seen.
"""
r1 = Request('http://scrapytest.org/index.html')
r2 = Request('http://scrapytest.org/INDEX.html')
dupefilter = RFPDupeFilter()
dupefilter.open()
assert not dupefilter.request_seen(r1)
assert not dupefilter.request_seen(r2)
dupefilter.close('finished')
class CaseInsensitiveRFPDupeFilter(RFPDupeFilter):
def request_fingerprint(self, request):
fp = hashlib.sha1()
fp.update(to_bytes(request.url.lower()))
return fp.hexdigest()
case_insensitive_dupefilter = CaseInsensitiveRFPDupeFilter()
case_insensitive_dupefilter.open()
assert not case_insensitive_dupefilter.request_seen(r1)
assert case_insensitive_dupefilter.request_seen(r2)
case_insensitive_dupefilter.close('finished')
| bsd-3-clause |
idumpling/robotx | robotx/lib/DebugLibrary.py | 4 | 2893 | """
RobotFramework Debug Library.
* Import this library, and type keyword "debug", then run case with pybot,
* such as: Library robotx.lib.DebugLibrary.Debug
then make your program stop on specified line.
* Directly enter into debug shell, and try to do sth.
Author: Xin Gao <fdumpling@gmail.com>
"""
import re
import sys
from cmd import Cmd
from robot.libraries.BuiltIn import BuiltIn
from robot.errors import HandlerExecutionFailed
class Debug:
"""RobotFramework debug library
"""
def debug(self):
'''Type this keyword to anywhere you want to stop and debug
on your Robot Framework case.
'''
old_stdout = sys.stdout
sys.stdout = sys.__stdout__
print '\n\nEnter into Robot Framework debug shell:'
debug_cmd = DebugCmd()
debug_cmd.cmdloop()
print '\nExit Robot Framework debug shell.'
sys.stdout = old_stdout
class DebugCmd(Cmd):
"""Interactive debug shell
"""
use_rawinput = True
prompt = '>>> '
def __init__(self, completekey='tab', stdin=None, stdout=None):
Cmd.__init__(self, completekey, stdin, stdout)
self.rf_bi = BuiltIn()
def default(self, line):
"""Run RobotFramework keywords
"""
pattern = re.compile(' +|\t')
command = line.strip()
if not command:
return
try:
keyword = pattern.split(command)
result = self.rf_bi.run_keyword(*keyword)
if result:
print repr(result)
except HandlerExecutionFailed, exc:
print 'keyword: ', command
print '! ', exc.full_message
except Exception, exc:
print 'keyword: ', command
print '! FAILED: ', repr(exc)
def emptyline(self):
"""By default Cmd runs last command if an empty line is entered.
Disable it.
"""
pass
def postcmd(self, stop, line):
"""run after a command"""
return stop
def do_exit(self, arg):
"""Exit
"""
return True
def do_web(self, arg):
"""Do web automation debug"""
print 'import library Selenium2Library'
self.rf_bi.run_keyword('import library', 'Selenium2Library')
if arg:
url = arg
else:
url = 'http://www.google.com/'
print 'open browser %s' % url
self.rf_bi.run_keyword('Open Browser', url)
def help_help(self):
"""Help of Help command
"""
print 'Show help message.'
def help_exit(self):
"""Help of Exit command
"""
print 'Exit the interpreter.'
print 'Use exit() or Ctrl-D (i.e. EOF) to exit'
def help_web(self):
'''Help of web command'''
print 'Do some web automation with Selenium2Library'
do_EOF = do_exit
help_EOF = help_exit
| mit |
takeshineshiro/django | tests/update_only_fields/tests.py | 296 | 9780 | from __future__ import unicode_literals
from django.db.models.signals import post_save, pre_save
from django.test import TestCase
from .models import Account, Employee, Person, Profile, ProxyEmployee
class UpdateOnlyFieldsTests(TestCase):
def test_update_fields_basic(self):
s = Person.objects.create(name='Sara', gender='F')
self.assertEqual(s.gender, 'F')
s.gender = 'M'
s.name = 'Ian'
s.save(update_fields=['name'])
s = Person.objects.get(pk=s.pk)
self.assertEqual(s.gender, 'F')
self.assertEqual(s.name, 'Ian')
def test_update_fields_deferred(self):
s = Person.objects.create(name='Sara', gender='F', pid=22)
self.assertEqual(s.gender, 'F')
s1 = Person.objects.defer("gender", "pid").get(pk=s.pk)
s1.name = "Emily"
s1.gender = "M"
with self.assertNumQueries(1):
s1.save()
s2 = Person.objects.get(pk=s1.pk)
self.assertEqual(s2.name, "Emily")
self.assertEqual(s2.gender, "M")
def test_update_fields_only_1(self):
s = Person.objects.create(name='Sara', gender='F')
self.assertEqual(s.gender, 'F')
s1 = Person.objects.only('name').get(pk=s.pk)
s1.name = "Emily"
s1.gender = "M"
with self.assertNumQueries(1):
s1.save()
s2 = Person.objects.get(pk=s1.pk)
self.assertEqual(s2.name, "Emily")
self.assertEqual(s2.gender, "M")
def test_update_fields_only_2(self):
s = Person.objects.create(name='Sara', gender='F', pid=22)
self.assertEqual(s.gender, 'F')
s1 = Person.objects.only('name').get(pk=s.pk)
s1.name = "Emily"
s1.gender = "M"
with self.assertNumQueries(2):
s1.save(update_fields=['pid'])
s2 = Person.objects.get(pk=s1.pk)
self.assertEqual(s2.name, "Sara")
self.assertEqual(s2.gender, "F")
def test_update_fields_only_repeated(self):
s = Person.objects.create(name='Sara', gender='F')
self.assertEqual(s.gender, 'F')
s1 = Person.objects.only('name').get(pk=s.pk)
s1.gender = 'M'
with self.assertNumQueries(1):
s1.save()
# Test that the deferred class does not remember that gender was
# set, instead the instance should remember this.
s1 = Person.objects.only('name').get(pk=s.pk)
with self.assertNumQueries(1):
s1.save()
def test_update_fields_inheritance_defer(self):
profile_boss = Profile.objects.create(name='Boss', salary=3000)
e1 = Employee.objects.create(name='Sara', gender='F',
employee_num=1, profile=profile_boss)
e1 = Employee.objects.only('name').get(pk=e1.pk)
e1.name = 'Linda'
with self.assertNumQueries(1):
e1.save()
self.assertEqual(Employee.objects.get(pk=e1.pk).name,
'Linda')
def test_update_fields_fk_defer(self):
profile_boss = Profile.objects.create(name='Boss', salary=3000)
profile_receptionist = Profile.objects.create(name='Receptionist', salary=1000)
e1 = Employee.objects.create(name='Sara', gender='F',
employee_num=1, profile=profile_boss)
e1 = Employee.objects.only('profile').get(pk=e1.pk)
e1.profile = profile_receptionist
with self.assertNumQueries(1):
e1.save()
self.assertEqual(Employee.objects.get(pk=e1.pk).profile, profile_receptionist)
e1.profile_id = profile_boss.pk
with self.assertNumQueries(1):
e1.save()
self.assertEqual(Employee.objects.get(pk=e1.pk).profile, profile_boss)
def test_select_related_only_interaction(self):
profile_boss = Profile.objects.create(name='Boss', salary=3000)
e1 = Employee.objects.create(name='Sara', gender='F',
employee_num=1, profile=profile_boss)
e1 = Employee.objects.only('profile__salary').select_related('profile').get(pk=e1.pk)
profile_boss.name = 'Clerk'
profile_boss.salary = 1000
profile_boss.save()
# The loaded salary of 3000 gets saved, the name of 'Clerk' isn't
# overwritten.
with self.assertNumQueries(1):
e1.profile.save()
reloaded_profile = Profile.objects.get(pk=profile_boss.pk)
self.assertEqual(reloaded_profile.name, profile_boss.name)
self.assertEqual(reloaded_profile.salary, 3000)
def test_update_fields_m2m(self):
profile_boss = Profile.objects.create(name='Boss', salary=3000)
e1 = Employee.objects.create(name='Sara', gender='F',
employee_num=1, profile=profile_boss)
a1 = Account.objects.create(num=1)
a2 = Account.objects.create(num=2)
e1.accounts = [a1, a2]
with self.assertRaises(ValueError):
e1.save(update_fields=['accounts'])
def test_update_fields_inheritance(self):
profile_boss = Profile.objects.create(name='Boss', salary=3000)
profile_receptionist = Profile.objects.create(name='Receptionist', salary=1000)
e1 = Employee.objects.create(name='Sara', gender='F',
employee_num=1, profile=profile_boss)
e1.name = 'Ian'
e1.gender = 'M'
e1.save(update_fields=['name'])
e2 = Employee.objects.get(pk=e1.pk)
self.assertEqual(e2.name, 'Ian')
self.assertEqual(e2.gender, 'F')
self.assertEqual(e2.profile, profile_boss)
e2.profile = profile_receptionist
e2.name = 'Sara'
e2.save(update_fields=['profile'])
e3 = Employee.objects.get(pk=e1.pk)
self.assertEqual(e3.name, 'Ian')
self.assertEqual(e3.profile, profile_receptionist)
with self.assertNumQueries(1):
e3.profile = profile_boss
e3.save(update_fields=['profile_id'])
e4 = Employee.objects.get(pk=e3.pk)
self.assertEqual(e4.profile, profile_boss)
self.assertEqual(e4.profile_id, profile_boss.pk)
def test_update_fields_inheritance_with_proxy_model(self):
profile_boss = Profile.objects.create(name='Boss', salary=3000)
profile_receptionist = Profile.objects.create(name='Receptionist', salary=1000)
e1 = ProxyEmployee.objects.create(name='Sara', gender='F',
employee_num=1, profile=profile_boss)
e1.name = 'Ian'
e1.gender = 'M'
e1.save(update_fields=['name'])
e2 = ProxyEmployee.objects.get(pk=e1.pk)
self.assertEqual(e2.name, 'Ian')
self.assertEqual(e2.gender, 'F')
self.assertEqual(e2.profile, profile_boss)
e2.profile = profile_receptionist
e2.name = 'Sara'
e2.save(update_fields=['profile'])
e3 = ProxyEmployee.objects.get(pk=e1.pk)
self.assertEqual(e3.name, 'Ian')
self.assertEqual(e3.profile, profile_receptionist)
def test_update_fields_signals(self):
p = Person.objects.create(name='Sara', gender='F')
pre_save_data = []
def pre_save_receiver(**kwargs):
pre_save_data.append(kwargs['update_fields'])
pre_save.connect(pre_save_receiver)
post_save_data = []
def post_save_receiver(**kwargs):
post_save_data.append(kwargs['update_fields'])
post_save.connect(post_save_receiver)
p.save(update_fields=['name'])
self.assertEqual(len(pre_save_data), 1)
self.assertEqual(len(pre_save_data[0]), 1)
self.assertIn('name', pre_save_data[0])
self.assertEqual(len(post_save_data), 1)
self.assertEqual(len(post_save_data[0]), 1)
self.assertIn('name', post_save_data[0])
pre_save.disconnect(pre_save_receiver)
post_save.disconnect(post_save_receiver)
def test_update_fields_incorrect_params(self):
s = Person.objects.create(name='Sara', gender='F')
with self.assertRaises(ValueError):
s.save(update_fields=['first_name'])
with self.assertRaises(ValueError):
s.save(update_fields="name")
def test_empty_update_fields(self):
s = Person.objects.create(name='Sara', gender='F')
pre_save_data = []
def pre_save_receiver(**kwargs):
pre_save_data.append(kwargs['update_fields'])
pre_save.connect(pre_save_receiver)
post_save_data = []
def post_save_receiver(**kwargs):
post_save_data.append(kwargs['update_fields'])
post_save.connect(post_save_receiver)
# Save is skipped.
with self.assertNumQueries(0):
s.save(update_fields=[])
# Signals were skipped, too...
self.assertEqual(len(pre_save_data), 0)
self.assertEqual(len(post_save_data), 0)
pre_save.disconnect(pre_save_receiver)
post_save.disconnect(post_save_receiver)
def test_num_queries_inheritance(self):
s = Employee.objects.create(name='Sara', gender='F')
s.employee_num = 1
s.name = 'Emily'
with self.assertNumQueries(1):
s.save(update_fields=['employee_num'])
s = Employee.objects.get(pk=s.pk)
self.assertEqual(s.employee_num, 1)
self.assertEqual(s.name, 'Sara')
s.employee_num = 2
s.name = 'Emily'
with self.assertNumQueries(1):
s.save(update_fields=['name'])
s = Employee.objects.get(pk=s.pk)
self.assertEqual(s.name, 'Emily')
self.assertEqual(s.employee_num, 1)
# A little sanity check that we actually did updates...
self.assertEqual(Employee.objects.count(), 1)
self.assertEqual(Person.objects.count(), 1)
with self.assertNumQueries(2):
s.save(update_fields=['name', 'employee_num'])
| bsd-3-clause |
Detailscool/YHSpider | BillboardAnalysis/bill/spiders/billtoprock.py | 1 | 1908 | from bill.items import BillItem
from scrapy import Spider, Request
class BillSpider(Spider):
name = 'billtoprock_spider'
allowed_ulrs = ['http://www.billboard.com/charts']
start_urls = ['http://www.billboard.com/charts/year-end/' + str(i) + '/hot-rock-songs' for i in range(2009, 2017)]
def parse(self, response):
year = response.xpath('.//div[@class="ye-chart__year-nav"]/text()').extract()[2].strip('\n')
entries = response.xpath('.//*[@class="ye-chart__item-wrapper"]')
for entry in entries:
name = entry.xpath('.//h1[@class="ye-chart__item-title"]/text()').extract_first()
ranking = entry.xpath('.//div[@class="ye-chart__item-rank"]/text()').extract_first()
if entry.xpath('.//*[@class="ye-chart__item-subtitle-link"]/text()').extract_first() is not None:
artist = entry.xpath('.//*[@class="ye-chart__item-subtitle-link"]/text()').extract_first()
else:
artist = entry.xpath('.//h1[@class="ye-chart__item-title"]/following-sibling::h2/text()').extract_first()
item = BillItem()
item['ranking'] = ranking
item['name'] = name.strip('\n')
item['artists'] = artist.strip('\n')
item['year'] = year
yield item
entries = response.xpath('.//*[@class="ye-chart__item"]')
for entry in entries:
name = entry.xpath('.//h1[@class="ye-chart__item-title"]/text()').extract_first()
ranking = entry.xpath('.//div[@class="ye-chart__item-rank"]/text()').extract_first()
if entry.xpath('.//*[@class="ye-chart__item-subtitle-link"]/text()').extract_first() is not None:
artist = entry.xpath('.//*[@class="ye-chart__item-subtitle-link"]/text()').extract_first()
else:
artist = entry.xpath('.//h1[@class="ye-chart__item-title"]/following-sibling::h2/text()').extract_first()
item = BillItem()
item['ranking'] = ranking
item['name'] = name.strip('\n')
item['artists'] = artist.strip('\n')
item['year'] = year
yield item
| mit |
vito16/express2 | node_modules/cordova/node_modules/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/msvs.py | 60 | 117280 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import copy
import ntpath
import os
import posixpath
import re
import subprocess
import sys
import gyp.common
import gyp.easy_xml as easy_xml
import gyp.MSVSNew as MSVSNew
import gyp.MSVSProject as MSVSProject
import gyp.MSVSSettings as MSVSSettings
import gyp.MSVSToolFile as MSVSToolFile
import gyp.MSVSUserFile as MSVSUserFile
import gyp.MSVSUtil as MSVSUtil
import gyp.MSVSVersion as MSVSVersion
from gyp.common import GypError
# Regular expression for validating Visual Studio GUIDs. If the GUID
# contains lowercase hex letters, MSVS will be fine. However,
# IncrediBuild BuildConsole will parse the solution file, but then
# silently skip building the target causing hard to track down errors.
# Note that this only happens with the BuildConsole, and does not occur
# if IncrediBuild is executed from inside Visual Studio. This regex
# validates that the string looks like a GUID with all uppercase hex
# letters.
VALID_MSVS_GUID_CHARS = re.compile('^[A-F0-9\-]+$')
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '.exe',
'STATIC_LIB_PREFIX': '',
'SHARED_LIB_PREFIX': '',
'STATIC_LIB_SUFFIX': '.lib',
'SHARED_LIB_SUFFIX': '.dll',
'INTERMEDIATE_DIR': '$(IntDir)',
'SHARED_INTERMEDIATE_DIR': '$(OutDir)obj/global_intermediate',
'OS': 'win',
'PRODUCT_DIR': '$(OutDir)',
'LIB_DIR': '$(OutDir)lib',
'RULE_INPUT_ROOT': '$(InputName)',
'RULE_INPUT_DIRNAME': '$(InputDir)',
'RULE_INPUT_EXT': '$(InputExt)',
'RULE_INPUT_NAME': '$(InputFileName)',
'RULE_INPUT_PATH': '$(InputPath)',
'CONFIGURATION_NAME': '$(ConfigurationName)',
}
# The msvs specific sections that hold paths
generator_additional_path_sections = [
'msvs_cygwin_dirs',
'msvs_props',
]
generator_additional_non_configuration_keys = [
'msvs_cygwin_dirs',
'msvs_cygwin_shell',
'msvs_large_pdb',
'msvs_shard',
]
# List of precompiled header related keys.
precomp_keys = [
'msvs_precompiled_header',
'msvs_precompiled_source',
]
cached_username = None
cached_domain = None
# TODO(gspencer): Switch the os.environ calls to be
# win32api.GetDomainName() and win32api.GetUserName() once the
# python version in depot_tools has been updated to work on Vista
# 64-bit.
def _GetDomainAndUserName():
if sys.platform not in ('win32', 'cygwin'):
return ('DOMAIN', 'USERNAME')
global cached_username
global cached_domain
if not cached_domain or not cached_username:
domain = os.environ.get('USERDOMAIN')
username = os.environ.get('USERNAME')
if not domain or not username:
call = subprocess.Popen(['net', 'config', 'Workstation'],
stdout=subprocess.PIPE)
config = call.communicate()[0]
username_re = re.compile('^User name\s+(\S+)', re.MULTILINE)
username_match = username_re.search(config)
if username_match:
username = username_match.group(1)
domain_re = re.compile('^Logon domain\s+(\S+)', re.MULTILINE)
domain_match = domain_re.search(config)
if domain_match:
domain = domain_match.group(1)
cached_domain = domain
cached_username = username
return (cached_domain, cached_username)
fixpath_prefix = None
def _NormalizedSource(source):
"""Normalize the path.
But not if that gets rid of a variable, as this may expand to something
larger than one directory.
Arguments:
source: The path to be normalize.d
Returns:
The normalized path.
"""
normalized = os.path.normpath(source)
if source.count('$') == normalized.count('$'):
source = normalized
return source
def _FixPath(path):
"""Convert paths to a form that will make sense in a vcproj file.
Arguments:
path: The path to convert, may contain / etc.
Returns:
The path with all slashes made into backslashes.
"""
if fixpath_prefix and path and not os.path.isabs(path) and not path[0] == '$':
path = os.path.join(fixpath_prefix, path)
path = path.replace('/', '\\')
path = _NormalizedSource(path)
if path and path[-1] == '\\':
path = path[:-1]
return path
def _FixPaths(paths):
"""Fix each of the paths of the list."""
return [_FixPath(i) for i in paths]
def _ConvertSourcesToFilterHierarchy(sources, prefix=None, excluded=None,
list_excluded=True):
"""Converts a list split source file paths into a vcproj folder hierarchy.
Arguments:
sources: A list of source file paths split.
prefix: A list of source file path layers meant to apply to each of sources.
excluded: A set of excluded files.
Returns:
A hierarchy of filenames and MSVSProject.Filter objects that matches the
layout of the source tree.
For example:
_ConvertSourcesToFilterHierarchy([['a', 'bob1.c'], ['b', 'bob2.c']],
prefix=['joe'])
-->
[MSVSProject.Filter('a', contents=['joe\\a\\bob1.c']),
MSVSProject.Filter('b', contents=['joe\\b\\bob2.c'])]
"""
if not prefix: prefix = []
result = []
excluded_result = []
folders = dict()
# Gather files into the final result, excluded, or folders.
for s in sources:
if len(s) == 1:
filename = _NormalizedSource('\\'.join(prefix + s))
if filename in excluded:
excluded_result.append(filename)
else:
result.append(filename)
else:
if not folders.get(s[0]):
folders[s[0]] = []
folders[s[0]].append(s[1:])
# Add a folder for excluded files.
if excluded_result and list_excluded:
excluded_folder = MSVSProject.Filter('_excluded_files',
contents=excluded_result)
result.append(excluded_folder)
# Populate all the folders.
for f in folders:
contents = _ConvertSourcesToFilterHierarchy(folders[f], prefix=prefix + [f],
excluded=excluded,
list_excluded=list_excluded)
contents = MSVSProject.Filter(f, contents=contents)
result.append(contents)
return result
def _ToolAppend(tools, tool_name, setting, value, only_if_unset=False):
if not value: return
_ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset)
def _ToolSetOrAppend(tools, tool_name, setting, value, only_if_unset=False):
# TODO(bradnelson): ugly hack, fix this more generally!!!
if 'Directories' in setting or 'Dependencies' in setting:
if type(value) == str:
value = value.replace('/', '\\')
else:
value = [i.replace('/', '\\') for i in value]
if not tools.get(tool_name):
tools[tool_name] = dict()
tool = tools[tool_name]
if tool.get(setting):
if only_if_unset: return
if type(tool[setting]) == list:
tool[setting] += value
else:
raise TypeError(
'Appending "%s" to a non-list setting "%s" for tool "%s" is '
'not allowed, previous value: %s' % (
value, setting, tool_name, str(tool[setting])))
else:
tool[setting] = value
def _ConfigPlatform(config_data):
return config_data.get('msvs_configuration_platform', 'Win32')
def _ConfigBaseName(config_name, platform_name):
if config_name.endswith('_' + platform_name):
return config_name[0:-len(platform_name) - 1]
else:
return config_name
def _ConfigFullName(config_name, config_data):
platform_name = _ConfigPlatform(config_data)
return '%s|%s' % (_ConfigBaseName(config_name, platform_name), platform_name)
def _BuildCommandLineForRuleRaw(spec, cmd, cygwin_shell, has_input_path,
quote_cmd, do_setup_env):
if [x for x in cmd if '$(InputDir)' in x]:
input_dir_preamble = (
'set INPUTDIR=$(InputDir)\n'
'set INPUTDIR=%INPUTDIR:$(ProjectDir)=%\n'
'set INPUTDIR=%INPUTDIR:~0,-1%\n'
)
else:
input_dir_preamble = ''
if cygwin_shell:
# Find path to cygwin.
cygwin_dir = _FixPath(spec.get('msvs_cygwin_dirs', ['.'])[0])
# Prepare command.
direct_cmd = cmd
direct_cmd = [i.replace('$(IntDir)',
'`cygpath -m "${INTDIR}"`') for i in direct_cmd]
direct_cmd = [i.replace('$(OutDir)',
'`cygpath -m "${OUTDIR}"`') for i in direct_cmd]
direct_cmd = [i.replace('$(InputDir)',
'`cygpath -m "${INPUTDIR}"`') for i in direct_cmd]
if has_input_path:
direct_cmd = [i.replace('$(InputPath)',
'`cygpath -m "${INPUTPATH}"`')
for i in direct_cmd]
direct_cmd = ['\\"%s\\"' % i.replace('"', '\\\\\\"') for i in direct_cmd]
# direct_cmd = gyp.common.EncodePOSIXShellList(direct_cmd)
direct_cmd = ' '.join(direct_cmd)
# TODO(quote): regularize quoting path names throughout the module
cmd = ''
if do_setup_env:
cmd += 'call "$(ProjectDir)%(cygwin_dir)s\\setup_env.bat" && '
cmd += 'set CYGWIN=nontsec&& '
if direct_cmd.find('NUMBER_OF_PROCESSORS') >= 0:
cmd += 'set /a NUMBER_OF_PROCESSORS_PLUS_1=%%NUMBER_OF_PROCESSORS%%+1&& '
if direct_cmd.find('INTDIR') >= 0:
cmd += 'set INTDIR=$(IntDir)&& '
if direct_cmd.find('OUTDIR') >= 0:
cmd += 'set OUTDIR=$(OutDir)&& '
if has_input_path and direct_cmd.find('INPUTPATH') >= 0:
cmd += 'set INPUTPATH=$(InputPath) && '
cmd += 'bash -c "%(cmd)s"'
cmd = cmd % {'cygwin_dir': cygwin_dir,
'cmd': direct_cmd}
return input_dir_preamble + cmd
else:
# Convert cat --> type to mimic unix.
if cmd[0] == 'cat':
command = ['type']
else:
command = [cmd[0].replace('/', '\\')]
# Add call before command to ensure that commands can be tied together one
# after the other without aborting in Incredibuild, since IB makes a bat
# file out of the raw command string, and some commands (like python) are
# actually batch files themselves.
command.insert(0, 'call')
# Fix the paths
# TODO(quote): This is a really ugly heuristic, and will miss path fixing
# for arguments like "--arg=path" or "/opt:path".
# If the argument starts with a slash or dash, it's probably a command line
# switch
arguments = [i if (i[:1] in "/-") else _FixPath(i) for i in cmd[1:]]
arguments = [i.replace('$(InputDir)', '%INPUTDIR%') for i in arguments]
arguments = [MSVSSettings.FixVCMacroSlashes(i) for i in arguments]
if quote_cmd:
# Support a mode for using cmd directly.
# Convert any paths to native form (first element is used directly).
# TODO(quote): regularize quoting path names throughout the module
arguments = ['"%s"' % i for i in arguments]
# Collapse into a single command.
return input_dir_preamble + ' '.join(command + arguments)
def _BuildCommandLineForRule(spec, rule, has_input_path, do_setup_env):
# Currently this weird argument munging is used to duplicate the way a
# python script would need to be run as part of the chrome tree.
# Eventually we should add some sort of rule_default option to set this
# per project. For now the behavior chrome needs is the default.
mcs = rule.get('msvs_cygwin_shell')
if mcs is None:
mcs = int(spec.get('msvs_cygwin_shell', 1))
elif isinstance(mcs, str):
mcs = int(mcs)
quote_cmd = int(rule.get('msvs_quote_cmd', 1))
return _BuildCommandLineForRuleRaw(spec, rule['action'], mcs, has_input_path,
quote_cmd, do_setup_env=do_setup_env)
def _AddActionStep(actions_dict, inputs, outputs, description, command):
"""Merge action into an existing list of actions.
Care must be taken so that actions which have overlapping inputs either don't
get assigned to the same input, or get collapsed into one.
Arguments:
actions_dict: dictionary keyed on input name, which maps to a list of
dicts describing the actions attached to that input file.
inputs: list of inputs
outputs: list of outputs
description: description of the action
command: command line to execute
"""
# Require there to be at least one input (call sites will ensure this).
assert inputs
action = {
'inputs': inputs,
'outputs': outputs,
'description': description,
'command': command,
}
# Pick where to stick this action.
# While less than optimal in terms of build time, attach them to the first
# input for now.
chosen_input = inputs[0]
# Add it there.
if chosen_input not in actions_dict:
actions_dict[chosen_input] = []
actions_dict[chosen_input].append(action)
def _AddCustomBuildToolForMSVS(p, spec, primary_input,
inputs, outputs, description, cmd):
"""Add a custom build tool to execute something.
Arguments:
p: the target project
spec: the target project dict
primary_input: input file to attach the build tool to
inputs: list of inputs
outputs: list of outputs
description: description of the action
cmd: command line to execute
"""
inputs = _FixPaths(inputs)
outputs = _FixPaths(outputs)
tool = MSVSProject.Tool(
'VCCustomBuildTool',
{'Description': description,
'AdditionalDependencies': ';'.join(inputs),
'Outputs': ';'.join(outputs),
'CommandLine': cmd,
})
# Add to the properties of primary input for each config.
for config_name, c_data in spec['configurations'].iteritems():
p.AddFileConfig(_FixPath(primary_input),
_ConfigFullName(config_name, c_data), tools=[tool])
def _AddAccumulatedActionsToMSVS(p, spec, actions_dict):
"""Add actions accumulated into an actions_dict, merging as needed.
Arguments:
p: the target project
spec: the target project dict
actions_dict: dictionary keyed on input name, which maps to a list of
dicts describing the actions attached to that input file.
"""
for primary_input in actions_dict:
inputs = set()
outputs = set()
descriptions = []
commands = []
for action in actions_dict[primary_input]:
inputs.update(set(action['inputs']))
outputs.update(set(action['outputs']))
descriptions.append(action['description'])
commands.append(action['command'])
# Add the custom build step for one input file.
description = ', and also '.join(descriptions)
command = '\r\n'.join(commands)
_AddCustomBuildToolForMSVS(p, spec,
primary_input=primary_input,
inputs=inputs,
outputs=outputs,
description=description,
cmd=command)
def _RuleExpandPath(path, input_file):
"""Given the input file to which a rule applied, string substitute a path.
Arguments:
path: a path to string expand
input_file: the file to which the rule applied.
Returns:
The string substituted path.
"""
path = path.replace('$(InputName)',
os.path.splitext(os.path.split(input_file)[1])[0])
path = path.replace('$(InputDir)', os.path.dirname(input_file))
path = path.replace('$(InputExt)',
os.path.splitext(os.path.split(input_file)[1])[1])
path = path.replace('$(InputFileName)', os.path.split(input_file)[1])
path = path.replace('$(InputPath)', input_file)
return path
def _FindRuleTriggerFiles(rule, sources):
"""Find the list of files which a particular rule applies to.
Arguments:
rule: the rule in question
sources: the set of all known source files for this project
Returns:
The list of sources that trigger a particular rule.
"""
rule_ext = rule['extension']
return [s for s in sources if s.endswith('.' + rule_ext)]
def _RuleInputsAndOutputs(rule, trigger_file):
"""Find the inputs and outputs generated by a rule.
Arguments:
rule: the rule in question.
trigger_file: the main trigger for this rule.
Returns:
The pair of (inputs, outputs) involved in this rule.
"""
raw_inputs = _FixPaths(rule.get('inputs', []))
raw_outputs = _FixPaths(rule.get('outputs', []))
inputs = set()
outputs = set()
inputs.add(trigger_file)
for i in raw_inputs:
inputs.add(_RuleExpandPath(i, trigger_file))
for o in raw_outputs:
outputs.add(_RuleExpandPath(o, trigger_file))
return (inputs, outputs)
def _GenerateNativeRulesForMSVS(p, rules, output_dir, spec, options):
"""Generate a native rules file.
Arguments:
p: the target project
rules: the set of rules to include
output_dir: the directory in which the project/gyp resides
spec: the project dict
options: global generator options
"""
rules_filename = '%s%s.rules' % (spec['target_name'],
options.suffix)
rules_file = MSVSToolFile.Writer(os.path.join(output_dir, rules_filename),
spec['target_name'])
# Add each rule.
for r in rules:
rule_name = r['rule_name']
rule_ext = r['extension']
inputs = _FixPaths(r.get('inputs', []))
outputs = _FixPaths(r.get('outputs', []))
# Skip a rule with no action and no inputs.
if 'action' not in r and not r.get('rule_sources', []):
continue
cmd = _BuildCommandLineForRule(spec, r, has_input_path=True,
do_setup_env=True)
rules_file.AddCustomBuildRule(name=rule_name,
description=r.get('message', rule_name),
extensions=[rule_ext],
additional_dependencies=inputs,
outputs=outputs,
cmd=cmd)
# Write out rules file.
rules_file.WriteIfChanged()
# Add rules file to project.
p.AddToolFile(rules_filename)
def _Cygwinify(path):
path = path.replace('$(OutDir)', '$(OutDirCygwin)')
path = path.replace('$(IntDir)', '$(IntDirCygwin)')
return path
def _GenerateExternalRules(rules, output_dir, spec,
sources, options, actions_to_add):
"""Generate an external makefile to do a set of rules.
Arguments:
rules: the list of rules to include
output_dir: path containing project and gyp files
spec: project specification data
sources: set of sources known
options: global generator options
actions_to_add: The list of actions we will add to.
"""
filename = '%s_rules%s.mk' % (spec['target_name'], options.suffix)
mk_file = gyp.common.WriteOnDiff(os.path.join(output_dir, filename))
# Find cygwin style versions of some paths.
mk_file.write('OutDirCygwin:=$(shell cygpath -u "$(OutDir)")\n')
mk_file.write('IntDirCygwin:=$(shell cygpath -u "$(IntDir)")\n')
# Gather stuff needed to emit all: target.
all_inputs = set()
all_outputs = set()
all_output_dirs = set()
first_outputs = []
for rule in rules:
trigger_files = _FindRuleTriggerFiles(rule, sources)
for tf in trigger_files:
inputs, outputs = _RuleInputsAndOutputs(rule, tf)
all_inputs.update(set(inputs))
all_outputs.update(set(outputs))
# Only use one target from each rule as the dependency for
# 'all' so we don't try to build each rule multiple times.
first_outputs.append(list(outputs)[0])
# Get the unique output directories for this rule.
output_dirs = [os.path.split(i)[0] for i in outputs]
for od in output_dirs:
all_output_dirs.add(od)
first_outputs_cyg = [_Cygwinify(i) for i in first_outputs]
# Write out all: target, including mkdir for each output directory.
mk_file.write('all: %s\n' % ' '.join(first_outputs_cyg))
for od in all_output_dirs:
if od:
mk_file.write('\tmkdir -p `cygpath -u "%s"`\n' % od)
mk_file.write('\n')
# Define how each output is generated.
for rule in rules:
trigger_files = _FindRuleTriggerFiles(rule, sources)
for tf in trigger_files:
# Get all the inputs and outputs for this rule for this trigger file.
inputs, outputs = _RuleInputsAndOutputs(rule, tf)
inputs = [_Cygwinify(i) for i in inputs]
outputs = [_Cygwinify(i) for i in outputs]
# Prepare the command line for this rule.
cmd = [_RuleExpandPath(c, tf) for c in rule['action']]
cmd = ['"%s"' % i for i in cmd]
cmd = ' '.join(cmd)
# Add it to the makefile.
mk_file.write('%s: %s\n' % (' '.join(outputs), ' '.join(inputs)))
mk_file.write('\t%s\n\n' % cmd)
# Close up the file.
mk_file.close()
# Add makefile to list of sources.
sources.add(filename)
# Add a build action to call makefile.
cmd = ['make',
'OutDir=$(OutDir)',
'IntDir=$(IntDir)',
'-j', '${NUMBER_OF_PROCESSORS_PLUS_1}',
'-f', filename]
cmd = _BuildCommandLineForRuleRaw(spec, cmd, True, False, True, True)
# Insert makefile as 0'th input, so it gets the action attached there,
# as this is easier to understand from in the IDE.
all_inputs = list(all_inputs)
all_inputs.insert(0, filename)
_AddActionStep(actions_to_add,
inputs=_FixPaths(all_inputs),
outputs=_FixPaths(all_outputs),
description='Running external rules for %s' %
spec['target_name'],
command=cmd)
def _EscapeEnvironmentVariableExpansion(s):
"""Escapes % characters.
Escapes any % characters so that Windows-style environment variable
expansions will leave them alone.
See http://connect.microsoft.com/VisualStudio/feedback/details/106127/cl-d-name-text-containing-percentage-characters-doesnt-compile
to understand why we have to do this.
Args:
s: The string to be escaped.
Returns:
The escaped string.
"""
s = s.replace('%', '%%')
return s
quote_replacer_regex = re.compile(r'(\\*)"')
def _EscapeCommandLineArgumentForMSVS(s):
"""Escapes a Windows command-line argument.
So that the Win32 CommandLineToArgv function will turn the escaped result back
into the original string.
See http://msdn.microsoft.com/en-us/library/17w5ykft.aspx
("Parsing C++ Command-Line Arguments") to understand why we have to do
this.
Args:
s: the string to be escaped.
Returns:
the escaped string.
"""
def _Replace(match):
# For a literal quote, CommandLineToArgv requires an odd number of
# backslashes preceding it, and it produces half as many literal backslashes
# (rounded down). So we need to produce 2n+1 backslashes.
return 2 * match.group(1) + '\\"'
# Escape all quotes so that they are interpreted literally.
s = quote_replacer_regex.sub(_Replace, s)
# Now add unescaped quotes so that any whitespace is interpreted literally.
s = '"' + s + '"'
return s
delimiters_replacer_regex = re.compile(r'(\\*)([,;]+)')
def _EscapeVCProjCommandLineArgListItem(s):
"""Escapes command line arguments for MSVS.
The VCProj format stores string lists in a single string using commas and
semi-colons as separators, which must be quoted if they are to be
interpreted literally. However, command-line arguments may already have
quotes, and the VCProj parser is ignorant of the backslash escaping
convention used by CommandLineToArgv, so the command-line quotes and the
VCProj quotes may not be the same quotes. So to store a general
command-line argument in a VCProj list, we need to parse the existing
quoting according to VCProj's convention and quote any delimiters that are
not already quoted by that convention. The quotes that we add will also be
seen by CommandLineToArgv, so if backslashes precede them then we also have
to escape those backslashes according to the CommandLineToArgv
convention.
Args:
s: the string to be escaped.
Returns:
the escaped string.
"""
def _Replace(match):
# For a non-literal quote, CommandLineToArgv requires an even number of
# backslashes preceding it, and it produces half as many literal
# backslashes. So we need to produce 2n backslashes.
return 2 * match.group(1) + '"' + match.group(2) + '"'
segments = s.split('"')
# The unquoted segments are at the even-numbered indices.
for i in range(0, len(segments), 2):
segments[i] = delimiters_replacer_regex.sub(_Replace, segments[i])
# Concatenate back into a single string
s = '"'.join(segments)
if len(segments) % 2 == 0:
# String ends while still quoted according to VCProj's convention. This
# means the delimiter and the next list item that follow this one in the
# .vcproj file will be misinterpreted as part of this item. There is nothing
# we can do about this. Adding an extra quote would correct the problem in
# the VCProj but cause the same problem on the final command-line. Moving
# the item to the end of the list does works, but that's only possible if
# there's only one such item. Let's just warn the user.
print >> sys.stderr, ('Warning: MSVS may misinterpret the odd number of ' +
'quotes in ' + s)
return s
def _EscapeCppDefineForMSVS(s):
"""Escapes a CPP define so that it will reach the compiler unaltered."""
s = _EscapeEnvironmentVariableExpansion(s)
s = _EscapeCommandLineArgumentForMSVS(s)
s = _EscapeVCProjCommandLineArgListItem(s)
# cl.exe replaces literal # characters with = in preprocesor definitions for
# some reason. Octal-encode to work around that.
s = s.replace('#', '\\%03o' % ord('#'))
return s
quote_replacer_regex2 = re.compile(r'(\\+)"')
def _EscapeCommandLineArgumentForMSBuild(s):
"""Escapes a Windows command-line argument for use by MSBuild."""
def _Replace(match):
return (len(match.group(1)) / 2 * 4) * '\\' + '\\"'
# Escape all quotes so that they are interpreted literally.
s = quote_replacer_regex2.sub(_Replace, s)
return s
def _EscapeMSBuildSpecialCharacters(s):
escape_dictionary = {
'%': '%25',
'$': '%24',
'@': '%40',
"'": '%27',
';': '%3B',
'?': '%3F',
'*': '%2A'
}
result = ''.join([escape_dictionary.get(c, c) for c in s])
return result
def _EscapeCppDefineForMSBuild(s):
"""Escapes a CPP define so that it will reach the compiler unaltered."""
s = _EscapeEnvironmentVariableExpansion(s)
s = _EscapeCommandLineArgumentForMSBuild(s)
s = _EscapeMSBuildSpecialCharacters(s)
# cl.exe replaces literal # characters with = in preprocesor definitions for
# some reason. Octal-encode to work around that.
s = s.replace('#', '\\%03o' % ord('#'))
return s
def _GenerateRulesForMSVS(p, output_dir, options, spec,
sources, excluded_sources,
actions_to_add):
"""Generate all the rules for a particular project.
Arguments:
p: the project
output_dir: directory to emit rules to
options: global options passed to the generator
spec: the specification for this project
sources: the set of all known source files in this project
excluded_sources: the set of sources excluded from normal processing
actions_to_add: deferred list of actions to add in
"""
rules = spec.get('rules', [])
rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))]
rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))]
# Handle rules that use a native rules file.
if rules_native:
_GenerateNativeRulesForMSVS(p, rules_native, output_dir, spec, options)
# Handle external rules (non-native rules).
if rules_external:
_GenerateExternalRules(rules_external, output_dir, spec,
sources, options, actions_to_add)
_AdjustSourcesForRules(rules, sources, excluded_sources)
def _AdjustSourcesForRules(rules, sources, excluded_sources):
# Add outputs generated by each rule (if applicable).
for rule in rules:
# Done if not processing outputs as sources.
if int(rule.get('process_outputs_as_sources', False)):
# Add in the outputs from this rule.
trigger_files = _FindRuleTriggerFiles(rule, sources)
for trigger_file in trigger_files:
inputs, outputs = _RuleInputsAndOutputs(rule, trigger_file)
inputs = set(_FixPaths(inputs))
outputs = set(_FixPaths(outputs))
inputs.remove(_FixPath(trigger_file))
sources.update(inputs)
excluded_sources.update(inputs)
sources.update(outputs)
def _FilterActionsFromExcluded(excluded_sources, actions_to_add):
"""Take inputs with actions attached out of the list of exclusions.
Arguments:
excluded_sources: list of source files not to be built.
actions_to_add: dict of actions keyed on source file they're attached to.
Returns:
excluded_sources with files that have actions attached removed.
"""
must_keep = set(_FixPaths(actions_to_add.keys()))
return [s for s in excluded_sources if s not in must_keep]
def _GetDefaultConfiguration(spec):
return spec['configurations'][spec['default_configuration']]
def _GetGuidOfProject(proj_path, spec):
"""Get the guid for the project.
Arguments:
proj_path: Path of the vcproj or vcxproj file to generate.
spec: The target dictionary containing the properties of the target.
Returns:
the guid.
Raises:
ValueError: if the specified GUID is invalid.
"""
# Pluck out the default configuration.
default_config = _GetDefaultConfiguration(spec)
# Decide the guid of the project.
guid = default_config.get('msvs_guid')
if guid:
if VALID_MSVS_GUID_CHARS.match(guid) is None:
raise ValueError('Invalid MSVS guid: "%s". Must match regex: "%s".' %
(guid, VALID_MSVS_GUID_CHARS.pattern))
guid = '{%s}' % guid
guid = guid or MSVSNew.MakeGuid(proj_path)
return guid
def _GetMsbuildToolsetOfProject(proj_path, spec, version):
"""Get the platform toolset for the project.
Arguments:
proj_path: Path of the vcproj or vcxproj file to generate.
spec: The target dictionary containing the properties of the target.
version: The MSVSVersion object.
Returns:
the platform toolset string or None.
"""
# Pluck out the default configuration.
default_config = _GetDefaultConfiguration(spec)
toolset = default_config.get('msbuild_toolset')
if not toolset and version.DefaultToolset():
toolset = version.DefaultToolset()
return toolset
def _GenerateProject(project, options, version, generator_flags):
"""Generates a vcproj file.
Arguments:
project: the MSVSProject object.
options: global generator options.
version: the MSVSVersion object.
generator_flags: dict of generator-specific flags.
Returns:
A list of source files that cannot be found on disk.
"""
default_config = _GetDefaultConfiguration(project.spec)
# Skip emitting anything if told to with msvs_existing_vcproj option.
if default_config.get('msvs_existing_vcproj'):
return []
if version.UsesVcxproj():
return _GenerateMSBuildProject(project, options, version, generator_flags)
else:
return _GenerateMSVSProject(project, options, version, generator_flags)
def _GenerateMSVSProject(project, options, version, generator_flags):
"""Generates a .vcproj file. It may create .rules and .user files too.
Arguments:
project: The project object we will generate the file for.
options: Global options passed to the generator.
version: The VisualStudioVersion object.
generator_flags: dict of generator-specific flags.
"""
spec = project.spec
vcproj_dir = os.path.dirname(project.path)
if vcproj_dir and not os.path.exists(vcproj_dir):
os.makedirs(vcproj_dir)
platforms = _GetUniquePlatforms(spec)
p = MSVSProject.Writer(project.path, version, spec['target_name'],
project.guid, platforms)
# Get directory project file is in.
project_dir = os.path.split(project.path)[0]
gyp_path = _NormalizedSource(project.build_file)
relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir)
config_type = _GetMSVSConfigurationType(spec, project.build_file)
for config_name, config in spec['configurations'].iteritems():
_AddConfigurationToMSVSProject(p, spec, config_type, config_name, config)
# Prepare list of sources and excluded sources.
gyp_file = os.path.split(project.build_file)[1]
sources, excluded_sources = _PrepareListOfSources(spec, generator_flags,
gyp_file)
# Add rules.
actions_to_add = {}
_GenerateRulesForMSVS(p, project_dir, options, spec,
sources, excluded_sources,
actions_to_add)
list_excluded = generator_flags.get('msvs_list_excluded_files', True)
sources, excluded_sources, excluded_idl = (
_AdjustSourcesAndConvertToFilterHierarchy(
spec, options, project_dir, sources, excluded_sources, list_excluded))
# Add in files.
missing_sources = _VerifySourcesExist(sources, project_dir)
p.AddFiles(sources)
_AddToolFilesToMSVS(p, spec)
_HandlePreCompiledHeaders(p, sources, spec)
_AddActions(actions_to_add, spec, relative_path_of_gyp_file)
_AddCopies(actions_to_add, spec)
_WriteMSVSUserFile(project.path, version, spec)
# NOTE: this stanza must appear after all actions have been decided.
# Don't excluded sources with actions attached, or they won't run.
excluded_sources = _FilterActionsFromExcluded(
excluded_sources, actions_to_add)
_ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl,
list_excluded)
_AddAccumulatedActionsToMSVS(p, spec, actions_to_add)
# Write it out.
p.WriteIfChanged()
return missing_sources
def _GetUniquePlatforms(spec):
"""Returns the list of unique platforms for this spec, e.g ['win32', ...].
Arguments:
spec: The target dictionary containing the properties of the target.
Returns:
The MSVSUserFile object created.
"""
# Gather list of unique platforms.
platforms = set()
for configuration in spec['configurations']:
platforms.add(_ConfigPlatform(spec['configurations'][configuration]))
platforms = list(platforms)
return platforms
def _CreateMSVSUserFile(proj_path, version, spec):
"""Generates a .user file for the user running this Gyp program.
Arguments:
proj_path: The path of the project file being created. The .user file
shares the same path (with an appropriate suffix).
version: The VisualStudioVersion object.
spec: The target dictionary containing the properties of the target.
Returns:
The MSVSUserFile object created.
"""
(domain, username) = _GetDomainAndUserName()
vcuser_filename = '.'.join([proj_path, domain, username, 'user'])
user_file = MSVSUserFile.Writer(vcuser_filename, version,
spec['target_name'])
return user_file
def _GetMSVSConfigurationType(spec, build_file):
"""Returns the configuration type for this project.
It's a number defined by Microsoft. May raise an exception.
Args:
spec: The target dictionary containing the properties of the target.
build_file: The path of the gyp file.
Returns:
An integer, the configuration type.
"""
try:
config_type = {
'executable': '1', # .exe
'shared_library': '2', # .dll
'loadable_module': '2', # .dll
'static_library': '4', # .lib
'none': '10', # Utility type
}[spec['type']]
except KeyError:
if spec.get('type'):
raise GypError('Target type %s is not a valid target type for '
'target %s in %s.' %
(spec['type'], spec['target_name'], build_file))
else:
raise GypError('Missing type field for target %s in %s.' %
(spec['target_name'], build_file))
return config_type
def _AddConfigurationToMSVSProject(p, spec, config_type, config_name, config):
"""Adds a configuration to the MSVS project.
Many settings in a vcproj file are specific to a configuration. This
function the main part of the vcproj file that's configuration specific.
Arguments:
p: The target project being generated.
spec: The target dictionary containing the properties of the target.
config_type: The configuration type, a number as defined by Microsoft.
config_name: The name of the configuration.
config: The dictionnary that defines the special processing to be done
for this configuration.
"""
# Get the information for this configuration
include_dirs, resource_include_dirs = _GetIncludeDirs(config)
libraries = _GetLibraries(spec)
out_file, vc_tool, _ = _GetOutputFilePathAndTool(spec, msbuild=False)
defines = _GetDefines(config)
defines = [_EscapeCppDefineForMSVS(d) for d in defines]
disabled_warnings = _GetDisabledWarnings(config)
prebuild = config.get('msvs_prebuild')
postbuild = config.get('msvs_postbuild')
def_file = _GetModuleDefinition(spec)
precompiled_header = config.get('msvs_precompiled_header')
# Prepare the list of tools as a dictionary.
tools = dict()
# Add in user specified msvs_settings.
msvs_settings = config.get('msvs_settings', {})
MSVSSettings.ValidateMSVSSettings(msvs_settings)
# Prevent default library inheritance from the environment.
_ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', ['$(NOINHERIT)'])
for tool in msvs_settings:
settings = config['msvs_settings'][tool]
for setting in settings:
_ToolAppend(tools, tool, setting, settings[setting])
# Add the information to the appropriate tool
_ToolAppend(tools, 'VCCLCompilerTool',
'AdditionalIncludeDirectories', include_dirs)
_ToolAppend(tools, 'VCResourceCompilerTool',
'AdditionalIncludeDirectories', resource_include_dirs)
# Add in libraries.
_ToolAppend(tools, 'VCLinkerTool', 'AdditionalDependencies', libraries)
if out_file:
_ToolAppend(tools, vc_tool, 'OutputFile', out_file, only_if_unset=True)
# Add defines.
_ToolAppend(tools, 'VCCLCompilerTool', 'PreprocessorDefinitions', defines)
_ToolAppend(tools, 'VCResourceCompilerTool', 'PreprocessorDefinitions',
defines)
# Change program database directory to prevent collisions.
_ToolAppend(tools, 'VCCLCompilerTool', 'ProgramDataBaseFileName',
'$(IntDir)$(ProjectName)\\vc80.pdb', only_if_unset=True)
# Add disabled warnings.
_ToolAppend(tools, 'VCCLCompilerTool',
'DisableSpecificWarnings', disabled_warnings)
# Add Pre-build.
_ToolAppend(tools, 'VCPreBuildEventTool', 'CommandLine', prebuild)
# Add Post-build.
_ToolAppend(tools, 'VCPostBuildEventTool', 'CommandLine', postbuild)
# Turn on precompiled headers if appropriate.
if precompiled_header:
precompiled_header = os.path.split(precompiled_header)[1]
_ToolAppend(tools, 'VCCLCompilerTool', 'UsePrecompiledHeader', '2')
_ToolAppend(tools, 'VCCLCompilerTool',
'PrecompiledHeaderThrough', precompiled_header)
_ToolAppend(tools, 'VCCLCompilerTool',
'ForcedIncludeFiles', precompiled_header)
# Loadable modules don't generate import libraries;
# tell dependent projects to not expect one.
if spec['type'] == 'loadable_module':
_ToolAppend(tools, 'VCLinkerTool', 'IgnoreImportLibrary', 'true')
# Set the module definition file if any.
if def_file:
_ToolAppend(tools, 'VCLinkerTool', 'ModuleDefinitionFile', def_file)
_AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name)
def _GetIncludeDirs(config):
"""Returns the list of directories to be used for #include directives.
Arguments:
config: The dictionnary that defines the special processing to be done
for this configuration.
Returns:
The list of directory paths.
"""
# TODO(bradnelson): include_dirs should really be flexible enough not to
# require this sort of thing.
include_dirs = (
config.get('include_dirs', []) +
config.get('msvs_system_include_dirs', []))
resource_include_dirs = config.get('resource_include_dirs', include_dirs)
include_dirs = _FixPaths(include_dirs)
resource_include_dirs = _FixPaths(resource_include_dirs)
return include_dirs, resource_include_dirs
def _GetLibraries(spec):
"""Returns the list of libraries for this configuration.
Arguments:
spec: The target dictionary containing the properties of the target.
Returns:
The list of directory paths.
"""
libraries = spec.get('libraries', [])
# Strip out -l, as it is not used on windows (but is needed so we can pass
# in libraries that are assumed to be in the default library path).
# Also remove duplicate entries, leaving only the last duplicate, while
# preserving order.
found = set()
unique_libraries_list = []
for entry in reversed(libraries):
library = re.sub('^\-l', '', entry)
if not os.path.splitext(library)[1]:
library += '.lib'
if library not in found:
found.add(library)
unique_libraries_list.append(library)
unique_libraries_list.reverse()
return unique_libraries_list
def _GetOutputFilePathAndTool(spec, msbuild):
"""Returns the path and tool to use for this target.
Figures out the path of the file this spec will create and the name of
the VC tool that will create it.
Arguments:
spec: The target dictionary containing the properties of the target.
Returns:
A triple of (file path, name of the vc tool, name of the msbuild tool)
"""
# Select a name for the output file.
out_file = ''
vc_tool = ''
msbuild_tool = ''
output_file_map = {
'executable': ('VCLinkerTool', 'Link', '$(OutDir)', '.exe'),
'shared_library': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'),
'loadable_module': ('VCLinkerTool', 'Link', '$(OutDir)', '.dll'),
'static_library': ('VCLibrarianTool', 'Lib', '$(OutDir)lib\\', '.lib'),
}
output_file_props = output_file_map.get(spec['type'])
if output_file_props and int(spec.get('msvs_auto_output_file', 1)):
vc_tool, msbuild_tool, out_dir, suffix = output_file_props
if spec.get('standalone_static_library', 0):
out_dir = '$(OutDir)'
out_dir = spec.get('product_dir', out_dir)
product_extension = spec.get('product_extension')
if product_extension:
suffix = '.' + product_extension
elif msbuild:
suffix = '$(TargetExt)'
prefix = spec.get('product_prefix', '')
product_name = spec.get('product_name', '$(ProjectName)')
out_file = ntpath.join(out_dir, prefix + product_name + suffix)
return out_file, vc_tool, msbuild_tool
def _GetDefines(config):
"""Returns the list of preprocessor definitions for this configuation.
Arguments:
config: The dictionnary that defines the special processing to be done
for this configuration.
Returns:
The list of preprocessor definitions.
"""
defines = []
for d in config.get('defines', []):
if type(d) == list:
fd = '='.join([str(dpart) for dpart in d])
else:
fd = str(d)
defines.append(fd)
return defines
def _GetDisabledWarnings(config):
return [str(i) for i in config.get('msvs_disabled_warnings', [])]
def _GetModuleDefinition(spec):
def_file = ''
if spec['type'] in ['shared_library', 'loadable_module', 'executable']:
def_files = [s for s in spec.get('sources', []) if s.endswith('.def')]
if len(def_files) == 1:
def_file = _FixPath(def_files[0])
elif def_files:
raise ValueError(
'Multiple module definition files in one target, target %s lists '
'multiple .def files: %s' % (
spec['target_name'], ' '.join(def_files)))
return def_file
def _ConvertToolsToExpectedForm(tools):
"""Convert tools to a form expected by Visual Studio.
Arguments:
tools: A dictionnary of settings; the tool name is the key.
Returns:
A list of Tool objects.
"""
tool_list = []
for tool, settings in tools.iteritems():
# Collapse settings with lists.
settings_fixed = {}
for setting, value in settings.iteritems():
if type(value) == list:
if ((tool == 'VCLinkerTool' and
setting == 'AdditionalDependencies') or
setting == 'AdditionalOptions'):
settings_fixed[setting] = ' '.join(value)
else:
settings_fixed[setting] = ';'.join(value)
else:
settings_fixed[setting] = value
# Add in this tool.
tool_list.append(MSVSProject.Tool(tool, settings_fixed))
return tool_list
def _AddConfigurationToMSVS(p, spec, tools, config, config_type, config_name):
"""Add to the project file the configuration specified by config.
Arguments:
p: The target project being generated.
spec: the target project dict.
tools: A dictionnary of settings; the tool name is the key.
config: The dictionnary that defines the special processing to be done
for this configuration.
config_type: The configuration type, a number as defined by Microsoft.
config_name: The name of the configuration.
"""
attributes = _GetMSVSAttributes(spec, config, config_type)
# Add in this configuration.
tool_list = _ConvertToolsToExpectedForm(tools)
p.AddConfig(_ConfigFullName(config_name, config),
attrs=attributes, tools=tool_list)
def _GetMSVSAttributes(spec, config, config_type):
# Prepare configuration attributes.
prepared_attrs = {}
source_attrs = config.get('msvs_configuration_attributes', {})
for a in source_attrs:
prepared_attrs[a] = source_attrs[a]
# Add props files.
vsprops_dirs = config.get('msvs_props', [])
vsprops_dirs = _FixPaths(vsprops_dirs)
if vsprops_dirs:
prepared_attrs['InheritedPropertySheets'] = ';'.join(vsprops_dirs)
# Set configuration type.
prepared_attrs['ConfigurationType'] = config_type
output_dir = prepared_attrs.get('OutputDirectory',
'$(SolutionDir)$(ConfigurationName)')
prepared_attrs['OutputDirectory'] = _FixPath(output_dir) + '\\'
if 'IntermediateDirectory' not in prepared_attrs:
intermediate = '$(ConfigurationName)\\obj\\$(ProjectName)'
prepared_attrs['IntermediateDirectory'] = _FixPath(intermediate) + '\\'
else:
intermediate = _FixPath(prepared_attrs['IntermediateDirectory']) + '\\'
intermediate = MSVSSettings.FixVCMacroSlashes(intermediate)
prepared_attrs['IntermediateDirectory'] = intermediate
return prepared_attrs
def _AddNormalizedSources(sources_set, sources_array):
sources = [_NormalizedSource(s) for s in sources_array]
sources_set.update(set(sources))
def _PrepareListOfSources(spec, generator_flags, gyp_file):
"""Prepare list of sources and excluded sources.
Besides the sources specified directly in the spec, adds the gyp file so
that a change to it will cause a re-compile. Also adds appropriate sources
for actions and copies. Assumes later stage will un-exclude files which
have custom build steps attached.
Arguments:
spec: The target dictionary containing the properties of the target.
gyp_file: The name of the gyp file.
Returns:
A pair of (list of sources, list of excluded sources).
The sources will be relative to the gyp file.
"""
sources = set()
_AddNormalizedSources(sources, spec.get('sources', []))
excluded_sources = set()
# Add in the gyp file.
if not generator_flags.get('standalone'):
sources.add(gyp_file)
# Add in 'action' inputs and outputs.
for a in spec.get('actions', []):
inputs = a['inputs']
inputs = [_NormalizedSource(i) for i in inputs]
# Add all inputs to sources and excluded sources.
inputs = set(inputs)
sources.update(inputs)
excluded_sources.update(inputs)
if int(a.get('process_outputs_as_sources', False)):
_AddNormalizedSources(sources, a.get('outputs', []))
# Add in 'copies' inputs and outputs.
for cpy in spec.get('copies', []):
_AddNormalizedSources(sources, cpy.get('files', []))
return (sources, excluded_sources)
def _AdjustSourcesAndConvertToFilterHierarchy(
spec, options, gyp_dir, sources, excluded_sources, list_excluded):
"""Adjusts the list of sources and excluded sources.
Also converts the sets to lists.
Arguments:
spec: The target dictionary containing the properties of the target.
options: Global generator options.
gyp_dir: The path to the gyp file being processed.
sources: A set of sources to be included for this project.
excluded_sources: A set of sources to be excluded for this project.
Returns:
A trio of (list of sources, list of excluded sources,
path of excluded IDL file)
"""
# Exclude excluded sources coming into the generator.
excluded_sources.update(set(spec.get('sources_excluded', [])))
# Add excluded sources into sources for good measure.
sources.update(excluded_sources)
# Convert to proper windows form.
# NOTE: sources goes from being a set to a list here.
# NOTE: excluded_sources goes from being a set to a list here.
sources = _FixPaths(sources)
# Convert to proper windows form.
excluded_sources = _FixPaths(excluded_sources)
excluded_idl = _IdlFilesHandledNonNatively(spec, sources)
precompiled_related = _GetPrecompileRelatedFiles(spec)
# Find the excluded ones, minus the precompiled header related ones.
fully_excluded = [i for i in excluded_sources if i not in precompiled_related]
# Convert to folders and the right slashes.
sources = [i.split('\\') for i in sources]
sources = _ConvertSourcesToFilterHierarchy(sources, excluded=fully_excluded,
list_excluded=list_excluded)
return sources, excluded_sources, excluded_idl
def _IdlFilesHandledNonNatively(spec, sources):
# If any non-native rules use 'idl' as an extension exclude idl files.
# Gather a list here to use later.
using_idl = False
for rule in spec.get('rules', []):
if rule['extension'] == 'idl' and int(rule.get('msvs_external_rule', 0)):
using_idl = True
break
if using_idl:
excluded_idl = [i for i in sources if i.endswith('.idl')]
else:
excluded_idl = []
return excluded_idl
def _GetPrecompileRelatedFiles(spec):
# Gather a list of precompiled header related sources.
precompiled_related = []
for _, config in spec['configurations'].iteritems():
for k in precomp_keys:
f = config.get(k)
if f:
precompiled_related.append(_FixPath(f))
return precompiled_related
def _ExcludeFilesFromBeingBuilt(p, spec, excluded_sources, excluded_idl,
list_excluded):
exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl)
for file_name, excluded_configs in exclusions.iteritems():
if (not list_excluded and
len(excluded_configs) == len(spec['configurations'])):
# If we're not listing excluded files, then they won't appear in the
# project, so don't try to configure them to be excluded.
pass
else:
for config_name, config in excluded_configs:
p.AddFileConfig(file_name, _ConfigFullName(config_name, config),
{'ExcludedFromBuild': 'true'})
def _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl):
exclusions = {}
# Exclude excluded sources from being built.
for f in excluded_sources:
excluded_configs = []
for config_name, config in spec['configurations'].iteritems():
precomped = [_FixPath(config.get(i, '')) for i in precomp_keys]
# Don't do this for ones that are precompiled header related.
if f not in precomped:
excluded_configs.append((config_name, config))
exclusions[f] = excluded_configs
# If any non-native rules use 'idl' as an extension exclude idl files.
# Exclude them now.
for f in excluded_idl:
excluded_configs = []
for config_name, config in spec['configurations'].iteritems():
excluded_configs.append((config_name, config))
exclusions[f] = excluded_configs
return exclusions
def _AddToolFilesToMSVS(p, spec):
# Add in tool files (rules).
tool_files = set()
for _, config in spec['configurations'].iteritems():
for f in config.get('msvs_tool_files', []):
tool_files.add(f)
for f in tool_files:
p.AddToolFile(f)
def _HandlePreCompiledHeaders(p, sources, spec):
# Pre-compiled header source stubs need a different compiler flag
# (generate precompiled header) and any source file not of the same
# kind (i.e. C vs. C++) as the precompiled header source stub needs
# to have use of precompiled headers disabled.
extensions_excluded_from_precompile = []
for config_name, config in spec['configurations'].iteritems():
source = config.get('msvs_precompiled_source')
if source:
source = _FixPath(source)
# UsePrecompiledHeader=1 for if using precompiled headers.
tool = MSVSProject.Tool('VCCLCompilerTool',
{'UsePrecompiledHeader': '1'})
p.AddFileConfig(source, _ConfigFullName(config_name, config),
{}, tools=[tool])
basename, extension = os.path.splitext(source)
if extension == '.c':
extensions_excluded_from_precompile = ['.cc', '.cpp', '.cxx']
else:
extensions_excluded_from_precompile = ['.c']
def DisableForSourceTree(source_tree):
for source in source_tree:
if isinstance(source, MSVSProject.Filter):
DisableForSourceTree(source.contents)
else:
basename, extension = os.path.splitext(source)
if extension in extensions_excluded_from_precompile:
for config_name, config in spec['configurations'].iteritems():
tool = MSVSProject.Tool('VCCLCompilerTool',
{'UsePrecompiledHeader': '0',
'ForcedIncludeFiles': '$(NOINHERIT)'})
p.AddFileConfig(_FixPath(source),
_ConfigFullName(config_name, config),
{}, tools=[tool])
# Do nothing if there was no precompiled source.
if extensions_excluded_from_precompile:
DisableForSourceTree(sources)
def _AddActions(actions_to_add, spec, relative_path_of_gyp_file):
# Add actions.
actions = spec.get('actions', [])
# Don't setup_env every time. When all the actions are run together in one
# batch file in VS, the PATH will grow too long.
# Membership in this set means that the cygwin environment has been set up,
# and does not need to be set up again.
have_setup_env = set()
for a in actions:
# Attach actions to the gyp file if nothing else is there.
inputs = a.get('inputs') or [relative_path_of_gyp_file]
attached_to = inputs[0]
need_setup_env = attached_to not in have_setup_env
cmd = _BuildCommandLineForRule(spec, a, has_input_path=False,
do_setup_env=need_setup_env)
have_setup_env.add(attached_to)
# Add the action.
_AddActionStep(actions_to_add,
inputs=inputs,
outputs=a.get('outputs', []),
description=a.get('message', a['action_name']),
command=cmd)
def _WriteMSVSUserFile(project_path, version, spec):
# Add run_as and test targets.
if 'run_as' in spec:
run_as = spec['run_as']
action = run_as.get('action', [])
environment = run_as.get('environment', [])
working_directory = run_as.get('working_directory', '.')
elif int(spec.get('test', 0)):
action = ['$(TargetPath)', '--gtest_print_time']
environment = []
working_directory = '.'
else:
return # Nothing to add
# Write out the user file.
user_file = _CreateMSVSUserFile(project_path, version, spec)
for config_name, c_data in spec['configurations'].iteritems():
user_file.AddDebugSettings(_ConfigFullName(config_name, c_data),
action, environment, working_directory)
user_file.WriteIfChanged()
def _AddCopies(actions_to_add, spec):
copies = _GetCopies(spec)
for inputs, outputs, cmd, description in copies:
_AddActionStep(actions_to_add, inputs=inputs, outputs=outputs,
description=description, command=cmd)
def _GetCopies(spec):
copies = []
# Add copies.
for cpy in spec.get('copies', []):
for src in cpy.get('files', []):
dst = os.path.join(cpy['destination'], os.path.basename(src))
# _AddCustomBuildToolForMSVS() will call _FixPath() on the inputs and
# outputs, so do the same for our generated command line.
if src.endswith('/'):
src_bare = src[:-1]
base_dir = posixpath.split(src_bare)[0]
outer_dir = posixpath.split(src_bare)[1]
cmd = 'cd "%s" && xcopy /e /f /y "%s" "%s\\%s\\"' % (
_FixPath(base_dir), outer_dir, _FixPath(dst), outer_dir)
copies.append(([src], ['dummy_copies', dst], cmd,
'Copying %s to %s' % (src, dst)))
else:
cmd = 'mkdir "%s" 2>nul & set ERRORLEVEL=0 & copy /Y "%s" "%s"' % (
_FixPath(cpy['destination']), _FixPath(src), _FixPath(dst))
copies.append(([src], [dst], cmd, 'Copying %s to %s' % (src, dst)))
return copies
def _GetPathDict(root, path):
# |path| will eventually be empty (in the recursive calls) if it was initially
# relative; otherwise it will eventually end up as '\', 'D:\', etc.
if not path or path.endswith(os.sep):
return root
parent, folder = os.path.split(path)
parent_dict = _GetPathDict(root, parent)
if folder not in parent_dict:
parent_dict[folder] = dict()
return parent_dict[folder]
def _DictsToFolders(base_path, bucket, flat):
# Convert to folders recursively.
children = []
for folder, contents in bucket.iteritems():
if type(contents) == dict:
folder_children = _DictsToFolders(os.path.join(base_path, folder),
contents, flat)
if flat:
children += folder_children
else:
folder_children = MSVSNew.MSVSFolder(os.path.join(base_path, folder),
name='(' + folder + ')',
entries=folder_children)
children.append(folder_children)
else:
children.append(contents)
return children
def _CollapseSingles(parent, node):
# Recursively explorer the tree of dicts looking for projects which are
# the sole item in a folder which has the same name as the project. Bring
# such projects up one level.
if (type(node) == dict and
len(node) == 1 and
node.keys()[0] == parent + '.vcproj'):
return node[node.keys()[0]]
if type(node) != dict:
return node
for child in node:
node[child] = _CollapseSingles(child, node[child])
return node
def _GatherSolutionFolders(sln_projects, project_objects, flat):
root = {}
# Convert into a tree of dicts on path.
for p in sln_projects:
gyp_file, target = gyp.common.ParseQualifiedTarget(p)[0:2]
gyp_dir = os.path.dirname(gyp_file)
path_dict = _GetPathDict(root, gyp_dir)
path_dict[target + '.vcproj'] = project_objects[p]
# Walk down from the top until we hit a folder that has more than one entry.
# In practice, this strips the top-level "src/" dir from the hierarchy in
# the solution.
while len(root) == 1 and type(root[root.keys()[0]]) == dict:
root = root[root.keys()[0]]
# Collapse singles.
root = _CollapseSingles('', root)
# Merge buckets until everything is a root entry.
return _DictsToFolders('', root, flat)
def _GetPathOfProject(qualified_target, spec, options, msvs_version):
default_config = _GetDefaultConfiguration(spec)
proj_filename = default_config.get('msvs_existing_vcproj')
if not proj_filename:
proj_filename = (spec['target_name'] + options.suffix +
msvs_version.ProjectExtension())
build_file = gyp.common.BuildFile(qualified_target)
proj_path = os.path.join(os.path.dirname(build_file), proj_filename)
fix_prefix = None
if options.generator_output:
project_dir_path = os.path.dirname(os.path.abspath(proj_path))
proj_path = os.path.join(options.generator_output, proj_path)
fix_prefix = gyp.common.RelativePath(project_dir_path,
os.path.dirname(proj_path))
return proj_path, fix_prefix
def _GetPlatformOverridesOfProject(spec):
# Prepare a dict indicating which project configurations are used for which
# solution configurations for this target.
config_platform_overrides = {}
for config_name, c in spec['configurations'].iteritems():
config_fullname = _ConfigFullName(config_name, c)
platform = c.get('msvs_target_platform', _ConfigPlatform(c))
fixed_config_fullname = '%s|%s' % (
_ConfigBaseName(config_name, _ConfigPlatform(c)), platform)
config_platform_overrides[config_fullname] = fixed_config_fullname
return config_platform_overrides
def _CreateProjectObjects(target_list, target_dicts, options, msvs_version):
"""Create a MSVSProject object for the targets found in target list.
Arguments:
target_list: the list of targets to generate project objects for.
target_dicts: the dictionary of specifications.
options: global generator options.
msvs_version: the MSVSVersion object.
Returns:
A set of created projects, keyed by target.
"""
global fixpath_prefix
# Generate each project.
projects = {}
for qualified_target in target_list:
spec = target_dicts[qualified_target]
if spec['toolset'] != 'target':
raise GypError(
'Multiple toolsets not supported in msvs build (target %s)' %
qualified_target)
proj_path, fixpath_prefix = _GetPathOfProject(qualified_target, spec,
options, msvs_version)
guid = _GetGuidOfProject(proj_path, spec)
overrides = _GetPlatformOverridesOfProject(spec)
build_file = gyp.common.BuildFile(qualified_target)
# Create object for this project.
obj = MSVSNew.MSVSProject(
proj_path,
name=spec['target_name'],
guid=guid,
spec=spec,
build_file=build_file,
config_platform_overrides=overrides,
fixpath_prefix=fixpath_prefix)
# Set project toolset if any (MS build only)
if msvs_version.UsesVcxproj():
obj.set_msbuild_toolset(
_GetMsbuildToolsetOfProject(proj_path, spec, msvs_version))
projects[qualified_target] = obj
# Set all the dependencies
for project in projects.values():
deps = project.spec.get('dependencies', [])
deps = [projects[d] for d in deps]
project.set_dependencies(deps)
return projects
def CalculateVariables(default_variables, params):
"""Generated variables that require params to be known."""
generator_flags = params.get('generator_flags', {})
# Select project file format version (if unset, default to auto detecting).
msvs_version = MSVSVersion.SelectVisualStudioVersion(
generator_flags.get('msvs_version', 'auto'))
# Stash msvs_version for later (so we don't have to probe the system twice).
params['msvs_version'] = msvs_version
# Set a variable so conditions can be based on msvs_version.
default_variables['MSVS_VERSION'] = msvs_version.ShortName()
# To determine processor word size on Windows, in addition to checking
# PROCESSOR_ARCHITECTURE (which reflects the word size of the current
# process), it is also necessary to check PROCESSOR_ARCITEW6432 (which
# contains the actual word size of the system when running thru WOW64).
if (os.environ.get('PROCESSOR_ARCHITECTURE', '').find('64') >= 0 or
os.environ.get('PROCESSOR_ARCHITEW6432', '').find('64') >= 0):
default_variables['MSVS_OS_BITS'] = 64
else:
default_variables['MSVS_OS_BITS'] = 32
def PerformBuild(data, configurations, params):
options = params['options']
msvs_version = params['msvs_version']
devenv = os.path.join(msvs_version.path, 'Common7', 'IDE', 'devenv.com')
for build_file, build_file_dict in data.iteritems():
(build_file_root, build_file_ext) = os.path.splitext(build_file)
if build_file_ext != '.gyp':
continue
sln_path = build_file_root + options.suffix + '.sln'
if options.generator_output:
sln_path = os.path.join(options.generator_output, sln_path)
for config in configurations:
arguments = [devenv, sln_path, '/Build', config]
print 'Building [%s]: %s' % (config, arguments)
rtn = subprocess.check_call(arguments)
def GenerateOutput(target_list, target_dicts, data, params):
"""Generate .sln and .vcproj files.
This is the entry point for this generator.
Arguments:
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
data: Dictionary containing per .gyp data.
"""
global fixpath_prefix
options = params['options']
# Get the project file format version back out of where we stashed it in
# GeneratorCalculatedVariables.
msvs_version = params['msvs_version']
generator_flags = params.get('generator_flags', {})
# Optionally shard targets marked with 'msvs_shard': SHARD_COUNT.
(target_list, target_dicts) = MSVSUtil.ShardTargets(target_list, target_dicts)
# Optionally use the large PDB workaround for targets marked with
# 'msvs_large_pdb': 1.
(target_list, target_dicts) = MSVSUtil.InsertLargePdbShims(
target_list, target_dicts, generator_default_variables)
# Prepare the set of configurations.
configs = set()
for qualified_target in target_list:
spec = target_dicts[qualified_target]
for config_name, config in spec['configurations'].iteritems():
configs.add(_ConfigFullName(config_name, config))
configs = list(configs)
# Figure out all the projects that will be generated and their guids
project_objects = _CreateProjectObjects(target_list, target_dicts, options,
msvs_version)
# Generate each project.
missing_sources = []
for project in project_objects.values():
fixpath_prefix = project.fixpath_prefix
missing_sources.extend(_GenerateProject(project, options, msvs_version,
generator_flags))
fixpath_prefix = None
for build_file in data:
# Validate build_file extension
if not build_file.endswith('.gyp'):
continue
sln_path = os.path.splitext(build_file)[0] + options.suffix + '.sln'
if options.generator_output:
sln_path = os.path.join(options.generator_output, sln_path)
# Get projects in the solution, and their dependents.
sln_projects = gyp.common.BuildFileTargets(target_list, build_file)
sln_projects += gyp.common.DeepDependencyTargets(target_dicts, sln_projects)
# Create folder hierarchy.
root_entries = _GatherSolutionFolders(
sln_projects, project_objects, flat=msvs_version.FlatSolution())
# Create solution.
sln = MSVSNew.MSVSSolution(sln_path,
entries=root_entries,
variants=configs,
websiteProperties=False,
version=msvs_version)
sln.Write()
if missing_sources:
error_message = "Missing input files:\n" + \
'\n'.join(set(missing_sources))
if generator_flags.get('msvs_error_on_missing_sources', False):
raise GypError(error_message)
else:
print >> sys.stdout, "Warning: " + error_message
def _GenerateMSBuildFiltersFile(filters_path, source_files,
extension_to_rule_name):
"""Generate the filters file.
This file is used by Visual Studio to organize the presentation of source
files into folders.
Arguments:
filters_path: The path of the file to be created.
source_files: The hierarchical structure of all the sources.
extension_to_rule_name: A dictionary mapping file extensions to rules.
"""
filter_group = []
source_group = []
_AppendFiltersForMSBuild('', source_files, extension_to_rule_name,
filter_group, source_group)
if filter_group:
content = ['Project',
{'ToolsVersion': '4.0',
'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'
},
['ItemGroup'] + filter_group,
['ItemGroup'] + source_group
]
easy_xml.WriteXmlIfChanged(content, filters_path, pretty=True, win32=True)
elif os.path.exists(filters_path):
# We don't need this filter anymore. Delete the old filter file.
os.unlink(filters_path)
def _AppendFiltersForMSBuild(parent_filter_name, sources,
extension_to_rule_name,
filter_group, source_group):
"""Creates the list of filters and sources to be added in the filter file.
Args:
parent_filter_name: The name of the filter under which the sources are
found.
sources: The hierarchy of filters and sources to process.
extension_to_rule_name: A dictionary mapping file extensions to rules.
filter_group: The list to which filter entries will be appended.
source_group: The list to which source entries will be appeneded.
"""
for source in sources:
if isinstance(source, MSVSProject.Filter):
# We have a sub-filter. Create the name of that sub-filter.
if not parent_filter_name:
filter_name = source.name
else:
filter_name = '%s\\%s' % (parent_filter_name, source.name)
# Add the filter to the group.
filter_group.append(
['Filter', {'Include': filter_name},
['UniqueIdentifier', MSVSNew.MakeGuid(source.name)]])
# Recurse and add its dependents.
_AppendFiltersForMSBuild(filter_name, source.contents,
extension_to_rule_name,
filter_group, source_group)
else:
# It's a source. Create a source entry.
_, element = _MapFileToMsBuildSourceType(source, extension_to_rule_name)
source_entry = [element, {'Include': source}]
# Specify the filter it is part of, if any.
if parent_filter_name:
source_entry.append(['Filter', parent_filter_name])
source_group.append(source_entry)
def _MapFileToMsBuildSourceType(source, extension_to_rule_name):
"""Returns the group and element type of the source file.
Arguments:
source: The source file name.
extension_to_rule_name: A dictionary mapping file extensions to rules.
Returns:
A pair of (group this file should be part of, the label of element)
"""
_, ext = os.path.splitext(source)
if ext in extension_to_rule_name:
group = 'rule'
element = extension_to_rule_name[ext]
elif ext in ['.cc', '.cpp', '.c', '.cxx']:
group = 'compile'
element = 'ClCompile'
elif ext in ['.h', '.hxx']:
group = 'include'
element = 'ClInclude'
elif ext == '.rc':
group = 'resource'
element = 'ResourceCompile'
elif ext == '.idl':
group = 'midl'
element = 'Midl'
else:
group = 'none'
element = 'None'
return (group, element)
def _GenerateRulesForMSBuild(output_dir, options, spec,
sources, excluded_sources,
props_files_of_rules, targets_files_of_rules,
actions_to_add, extension_to_rule_name):
# MSBuild rules are implemented using three files: an XML file, a .targets
# file and a .props file.
# See http://blogs.msdn.com/b/vcblog/archive/2010/04/21/quick-help-on-vs2010-custom-build-rule.aspx
# for more details.
rules = spec.get('rules', [])
rules_native = [r for r in rules if not int(r.get('msvs_external_rule', 0))]
rules_external = [r for r in rules if int(r.get('msvs_external_rule', 0))]
msbuild_rules = []
for rule in rules_native:
# Skip a rule with no action and no inputs.
if 'action' not in rule and not rule.get('rule_sources', []):
continue
msbuild_rule = MSBuildRule(rule, spec)
msbuild_rules.append(msbuild_rule)
extension_to_rule_name[msbuild_rule.extension] = msbuild_rule.rule_name
if msbuild_rules:
base = spec['target_name'] + options.suffix
props_name = base + '.props'
targets_name = base + '.targets'
xml_name = base + '.xml'
props_files_of_rules.add(props_name)
targets_files_of_rules.add(targets_name)
props_path = os.path.join(output_dir, props_name)
targets_path = os.path.join(output_dir, targets_name)
xml_path = os.path.join(output_dir, xml_name)
_GenerateMSBuildRulePropsFile(props_path, msbuild_rules)
_GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules)
_GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules)
if rules_external:
_GenerateExternalRules(rules_external, output_dir, spec,
sources, options, actions_to_add)
_AdjustSourcesForRules(rules, sources, excluded_sources)
class MSBuildRule(object):
"""Used to store information used to generate an MSBuild rule.
Attributes:
rule_name: The rule name, sanitized to use in XML.
target_name: The name of the target.
after_targets: The name of the AfterTargets element.
before_targets: The name of the BeforeTargets element.
depends_on: The name of the DependsOn element.
compute_output: The name of the ComputeOutput element.
dirs_to_make: The name of the DirsToMake element.
inputs: The name of the _inputs element.
tlog: The name of the _tlog element.
extension: The extension this rule applies to.
description: The message displayed when this rule is invoked.
additional_dependencies: A string listing additional dependencies.
outputs: The outputs of this rule.
command: The command used to run the rule.
"""
def __init__(self, rule, spec):
self.display_name = rule['rule_name']
# Assure that the rule name is only characters and numbers
self.rule_name = re.sub(r'\W', '_', self.display_name)
# Create the various element names, following the example set by the
# Visual Studio 2008 to 2010 conversion. I don't know if VS2010
# is sensitive to the exact names.
self.target_name = '_' + self.rule_name
self.after_targets = self.rule_name + 'AfterTargets'
self.before_targets = self.rule_name + 'BeforeTargets'
self.depends_on = self.rule_name + 'DependsOn'
self.compute_output = 'Compute%sOutput' % self.rule_name
self.dirs_to_make = self.rule_name + 'DirsToMake'
self.inputs = self.rule_name + '_inputs'
self.tlog = self.rule_name + '_tlog'
self.extension = rule['extension']
if not self.extension.startswith('.'):
self.extension = '.' + self.extension
self.description = MSVSSettings.ConvertVCMacrosToMSBuild(
rule.get('message', self.rule_name))
old_additional_dependencies = _FixPaths(rule.get('inputs', []))
self.additional_dependencies = (
';'.join([MSVSSettings.ConvertVCMacrosToMSBuild(i)
for i in old_additional_dependencies]))
old_outputs = _FixPaths(rule.get('outputs', []))
self.outputs = ';'.join([MSVSSettings.ConvertVCMacrosToMSBuild(i)
for i in old_outputs])
old_command = _BuildCommandLineForRule(spec, rule, has_input_path=True,
do_setup_env=True)
self.command = MSVSSettings.ConvertVCMacrosToMSBuild(old_command)
def _GenerateMSBuildRulePropsFile(props_path, msbuild_rules):
"""Generate the .props file."""
content = ['Project',
{'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'}]
for rule in msbuild_rules:
content.extend([
['PropertyGroup',
{'Condition': "'$(%s)' == '' and '$(%s)' == '' and "
"'$(ConfigurationType)' != 'Makefile'" % (rule.before_targets,
rule.after_targets)
},
[rule.before_targets, 'Midl'],
[rule.after_targets, 'CustomBuild'],
],
['PropertyGroup',
[rule.depends_on,
{'Condition': "'$(ConfigurationType)' != 'Makefile'"},
'_SelectedFiles;$(%s)' % rule.depends_on
],
],
['ItemDefinitionGroup',
[rule.rule_name,
['CommandLineTemplate', rule.command],
['Outputs', rule.outputs],
['ExecutionDescription', rule.description],
['AdditionalDependencies', rule.additional_dependencies],
],
]
])
easy_xml.WriteXmlIfChanged(content, props_path, pretty=True, win32=True)
def _GenerateMSBuildRuleTargetsFile(targets_path, msbuild_rules):
"""Generate the .targets file."""
content = ['Project',
{'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003'
}
]
item_group = [
'ItemGroup',
['PropertyPageSchema',
{'Include': '$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml'}
]
]
for rule in msbuild_rules:
item_group.append(
['AvailableItemName',
{'Include': rule.rule_name},
['Targets', rule.target_name],
])
content.append(item_group)
for rule in msbuild_rules:
content.append(
['UsingTask',
{'TaskName': rule.rule_name,
'TaskFactory': 'XamlTaskFactory',
'AssemblyName': 'Microsoft.Build.Tasks.v4.0'
},
['Task', '$(MSBuildThisFileDirectory)$(MSBuildThisFileName).xml'],
])
for rule in msbuild_rules:
rule_name = rule.rule_name
target_outputs = '%%(%s.Outputs)' % rule_name
target_inputs = ('%%(%s.Identity);%%(%s.AdditionalDependencies);'
'$(MSBuildProjectFile)') % (rule_name, rule_name)
rule_inputs = '%%(%s.Identity)' % rule_name
extension_condition = ("'%(Extension)'=='.obj' or "
"'%(Extension)'=='.res' or "
"'%(Extension)'=='.rsc' or "
"'%(Extension)'=='.lib'")
remove_section = [
'ItemGroup',
{'Condition': "'@(SelectedFiles)' != ''"},
[rule_name,
{'Remove': '@(%s)' % rule_name,
'Condition': "'%(Identity)' != '@(SelectedFiles)'"
}
]
]
inputs_section = [
'ItemGroup',
[rule.inputs, {'Include': '%%(%s.AdditionalDependencies)' % rule_name}]
]
logging_section = [
'ItemGroup',
[rule.tlog,
{'Include': '%%(%s.Outputs)' % rule_name,
'Condition': ("'%%(%s.Outputs)' != '' and "
"'%%(%s.ExcludedFromBuild)' != 'true'" %
(rule_name, rule_name))
},
['Source', "@(%s, '|')" % rule_name],
['Inputs', "@(%s -> '%%(Fullpath)', ';')" % rule.inputs],
],
]
message_section = [
'Message',
{'Importance': 'High',
'Text': '%%(%s.ExecutionDescription)' % rule_name
}
]
write_tlog_section = [
'WriteLinesToFile',
{'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
"'true'" % (rule.tlog, rule.tlog),
'File': '$(IntDir)$(ProjectName).write.1.tlog',
'Lines': "^%%(%s.Source);@(%s->'%%(Fullpath)')" % (rule.tlog,
rule.tlog)
}
]
read_tlog_section = [
'WriteLinesToFile',
{'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
"'true'" % (rule.tlog, rule.tlog),
'File': '$(IntDir)$(ProjectName).read.1.tlog',
'Lines': "^%%(%s.Source);%%(%s.Inputs)" % (rule.tlog, rule.tlog)
}
]
command_and_input_section = [
rule_name,
{'Condition': "'@(%s)' != '' and '%%(%s.ExcludedFromBuild)' != "
"'true'" % (rule_name, rule_name),
'CommandLineTemplate': '%%(%s.CommandLineTemplate)' % rule_name,
'AdditionalOptions': '%%(%s.AdditionalOptions)' % rule_name,
'Inputs': rule_inputs
}
]
content.extend([
['Target',
{'Name': rule.target_name,
'BeforeTargets': '$(%s)' % rule.before_targets,
'AfterTargets': '$(%s)' % rule.after_targets,
'Condition': "'@(%s)' != ''" % rule_name,
'DependsOnTargets': '$(%s);%s' % (rule.depends_on,
rule.compute_output),
'Outputs': target_outputs,
'Inputs': target_inputs
},
remove_section,
inputs_section,
logging_section,
message_section,
write_tlog_section,
read_tlog_section,
command_and_input_section,
],
['PropertyGroup',
['ComputeLinkInputsTargets',
'$(ComputeLinkInputsTargets);',
'%s;' % rule.compute_output
],
['ComputeLibInputsTargets',
'$(ComputeLibInputsTargets);',
'%s;' % rule.compute_output
],
],
['Target',
{'Name': rule.compute_output,
'Condition': "'@(%s)' != ''" % rule_name
},
['ItemGroup',
[rule.dirs_to_make,
{'Condition': "'@(%s)' != '' and "
"'%%(%s.ExcludedFromBuild)' != 'true'" % (rule_name, rule_name),
'Include': '%%(%s.Outputs)' % rule_name
}
],
['Link',
{'Include': '%%(%s.Identity)' % rule.dirs_to_make,
'Condition': extension_condition
}
],
['Lib',
{'Include': '%%(%s.Identity)' % rule.dirs_to_make,
'Condition': extension_condition
}
],
['ImpLib',
{'Include': '%%(%s.Identity)' % rule.dirs_to_make,
'Condition': extension_condition
}
],
],
['MakeDir',
{'Directories': ("@(%s->'%%(RootDir)%%(Directory)')" %
rule.dirs_to_make)
}
]
],
])
easy_xml.WriteXmlIfChanged(content, targets_path, pretty=True, win32=True)
def _GenerateMSBuildRuleXmlFile(xml_path, msbuild_rules):
# Generate the .xml file
content = [
'ProjectSchemaDefinitions',
{'xmlns': ('clr-namespace:Microsoft.Build.Framework.XamlTypes;'
'assembly=Microsoft.Build.Framework'),
'xmlns:x': 'http://schemas.microsoft.com/winfx/2006/xaml',
'xmlns:sys': 'clr-namespace:System;assembly=mscorlib',
'xmlns:transformCallback':
'Microsoft.Cpp.Dev10.ConvertPropertyCallback'
}
]
for rule in msbuild_rules:
content.extend([
['Rule',
{'Name': rule.rule_name,
'PageTemplate': 'tool',
'DisplayName': rule.display_name,
'Order': '200'
},
['Rule.DataSource',
['DataSource',
{'Persistence': 'ProjectFile',
'ItemType': rule.rule_name
}
]
],
['Rule.Categories',
['Category',
{'Name': 'General'},
['Category.DisplayName',
['sys:String', 'General'],
],
],
['Category',
{'Name': 'Command Line',
'Subtype': 'CommandLine'
},
['Category.DisplayName',
['sys:String', 'Command Line'],
],
],
],
['StringListProperty',
{'Name': 'Inputs',
'Category': 'Command Line',
'IsRequired': 'true',
'Switch': ' '
},
['StringListProperty.DataSource',
['DataSource',
{'Persistence': 'ProjectFile',
'ItemType': rule.rule_name,
'SourceType': 'Item'
}
]
],
],
['StringProperty',
{'Name': 'CommandLineTemplate',
'DisplayName': 'Command Line',
'Visible': 'False',
'IncludeInCommandLine': 'False'
}
],
['DynamicEnumProperty',
{'Name': rule.before_targets,
'Category': 'General',
'EnumProvider': 'Targets',
'IncludeInCommandLine': 'False'
},
['DynamicEnumProperty.DisplayName',
['sys:String', 'Execute Before'],
],
['DynamicEnumProperty.Description',
['sys:String', 'Specifies the targets for the build customization'
' to run before.'
],
],
['DynamicEnumProperty.ProviderSettings',
['NameValuePair',
{'Name': 'Exclude',
'Value': '^%s|^Compute' % rule.before_targets
}
]
],
['DynamicEnumProperty.DataSource',
['DataSource',
{'Persistence': 'ProjectFile',
'HasConfigurationCondition': 'true'
}
]
],
],
['DynamicEnumProperty',
{'Name': rule.after_targets,
'Category': 'General',
'EnumProvider': 'Targets',
'IncludeInCommandLine': 'False'
},
['DynamicEnumProperty.DisplayName',
['sys:String', 'Execute After'],
],
['DynamicEnumProperty.Description',
['sys:String', ('Specifies the targets for the build customization'
' to run after.')
],
],
['DynamicEnumProperty.ProviderSettings',
['NameValuePair',
{'Name': 'Exclude',
'Value': '^%s|^Compute' % rule.after_targets
}
]
],
['DynamicEnumProperty.DataSource',
['DataSource',
{'Persistence': 'ProjectFile',
'ItemType': '',
'HasConfigurationCondition': 'true'
}
]
],
],
['StringListProperty',
{'Name': 'Outputs',
'DisplayName': 'Outputs',
'Visible': 'False',
'IncludeInCommandLine': 'False'
}
],
['StringProperty',
{'Name': 'ExecutionDescription',
'DisplayName': 'Execution Description',
'Visible': 'False',
'IncludeInCommandLine': 'False'
}
],
['StringListProperty',
{'Name': 'AdditionalDependencies',
'DisplayName': 'Additional Dependencies',
'IncludeInCommandLine': 'False',
'Visible': 'false'
}
],
['StringProperty',
{'Subtype': 'AdditionalOptions',
'Name': 'AdditionalOptions',
'Category': 'Command Line'
},
['StringProperty.DisplayName',
['sys:String', 'Additional Options'],
],
['StringProperty.Description',
['sys:String', 'Additional Options'],
],
],
],
['ItemType',
{'Name': rule.rule_name,
'DisplayName': rule.display_name
}
],
['FileExtension',
{'Name': '*' + rule.extension,
'ContentType': rule.rule_name
}
],
['ContentType',
{'Name': rule.rule_name,
'DisplayName': '',
'ItemType': rule.rule_name
}
]
])
easy_xml.WriteXmlIfChanged(content, xml_path, pretty=True, win32=True)
def _GetConfigurationAndPlatform(name, settings):
configuration = name.rsplit('_', 1)[0]
platform = settings.get('msvs_configuration_platform', 'Win32')
return (configuration, platform)
def _GetConfigurationCondition(name, settings):
return (r"'$(Configuration)|$(Platform)'=='%s|%s'" %
_GetConfigurationAndPlatform(name, settings))
def _GetMSBuildProjectConfigurations(configurations):
group = ['ItemGroup', {'Label': 'ProjectConfigurations'}]
for (name, settings) in sorted(configurations.iteritems()):
configuration, platform = _GetConfigurationAndPlatform(name, settings)
designation = '%s|%s' % (configuration, platform)
group.append(
['ProjectConfiguration', {'Include': designation},
['Configuration', configuration],
['Platform', platform]])
return [group]
def _GetMSBuildGlobalProperties(spec, guid, gyp_file_name):
namespace = os.path.splitext(gyp_file_name)[0]
return [
['PropertyGroup', {'Label': 'Globals'},
['ProjectGuid', guid],
['Keyword', 'Win32Proj'],
['RootNamespace', namespace],
]
]
def _GetMSBuildConfigurationDetails(spec, build_file):
properties = {}
for name, settings in spec['configurations'].iteritems():
msbuild_attributes = _GetMSBuildAttributes(spec, settings, build_file)
condition = _GetConfigurationCondition(name, settings)
character_set = msbuild_attributes.get('CharacterSet')
_AddConditionalProperty(properties, condition, 'ConfigurationType',
msbuild_attributes['ConfigurationType'])
if character_set:
_AddConditionalProperty(properties, condition, 'CharacterSet',
character_set)
return _GetMSBuildPropertyGroup(spec, 'Configuration', properties)
def _GetMSBuildLocalProperties(msbuild_toolset):
# Currently the only local property we support is PlatformToolset
properties = {}
if msbuild_toolset:
properties = [
['PropertyGroup', {'Label': 'Locals'},
['PlatformToolset', msbuild_toolset],
]
]
return properties
def _GetMSBuildPropertySheets(configurations):
user_props = r'$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props'
additional_props = {}
props_specified = False
for name, settings in sorted(configurations.iteritems()):
configuration = _GetConfigurationCondition(name, settings)
if settings.has_key('msbuild_props'):
additional_props[configuration] = _FixPaths(settings['msbuild_props'])
props_specified = True
else:
additional_props[configuration] = ''
if not props_specified:
return [
['ImportGroup',
{'Label': 'PropertySheets'},
['Import',
{'Project': user_props,
'Condition': "exists('%s')" % user_props,
'Label': 'LocalAppDataPlatform'
}
]
]
]
else:
sheets = []
for condition, props in additional_props.iteritems():
import_group = [
'ImportGroup',
{'Label': 'PropertySheets',
'Condition': condition
},
['Import',
{'Project': user_props,
'Condition': "exists('%s')" % user_props,
'Label': 'LocalAppDataPlatform'
}
]
]
for props_file in props:
import_group.append(['Import', {'Project':props_file}])
sheets.append(import_group)
return sheets
def _ConvertMSVSBuildAttributes(spec, config, build_file):
config_type = _GetMSVSConfigurationType(spec, build_file)
msvs_attributes = _GetMSVSAttributes(spec, config, config_type)
msbuild_attributes = {}
for a in msvs_attributes:
if a in ['IntermediateDirectory', 'OutputDirectory']:
directory = MSVSSettings.ConvertVCMacrosToMSBuild(msvs_attributes[a])
if not directory.endswith('\\'):
directory += '\\'
msbuild_attributes[a] = directory
elif a == 'CharacterSet':
msbuild_attributes[a] = _ConvertMSVSCharacterSet(msvs_attributes[a])
elif a == 'ConfigurationType':
msbuild_attributes[a] = _ConvertMSVSConfigurationType(msvs_attributes[a])
else:
print 'Warning: Do not know how to convert MSVS attribute ' + a
return msbuild_attributes
def _ConvertMSVSCharacterSet(char_set):
if char_set.isdigit():
char_set = {
'0': 'MultiByte',
'1': 'Unicode',
'2': 'MultiByte',
}[char_set]
return char_set
def _ConvertMSVSConfigurationType(config_type):
if config_type.isdigit():
config_type = {
'1': 'Application',
'2': 'DynamicLibrary',
'4': 'StaticLibrary',
'10': 'Utility'
}[config_type]
return config_type
def _GetMSBuildAttributes(spec, config, build_file):
if 'msbuild_configuration_attributes' not in config:
msbuild_attributes = _ConvertMSVSBuildAttributes(spec, config, build_file)
else:
config_type = _GetMSVSConfigurationType(spec, build_file)
config_type = _ConvertMSVSConfigurationType(config_type)
msbuild_attributes = config.get('msbuild_configuration_attributes', {})
msbuild_attributes.setdefault('ConfigurationType', config_type)
output_dir = msbuild_attributes.get('OutputDirectory',
'$(SolutionDir)$(Configuration)')
msbuild_attributes['OutputDirectory'] = _FixPath(output_dir) + '\\'
if 'IntermediateDirectory' not in msbuild_attributes:
intermediate = _FixPath('$(Configuration)') + '\\'
msbuild_attributes['IntermediateDirectory'] = intermediate
if 'CharacterSet' in msbuild_attributes:
msbuild_attributes['CharacterSet'] = _ConvertMSVSCharacterSet(
msbuild_attributes['CharacterSet'])
if 'TargetName' not in msbuild_attributes:
prefix = spec.get('product_prefix', '')
product_name = spec.get('product_name', '$(ProjectName)')
target_name = prefix + product_name
msbuild_attributes['TargetName'] = target_name
if 'TargetExt' not in msbuild_attributes and 'product_extension' in spec:
ext = spec.get('product_extension')
msbuild_attributes['TargetExt'] = '.' + ext
# Make sure that 'TargetPath' matches 'Lib.OutputFile' or 'Link.OutputFile'
# (depending on the tool used) to avoid MSB8012 warning.
msbuild_tool_map = {
'executable': 'Link',
'shared_library': 'Link',
'loadable_module': 'Link',
'static_library': 'Lib',
}
msbuild_tool = msbuild_tool_map.get(spec['type'])
if msbuild_tool:
msbuild_settings = config['finalized_msbuild_settings']
out_file = msbuild_settings[msbuild_tool].get('OutputFile')
if out_file:
msbuild_attributes['TargetPath'] = _FixPath(out_file)
return msbuild_attributes
def _GetMSBuildConfigurationGlobalProperties(spec, configurations, build_file):
# TODO(jeanluc) We could optimize out the following and do it only if
# there are actions.
# TODO(jeanluc) Handle the equivalent of setting 'CYGWIN=nontsec'.
new_paths = []
cygwin_dirs = spec.get('msvs_cygwin_dirs', ['.'])[0]
if cygwin_dirs:
cyg_path = '$(MSBuildProjectDirectory)\\%s\\bin\\' % _FixPath(cygwin_dirs)
new_paths.append(cyg_path)
# TODO(jeanluc) Change the convention to have both a cygwin_dir and a
# python_dir.
python_path = cyg_path.replace('cygwin\\bin', 'python_26')
new_paths.append(python_path)
if new_paths:
new_paths = '$(ExecutablePath);' + ';'.join(new_paths)
properties = {}
for (name, configuration) in sorted(configurations.iteritems()):
condition = _GetConfigurationCondition(name, configuration)
attributes = _GetMSBuildAttributes(spec, configuration, build_file)
msbuild_settings = configuration['finalized_msbuild_settings']
_AddConditionalProperty(properties, condition, 'IntDir',
attributes['IntermediateDirectory'])
_AddConditionalProperty(properties, condition, 'OutDir',
attributes['OutputDirectory'])
_AddConditionalProperty(properties, condition, 'TargetName',
attributes['TargetName'])
if 'TargetExt' in attributes:
_AddConditionalProperty(properties, condition, 'TargetExt',
attributes['TargetExt'])
if attributes.get('TargetPath'):
_AddConditionalProperty(properties, condition, 'TargetPath',
attributes['TargetPath'])
if new_paths:
_AddConditionalProperty(properties, condition, 'ExecutablePath',
new_paths)
tool_settings = msbuild_settings.get('', {})
for name, value in sorted(tool_settings.iteritems()):
formatted_value = _GetValueFormattedForMSBuild('', name, value)
_AddConditionalProperty(properties, condition, name, formatted_value)
return _GetMSBuildPropertyGroup(spec, None, properties)
def _AddConditionalProperty(properties, condition, name, value):
"""Adds a property / conditional value pair to a dictionary.
Arguments:
properties: The dictionary to be modified. The key is the name of the
property. The value is itself a dictionary; its key is the value and
the value a list of condition for which this value is true.
condition: The condition under which the named property has the value.
name: The name of the property.
value: The value of the property.
"""
if name not in properties:
properties[name] = {}
values = properties[name]
if value not in values:
values[value] = []
conditions = values[value]
conditions.append(condition)
# Regex for msvs variable references ( i.e. $(FOO) ).
MSVS_VARIABLE_REFERENCE = re.compile('\$\(([a-zA-Z_][a-zA-Z0-9_]*)\)')
def _GetMSBuildPropertyGroup(spec, label, properties):
"""Returns a PropertyGroup definition for the specified properties.
Arguments:
spec: The target project dict.
label: An optional label for the PropertyGroup.
properties: The dictionary to be converted. The key is the name of the
property. The value is itself a dictionary; its key is the value and
the value a list of condition for which this value is true.
"""
group = ['PropertyGroup']
if label:
group.append({'Label': label})
num_configurations = len(spec['configurations'])
def GetEdges(node):
# Use a definition of edges such that user_of_variable -> used_varible.
# This happens to be easier in this case, since a variable's
# definition contains all variables it references in a single string.
edges = set()
for value in sorted(properties[node].keys()):
# Add to edges all $(...) references to variables.
#
# Variable references that refer to names not in properties are excluded
# These can exist for instance to refer built in definitions like
# $(SolutionDir).
#
# Self references are ignored. Self reference is used in a few places to
# append to the default value. I.e. PATH=$(PATH);other_path
edges.update(set([v for v in MSVS_VARIABLE_REFERENCE.findall(value)
if v in properties and v != node]))
return edges
properties_ordered = gyp.common.TopologicallySorted(
properties.keys(), GetEdges)
# Walk properties in the reverse of a topological sort on
# user_of_variable -> used_variable as this ensures variables are
# defined before they are used.
# NOTE: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
for name in reversed(properties_ordered):
values = properties[name]
for value, conditions in sorted(values.iteritems()):
if len(conditions) == num_configurations:
# If the value is the same all configurations,
# just add one unconditional entry.
group.append([name, value])
else:
for condition in conditions:
group.append([name, {'Condition': condition}, value])
return [group]
def _GetMSBuildToolSettingsSections(spec, configurations):
groups = []
for (name, configuration) in sorted(configurations.iteritems()):
msbuild_settings = configuration['finalized_msbuild_settings']
group = ['ItemDefinitionGroup',
{'Condition': _GetConfigurationCondition(name, configuration)}
]
for tool_name, tool_settings in sorted(msbuild_settings.iteritems()):
# Skip the tool named '' which is a holder of global settings handled
# by _GetMSBuildConfigurationGlobalProperties.
if tool_name:
if tool_settings:
tool = [tool_name]
for name, value in sorted(tool_settings.iteritems()):
formatted_value = _GetValueFormattedForMSBuild(tool_name, name,
value)
tool.append([name, formatted_value])
group.append(tool)
groups.append(group)
return groups
def _FinalizeMSBuildSettings(spec, configuration):
if 'msbuild_settings' in configuration:
converted = False
msbuild_settings = configuration['msbuild_settings']
MSVSSettings.ValidateMSBuildSettings(msbuild_settings)
else:
converted = True
msvs_settings = configuration.get('msvs_settings', {})
msbuild_settings = MSVSSettings.ConvertToMSBuildSettings(msvs_settings)
include_dirs, resource_include_dirs = _GetIncludeDirs(configuration)
libraries = _GetLibraries(spec)
out_file, _, msbuild_tool = _GetOutputFilePathAndTool(spec, msbuild=True)
defines = _GetDefines(configuration)
if converted:
# Visual Studio 2010 has TR1
defines = [d for d in defines if d != '_HAS_TR1=0']
# Warn of ignored settings
ignored_settings = ['msvs_prebuild', 'msvs_postbuild', 'msvs_tool_files']
for ignored_setting in ignored_settings:
value = configuration.get(ignored_setting)
if value:
print ('Warning: The automatic conversion to MSBuild does not handle '
'%s. Ignoring setting of %s' % (ignored_setting, str(value)))
defines = [_EscapeCppDefineForMSBuild(d) for d in defines]
disabled_warnings = _GetDisabledWarnings(configuration)
# TODO(jeanluc) Validate & warn that we don't translate
# prebuild = configuration.get('msvs_prebuild')
# postbuild = configuration.get('msvs_postbuild')
def_file = _GetModuleDefinition(spec)
precompiled_header = configuration.get('msvs_precompiled_header')
# Add the information to the appropriate tool
# TODO(jeanluc) We could optimize and generate these settings only if
# the corresponding files are found, e.g. don't generate ResourceCompile
# if you don't have any resources.
_ToolAppend(msbuild_settings, 'ClCompile',
'AdditionalIncludeDirectories', include_dirs)
_ToolAppend(msbuild_settings, 'ResourceCompile',
'AdditionalIncludeDirectories', resource_include_dirs)
# Add in libraries, note that even for empty libraries, we want this
# set, to prevent inheriting default libraries from the enviroment.
_ToolSetOrAppend(msbuild_settings, 'Link', 'AdditionalDependencies',
libraries)
if out_file:
_ToolAppend(msbuild_settings, msbuild_tool, 'OutputFile', out_file,
only_if_unset=True)
# Add defines.
_ToolAppend(msbuild_settings, 'ClCompile',
'PreprocessorDefinitions', defines)
_ToolAppend(msbuild_settings, 'ResourceCompile',
'PreprocessorDefinitions', defines)
# Add disabled warnings.
_ToolAppend(msbuild_settings, 'ClCompile',
'DisableSpecificWarnings', disabled_warnings)
# Turn on precompiled headers if appropriate.
if precompiled_header:
precompiled_header = os.path.split(precompiled_header)[1]
_ToolAppend(msbuild_settings, 'ClCompile', 'PrecompiledHeader', 'Use')
_ToolAppend(msbuild_settings, 'ClCompile',
'PrecompiledHeaderFile', precompiled_header)
_ToolAppend(msbuild_settings, 'ClCompile',
'ForcedIncludeFiles', precompiled_header)
# Loadable modules don't generate import libraries;
# tell dependent projects to not expect one.
if spec['type'] == 'loadable_module':
_ToolAppend(msbuild_settings, '', 'IgnoreImportLibrary', 'true')
# Set the module definition file if any.
if def_file:
_ToolAppend(msbuild_settings, 'Link', 'ModuleDefinitionFile', def_file)
configuration['finalized_msbuild_settings'] = msbuild_settings
def _GetValueFormattedForMSBuild(tool_name, name, value):
if type(value) == list:
# For some settings, VS2010 does not automatically extends the settings
# TODO(jeanluc) Is this what we want?
if name in ['AdditionalIncludeDirectories',
'AdditionalLibraryDirectories',
'AdditionalOptions',
'DelayLoadDLLs',
'DisableSpecificWarnings',
'PreprocessorDefinitions']:
value.append('%%(%s)' % name)
# For most tools, entries in a list should be separated with ';' but some
# settings use a space. Check for those first.
exceptions = {
'ClCompile': ['AdditionalOptions'],
'Link': ['AdditionalOptions'],
'Lib': ['AdditionalOptions']}
if tool_name in exceptions and name in exceptions[tool_name]:
char = ' '
else:
char = ';'
formatted_value = char.join(
[MSVSSettings.ConvertVCMacrosToMSBuild(i) for i in value])
else:
formatted_value = MSVSSettings.ConvertVCMacrosToMSBuild(value)
return formatted_value
def _VerifySourcesExist(sources, root_dir):
"""Verifies that all source files exist on disk.
Checks that all regular source files, i.e. not created at run time,
exist on disk. Missing files cause needless recompilation but no otherwise
visible errors.
Arguments:
sources: A recursive list of Filter/file names.
root_dir: The root directory for the relative path names.
Returns:
A list of source files that cannot be found on disk.
"""
missing_sources = []
for source in sources:
if isinstance(source, MSVSProject.Filter):
missing_sources.extend(_VerifySourcesExist(source.contents, root_dir))
else:
if '$' not in source:
full_path = os.path.join(root_dir, source)
if not os.path.exists(full_path):
missing_sources.append(full_path)
return missing_sources
def _GetMSBuildSources(spec, sources, exclusions, extension_to_rule_name,
actions_spec, sources_handled_by_action, list_excluded):
groups = ['none', 'midl', 'include', 'compile', 'resource', 'rule']
grouped_sources = {}
for g in groups:
grouped_sources[g] = []
_AddSources2(spec, sources, exclusions, grouped_sources,
extension_to_rule_name, sources_handled_by_action, list_excluded)
sources = []
for g in groups:
if grouped_sources[g]:
sources.append(['ItemGroup'] + grouped_sources[g])
if actions_spec:
sources.append(['ItemGroup'] + actions_spec)
return sources
def _AddSources2(spec, sources, exclusions, grouped_sources,
extension_to_rule_name, sources_handled_by_action,
list_excluded):
extensions_excluded_from_precompile = []
for source in sources:
if isinstance(source, MSVSProject.Filter):
_AddSources2(spec, source.contents, exclusions, grouped_sources,
extension_to_rule_name, sources_handled_by_action,
list_excluded)
else:
if not source in sources_handled_by_action:
detail = []
excluded_configurations = exclusions.get(source, [])
if len(excluded_configurations) == len(spec['configurations']):
detail.append(['ExcludedFromBuild', 'true'])
else:
for config_name, configuration in sorted(excluded_configurations):
condition = _GetConfigurationCondition(config_name, configuration)
detail.append(['ExcludedFromBuild',
{'Condition': condition},
'true'])
# Add precompile if needed
for config_name, configuration in spec['configurations'].iteritems():
precompiled_source = configuration.get('msvs_precompiled_source', '')
if precompiled_source != '':
precompiled_source = _FixPath(precompiled_source)
if not extensions_excluded_from_precompile:
# If the precompiled header is generated by a C source, we must
# not try to use it for C++ sources, and vice versa.
basename, extension = os.path.splitext(precompiled_source)
if extension == '.c':
extensions_excluded_from_precompile = ['.cc', '.cpp', '.cxx']
else:
extensions_excluded_from_precompile = ['.c']
if precompiled_source == source:
condition = _GetConfigurationCondition(config_name, configuration)
detail.append(['PrecompiledHeader',
{'Condition': condition},
'Create'
])
else:
# Turn off precompiled header usage for source files of a
# different type than the file that generated the
# precompiled header.
for extension in extensions_excluded_from_precompile:
if source.endswith(extension):
detail.append(['PrecompiledHeader', ''])
detail.append(['ForcedIncludeFiles', ''])
group, element = _MapFileToMsBuildSourceType(source,
extension_to_rule_name)
grouped_sources[group].append([element, {'Include': source}] + detail)
def _GetMSBuildProjectReferences(project):
references = []
if project.dependencies:
group = ['ItemGroup']
for dependency in project.dependencies:
guid = dependency.guid
project_dir = os.path.split(project.path)[0]
relative_path = gyp.common.RelativePath(dependency.path, project_dir)
project_ref = ['ProjectReference',
{'Include': relative_path},
['Project', guid],
['ReferenceOutputAssembly', 'false']
]
for config in dependency.spec.get('configurations', {}).itervalues():
# If it's disabled in any config, turn it off in the reference.
if config.get('msvs_2010_disable_uldi_when_referenced', 0):
project_ref.append(['UseLibraryDependencyInputs', 'false'])
break
group.append(project_ref)
references.append(group)
return references
def _GenerateMSBuildProject(project, options, version, generator_flags):
spec = project.spec
configurations = spec['configurations']
project_dir, project_file_name = os.path.split(project.path)
msbuildproj_dir = os.path.dirname(project.path)
if msbuildproj_dir and not os.path.exists(msbuildproj_dir):
os.makedirs(msbuildproj_dir)
# Prepare list of sources and excluded sources.
gyp_path = _NormalizedSource(project.build_file)
relative_path_of_gyp_file = gyp.common.RelativePath(gyp_path, project_dir)
gyp_file = os.path.split(project.build_file)[1]
sources, excluded_sources = _PrepareListOfSources(spec, generator_flags,
gyp_file)
# Add rules.
actions_to_add = {}
props_files_of_rules = set()
targets_files_of_rules = set()
extension_to_rule_name = {}
list_excluded = generator_flags.get('msvs_list_excluded_files', True)
_GenerateRulesForMSBuild(project_dir, options, spec,
sources, excluded_sources,
props_files_of_rules, targets_files_of_rules,
actions_to_add, extension_to_rule_name)
sources, excluded_sources, excluded_idl = (
_AdjustSourcesAndConvertToFilterHierarchy(spec, options,
project_dir, sources,
excluded_sources,
list_excluded))
_AddActions(actions_to_add, spec, project.build_file)
_AddCopies(actions_to_add, spec)
# NOTE: this stanza must appear after all actions have been decided.
# Don't excluded sources with actions attached, or they won't run.
excluded_sources = _FilterActionsFromExcluded(
excluded_sources, actions_to_add)
exclusions = _GetExcludedFilesFromBuild(spec, excluded_sources, excluded_idl)
actions_spec, sources_handled_by_action = _GenerateActionsForMSBuild(
spec, actions_to_add)
_GenerateMSBuildFiltersFile(project.path + '.filters', sources,
extension_to_rule_name)
missing_sources = _VerifySourcesExist(sources, project_dir)
for configuration in configurations.itervalues():
_FinalizeMSBuildSettings(spec, configuration)
# Add attributes to root element
import_default_section = [
['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.Default.props'}]]
import_cpp_props_section = [
['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.props'}]]
import_cpp_targets_section = [
['Import', {'Project': r'$(VCTargetsPath)\Microsoft.Cpp.targets'}]]
macro_section = [['PropertyGroup', {'Label': 'UserMacros'}]]
content = [
'Project',
{'xmlns': 'http://schemas.microsoft.com/developer/msbuild/2003',
'ToolsVersion': version.ProjectVersion(),
'DefaultTargets': 'Build'
}]
content += _GetMSBuildProjectConfigurations(configurations)
content += _GetMSBuildGlobalProperties(spec, project.guid, project_file_name)
content += import_default_section
content += _GetMSBuildConfigurationDetails(spec, project.build_file)
content += _GetMSBuildLocalProperties(project.msbuild_toolset)
content += import_cpp_props_section
content += _GetMSBuildExtensions(props_files_of_rules)
content += _GetMSBuildPropertySheets(configurations)
content += macro_section
content += _GetMSBuildConfigurationGlobalProperties(spec, configurations,
project.build_file)
content += _GetMSBuildToolSettingsSections(spec, configurations)
content += _GetMSBuildSources(
spec, sources, exclusions, extension_to_rule_name, actions_spec,
sources_handled_by_action, list_excluded)
content += _GetMSBuildProjectReferences(project)
content += import_cpp_targets_section
content += _GetMSBuildExtensionTargets(targets_files_of_rules)
# TODO(jeanluc) File a bug to get rid of runas. We had in MSVS:
# has_run_as = _WriteMSVSUserFile(project.path, version, spec)
easy_xml.WriteXmlIfChanged(content, project.path, pretty=True, win32=True)
return missing_sources
def _GetMSBuildExtensions(props_files_of_rules):
extensions = ['ImportGroup', {'Label': 'ExtensionSettings'}]
for props_file in props_files_of_rules:
extensions.append(['Import', {'Project': props_file}])
return [extensions]
def _GetMSBuildExtensionTargets(targets_files_of_rules):
targets_node = ['ImportGroup', {'Label': 'ExtensionTargets'}]
for targets_file in sorted(targets_files_of_rules):
targets_node.append(['Import', {'Project': targets_file}])
return [targets_node]
def _GenerateActionsForMSBuild(spec, actions_to_add):
"""Add actions accumulated into an actions_to_add, merging as needed.
Arguments:
spec: the target project dict
actions_to_add: dictionary keyed on input name, which maps to a list of
dicts describing the actions attached to that input file.
Returns:
A pair of (action specification, the sources handled by this action).
"""
sources_handled_by_action = set()
actions_spec = []
for primary_input, actions in actions_to_add.iteritems():
inputs = set()
outputs = set()
descriptions = []
commands = []
for action in actions:
inputs.update(set(action['inputs']))
outputs.update(set(action['outputs']))
descriptions.append(action['description'])
cmd = action['command']
# For most actions, add 'call' so that actions that invoke batch files
# return and continue executing. msbuild_use_call provides a way to
# disable this but I have not seen any adverse effect from doing that
# for everything.
if action.get('msbuild_use_call', True):
cmd = 'call ' + cmd
commands.append(cmd)
# Add the custom build action for one input file.
description = ', and also '.join(descriptions)
# We can't join the commands simply with && because the command line will
# get too long. See also _AddActions: cygwin's setup_env mustn't be called
# for every invocation or the command that sets the PATH will grow too
# long.
command = (
'\r\nif %errorlevel% neq 0 exit /b %errorlevel%\r\n'.join(commands))
_AddMSBuildAction(spec,
primary_input,
inputs,
outputs,
command,
description,
sources_handled_by_action,
actions_spec)
return actions_spec, sources_handled_by_action
def _AddMSBuildAction(spec, primary_input, inputs, outputs, cmd, description,
sources_handled_by_action, actions_spec):
command = MSVSSettings.ConvertVCMacrosToMSBuild(cmd)
primary_input = _FixPath(primary_input)
inputs_array = _FixPaths(inputs)
outputs_array = _FixPaths(outputs)
additional_inputs = ';'.join([i for i in inputs_array
if i != primary_input])
outputs = ';'.join(outputs_array)
sources_handled_by_action.add(primary_input)
action_spec = ['CustomBuild', {'Include': primary_input}]
action_spec.extend(
# TODO(jeanluc) 'Document' for all or just if as_sources?
[['FileType', 'Document'],
['Command', command],
['Message', description],
['Outputs', outputs]
])
if additional_inputs:
action_spec.append(['AdditionalInputs', additional_inputs])
actions_spec.append(action_spec)
| mit |
chenmoshushi/shogun | examples/undocumented/python_modular/structure_graphcuts.py | 14 | 8660 | #!/usr/bin/env python
import numpy as np
import itertools
from modshogun import Factor, TableFactorType, FactorGraph
from modshogun import FactorGraphObservation, FactorGraphLabels, FactorGraphFeatures
from modshogun import FactorGraphModel, GRAPH_CUT
from modshogun import GraphCut
from modshogun import StochasticSOSVM
def generate_data(num_train_samples, len_label, len_feat):
""" Generate synthetic dataset
Generate random data following [1]:
Each example has exactly one label on.
Each label has 40 related binary features.
For an example, if label i is on, 4i randomly chosen features are set to 1
[1] Finley, Thomas, and Thorsten Joachims.
"Training structural SVMs when exact inference is intractable."
Proceedings of the 25th international conference on Machine learning. ACM, 2008.
Args:
num_train_samples: number of samples
len_label: label length (10)
len_feat: feature length (40)
Returns:
feats: generated feature matrix
labels: generated label matrix
"""
labels = np.zeros((num_train_samples, len_label), np.int32)
feats = np.zeros((num_train_samples, len_feat), np.int32)
for k in range(num_train_samples):
i = k % len_label
labels[k, i] = 1
inds_one = np.random.permutation(range(len_feat))
inds_one = inds_one[:4*(i+1)]
for j in inds_one:
feats[k, j] = 1
return (labels, feats)
def define_factor_types(num_vars, len_feat, edge_table):
""" Define factor types
Args:
num_vars: number of variables in factor graph
len_feat: length of the feature vector
edge_table: edge table defines pair-wise node indeces
Returns:
v_factor_types: list of all unary and pair-wise factor types
"""
n_stats = 2 # for binary status
v_factor_types = {}
n_edges = edge_table.shape[0]
# unary factors
cards_u = np.array([n_stats], np.int32)
w_u = np.zeros(n_stats*len_feat)
for i in range(num_vars):
v_factor_types[i] = TableFactorType(i, cards_u, w_u)
# pair-wise factors
cards_pw = np.array([n_stats, n_stats], np.int32)
w_pw = np.zeros(n_stats*n_stats)
for j in range(n_edges):
v_factor_types[j + num_vars] = TableFactorType(j + num_vars, cards_pw, w_pw)
return v_factor_types
def build_factor_graph_model(labels, feats, factor_types, edge_table, infer_alg = GRAPH_CUT):
""" Build factor graph model
Args:
labels: matrix of labels [num_train_samples*len_label]
feats: maxtrix of feats [num_train_samples*len_feat]
factory_types: vectors of all factor types
edge_table: matrix of pairwised edges, each row is a pair of node indeces
infer_alg: inference algorithm (GRAPH_CUT)
Returns:
labels_fg: matrix of labels in factor graph format
feats_fg: matrix of features in factor graph format
"""
labels = labels.astype(np.int32)
num_train_samples = labels.shape[0]
num_vars = labels.shape[1]
num_edges = edge_table.shape[0]
n_stats = 2
feats_fg = FactorGraphFeatures(num_train_samples)
labels_fg = FactorGraphLabels(num_train_samples)
for i in range(num_train_samples):
cardinaities = np.array([n_stats]*num_vars, np.int32)
fg = FactorGraph(cardinaities)
# add unary factors
for u in range(num_vars):
data_u = np.array(feats[i,:], np.float64)
inds_u = np.array([u], np.int32)
factor_u = Factor(factor_types[u], inds_u, data_u)
fg.add_factor(factor_u)
# add pairwise factors
for v in range(num_edges):
data_p = np.array([1.0])
inds_p = np.array(edge_table[v, :], np.int32)
factor_p = Factor(factor_types[v + num_vars], inds_p, data_p)
fg.add_factor(factor_p)
# add factor graph
feats_fg.add_sample(fg)
# add corresponding label
loss_weights = np.array([1.0/num_vars]*num_vars)
fg_obs = FactorGraphObservation(labels[i,:], loss_weights)
labels_fg.add_label(fg_obs)
return (labels_fg, feats_fg)
def evaluation(labels_pr, labels_gt, model):
""" Evaluation
Args:
labels_pr: predicted label
labels_gt: ground truth label
model: factor graph model
Returns:
ave_loss: average loss
"""
num_train_samples = labels_pr.get_num_labels()
acc_loss = 0.0
ave_loss = 0.0
for i in range(num_train_samples):
y_pred = labels_pr.get_label(i)
y_truth = labels_gt.get_label(i)
acc_loss = acc_loss + model.delta_loss(y_truth, y_pred)
ave_loss = acc_loss / num_train_samples
return ave_loss
def graphcuts_sosvm(num_train_samples = 20, len_label = 10, len_feat = 40, num_test_samples = 10):
""" Graph cuts as approximate inference in structured output SVM framework.
Args:
num_train_samples: number of training samples
len_label: number of classes, i.e., size of label space
len_feat: the dimention of the feature vector
num_test_samples: number of testing samples
"""
import time
# generate synthetic dataset
(labels_train, feats_train) = generate_data(num_train_samples, len_label, len_feat)
# compute full-connected edge table
full = np.vstack([x for x in itertools.combinations(range(len_label), 2)])
# define factor types
factor_types = define_factor_types(len_label, len_feat, full)
# create features and labels for factor graph mode
(labels_fg, feats_fg) = build_factor_graph_model(labels_train, feats_train, factor_types, full, GRAPH_CUT)
# create model and register factor types
model = FactorGraphModel(feats_fg, labels_fg, GRAPH_CUT)
for i in range(len(factor_types)):
model.add_factor_type(factor_types[i])
# Training
# the 3rd parameter is do_weighted_averaging, by turning this on,
# a possibly faster convergence rate may be achieved.
# the 4th parameter controls outputs of verbose training information
sgd = StochasticSOSVM(model, labels_fg, True, True)
sgd.set_num_iter(150)
sgd.set_lambda(0.0001)
# train
t0 = time.time()
sgd.train()
t1 = time.time()
w_sgd = sgd.get_w()
#print "SGD took", t1 - t0, "seconds."
# training error
labels_pr = sgd.apply()
ave_loss = evaluation(labels_pr, labels_fg, model)
#print('SGD: Average training error is %.4f' % ave_loss)
# testing error
# generate synthetic testing dataset
(labels_test, feats_test) = generate_data(num_test_samples, len_label, len_feat)
# create features and labels for factor graph mode
(labels_fg_test, feats_fg_test) = build_factor_graph_model(labels_test, feats_test, factor_types, full, GRAPH_CUT)
# set features and labels to sgd
sgd.set_features(feats_fg_test)
sgd.set_labels(labels_fg_test)
# test
labels_pr = sgd.apply()
ave_loss = evaluation(labels_pr, labels_fg_test, model)
#print('SGD: Average testing error is %.4f' % ave_loss)
def graphcuts_general():
""" Graph cuts for general s-t graph optimization.
"""
num_nodes = 5
num_edges = 6
g = GraphCut(num_nodes, num_edges)
# add termainal-connected edges
# i.e., SOURCE->node_i and node_i->SINK
g.add_tweights(0, 4, 0)
g.add_tweights(1, 2, 0)
g.add_tweights(2, 8, 0)
g.add_tweights(2, 0, 4)
g.add_tweights(3, 0, 7)
g.add_tweights(4, 0, 5)
# add node to node edges
g.add_edge(0, 2, 5, 0)
g.add_edge(0, 3, 2, 0)
g.add_edge(1, 2, 6, 0)
g.add_edge(1, 4, 9, 0)
g.add_edge(2, 3, 1, 0)
g.add_edge(2, 4, 3, 0)
# initialize max-flow algorithm
g.init_maxflow()
# compute max flow
flow = g.compute_maxflow()
#print("Flow = %f" % flow)
# print assignment
#for i in xrange(num_nodes):
# print("\nNode %d = %d" % (i, g.get_assignment(i)))
test_general = True
test_sosvm = True
parameter_list = [[test_general, test_sosvm]]
def structure_graphcuts(test_general=True, test_sosvm=True):
""" Test graph cuts.
Args:
test_general: test graph cuts for general s-t graph optimization
test_sosvm: test graph cuts for structured output svm
"""
if test_general:
graphcuts_general()
if test_sosvm:
graphcuts_sosvm()
if __name__ == '__main__':
print("Graph cuts")
structure_graphcuts(*parameter_list[0])
| gpl-3.0 |
erjohnso/ansible | test/units/modules/network/netscaler/test_netscaler_cs_vserver.py | 32 | 28665 |
# Copyright (c) 2017 Citrix Systems
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from ansible.compat.tests.mock import patch, Mock, MagicMock, call
from .netscaler_module import TestModule, nitro_base_patcher, set_module_args
import sys
if sys.version_info[:2] != (2, 6):
import requests
class TestNetscalerCSVserverModule(TestModule):
@classmethod
def setUpClass(cls):
class MockException(Exception):
pass
cls.MockException = MockException
m = MagicMock()
cls.cs_vserver_mock = MagicMock()
cls.cs_vserver_mock.__class__ = MagicMock(add=Mock())
nssrc_modules_mock = {
'nssrc.com.citrix.netscaler.nitro.resource.config.cs': m,
'nssrc.com.citrix.netscaler.nitro.resource.config.cs.csvserver': m,
'nssrc.com.citrix.netscaler.nitro.resource.config.cs.csvserver.csvserver': m,
'nssrc.com.citrix.netscaler.nitro.resource.config.cs.csvserver_cspolicy_binding': m,
'nssrc.com.citrix.netscaler.nitro.resource.config.cs.csvserver_cspolicy_binding.csvserver_cspolicy_binding': m,
'nssrc.com.citrix.netscaler.nitro.resource.config.ssl': m,
'nssrc.com.citrix.netscaler.nitro.resource.config.ssl.sslvserver_sslcertkey_binding': m,
'nssrc.com.citrix.netscaler.nitro.resource.config.ssl.sslvserver_sslcertkey_binding.sslvserver_sslcertkey_binding': m,
}
cls.nitro_specific_patcher = patch.dict(sys.modules, nssrc_modules_mock)
cls.nitro_base_patcher = nitro_base_patcher
@classmethod
def tearDownClass(cls):
cls.nitro_base_patcher.stop()
cls.nitro_specific_patcher.stop()
def setUp(self):
self.nitro_base_patcher.start()
self.nitro_specific_patcher.start()
# Setup minimal required arguments to pass AnsibleModule argument parsing
def tearDown(self):
self.nitro_base_patcher.stop()
self.nitro_specific_patcher.stop()
def test_graceful_nitro_api_import_error(self):
# Stop nitro api patching to cause ImportError
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
self.nitro_base_patcher.stop()
self.nitro_specific_patcher.stop()
from ansible.modules.network.netscaler import netscaler_cs_vserver
self.module = netscaler_cs_vserver
result = self.failed()
self.assertEqual(result['msg'], 'Could not load nitro python sdk')
def test_graceful_nitro_error_on_login(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_cs_vserver
class MockException(Exception):
def __init__(self, *args, **kwargs):
self.errorcode = 0
self.message = ''
client_mock = Mock()
client_mock.login = Mock(side_effect=MockException)
m = Mock(return_value=client_mock)
with patch('ansible.modules.network.netscaler.netscaler_cs_vserver.get_nitro_client', m):
with patch('ansible.modules.network.netscaler.netscaler_cs_vserver.nitro_exception', MockException):
self.module = netscaler_cs_vserver
result = self.failed()
self.assertTrue(result['msg'].startswith('nitro exception'), msg='nitro exception during login not handled properly')
def test_graceful_no_connection_error(self):
if sys.version_info[:2] == (2, 6):
self.skipTest('requests library not available under python2.6')
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_cs_vserver
class MockException(Exception):
pass
client_mock = Mock()
attrs = {'login.side_effect': requests.exceptions.ConnectionError}
client_mock.configure_mock(**attrs)
m = Mock(return_value=client_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_vserver',
get_nitro_client=m,
nitro_exception=MockException,
):
self.module = netscaler_cs_vserver
result = self.failed()
self.assertTrue(result['msg'].startswith('Connection error'), msg='Connection error was not handled gracefully')
def test_graceful_login_error(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_cs_vserver
if sys.version_info[:2] == (2, 6):
self.skipTest('requests library not available under python2.6')
class MockException(Exception):
pass
client_mock = Mock()
attrs = {'login.side_effect': requests.exceptions.SSLError}
client_mock.configure_mock(**attrs)
m = Mock(return_value=client_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_vserver',
get_nitro_client=m,
nitro_exception=MockException,
):
self.module = netscaler_cs_vserver
result = self.failed()
self.assertTrue(result['msg'].startswith('SSL Error'), msg='SSL Error was not handled gracefully')
def test_save_config_called_on_state_present(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_cs_vserver
client_mock = Mock()
m = Mock(return_value=client_mock)
cs_vserver_proxy_mock = Mock()
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_vserver',
get_nitro_client=m,
cs_vserver_exists=Mock(side_effect=[False, True]),
cs_vserver_identical=Mock(side_effect=[True]),
ensure_feature_is_enabled=Mock(return_value=True),
diff_list=Mock(return_value={}),
nitro_exception=self.MockException,
do_state_change=Mock(return_value=Mock(errorcode=0)),
ConfigProxy=Mock(return_value=cs_vserver_proxy_mock),
):
self.module = netscaler_cs_vserver
self.exited()
self.assertIn(call.save_config(), client_mock.mock_calls)
def test_save_config_called_on_state_absent(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='absent',
))
from ansible.modules.network.netscaler import netscaler_cs_vserver
client_mock = Mock()
m = Mock(return_value=client_mock)
cs_vserver_proxy_mock = Mock()
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_vserver',
get_nitro_client=m,
cs_vserver_exists=Mock(side_effect=[True, False]),
ensure_feature_is_enabled=Mock(return_value=True),
ConfigProxy=Mock(return_value=cs_vserver_proxy_mock),
):
self.module = netscaler_cs_vserver
self.exited()
self.assertIn(call.save_config(), client_mock.mock_calls)
def test_save_config_not_called_on_state_present(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
save_config=False,
))
from ansible.modules.network.netscaler import netscaler_cs_vserver
client_mock = Mock()
m = Mock(return_value=client_mock)
cs_vserver_proxy_mock = Mock()
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_vserver',
get_nitro_client=m,
cs_vserver_exists=Mock(side_effect=[False, True]),
cs_vserver_identical=Mock(side_effect=[True]),
diff_list=Mock(return_value={}),
ensure_feature_is_enabled=Mock(return_value=True),
do_state_change=Mock(return_value=Mock(errorcode=0)),
nitro_exception=self.MockException,
ConfigProxy=Mock(return_value=cs_vserver_proxy_mock),
):
self.module = netscaler_cs_vserver
self.exited()
self.assertNotIn(call.save_config(), client_mock.mock_calls)
def test_save_config_not_called_on_state_absent(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='absent',
save_config=False,
))
from ansible.modules.network.netscaler import netscaler_cs_vserver
client_mock = Mock()
m = Mock(return_value=client_mock)
cs_vserver_proxy_mock = Mock()
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_vserver',
get_nitro_client=m,
cs_vserver_exists=Mock(side_effect=[True, False]),
ensure_feature_is_enabled=Mock(return_value=True),
ConfigProxy=Mock(return_value=cs_vserver_proxy_mock),
):
self.module = netscaler_cs_vserver
self.exited()
self.assertNotIn(call.save_config(), client_mock.mock_calls)
def test_new_cs_vserver_execution_flow(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_cs_vserver
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_attrs = {
'diff_object.return_value': {},
}
cs_vserver_proxy_mock = Mock()
cs_vserver_proxy_mock.configure_mock(**server_proxy_attrs)
config_proxy_mock = Mock(return_value=cs_vserver_proxy_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_vserver',
get_nitro_client=m,
cs_vserver_exists=Mock(side_effect=[False, True]),
cs_vserver_identical=Mock(side_effect=[True]),
ensure_feature_is_enabled=Mock(return_value=True),
ConfigProxy=config_proxy_mock,
nitro_exception=self.MockException,
do_state_change=Mock(return_value=Mock(errorcode=0)),
):
self.module = netscaler_cs_vserver
self.exited()
cs_vserver_proxy_mock.assert_has_calls([call.add()])
def test_modified_cs_vserver_execution_flow(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_cs_vserver
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_attrs = {
'diff_object.return_value': {},
}
cs_vserver_proxy_mock = Mock()
cs_vserver_proxy_mock.configure_mock(**server_proxy_attrs)
config_proxy_mock = Mock(return_value=cs_vserver_proxy_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_vserver',
get_nitro_client=m,
diff_list=Mock(return_value={}),
get_immutables_intersection=Mock(return_value=[]),
cs_vserver_exists=Mock(side_effect=[True, True]),
cs_vserver_identical=Mock(side_effect=[False, True]),
ensure_feature_is_enabled=Mock(return_value=True),
nitro_exception=self.MockException,
do_state_change=Mock(return_value=Mock(errorcode=0)),
ConfigProxy=config_proxy_mock,
):
self.module = netscaler_cs_vserver
self.exited()
cs_vserver_proxy_mock.assert_has_calls([call.update()])
def test_absent_cs_vserver_execution_flow(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='absent',
))
from ansible.modules.network.netscaler import netscaler_cs_vserver
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_attrs = {
'diff_object.return_value': {},
}
cs_vserver_proxy_mock = Mock()
cs_vserver_proxy_mock.configure_mock(**server_proxy_attrs)
config_proxy_mock = Mock(return_value=cs_vserver_proxy_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_vserver',
get_nitro_client=m,
diff_list=Mock(return_value={}),
get_immutables_intersection=Mock(return_value=[]),
cs_vserver_exists=Mock(side_effect=[True, False]),
cs_vserver_identical=Mock(side_effect=[False, True]),
ensure_feature_is_enabled=Mock(return_value=True),
ConfigProxy=config_proxy_mock,
):
self.module = netscaler_cs_vserver
self.exited()
cs_vserver_proxy_mock.assert_has_calls([call.delete()])
def test_present_cs_vserver_identical_flow(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_cs_vserver
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_attrs = {
'diff_object.return_value': {},
}
cs_vserver_proxy_mock = Mock()
cs_vserver_proxy_mock.configure_mock(**server_proxy_attrs)
config_proxy_mock = Mock(return_value=cs_vserver_proxy_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_vserver',
get_nitro_client=m,
diff_list=Mock(return_value={}),
get_immutables_intersection=Mock(return_value=[]),
cs_vserver_exists=Mock(side_effect=[True, True]),
cs_vserver_identical=Mock(side_effect=[True, True]),
ensure_feature_is_enabled=Mock(return_value=True),
do_state_change=Mock(return_value=Mock(errorcode=0)),
ConfigProxy=config_proxy_mock,
):
self.module = netscaler_cs_vserver
self.exited()
cs_vserver_proxy_mock.assert_not_called()
def test_absent_cs_vserver_noop_flow(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='absent',
))
from ansible.modules.network.netscaler import netscaler_cs_vserver
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_attrs = {
'diff_object.return_value': {},
}
cs_vserver_proxy_mock = Mock()
cs_vserver_proxy_mock.configure_mock(**server_proxy_attrs)
config_proxy_mock = Mock(return_value=cs_vserver_proxy_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_vserver',
get_nitro_client=m,
diff_list=Mock(return_value={}),
get_immutables_intersection=Mock(return_value=[]),
cs_vserver_exists=Mock(side_effect=[False, False]),
cs_vserver_identical=Mock(side_effect=[False, False]),
ensure_feature_is_enabled=Mock(return_value=True),
ConfigProxy=config_proxy_mock,
):
self.module = netscaler_cs_vserver
self.exited()
cs_vserver_proxy_mock.assert_not_called()
def test_present_cs_vserver_failed_update(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_cs_vserver
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_attrs = {
'diff_object.return_value': {},
}
cs_vserver_proxy_mock = Mock()
cs_vserver_proxy_mock.configure_mock(**server_proxy_attrs)
config_proxy_mock = Mock(return_value=cs_vserver_proxy_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_vserver',
nitro_exception=self.MockException,
get_nitro_client=m,
diff_list=Mock(return_value={}),
get_immutables_intersection=Mock(return_value=[]),
cs_vserver_exists=Mock(side_effect=[True, True]),
cs_vserver_identical=Mock(side_effect=[False, False]),
ensure_feature_is_enabled=Mock(return_value=True),
do_state_change=Mock(return_value=Mock(errorcode=0)),
ConfigProxy=config_proxy_mock,
):
self.module = netscaler_cs_vserver
result = self.failed()
self.assertEqual(result['msg'], 'CS vserver differs from configured')
self.assertTrue(result['failed'])
def test_present_cs_vserver_failed_create(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_cs_vserver
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_attrs = {
'diff_object.return_value': {},
}
cs_vserver_proxy_mock = Mock()
cs_vserver_proxy_mock.configure_mock(**server_proxy_attrs)
config_proxy_mock = Mock(return_value=cs_vserver_proxy_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_vserver',
nitro_exception=self.MockException,
get_nitro_client=m,
diff_list=Mock(return_value={}),
get_immutables_intersection=Mock(return_value=[]),
cs_vserver_exists=Mock(side_effect=[False, False]),
cs_vserver_identical=Mock(side_effect=[False, False]),
ensure_feature_is_enabled=Mock(return_value=True),
do_state_change=Mock(return_value=Mock(errorcode=0)),
ConfigProxy=config_proxy_mock,
):
self.module = netscaler_cs_vserver
result = self.failed()
self.assertEqual(result['msg'], 'CS vserver does not exist')
self.assertTrue(result['failed'])
def test_present_cs_vserver_update_immutable_attribute(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_cs_vserver
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_attrs = {
'diff_object.return_value': {},
}
cs_vserver_proxy_mock = Mock()
cs_vserver_proxy_mock.configure_mock(**server_proxy_attrs)
config_proxy_mock = Mock(return_value=cs_vserver_proxy_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_vserver',
nitro_exception=self.MockException,
get_nitro_client=m,
diff_list=Mock(return_value={}),
get_immutables_intersection=Mock(return_value=['domain']),
cs_vserver_exists=Mock(side_effect=[True, True]),
cs_vserver_identical=Mock(side_effect=[False, False]),
ensure_feature_is_enabled=Mock(return_value=True),
ConfigProxy=config_proxy_mock,
):
self.module = netscaler_cs_vserver
result = self.failed()
self.assertEqual(result['msg'], 'Cannot update immutable attributes [\'domain\']')
self.assertTrue(result['failed'])
def test_absent_cs_vserver_failed_delete(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='absent',
))
from ansible.modules.network.netscaler import netscaler_cs_vserver
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_attrs = {
'diff_object.return_value': {},
}
cs_vserver_proxy_mock = Mock()
cs_vserver_proxy_mock.configure_mock(**server_proxy_attrs)
config_proxy_mock = Mock(return_value=cs_vserver_proxy_mock)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_vserver',
nitro_exception=self.MockException,
get_nitro_client=m,
diff_list=Mock(return_value={}),
get_immutables_intersection=Mock(return_value=[]),
cs_vserver_exists=Mock(side_effect=[True, True]),
cs_vserver_identical=Mock(side_effect=[False, False]),
ensure_feature_is_enabled=Mock(return_value=True),
ConfigProxy=config_proxy_mock,
):
self.module = netscaler_cs_vserver
result = self.failed()
self.assertEqual(result['msg'], 'CS vserver still exists')
self.assertTrue(result['failed'])
def test_graceful_nitro_exception_state_present(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_cs_vserver
class MockException(Exception):
def __init__(self, *args, **kwargs):
self.errorcode = 0
self.message = ''
m = Mock(side_effect=MockException)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_vserver',
cs_vserver_exists=m,
ensure_feature_is_enabled=Mock(return_value=True),
nitro_exception=MockException
):
self.module = netscaler_cs_vserver
result = self.failed()
self.assertTrue(
result['msg'].startswith('nitro exception'),
msg='Nitro exception not caught on operation absent'
)
def test_graceful_nitro_exception_state_absent(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='absent',
))
from ansible.modules.network.netscaler import netscaler_cs_vserver
class MockException(Exception):
def __init__(self, *args, **kwargs):
self.errorcode = 0
self.message = ''
m = Mock(side_effect=MockException)
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_vserver',
cs_vserver_exists=m,
ensure_feature_is_enabled=Mock(return_value=True),
nitro_exception=MockException
):
self.module = netscaler_cs_vserver
result = self.failed()
self.assertTrue(
result['msg'].startswith('nitro exception'),
msg='Nitro exception not caught on operation absent'
)
def test_disabled_state_change_called(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_cs_vserver
cs_vserver_proxy_mock = Mock()
do_state_change_mock = Mock(return_value=Mock(errorcode=0))
client_mock = Mock()
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_vserver',
get_nitro_client=Mock(return_value=client_mock),
ConfigProxy=Mock(return_value=cs_vserver_proxy_mock),
ensure_feature_is_enabled=Mock(return_value=True),
cs_vserver_exists=Mock(side_effect=[True, True]),
cs_vserver_identical=Mock(side_effect=[True, True]),
nitro_exception=self.MockException,
do_state_change=do_state_change_mock,
):
self.module = netscaler_cs_vserver
self.exited()
self.assertTrue(len(do_state_change_mock.mock_calls) > 0, msg='Did not call state change')
def test_cs_vserver_ssl_called(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
servicetype='SSL',
))
from ansible.modules.network.netscaler import netscaler_cs_vserver
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_attrs = {
'diff_object.return_value': {},
}
cs_vserver_proxy_mock = Mock()
cs_vserver_proxy_mock.configure_mock(**server_proxy_attrs)
config_proxy_mock = Mock(return_value=cs_vserver_proxy_mock)
ssl_certkey_bindings_sync_mock = Mock()
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_vserver',
nitro_exception=self.MockException,
get_nitro_client=m,
diff_list=Mock(return_value={}),
get_immutables_intersection=Mock(return_value=[]),
cs_vserver_exists=Mock(side_effect=[True, True]),
cs_vserver_identical=Mock(side_effect=[False, True]),
ensure_feature_is_enabled=Mock(return_value=True),
ssl_certkey_bindings_identical=Mock(side_effect=[False, True]),
ssl_certkey_bindings_sync=ssl_certkey_bindings_sync_mock,
do_state_change=Mock(return_value=Mock(errorcode=0)),
ConfigProxy=config_proxy_mock,
):
self.module = netscaler_cs_vserver
result = self.exited()
self.assertTrue(result['changed'])
self.assertTrue(ssl_certkey_bindings_sync_mock.called)
def test_cs_vserver_ssl_not_called(self):
set_module_args(dict(
nitro_user='user',
nitro_pass='pass',
nsip='1.1.1.1',
state='present',
))
from ansible.modules.network.netscaler import netscaler_cs_vserver
client_mock = Mock()
m = Mock(return_value=client_mock)
server_proxy_attrs = {
'diff_object.return_value': {},
}
cs_vserver_proxy_mock = Mock()
cs_vserver_proxy_mock.configure_mock(**server_proxy_attrs)
config_proxy_mock = Mock(return_value=cs_vserver_proxy_mock)
ssl_certkey_bindings_sync_mock = Mock()
with patch.multiple(
'ansible.modules.network.netscaler.netscaler_cs_vserver',
nitro_exception=self.MockException,
get_nitro_client=m,
diff_list=Mock(return_value={}),
get_immutables_intersection=Mock(return_value=[]),
cs_vserver_exists=Mock(side_effect=[True, True]),
cs_vserver_identical=Mock(side_effect=[False, True]),
ensure_feature_is_enabled=Mock(return_value=True),
ssl_certkey_bindings_identical=Mock(side_effect=[False, True]),
ssl_certkey_bindings_sync=ssl_certkey_bindings_sync_mock,
do_state_change=Mock(return_value=Mock(errorcode=0)),
ConfigProxy=config_proxy_mock,
):
self.module = netscaler_cs_vserver
result = self.exited()
self.assertTrue(result['changed'])
self.assertFalse(ssl_certkey_bindings_sync_mock.called)
| gpl-3.0 |
ixiom/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/common/system/filesystem_mock.py | 122 | 16269 | # Copyright (C) 2009 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import StringIO
import errno
import hashlib
import os
import re
from webkitpy.common.system import path
class MockFileSystem(object):
sep = '/'
pardir = '..'
def __init__(self, files=None, dirs=None, cwd='/'):
"""Initializes a "mock" filesystem that can be used to completely
stub out a filesystem.
Args:
files: a dict of filenames -> file contents. A file contents
value of None is used to indicate that the file should
not exist.
"""
self.files = files or {}
self.written_files = {}
self.last_tmpdir = None
self.current_tmpno = 0
self.cwd = cwd
self.dirs = set(dirs or [])
self.dirs.add(cwd)
for f in self.files:
d = self.dirname(f)
while not d in self.dirs:
self.dirs.add(d)
d = self.dirname(d)
def clear_written_files(self):
# This function can be used to track what is written between steps in a test.
self.written_files = {}
def _raise_not_found(self, path):
raise IOError(errno.ENOENT, path, os.strerror(errno.ENOENT))
def _split(self, path):
# This is not quite a full implementation of os.path.split
# http://docs.python.org/library/os.path.html#os.path.split
if self.sep in path:
return path.rsplit(self.sep, 1)
return ('', path)
def abspath(self, path):
if os.path.isabs(path):
return self.normpath(path)
return self.abspath(self.join(self.cwd, path))
def realpath(self, path):
return self.abspath(path)
def basename(self, path):
return self._split(path)[1]
def expanduser(self, path):
if path[0] != "~":
return path
parts = path.split(self.sep, 1)
home_directory = self.sep + "Users" + self.sep + "mock"
if len(parts) == 1:
return home_directory
return home_directory + self.sep + parts[1]
def path_to_module(self, module_name):
return "/mock-checkout/Tools/Scripts/" + module_name.replace('.', '/') + ".py"
def chdir(self, path):
path = self.normpath(path)
if not self.isdir(path):
raise OSError(errno.ENOENT, path, os.strerror(errno.ENOENT))
self.cwd = path
def copyfile(self, source, destination):
if not self.exists(source):
self._raise_not_found(source)
if self.isdir(source):
raise IOError(errno.EISDIR, source, os.strerror(errno.EISDIR))
if self.isdir(destination):
raise IOError(errno.EISDIR, destination, os.strerror(errno.EISDIR))
if not self.exists(self.dirname(destination)):
raise IOError(errno.ENOENT, destination, os.strerror(errno.ENOENT))
self.files[destination] = self.files[source]
self.written_files[destination] = self.files[source]
def dirname(self, path):
return self._split(path)[0]
def exists(self, path):
return self.isfile(path) or self.isdir(path)
def files_under(self, path, dirs_to_skip=[], file_filter=None):
def filter_all(fs, dirpath, basename):
return True
file_filter = file_filter or filter_all
files = []
if self.isfile(path):
if file_filter(self, self.dirname(path), self.basename(path)) and self.files[path] is not None:
files.append(path)
return files
if self.basename(path) in dirs_to_skip:
return []
if not path.endswith(self.sep):
path += self.sep
dir_substrings = [self.sep + d + self.sep for d in dirs_to_skip]
for filename in self.files:
if not filename.startswith(path):
continue
suffix = filename[len(path) - 1:]
if any(dir_substring in suffix for dir_substring in dir_substrings):
continue
dirpath, basename = self._split(filename)
if file_filter(self, dirpath, basename) and self.files[filename] is not None:
files.append(filename)
return files
def getcwd(self):
return self.cwd
def glob(self, glob_string):
# FIXME: This handles '*', but not '?', '[', or ']'.
glob_string = re.escape(glob_string)
glob_string = glob_string.replace('\\*', '[^\\/]*') + '$'
glob_string = glob_string.replace('\\/', '/')
path_filter = lambda path: re.match(glob_string, path)
# We could use fnmatch.fnmatch, but that might not do the right thing on windows.
existing_files = [path for path, contents in self.files.items() if contents is not None]
return filter(path_filter, existing_files) + filter(path_filter, self.dirs)
def isabs(self, path):
return path.startswith(self.sep)
def isfile(self, path):
return path in self.files and self.files[path] is not None
def isdir(self, path):
return self.normpath(path) in self.dirs
def _slow_but_correct_join(self, *comps):
return re.sub(re.escape(os.path.sep), self.sep, os.path.join(*comps))
def join(self, *comps):
# This function is called a lot, so we optimize it; there are
# unittests to check that we match _slow_but_correct_join(), above.
path = ''
sep = self.sep
for comp in comps:
if not comp:
continue
if comp[0] == sep:
path = comp
continue
if path:
path += sep
path += comp
if comps[-1] == '' and path:
path += '/'
path = path.replace(sep + sep, sep)
return path
def listdir(self, path):
sep = self.sep
if not self.isdir(path):
raise OSError("%s is not a directory" % path)
if not path.endswith(sep):
path += sep
dirs = []
files = []
for f in self.files:
if self.exists(f) and f.startswith(path):
remaining = f[len(path):]
if sep in remaining:
dir = remaining[:remaining.index(sep)]
if not dir in dirs:
dirs.append(dir)
else:
files.append(remaining)
return dirs + files
def mtime(self, path):
if self.exists(path):
return 0
self._raise_not_found(path)
def _mktemp(self, suffix='', prefix='tmp', dir=None, **kwargs):
if dir is None:
dir = self.sep + '__im_tmp'
curno = self.current_tmpno
self.current_tmpno += 1
self.last_tmpdir = self.join(dir, '%s_%u_%s' % (prefix, curno, suffix))
return self.last_tmpdir
def mkdtemp(self, **kwargs):
class TemporaryDirectory(object):
def __init__(self, fs, **kwargs):
self._kwargs = kwargs
self._filesystem = fs
self._directory_path = fs._mktemp(**kwargs)
fs.maybe_make_directory(self._directory_path)
def __str__(self):
return self._directory_path
def __enter__(self):
return self._directory_path
def __exit__(self, type, value, traceback):
# Only self-delete if necessary.
# FIXME: Should we delete non-empty directories?
if self._filesystem.exists(self._directory_path):
self._filesystem.rmtree(self._directory_path)
return TemporaryDirectory(fs=self, **kwargs)
def maybe_make_directory(self, *path):
norm_path = self.normpath(self.join(*path))
while norm_path and not self.isdir(norm_path):
self.dirs.add(norm_path)
norm_path = self.dirname(norm_path)
def move(self, source, destination):
if self.files[source] is None:
self._raise_not_found(source)
self.files[destination] = self.files[source]
self.written_files[destination] = self.files[destination]
self.files[source] = None
self.written_files[source] = None
def _slow_but_correct_normpath(self, path):
return re.sub(re.escape(os.path.sep), self.sep, os.path.normpath(path))
def normpath(self, path):
# This function is called a lot, so we try to optimize the common cases
# instead of always calling _slow_but_correct_normpath(), above.
if '..' in path or '/./' in path:
# This doesn't happen very often; don't bother trying to optimize it.
return self._slow_but_correct_normpath(path)
if not path:
return '.'
if path == '/':
return path
if path == '/.':
return '/'
if path.endswith('/.'):
return path[:-2]
if path.endswith('/'):
return path[:-1]
return path
def open_binary_tempfile(self, suffix=''):
path = self._mktemp(suffix)
return (WritableBinaryFileObject(self, path), path)
def open_binary_file_for_reading(self, path):
if self.files[path] is None:
self._raise_not_found(path)
return ReadableBinaryFileObject(self, path, self.files[path])
def read_binary_file(self, path):
# Intentionally raises KeyError if we don't recognize the path.
if self.files[path] is None:
self._raise_not_found(path)
return self.files[path]
def write_binary_file(self, path, contents):
# FIXME: should this assert if dirname(path) doesn't exist?
self.maybe_make_directory(self.dirname(path))
self.files[path] = contents
self.written_files[path] = contents
def open_text_file_for_reading(self, path):
if self.files[path] is None:
self._raise_not_found(path)
return ReadableTextFileObject(self, path, self.files[path])
def open_text_file_for_writing(self, path):
return WritableTextFileObject(self, path)
def read_text_file(self, path):
return self.read_binary_file(path).decode('utf-8')
def write_text_file(self, path, contents):
return self.write_binary_file(path, contents.encode('utf-8'))
def sha1(self, path):
contents = self.read_binary_file(path)
return hashlib.sha1(contents).hexdigest()
def relpath(self, path, start='.'):
# Since os.path.relpath() calls os.path.normpath()
# (see http://docs.python.org/library/os.path.html#os.path.abspath )
# it also removes trailing slashes and converts forward and backward
# slashes to the preferred slash os.sep.
start = self.abspath(start)
path = self.abspath(path)
if not path.lower().startswith(start.lower()):
# path is outside the directory given by start; compute path from root
return '../' * start.count('/') + path
rel_path = path[len(start):]
if not rel_path:
# Then the paths are the same.
pass
elif rel_path[0] == self.sep:
# It is probably sufficient to remove just the first character
# since os.path.normpath() collapses separators, but we use
# lstrip() just to be sure.
rel_path = rel_path.lstrip(self.sep)
else:
# We are in the case typified by the following example:
# path = "/tmp/foobar", start = "/tmp/foo" -> rel_path = "bar"
# FIXME: We return a less-than-optimal result here.
return '../' * start.count('/') + path
return rel_path
def remove(self, path):
if self.files[path] is None:
self._raise_not_found(path)
self.files[path] = None
self.written_files[path] = None
def rmtree(self, path):
path = self.normpath(path)
for f in self.files:
if f.startswith(path):
self.files[f] = None
self.dirs = set(filter(lambda d: not d.startswith(path), self.dirs))
def copytree(self, source, destination):
source = self.normpath(source)
destination = self.normpath(destination)
for source_file in self.files:
if source_file.startswith(source):
destination_path = self.join(destination, self.relpath(source_file, source))
self.maybe_make_directory(self.dirname(destination_path))
self.files[destination_path] = self.files[source_file]
def split(self, path):
idx = path.rfind(self.sep)
if idx == -1:
return ('', path)
return (path[:idx], path[(idx + 1):])
def splitext(self, path):
idx = path.rfind('.')
if idx == -1:
idx = len(path)
return (path[0:idx], path[idx:])
class WritableBinaryFileObject(object):
def __init__(self, fs, path):
self.fs = fs
self.path = path
self.closed = False
self.fs.files[path] = ""
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def close(self):
self.closed = True
def write(self, str):
self.fs.files[self.path] += str
self.fs.written_files[self.path] = self.fs.files[self.path]
class WritableTextFileObject(WritableBinaryFileObject):
def write(self, str):
WritableBinaryFileObject.write(self, str.encode('utf-8'))
class ReadableBinaryFileObject(object):
def __init__(self, fs, path, data):
self.fs = fs
self.path = path
self.closed = False
self.data = data
self.offset = 0
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def close(self):
self.closed = True
def read(self, bytes=None):
if not bytes:
return self.data[self.offset:]
start = self.offset
self.offset += bytes
return self.data[start:self.offset]
class ReadableTextFileObject(ReadableBinaryFileObject):
def __init__(self, fs, path, data):
super(ReadableTextFileObject, self).__init__(fs, path, StringIO.StringIO(data.decode("utf-8")))
def close(self):
self.data.close()
super(ReadableTextFileObject, self).close()
def read(self, bytes=-1):
return self.data.read(bytes)
def readline(self, length=None):
return self.data.readline(length)
def __iter__(self):
return self.data.__iter__()
def next(self):
return self.data.next()
def seek(self, offset, whence=os.SEEK_SET):
self.data.seek(offset, whence)
| bsd-3-clause |
ABaldwinHunter/django-clone | tests/migrations/test_executor.py | 13 | 29432 | from django.apps.registry import apps as global_apps
from django.db import connection
from django.db.migrations.exceptions import InvalidMigrationPlan
from django.db.migrations.executor import MigrationExecutor
from django.db.migrations.graph import MigrationGraph
from django.db.migrations.recorder import MigrationRecorder
from django.db.utils import DatabaseError
from django.test import TestCase, modify_settings, override_settings
from .test_base import MigrationTestBase
@modify_settings(INSTALLED_APPS={'append': 'migrations2'})
class ExecutorTests(MigrationTestBase):
"""
Tests the migration executor (full end-to-end running).
Bear in mind that if these are failing you should fix the other
test failures first, as they may be propagating into here.
"""
available_apps = ["migrations", "migrations2", "django.contrib.auth", "django.contrib.contenttypes"]
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_run(self):
"""
Tests running a simple set of migrations.
"""
executor = MigrationExecutor(connection)
# Let's look at the plan first and make sure it's up to scratch
plan = executor.migration_plan([("migrations", "0002_second")])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0001_initial"], False),
(executor.loader.graph.nodes["migrations", "0002_second"], False),
],
)
# Were the tables there before?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
# Alright, let's try running it
executor.migrate([("migrations", "0002_second")])
# Are the tables there now?
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_book")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Alright, let's undo what we did
plan = executor.migration_plan([("migrations", None)])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0002_second"], True),
(executor.loader.graph.nodes["migrations", "0001_initial"], True),
],
)
executor.migrate([("migrations", None)])
# Are the tables gone?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"})
def test_run_with_squashed(self):
"""
Tests running a squashed migration from zero (should ignore what it replaces)
"""
executor = MigrationExecutor(connection)
# Check our leaf node is the squashed one
leaves = [key for key in executor.loader.graph.leaf_nodes() if key[0] == "migrations"]
self.assertEqual(leaves, [("migrations", "0001_squashed_0002")])
# Check the plan
plan = executor.migration_plan([("migrations", "0001_squashed_0002")])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0001_squashed_0002"], False),
],
)
# Were the tables there before?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
# Alright, let's try running it
executor.migrate([("migrations", "0001_squashed_0002")])
# Are the tables there now?
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_book")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Alright, let's undo what we did. Should also just use squashed.
plan = executor.migration_plan([("migrations", None)])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0001_squashed_0002"], True),
],
)
executor.migrate([("migrations", None)])
# Are the tables gone?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
@override_settings(MIGRATION_MODULES={
"migrations": "migrations.test_migrations",
"migrations2": "migrations2.test_migrations_2",
})
def test_empty_plan(self):
"""
Tests that re-planning a full migration of a fully-migrated set doesn't
perform spurious unmigrations and remigrations.
There was previously a bug where the executor just always performed the
backwards plan for applied migrations - which even for the most recent
migration in an app, might include other, dependent apps, and these
were being unmigrated.
"""
# Make the initial plan, check it
executor = MigrationExecutor(connection)
plan = executor.migration_plan([
("migrations", "0002_second"),
("migrations2", "0001_initial"),
])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0001_initial"], False),
(executor.loader.graph.nodes["migrations", "0002_second"], False),
(executor.loader.graph.nodes["migrations2", "0001_initial"], False),
],
)
# Fake-apply all migrations
executor.migrate([
("migrations", "0002_second"),
("migrations2", "0001_initial")
], fake=True)
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Now plan a second time and make sure it's empty
plan = executor.migration_plan([
("migrations", "0002_second"),
("migrations2", "0001_initial"),
])
self.assertEqual(plan, [])
# Erase all the fake records
executor.recorder.record_unapplied("migrations2", "0001_initial")
executor.recorder.record_unapplied("migrations", "0002_second")
executor.recorder.record_unapplied("migrations", "0001_initial")
@override_settings(MIGRATION_MODULES={
"migrations": "migrations.test_migrations",
"migrations2": "migrations2.test_migrations_2_no_deps",
})
def test_mixed_plan_not_supported(self):
"""
Although the MigrationExecutor interfaces allows for mixed migration
plans (combined forwards and backwards migrations) this is not
supported.
"""
# Prepare for mixed plan
executor = MigrationExecutor(connection)
plan = executor.migration_plan([("migrations", "0002_second")])
self.assertEqual(
plan,
[
(executor.loader.graph.nodes["migrations", "0001_initial"], False),
(executor.loader.graph.nodes["migrations", "0002_second"], False),
],
)
executor.migrate(None, plan)
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
self.assertIn(('migrations', '0001_initial'), executor.loader.applied_migrations)
self.assertIn(('migrations', '0002_second'), executor.loader.applied_migrations)
self.assertNotIn(('migrations2', '0001_initial'), executor.loader.applied_migrations)
# Generate mixed plan
plan = executor.migration_plan([
("migrations", None),
("migrations2", "0001_initial"),
])
msg = (
'Migration plans with both forwards and backwards migrations are '
'not supported. Please split your migration process into separate '
'plans of only forwards OR backwards migrations.'
)
with self.assertRaisesMessage(InvalidMigrationPlan, msg) as cm:
executor.migrate(None, plan)
self.assertEqual(
cm.exception.args[1],
[
(executor.loader.graph.nodes["migrations", "0002_second"], True),
(executor.loader.graph.nodes["migrations", "0001_initial"], True),
(executor.loader.graph.nodes["migrations2", "0001_initial"], False),
],
)
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
executor.migrate([
("migrations", None),
("migrations2", None),
])
# Are the tables gone?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_book")
self.assertTableNotExists("migrations2_otherauthor")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_soft_apply(self):
"""
Tests detection of initial migrations already having been applied.
"""
state = {"faked": None}
def fake_storer(phase, migration=None, fake=None):
state["faked"] = fake
executor = MigrationExecutor(connection, progress_callback=fake_storer)
# Were the tables there before?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
# Run it normally
self.assertEqual(
executor.migration_plan([("migrations", "0001_initial")]),
[
(executor.loader.graph.nodes["migrations", "0001_initial"], False),
],
)
executor.migrate([("migrations", "0001_initial")])
# Are the tables there now?
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
# We shouldn't have faked that one
self.assertEqual(state["faked"], False)
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Fake-reverse that
executor.migrate([("migrations", None)], fake=True)
# Are the tables still there?
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
# Make sure that was faked
self.assertEqual(state["faked"], True)
# Finally, migrate forwards; this should fake-apply our initial migration
executor.loader.build_graph()
self.assertEqual(
executor.migration_plan([("migrations", "0001_initial")]),
[
(executor.loader.graph.nodes["migrations", "0001_initial"], False),
],
)
# Applying the migration should raise a database level error
# because we haven't given the --fake-initial option
with self.assertRaises(DatabaseError):
executor.migrate([("migrations", "0001_initial")])
# Reset the faked state
state = {"faked": None}
# Allow faking of initial CreateModel operations
executor.migrate([("migrations", "0001_initial")], fake_initial=True)
self.assertEqual(state["faked"], True)
# And migrate back to clean up the database
executor.loader.build_graph()
executor.migrate([("migrations", None)])
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_migrations_custom_user",
"django.contrib.auth": "django.contrib.auth.migrations",
},
AUTH_USER_MODEL="migrations.Author",
)
def test_custom_user(self):
"""
Regression test for #22325 - references to a custom user model defined in the
same app are not resolved correctly.
"""
executor = MigrationExecutor(connection)
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
# Migrate forwards
executor.migrate([("migrations", "0001_initial")])
self.assertTableExists("migrations_author")
self.assertTableExists("migrations_tribble")
# Make sure the soft-application detection works (#23093)
# Change table_names to not return auth_user during this as
# it wouldn't be there in a normal run, and ensure migrations.Author
# exists in the global app registry temporarily.
old_table_names = connection.introspection.table_names
connection.introspection.table_names = lambda c: [x for x in old_table_names(c) if x != "auth_user"]
migrations_apps = executor.loader.project_state(("migrations", "0001_initial")).apps
global_apps.get_app_config("migrations").models["author"] = migrations_apps.get_model("migrations", "author")
try:
migration = executor.loader.get_migration("auth", "0001_initial")
self.assertEqual(executor.detect_soft_applied(None, migration)[0], True)
finally:
connection.introspection.table_names = old_table_names
del global_apps.get_app_config("migrations").models["author"]
# And migrate back to clean up the database
executor.loader.build_graph()
executor.migrate([("migrations", None)])
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
@override_settings(
MIGRATION_MODULES={
"migrations": "migrations.test_add_many_to_many_field_initial",
},
)
def test_detect_soft_applied_add_field_manytomanyfield(self):
"""
executor.detect_soft_applied() detects ManyToManyField tables from an
AddField operation. This checks the case of AddField in a migration
with other operations (0001) and the case of AddField in its own
migration (0002).
"""
tables = [
# from 0001
"migrations_project",
"migrations_task",
"migrations_project_tasks",
# from 0002
"migrations_task_projects",
]
executor = MigrationExecutor(connection)
# Create the tables for 0001 but make it look like the migration hasn't
# been applied.
executor.migrate([("migrations", "0001_initial")])
executor.migrate([("migrations", None)], fake=True)
for table in tables[:3]:
self.assertTableExists(table)
# Table detection sees 0001 is applied but not 0002.
migration = executor.loader.get_migration("migrations", "0001_initial")
self.assertEqual(executor.detect_soft_applied(None, migration)[0], True)
migration = executor.loader.get_migration("migrations", "0002_initial")
self.assertEqual(executor.detect_soft_applied(None, migration)[0], False)
# Create the tables for both migrations but make it look like neither
# has been applied.
executor.loader.build_graph()
executor.migrate([("migrations", "0001_initial")], fake=True)
executor.migrate([("migrations", "0002_initial")])
executor.loader.build_graph()
executor.migrate([("migrations", None)], fake=True)
# Table detection sees 0002 is applied.
migration = executor.loader.get_migration("migrations", "0002_initial")
self.assertEqual(executor.detect_soft_applied(None, migration)[0], True)
# Leave the tables for 0001 except the many-to-many table. That missing
# table should cause detect_soft_applied() to return False.
with connection.schema_editor() as editor:
for table in tables[2:]:
editor.execute(editor.sql_delete_table % {"table": table})
migration = executor.loader.get_migration("migrations", "0001_initial")
self.assertEqual(executor.detect_soft_applied(None, migration)[0], False)
# Cleanup by removing the remaining tables.
with connection.schema_editor() as editor:
for table in tables[:2]:
editor.execute(editor.sql_delete_table % {"table": table})
for table in tables:
self.assertTableNotExists(table)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.lookuperror_a",
"migrations.migrations_test_apps.lookuperror_b",
"migrations.migrations_test_apps.lookuperror_c"
]
)
def test_unrelated_model_lookups_forwards(self):
"""
#24123 - Tests that all models of apps already applied which are
unrelated to the first app being applied are part of the initial model
state.
"""
try:
executor = MigrationExecutor(connection)
self.assertTableNotExists("lookuperror_a_a1")
self.assertTableNotExists("lookuperror_b_b1")
self.assertTableNotExists("lookuperror_c_c1")
executor.migrate([("lookuperror_b", "0003_b3")])
self.assertTableExists("lookuperror_b_b3")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Migrate forwards -- This led to a lookup LookupErrors because
# lookuperror_b.B2 is already applied
executor.migrate([
("lookuperror_a", "0004_a4"),
("lookuperror_c", "0003_c3"),
])
self.assertTableExists("lookuperror_a_a4")
self.assertTableExists("lookuperror_c_c3")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
finally:
# Cleanup
executor.migrate([
("lookuperror_a", None),
("lookuperror_b", None),
("lookuperror_c", None),
])
self.assertTableNotExists("lookuperror_a_a1")
self.assertTableNotExists("lookuperror_b_b1")
self.assertTableNotExists("lookuperror_c_c1")
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.lookuperror_a",
"migrations.migrations_test_apps.lookuperror_b",
"migrations.migrations_test_apps.lookuperror_c"
]
)
def test_unrelated_model_lookups_backwards(self):
"""
#24123 - Tests that all models of apps being unapplied which are
unrelated to the first app being unapplied are part of the initial
model state.
"""
try:
executor = MigrationExecutor(connection)
self.assertTableNotExists("lookuperror_a_a1")
self.assertTableNotExists("lookuperror_b_b1")
self.assertTableNotExists("lookuperror_c_c1")
executor.migrate([
("lookuperror_a", "0004_a4"),
("lookuperror_b", "0003_b3"),
("lookuperror_c", "0003_c3"),
])
self.assertTableExists("lookuperror_b_b3")
self.assertTableExists("lookuperror_a_a4")
self.assertTableExists("lookuperror_c_c3")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Migrate backwards -- This led to a lookup LookupErrors because
# lookuperror_b.B2 is not in the initial state (unrelated to app c)
executor.migrate([("lookuperror_a", None)])
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
finally:
# Cleanup
executor.migrate([
("lookuperror_b", None),
("lookuperror_c", None)
])
self.assertTableNotExists("lookuperror_a_a1")
self.assertTableNotExists("lookuperror_b_b1")
self.assertTableNotExists("lookuperror_c_c1")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations"})
def test_process_callback(self):
"""
#24129 - Tests callback process
"""
call_args_list = []
def callback(*args):
call_args_list.append(args)
executor = MigrationExecutor(connection, progress_callback=callback)
# Were the tables there before?
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
executor.migrate([
("migrations", "0001_initial"),
("migrations", "0002_second"),
])
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
executor.migrate([
("migrations", None),
("migrations", None),
])
self.assertTableNotExists("migrations_author")
self.assertTableNotExists("migrations_tribble")
migrations = executor.loader.graph.nodes
expected = [
("render_start", ),
("render_success", ),
("apply_start", migrations['migrations', '0001_initial'], False),
("apply_success", migrations['migrations', '0001_initial'], False),
("apply_start", migrations['migrations', '0002_second'], False),
("apply_success", migrations['migrations', '0002_second'], False),
("render_start", ),
("render_success", ),
("unapply_start", migrations['migrations', '0002_second'], False),
("unapply_success", migrations['migrations', '0002_second'], False),
("unapply_start", migrations['migrations', '0001_initial'], False),
("unapply_success", migrations['migrations', '0001_initial'], False),
]
self.assertEqual(call_args_list, expected)
@override_settings(
INSTALLED_APPS=[
"migrations.migrations_test_apps.alter_fk.author_app",
"migrations.migrations_test_apps.alter_fk.book_app",
]
)
def test_alter_id_type_with_fk(self):
try:
executor = MigrationExecutor(connection)
self.assertTableNotExists("author_app_author")
self.assertTableNotExists("book_app_book")
# Apply initial migrations
executor.migrate([
("author_app", "0001_initial"),
("book_app", "0001_initial"),
])
self.assertTableExists("author_app_author")
self.assertTableExists("book_app_book")
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
# Apply PK type alteration
executor.migrate([("author_app", "0002_alter_id")])
# Rebuild the graph to reflect the new DB state
executor.loader.build_graph()
finally:
# We can't simply unapply the migrations here because there is no
# implicit cast from VARCHAR to INT on the database level.
with connection.schema_editor() as editor:
editor.execute(editor.sql_delete_table % {"table": "book_app_book"})
editor.execute(editor.sql_delete_table % {"table": "author_app_author"})
self.assertTableNotExists("author_app_author")
self.assertTableNotExists("book_app_book")
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"})
def test_apply_all_replaced_marks_replacement_as_applied(self):
"""
Applying all replaced migrations marks replacement as applied (#24628).
"""
recorder = MigrationRecorder(connection)
# Place the database in a state where the replaced migrations are
# partially applied: 0001 is applied, 0002 is not.
recorder.record_applied("migrations", "0001_initial")
executor = MigrationExecutor(connection)
# Use fake because we don't actually have the first migration
# applied, so the second will fail. And there's no need to actually
# create/modify tables here, we're just testing the
# MigrationRecord, which works the same with or without fake.
executor.migrate([("migrations", "0002_second")], fake=True)
# Because we've now applied 0001 and 0002 both, their squashed
# replacement should be marked as applied.
self.assertIn(
("migrations", "0001_squashed_0002"),
recorder.applied_migrations(),
)
@override_settings(MIGRATION_MODULES={"migrations": "migrations.test_migrations_squashed"})
def test_migrate_marks_replacement_applied_even_if_it_did_nothing(self):
"""
A new squash migration will be marked as applied even if all its
replaced migrations were previously already applied (#24628).
"""
recorder = MigrationRecorder(connection)
# Record all replaced migrations as applied
recorder.record_applied("migrations", "0001_initial")
recorder.record_applied("migrations", "0002_second")
executor = MigrationExecutor(connection)
executor.migrate([("migrations", "0001_squashed_0002")])
# Because 0001 and 0002 are both applied, even though this migrate run
# didn't apply anything new, their squashed replacement should be
# marked as applied.
self.assertIn(
("migrations", "0001_squashed_0002"),
recorder.applied_migrations(),
)
class FakeLoader(object):
def __init__(self, graph, applied):
self.graph = graph
self.applied_migrations = applied
class FakeMigration(object):
"""Really all we need is any object with a debug-useful repr."""
def __init__(self, name):
self.name = name
def __repr__(self):
return 'M<%s>' % self.name
class ExecutorUnitTests(TestCase):
"""(More) isolated unit tests for executor methods."""
def test_minimize_rollbacks(self):
"""
Minimize unnecessary rollbacks in connected apps.
When you say "./manage.py migrate appA 0001", rather than migrating to
just after appA-0001 in the linearized migration plan (which could roll
back migrations in other apps that depend on appA 0001, but don't need
to be rolled back since we're not rolling back appA 0001), we migrate
to just before appA-0002.
"""
a1_impl = FakeMigration('a1')
a1 = ('a', '1')
a2_impl = FakeMigration('a2')
a2 = ('a', '2')
b1_impl = FakeMigration('b1')
b1 = ('b', '1')
graph = MigrationGraph()
graph.add_node(a1, a1_impl)
graph.add_node(a2, a2_impl)
graph.add_node(b1, b1_impl)
graph.add_dependency(None, b1, a1)
graph.add_dependency(None, a2, a1)
executor = MigrationExecutor(None)
executor.loader = FakeLoader(graph, {a1, b1, a2})
plan = executor.migration_plan({a1})
self.assertEqual(plan, [(a2_impl, True)])
def test_minimize_rollbacks_branchy(self):
"""
Minimize rollbacks when target has multiple in-app children.
a: 1 <---- 3 <--\
\ \- 2 <--- 4
\ \
b: \- 1 <--- 2
"""
a1_impl = FakeMigration('a1')
a1 = ('a', '1')
a2_impl = FakeMigration('a2')
a2 = ('a', '2')
a3_impl = FakeMigration('a3')
a3 = ('a', '3')
a4_impl = FakeMigration('a4')
a4 = ('a', '4')
b1_impl = FakeMigration('b1')
b1 = ('b', '1')
b2_impl = FakeMigration('b2')
b2 = ('b', '2')
graph = MigrationGraph()
graph.add_node(a1, a1_impl)
graph.add_node(a2, a2_impl)
graph.add_node(a3, a3_impl)
graph.add_node(a4, a4_impl)
graph.add_node(b1, b1_impl)
graph.add_node(b2, b2_impl)
graph.add_dependency(None, a2, a1)
graph.add_dependency(None, a3, a1)
graph.add_dependency(None, a4, a2)
graph.add_dependency(None, a4, a3)
graph.add_dependency(None, b2, b1)
graph.add_dependency(None, b1, a1)
graph.add_dependency(None, b2, a2)
executor = MigrationExecutor(None)
executor.loader = FakeLoader(graph, {a1, b1, a2, b2, a3, a4})
plan = executor.migration_plan({a1})
should_be_rolled_back = [b2_impl, a4_impl, a2_impl, a3_impl]
exp = [(m, True) for m in should_be_rolled_back]
self.assertEqual(plan, exp)
def test_backwards_nothing_to_do(self):
"""
If the current state satisfies the given target, do nothing.
a: 1 <--- 2
b: \- 1
c: \- 1
If a1 is applied already and a2 is not, and we're asked to migrate to
a1, don't apply or unapply b1 or c1, regardless of their current state.
"""
a1_impl = FakeMigration('a1')
a1 = ('a', '1')
a2_impl = FakeMigration('a2')
a2 = ('a', '2')
b1_impl = FakeMigration('b1')
b1 = ('b', '1')
c1_impl = FakeMigration('c1')
c1 = ('c', '1')
graph = MigrationGraph()
graph.add_node(a1, a1_impl)
graph.add_node(a2, a2_impl)
graph.add_node(b1, b1_impl)
graph.add_node(c1, c1_impl)
graph.add_dependency(None, a2, a1)
graph.add_dependency(None, b1, a1)
graph.add_dependency(None, c1, a1)
executor = MigrationExecutor(None)
executor.loader = FakeLoader(graph, {a1, b1})
plan = executor.migration_plan({a1})
self.assertEqual(plan, [])
| bsd-3-clause |
EricMuller/mynotes-backend | requirements/twisted/Twisted-17.1.0/docs/core/howto/listings/pb/chatserver.py | 2 | 2109 | #!/usr/bin/env python
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
from zope.interface import implementer
from twisted.cred import portal, checkers
from twisted.spread import pb
from twisted.internet import reactor
class ChatServer:
def __init__(self):
self.groups = {} # indexed by name
def joinGroup(self, groupname, user, allowMattress):
if groupname not in self.groups:
self.groups[groupname] = Group(groupname, allowMattress)
self.groups[groupname].addUser(user)
return self.groups[groupname]
@implementer(portal.IRealm)
class ChatRealm:
def requestAvatar(self, avatarID, mind, *interfaces):
assert pb.IPerspective in interfaces
avatar = User(avatarID)
avatar.server = self.server
avatar.attached(mind)
return pb.IPerspective, avatar, lambda a=avatar:a.detached(mind)
class User(pb.Avatar):
def __init__(self, name):
self.name = name
def attached(self, mind):
self.remote = mind
def detached(self, mind):
self.remote = None
def perspective_joinGroup(self, groupname, allowMattress=True):
return self.server.joinGroup(groupname, self, allowMattress)
def send(self, message):
self.remote.callRemote("print", message)
class Group(pb.Viewable):
def __init__(self, groupname, allowMattress):
self.name = groupname
self.allowMattress = allowMattress
self.users = []
def addUser(self, user):
self.users.append(user)
def view_send(self, from_user, message):
if not self.allowMattress and "mattress" in message:
raise ValueError("Don't say that word")
for user in self.users:
user.send("<%s> says: %s" % (from_user.name, message))
realm = ChatRealm()
realm.server = ChatServer()
checker = checkers.InMemoryUsernamePasswordDatabaseDontUse()
checker.addUser("alice", "1234")
checker.addUser("bob", "secret")
checker.addUser("carol", "fido")
p = portal.Portal(realm, [checker])
reactor.listenTCP(8800, pb.PBServerFactory(p))
reactor.run()
| mit |
masika/heekscnc | nc/nc_read.py | 31 | 6367 | ################################################################################
# nc_read.py
#
# Base class for NC code parsing
################################################################################
import area
import math
count = 0
class Program: # stores start and end lines of programs and subroutines
def __init__(self):
self.start_line = None
self.end_line = None
class Parser:
def __init__(self, writer):
self.writer = writer
self.currentx = None
self.currenty = None
self.currentz = None
self.absolute_flag = True
self.drillz = None
self.need_m6_for_t_change = True
def __del__(self):
self.file_in.close()
############################################################################
## Internals
def readline(self):
self.line = self.file_in.readline().rstrip()
if (len(self.line)) : return True
else : return False
def set_current_pos(self, x, y, z):
if (x != None) :
if self.absolute_flag or self.currentx == None: self.currentx = x
else: self.currentx = self.currentx + x
if (y != None) :
if self.absolute_flag or self.currenty == None: self.currenty = y
else: self.currenty = self.currenty + y
if (z != None) :
if self.absolute_flag or self.currentz == None: self.currentz = z
else: self.currentz = self.currentz + z
def incremental(self):
self.absolute_flag = False
def absolute(self):
self.absolute_flag = True
def Parse(self, name):
self.file_in = open(name, 'r')
self.path_col = None
self.f = None
self.arc = 0
self.q = None
self.r = None
self.drilling = None
self.drilling_uses_clearance = False
self.drilling_clearance_height = None
while (self.readline()):
self.a = None
self.b = None
self.c = None
self.h = None
self.i = None
self.j = None
self.k = None
self.p = None
self.s = None
self.x = None
self.y = None
self.z = None
self.t = None
self.m6 = False
self.writer.begin_ncblock()
self.move = False
self.height_offset = False
self.drill = False
self.drill_off = False
self.no_move = False
words = self.pattern_main.findall(self.line)
for word in words:
self.col = None
self.cdata = False
self.ParseWord(word)
self.writer.add_text(word, self.col, self.cdata)
if self.t != None:
if (self.m6 == True) or (self.need_m6_for_t_change == False):
self.writer.tool_change( self.t )
if self.height_offset and (self.z != None):
self.drilling_clearance_height = self.z
if self.drill:
self.drilling = True
if self.drill_off:
self.drilling = False
if self.drilling:
rapid_z = self.r
if self.drilling_uses_clearance and (self.drilling_clearance_height != None):
rapid_z = self.drilling_clearance_height
if self.z != None: self.drillz = self.z
self.writer.rapid(self.x, self.y, rapid_z)
self.writer.feed(self.x, self.y, self.drillz)
self.writer.feed(self.x, self.y, rapid_z)
else:
if (self.move and not self.no_move):
if (self.arc==0):
if self.path_col == "feed":
self.writer.feed(self.x, self.y, self.z)
else:
self.writer.rapid(self.x, self.y, self.z, self.a, self.b, self.c)
else:
i = self.i
j = self.j
k = self.k
if self.arc_centre_absolute == True:
pass
else:
if (self.arc_centre_positive == True) and (self.oldx != None) and (self.oldy != None):
x = self.oldx
if self.x != None: x = self.x
if (self.x > self.oldx) != (self.arc > 0):
j = -j
y = self.oldy
if self.y != None: y = self.y
if (self.y > self.oldy) != (self.arc < 0):
i = -i
#fix centre point
r = math.sqrt(i*i + j*j)
p0 = area.Point(self.oldx, self.oldy)
p1 = area.Point(x, y)
v = p1 - p0
l = v.length()
h = l/2
d = math.sqrt(r*r - h*h)
n = area.Point(-v.y, v.x)
n.normalize()
if self.arc == -1: d = -d
c = p0 + (v * 0.5) + (n * d)
i = c.x
j = c.y
else:
i = i + self.oldx
j = j + self.oldy
if self.arc == -1:
self.writer.arc_cw(self.x, self.y, self.z, i, j, k)
else:
self.writer.arc_ccw(self.x, self.y, self.z, i, j, k)
if self.x != None: self.oldx = self.x
if self.y != None: self.oldy = self.y
if self.z != None: self.oldz = self.z
self.writer.end_ncblock()
| bsd-3-clause |
rooshilp/CMPUT410W15-project | testenv/lib/python2.7/site-packages/django/contrib/contenttypes/fields.py | 43 | 22826 | from __future__ import unicode_literals
from collections import defaultdict
from django.core import checks
from django.core.exceptions import ObjectDoesNotExist
from django.db import connection
from django.db import models, router, transaction, DEFAULT_DB_ALIAS
from django.db.models import signals, FieldDoesNotExist, DO_NOTHING
from django.db.models.base import ModelBase
from django.db.models.fields.related import ForeignObject, ForeignObjectRel
from django.db.models.related import PathInfo
from django.db.models.sql.datastructures import Col
from django.contrib.contenttypes.models import ContentType
from django.utils import six
from django.utils.deprecation import RenameMethodsBase, RemovedInDjango18Warning
from django.utils.encoding import smart_text, python_2_unicode_compatible
class RenameGenericForeignKeyMethods(RenameMethodsBase):
renamed_methods = (
('get_prefetch_query_set', 'get_prefetch_queryset', RemovedInDjango18Warning),
)
@python_2_unicode_compatible
class GenericForeignKey(six.with_metaclass(RenameGenericForeignKeyMethods)):
"""
Provides a generic relation to any object through content-type/object-id
fields.
"""
def __init__(self, ct_field="content_type", fk_field="object_id", for_concrete_model=True):
self.ct_field = ct_field
self.fk_field = fk_field
self.for_concrete_model = for_concrete_model
self.editable = False
def contribute_to_class(self, cls, name):
self.name = name
self.model = cls
self.cache_attr = "_%s_cache" % name
cls._meta.add_virtual_field(self)
# Only run pre-initialization field assignment on non-abstract models
if not cls._meta.abstract:
signals.pre_init.connect(self.instance_pre_init, sender=cls)
setattr(cls, name, self)
def __str__(self):
model = self.model
app = model._meta.app_label
return '%s.%s.%s' % (app, model._meta.object_name, self.name)
def check(self, **kwargs):
errors = []
errors.extend(self._check_field_name())
errors.extend(self._check_object_id_field())
errors.extend(self._check_content_type_field())
return errors
def _check_field_name(self):
if self.name.endswith("_"):
return [
checks.Error(
'Field names must not end with an underscore.',
hint=None,
obj=self,
id='fields.E001',
)
]
else:
return []
def _check_object_id_field(self):
try:
self.model._meta.get_field(self.fk_field)
except FieldDoesNotExist:
return [
checks.Error(
"The GenericForeignKey object ID references the non-existent field '%s'." % self.fk_field,
hint=None,
obj=self,
id='contenttypes.E001',
)
]
else:
return []
def _check_content_type_field(self):
""" Check if field named `field_name` in model `model` exists and is
valid content_type field (is a ForeignKey to ContentType). """
try:
field = self.model._meta.get_field(self.ct_field)
except FieldDoesNotExist:
return [
checks.Error(
"The GenericForeignKey content type references the non-existent field '%s.%s'." % (
self.model._meta.object_name, self.ct_field
),
hint=None,
obj=self,
id='contenttypes.E002',
)
]
else:
if not isinstance(field, models.ForeignKey):
return [
checks.Error(
"'%s.%s' is not a ForeignKey." % (
self.model._meta.object_name, self.ct_field
),
hint="GenericForeignKeys must use a ForeignKey to 'contenttypes.ContentType' as the 'content_type' field.",
obj=self,
id='contenttypes.E003',
)
]
elif field.rel.to != ContentType:
return [
checks.Error(
"'%s.%s' is not a ForeignKey to 'contenttypes.ContentType'." % (
self.model._meta.object_name, self.ct_field
),
hint="GenericForeignKeys must use a ForeignKey to 'contenttypes.ContentType' as the 'content_type' field.",
obj=self,
id='contenttypes.E004',
)
]
else:
return []
def instance_pre_init(self, signal, sender, args, kwargs, **_kwargs):
"""
Handles initializing an object with the generic FK instead of
content-type/object-id fields.
"""
if self.name in kwargs:
value = kwargs.pop(self.name)
if value is not None:
kwargs[self.ct_field] = self.get_content_type(obj=value)
kwargs[self.fk_field] = value._get_pk_val()
else:
kwargs[self.ct_field] = None
kwargs[self.fk_field] = None
def get_content_type(self, obj=None, id=None, using=None):
if obj is not None:
return ContentType.objects.db_manager(obj._state.db).get_for_model(
obj, for_concrete_model=self.for_concrete_model)
elif id is not None:
return ContentType.objects.db_manager(using).get_for_id(id)
else:
# This should never happen. I love comments like this, don't you?
raise Exception("Impossible arguments to GFK.get_content_type!")
def get_prefetch_queryset(self, instances, queryset=None):
if queryset is not None:
raise ValueError("Custom queryset can't be used for this lookup.")
# For efficiency, group the instances by content type and then do one
# query per model
fk_dict = defaultdict(set)
# We need one instance for each group in order to get the right db:
instance_dict = {}
ct_attname = self.model._meta.get_field(self.ct_field).get_attname()
for instance in instances:
# We avoid looking for values if either ct_id or fkey value is None
ct_id = getattr(instance, ct_attname)
if ct_id is not None:
fk_val = getattr(instance, self.fk_field)
if fk_val is not None:
fk_dict[ct_id].add(fk_val)
instance_dict[ct_id] = instance
ret_val = []
for ct_id, fkeys in fk_dict.items():
instance = instance_dict[ct_id]
ct = self.get_content_type(id=ct_id, using=instance._state.db)
ret_val.extend(ct.get_all_objects_for_this_type(pk__in=fkeys))
# For doing the join in Python, we have to match both the FK val and the
# content type, so we use a callable that returns a (fk, class) pair.
def gfk_key(obj):
ct_id = getattr(obj, ct_attname)
if ct_id is None:
return None
else:
model = self.get_content_type(id=ct_id,
using=obj._state.db).model_class()
return (model._meta.pk.get_prep_value(getattr(obj, self.fk_field)),
model)
return (ret_val,
lambda obj: (obj._get_pk_val(), obj.__class__),
gfk_key,
True,
self.cache_attr)
def is_cached(self, instance):
return hasattr(instance, self.cache_attr)
def __get__(self, instance, instance_type=None):
if instance is None:
return self
try:
return getattr(instance, self.cache_attr)
except AttributeError:
rel_obj = None
# Make sure to use ContentType.objects.get_for_id() to ensure that
# lookups are cached (see ticket #5570). This takes more code than
# the naive ``getattr(instance, self.ct_field)``, but has better
# performance when dealing with GFKs in loops and such.
f = self.model._meta.get_field(self.ct_field)
ct_id = getattr(instance, f.get_attname(), None)
if ct_id is not None:
ct = self.get_content_type(id=ct_id, using=instance._state.db)
try:
rel_obj = ct.get_object_for_this_type(pk=getattr(instance, self.fk_field))
except ObjectDoesNotExist:
pass
setattr(instance, self.cache_attr, rel_obj)
return rel_obj
def __set__(self, instance, value):
ct = None
fk = None
if value is not None:
ct = self.get_content_type(obj=value)
fk = value._get_pk_val()
setattr(instance, self.ct_field, ct)
setattr(instance, self.fk_field, fk)
setattr(instance, self.cache_attr, value)
class GenericRelation(ForeignObject):
"""Provides an accessor to generic related objects (e.g. comments)"""
def __init__(self, to, **kwargs):
kwargs['verbose_name'] = kwargs.get('verbose_name', None)
kwargs['rel'] = GenericRel(
self, to,
related_query_name=kwargs.pop('related_query_name', None),
limit_choices_to=kwargs.pop('limit_choices_to', None),
)
# Override content-type/object-id field names on the related class
self.object_id_field_name = kwargs.pop("object_id_field", "object_id")
self.content_type_field_name = kwargs.pop("content_type_field", "content_type")
self.for_concrete_model = kwargs.pop("for_concrete_model", True)
kwargs['blank'] = True
kwargs['editable'] = False
kwargs['serialize'] = False
# This construct is somewhat of an abuse of ForeignObject. This field
# represents a relation from pk to object_id field. But, this relation
# isn't direct, the join is generated reverse along foreign key. So,
# the from_field is object_id field, to_field is pk because of the
# reverse join.
super(GenericRelation, self).__init__(
to, to_fields=[],
from_fields=[self.object_id_field_name], **kwargs)
def check(self, **kwargs):
errors = super(GenericRelation, self).check(**kwargs)
errors.extend(self._check_generic_foreign_key_existence())
return errors
def _check_generic_foreign_key_existence(self):
target = self.rel.to
if isinstance(target, ModelBase):
# Using `vars` is very ugly approach, but there is no better one,
# because GenericForeignKeys are not considered as fields and,
# therefore, are not included in `target._meta.local_fields`.
fields = target._meta.virtual_fields
if any(isinstance(field, GenericForeignKey) and
field.ct_field == self.content_type_field_name and
field.fk_field == self.object_id_field_name
for field in fields):
return []
else:
return [
checks.Error(
("The GenericRelation defines a relation with the model "
"'%s.%s', but that model does not have a GenericForeignKey.") % (
target._meta.app_label, target._meta.object_name
),
hint=None,
obj=self,
id='contenttypes.E004',
)
]
else:
return []
def resolve_related_fields(self):
self.to_fields = [self.model._meta.pk.name]
return [(self.rel.to._meta.get_field_by_name(self.object_id_field_name)[0],
self.model._meta.pk)]
def get_path_info(self):
opts = self.rel.to._meta
target = opts.get_field_by_name(self.object_id_field_name)[0]
return [PathInfo(self.model._meta, opts, (target,), self.rel, True, False)]
def get_reverse_path_info(self):
opts = self.model._meta
from_opts = self.rel.to._meta
return [PathInfo(from_opts, opts, (opts.pk,), self, not self.unique, False)]
def get_choices_default(self):
return super(GenericRelation, self).get_choices(include_blank=False)
def value_to_string(self, obj):
qs = getattr(obj, self.name).all()
return smart_text([instance._get_pk_val() for instance in qs])
def contribute_to_class(self, cls, name):
super(GenericRelation, self).contribute_to_class(cls, name, virtual_only=True)
# Save a reference to which model this class is on for future use
self.model = cls
# Add the descriptor for the relation
setattr(cls, self.name, ReverseGenericRelatedObjectsDescriptor(self, self.for_concrete_model))
def set_attributes_from_rel(self):
pass
def get_internal_type(self):
return "ManyToManyField"
def get_content_type(self):
"""
Returns the content type associated with this field's model.
"""
return ContentType.objects.get_for_model(self.model,
for_concrete_model=self.for_concrete_model)
def get_extra_restriction(self, where_class, alias, remote_alias):
field = self.rel.to._meta.get_field_by_name(self.content_type_field_name)[0]
contenttype_pk = self.get_content_type().pk
cond = where_class()
lookup = field.get_lookup('exact')(Col(remote_alias, field, field), contenttype_pk)
cond.add(lookup, 'AND')
return cond
def bulk_related_objects(self, objs, using=DEFAULT_DB_ALIAS):
"""
Return all objects related to ``objs`` via this ``GenericRelation``.
"""
return self.rel.to._base_manager.db_manager(using).filter(**{
"%s__pk" % self.content_type_field_name: ContentType.objects.db_manager(using).get_for_model(
self.model, for_concrete_model=self.for_concrete_model).pk,
"%s__in" % self.object_id_field_name: [obj.pk for obj in objs]
})
class ReverseGenericRelatedObjectsDescriptor(object):
"""
This class provides the functionality that makes the related-object
managers available as attributes on a model class, for fields that have
multiple "remote" values and have a GenericRelation defined in their model
(rather than having another model pointed *at* them). In the example
"article.publications", the publications attribute is a
ReverseGenericRelatedObjectsDescriptor instance.
"""
def __init__(self, field, for_concrete_model=True):
self.field = field
self.for_concrete_model = for_concrete_model
def __get__(self, instance, instance_type=None):
if instance is None:
return self
# Dynamically create a class that subclasses the related model's
# default manager.
rel_model = self.field.rel.to
superclass = rel_model._default_manager.__class__
RelatedManager = create_generic_related_manager(superclass)
qn = connection.ops.quote_name
content_type = ContentType.objects.db_manager(instance._state.db).get_for_model(
instance, for_concrete_model=self.for_concrete_model)
join_cols = self.field.get_joining_columns(reverse_join=True)[0]
manager = RelatedManager(
model=rel_model,
instance=instance,
source_col_name=qn(join_cols[0]),
target_col_name=qn(join_cols[1]),
content_type=content_type,
content_type_field_name=self.field.content_type_field_name,
object_id_field_name=self.field.object_id_field_name,
prefetch_cache_name=self.field.attname,
)
return manager
def __set__(self, instance, value):
manager = self.__get__(instance)
manager.clear()
for obj in value:
manager.add(obj)
def create_generic_related_manager(superclass):
"""
Factory function for a manager that subclasses 'superclass' (which is a
Manager) and adds behavior for generic related objects.
"""
class GenericRelatedObjectManager(superclass):
def __init__(self, model=None, instance=None, symmetrical=None,
source_col_name=None, target_col_name=None, content_type=None,
content_type_field_name=None, object_id_field_name=None,
prefetch_cache_name=None):
super(GenericRelatedObjectManager, self).__init__()
self.model = model
self.content_type = content_type
self.symmetrical = symmetrical
self.instance = instance
self.source_col_name = source_col_name
self.target_col_name = target_col_name
self.content_type_field_name = content_type_field_name
self.object_id_field_name = object_id_field_name
self.prefetch_cache_name = prefetch_cache_name
self.pk_val = self.instance._get_pk_val()
self.core_filters = {
'%s__pk' % content_type_field_name: content_type.id,
'%s' % object_id_field_name: instance._get_pk_val(),
}
def __call__(self, **kwargs):
# We use **kwargs rather than a kwarg argument to enforce the
# `manager='manager_name'` syntax.
manager = getattr(self.model, kwargs.pop('manager'))
manager_class = create_generic_related_manager(manager.__class__)
return manager_class(
model=self.model,
instance=self.instance,
symmetrical=self.symmetrical,
source_col_name=self.source_col_name,
target_col_name=self.target_col_name,
content_type=self.content_type,
content_type_field_name=self.content_type_field_name,
object_id_field_name=self.object_id_field_name,
prefetch_cache_name=self.prefetch_cache_name,
)
do_not_call_in_templates = True
def __str__(self):
return repr(self)
def get_queryset(self):
try:
return self.instance._prefetched_objects_cache[self.prefetch_cache_name]
except (AttributeError, KeyError):
db = self._db or router.db_for_read(self.model, instance=self.instance)
return super(GenericRelatedObjectManager, self).get_queryset().using(db).filter(**self.core_filters)
def get_prefetch_queryset(self, instances, queryset=None):
if queryset is None:
queryset = super(GenericRelatedObjectManager, self).get_queryset()
queryset._add_hints(instance=instances[0])
queryset = queryset.using(queryset._db or self._db)
query = {
'%s__pk' % self.content_type_field_name: self.content_type.id,
'%s__in' % self.object_id_field_name: set(obj._get_pk_val() for obj in instances)
}
# We (possibly) need to convert object IDs to the type of the
# instances' PK in order to match up instances:
object_id_converter = instances[0]._meta.pk.to_python
return (queryset.filter(**query),
lambda relobj: object_id_converter(getattr(relobj, self.object_id_field_name)),
lambda obj: obj._get_pk_val(),
False,
self.prefetch_cache_name)
def add(self, *objs):
for obj in objs:
if not isinstance(obj, self.model):
raise TypeError("'%s' instance expected" % self.model._meta.object_name)
setattr(obj, self.content_type_field_name, self.content_type)
setattr(obj, self.object_id_field_name, self.pk_val)
obj.save()
add.alters_data = True
def remove(self, *objs, **kwargs):
if not objs:
return
bulk = kwargs.pop('bulk', True)
self._clear(self.filter(pk__in=[o.pk for o in objs]), bulk)
remove.alters_data = True
def clear(self, **kwargs):
bulk = kwargs.pop('bulk', True)
self._clear(self, bulk)
clear.alters_data = True
def _clear(self, queryset, bulk):
db = router.db_for_write(self.model, instance=self.instance)
queryset = queryset.using(db)
if bulk:
queryset.delete()
else:
with transaction.commit_on_success_unless_managed(using=db, savepoint=False):
for obj in queryset:
obj.delete()
_clear.alters_data = True
def create(self, **kwargs):
kwargs[self.content_type_field_name] = self.content_type
kwargs[self.object_id_field_name] = self.pk_val
db = router.db_for_write(self.model, instance=self.instance)
return super(GenericRelatedObjectManager, self).using(db).create(**kwargs)
create.alters_data = True
def get_or_create(self, **kwargs):
kwargs[self.content_type_field_name] = self.content_type
kwargs[self.object_id_field_name] = self.pk_val
db = router.db_for_write(self.model, instance=self.instance)
return super(GenericRelatedObjectManager, self).using(db).get_or_create(**kwargs)
get_or_create.alters_data = True
def update_or_create(self, **kwargs):
kwargs[self.content_type_field_name] = self.content_type
kwargs[self.object_id_field_name] = self.pk_val
db = router.db_for_write(self.model, instance=self.instance)
return super(GenericRelatedObjectManager, self).using(db).update_or_create(**kwargs)
update_or_create.alters_data = True
return GenericRelatedObjectManager
class GenericRel(ForeignObjectRel):
def __init__(self, field, to, related_name=None, limit_choices_to=None, related_query_name=None):
super(GenericRel, self).__init__(field=field, to=to, related_name=related_query_name or '+',
limit_choices_to=limit_choices_to, on_delete=DO_NOTHING,
related_query_name=related_query_name)
| gpl-2.0 |
jessstrap/servotk | tests/wpt/css-tests/tools/py/py/_path/svnwc.py | 176 | 43848 | """
svn-Command based Implementation of a Subversion WorkingCopy Path.
SvnWCCommandPath is the main class.
"""
import os, sys, time, re, calendar
import py
import subprocess
from py._path import common
#-----------------------------------------------------------
# Caching latest repository revision and repo-paths
# (getting them is slow with the current implementations)
#
# XXX make mt-safe
#-----------------------------------------------------------
class cache:
proplist = {}
info = {}
entries = {}
prop = {}
class RepoEntry:
def __init__(self, url, rev, timestamp):
self.url = url
self.rev = rev
self.timestamp = timestamp
def __str__(self):
return "repo: %s;%s %s" %(self.url, self.rev, self.timestamp)
class RepoCache:
""" The Repocache manages discovered repository paths
and their revisions. If inside a timeout the cache
will even return the revision of the root.
"""
timeout = 20 # seconds after which we forget that we know the last revision
def __init__(self):
self.repos = []
def clear(self):
self.repos = []
def put(self, url, rev, timestamp=None):
if rev is None:
return
if timestamp is None:
timestamp = time.time()
for entry in self.repos:
if url == entry.url:
entry.timestamp = timestamp
entry.rev = rev
#print "set repo", entry
break
else:
entry = RepoEntry(url, rev, timestamp)
self.repos.append(entry)
#print "appended repo", entry
def get(self, url):
now = time.time()
for entry in self.repos:
if url.startswith(entry.url):
if now < entry.timestamp + self.timeout:
#print "returning immediate Etrny", entry
return entry.url, entry.rev
return entry.url, -1
return url, -1
repositories = RepoCache()
# svn support code
ALLOWED_CHARS = "_ -/\\=$.~+%" #add characters as necessary when tested
if sys.platform == "win32":
ALLOWED_CHARS += ":"
ALLOWED_CHARS_HOST = ALLOWED_CHARS + '@:'
def _getsvnversion(ver=[]):
try:
return ver[0]
except IndexError:
v = py.process.cmdexec("svn -q --version")
v.strip()
v = '.'.join(v.split('.')[:2])
ver.append(v)
return v
def _escape_helper(text):
text = str(text)
if py.std.sys.platform != 'win32':
text = str(text).replace('$', '\\$')
return text
def _check_for_bad_chars(text, allowed_chars=ALLOWED_CHARS):
for c in str(text):
if c.isalnum():
continue
if c in allowed_chars:
continue
return True
return False
def checkbadchars(url):
# (hpk) not quite sure about the exact purpose, guido w.?
proto, uri = url.split("://", 1)
if proto != "file":
host, uripath = uri.split('/', 1)
# only check for bad chars in the non-protocol parts
if (_check_for_bad_chars(host, ALLOWED_CHARS_HOST) \
or _check_for_bad_chars(uripath, ALLOWED_CHARS)):
raise ValueError("bad char in %r" % (url, ))
#_______________________________________________________________
class SvnPathBase(common.PathBase):
""" Base implementation for SvnPath implementations. """
sep = '/'
def _geturl(self):
return self.strpath
url = property(_geturl, None, None, "url of this svn-path.")
def __str__(self):
""" return a string representation (including rev-number) """
return self.strpath
def __hash__(self):
return hash(self.strpath)
def new(self, **kw):
""" create a modified version of this path. A 'rev' argument
indicates a new revision.
the following keyword arguments modify various path parts::
http://host.com/repo/path/file.ext
|-----------------------| dirname
|------| basename
|--| purebasename
|--| ext
"""
obj = object.__new__(self.__class__)
obj.rev = kw.get('rev', self.rev)
obj.auth = kw.get('auth', self.auth)
dirname, basename, purebasename, ext = self._getbyspec(
"dirname,basename,purebasename,ext")
if 'basename' in kw:
if 'purebasename' in kw or 'ext' in kw:
raise ValueError("invalid specification %r" % kw)
else:
pb = kw.setdefault('purebasename', purebasename)
ext = kw.setdefault('ext', ext)
if ext and not ext.startswith('.'):
ext = '.' + ext
kw['basename'] = pb + ext
kw.setdefault('dirname', dirname)
kw.setdefault('sep', self.sep)
if kw['basename']:
obj.strpath = "%(dirname)s%(sep)s%(basename)s" % kw
else:
obj.strpath = "%(dirname)s" % kw
return obj
def _getbyspec(self, spec):
""" get specified parts of the path. 'arg' is a string
with comma separated path parts. The parts are returned
in exactly the order of the specification.
you may specify the following parts:
http://host.com/repo/path/file.ext
|-----------------------| dirname
|------| basename
|--| purebasename
|--| ext
"""
res = []
parts = self.strpath.split(self.sep)
for name in spec.split(','):
name = name.strip()
if name == 'dirname':
res.append(self.sep.join(parts[:-1]))
elif name == 'basename':
res.append(parts[-1])
else:
basename = parts[-1]
i = basename.rfind('.')
if i == -1:
purebasename, ext = basename, ''
else:
purebasename, ext = basename[:i], basename[i:]
if name == 'purebasename':
res.append(purebasename)
elif name == 'ext':
res.append(ext)
else:
raise NameError("Don't know part %r" % name)
return res
def __eq__(self, other):
""" return true if path and rev attributes each match """
return (str(self) == str(other) and
(self.rev == other.rev or self.rev == other.rev))
def __ne__(self, other):
return not self == other
def join(self, *args):
""" return a new Path (with the same revision) which is composed
of the self Path followed by 'args' path components.
"""
if not args:
return self
args = tuple([arg.strip(self.sep) for arg in args])
parts = (self.strpath, ) + args
newpath = self.__class__(self.sep.join(parts), self.rev, self.auth)
return newpath
def propget(self, name):
""" return the content of the given property. """
value = self._propget(name)
return value
def proplist(self):
""" list all property names. """
content = self._proplist()
return content
def size(self):
""" Return the size of the file content of the Path. """
return self.info().size
def mtime(self):
""" Return the last modification time of the file. """
return self.info().mtime
# shared help methods
def _escape(self, cmd):
return _escape_helper(cmd)
#def _childmaxrev(self):
# """ return maximum revision number of childs (or self.rev if no childs) """
# rev = self.rev
# for name, info in self._listdir_nameinfo():
# rev = max(rev, info.created_rev)
# return rev
#def _getlatestrevision(self):
# """ return latest repo-revision for this path. """
# url = self.strpath
# path = self.__class__(url, None)
#
# # we need a long walk to find the root-repo and revision
# while 1:
# try:
# rev = max(rev, path._childmaxrev())
# previous = path
# path = path.dirpath()
# except (IOError, process.cmdexec.Error):
# break
# if rev is None:
# raise IOError, "could not determine newest repo revision for %s" % self
# return rev
class Checkers(common.Checkers):
def dir(self):
try:
return self.path.info().kind == 'dir'
except py.error.Error:
return self._listdirworks()
def _listdirworks(self):
try:
self.path.listdir()
except py.error.ENOENT:
return False
else:
return True
def file(self):
try:
return self.path.info().kind == 'file'
except py.error.ENOENT:
return False
def exists(self):
try:
return self.path.info()
except py.error.ENOENT:
return self._listdirworks()
def parse_apr_time(timestr):
i = timestr.rfind('.')
if i == -1:
raise ValueError("could not parse %s" % timestr)
timestr = timestr[:i]
parsedtime = time.strptime(timestr, "%Y-%m-%dT%H:%M:%S")
return time.mktime(parsedtime)
class PropListDict(dict):
""" a Dictionary which fetches values (InfoSvnCommand instances) lazily"""
def __init__(self, path, keynames):
dict.__init__(self, [(x, None) for x in keynames])
self.path = path
def __getitem__(self, key):
value = dict.__getitem__(self, key)
if value is None:
value = self.path.propget(key)
dict.__setitem__(self, key, value)
return value
def fixlocale():
if sys.platform != 'win32':
return 'LC_ALL=C '
return ''
# some nasty chunk of code to solve path and url conversion and quoting issues
ILLEGAL_CHARS = '* | \ / : < > ? \t \n \x0b \x0c \r'.split(' ')
if os.sep in ILLEGAL_CHARS:
ILLEGAL_CHARS.remove(os.sep)
ISWINDOWS = sys.platform == 'win32'
_reg_allow_disk = re.compile(r'^([a-z]\:\\)?[^:]+$', re.I)
def _check_path(path):
illegal = ILLEGAL_CHARS[:]
sp = path.strpath
if ISWINDOWS:
illegal.remove(':')
if not _reg_allow_disk.match(sp):
raise ValueError('path may not contain a colon (:)')
for char in sp:
if char not in string.printable or char in illegal:
raise ValueError('illegal character %r in path' % (char,))
def path_to_fspath(path, addat=True):
_check_path(path)
sp = path.strpath
if addat and path.rev != -1:
sp = '%s@%s' % (sp, path.rev)
elif addat:
sp = '%s@HEAD' % (sp,)
return sp
def url_from_path(path):
fspath = path_to_fspath(path, False)
quote = py.std.urllib.quote
if ISWINDOWS:
match = _reg_allow_disk.match(fspath)
fspath = fspath.replace('\\', '/')
if match.group(1):
fspath = '/%s%s' % (match.group(1).replace('\\', '/'),
quote(fspath[len(match.group(1)):]))
else:
fspath = quote(fspath)
else:
fspath = quote(fspath)
if path.rev != -1:
fspath = '%s@%s' % (fspath, path.rev)
else:
fspath = '%s@HEAD' % (fspath,)
return 'file://%s' % (fspath,)
class SvnAuth(object):
""" container for auth information for Subversion """
def __init__(self, username, password, cache_auth=True, interactive=True):
self.username = username
self.password = password
self.cache_auth = cache_auth
self.interactive = interactive
def makecmdoptions(self):
uname = self.username.replace('"', '\\"')
passwd = self.password.replace('"', '\\"')
ret = []
if uname:
ret.append('--username="%s"' % (uname,))
if passwd:
ret.append('--password="%s"' % (passwd,))
if not self.cache_auth:
ret.append('--no-auth-cache')
if not self.interactive:
ret.append('--non-interactive')
return ' '.join(ret)
def __str__(self):
return "<SvnAuth username=%s ...>" %(self.username,)
rex_blame = re.compile(r'\s*(\d+)\s*(\S+) (.*)')
class SvnWCCommandPath(common.PathBase):
""" path implementation offering access/modification to svn working copies.
It has methods similar to the functions in os.path and similar to the
commands of the svn client.
"""
sep = os.sep
def __new__(cls, wcpath=None, auth=None):
self = object.__new__(cls)
if isinstance(wcpath, cls):
if wcpath.__class__ == cls:
return wcpath
wcpath = wcpath.localpath
if _check_for_bad_chars(str(wcpath),
ALLOWED_CHARS):
raise ValueError("bad char in wcpath %s" % (wcpath, ))
self.localpath = py.path.local(wcpath)
self.auth = auth
return self
strpath = property(lambda x: str(x.localpath), None, None, "string path")
rev = property(lambda x: x.info(usecache=0).rev, None, None, "revision")
def __eq__(self, other):
return self.localpath == getattr(other, 'localpath', None)
def _geturl(self):
if getattr(self, '_url', None) is None:
info = self.info()
self._url = info.url #SvnPath(info.url, info.rev)
assert isinstance(self._url, py.builtin._basestring)
return self._url
url = property(_geturl, None, None, "url of this WC item")
def _escape(self, cmd):
return _escape_helper(cmd)
def dump(self, obj):
""" pickle object into path location"""
return self.localpath.dump(obj)
def svnurl(self):
""" return current SvnPath for this WC-item. """
info = self.info()
return py.path.svnurl(info.url)
def __repr__(self):
return "svnwc(%r)" % (self.strpath) # , self._url)
def __str__(self):
return str(self.localpath)
def _makeauthoptions(self):
if self.auth is None:
return ''
return self.auth.makecmdoptions()
def _authsvn(self, cmd, args=None):
args = args and list(args) or []
args.append(self._makeauthoptions())
return self._svn(cmd, *args)
def _svn(self, cmd, *args):
l = ['svn %s' % cmd]
args = [self._escape(item) for item in args]
l.extend(args)
l.append('"%s"' % self._escape(self.strpath))
# try fixing the locale because we can't otherwise parse
string = fixlocale() + " ".join(l)
try:
try:
key = 'LC_MESSAGES'
hold = os.environ.get(key)
os.environ[key] = 'C'
out = py.process.cmdexec(string)
finally:
if hold:
os.environ[key] = hold
else:
del os.environ[key]
except py.process.cmdexec.Error:
e = sys.exc_info()[1]
strerr = e.err.lower()
if strerr.find('not found') != -1:
raise py.error.ENOENT(self)
elif strerr.find("E200009:") != -1:
raise py.error.ENOENT(self)
if (strerr.find('file exists') != -1 or
strerr.find('file already exists') != -1 or
strerr.find('w150002:') != -1 or
strerr.find("can't create directory") != -1):
raise py.error.EEXIST(strerr) #self)
raise
return out
def switch(self, url):
""" switch to given URL. """
self._authsvn('switch', [url])
def checkout(self, url=None, rev=None):
""" checkout from url to local wcpath. """
args = []
if url is None:
url = self.url
if rev is None or rev == -1:
if (py.std.sys.platform != 'win32' and
_getsvnversion() == '1.3'):
url += "@HEAD"
else:
if _getsvnversion() == '1.3':
url += "@%d" % rev
else:
args.append('-r' + str(rev))
args.append(url)
self._authsvn('co', args)
def update(self, rev='HEAD', interactive=True):
""" update working copy item to given revision. (None -> HEAD). """
opts = ['-r', rev]
if not interactive:
opts.append("--non-interactive")
self._authsvn('up', opts)
def write(self, content, mode='w'):
""" write content into local filesystem wc. """
self.localpath.write(content, mode)
def dirpath(self, *args):
""" return the directory Path of the current Path. """
return self.__class__(self.localpath.dirpath(*args), auth=self.auth)
def _ensuredirs(self):
parent = self.dirpath()
if parent.check(dir=0):
parent._ensuredirs()
if self.check(dir=0):
self.mkdir()
return self
def ensure(self, *args, **kwargs):
""" ensure that an args-joined path exists (by default as
a file). if you specify a keyword argument 'directory=True'
then the path is forced to be a directory path.
"""
p = self.join(*args)
if p.check():
if p.check(versioned=False):
p.add()
return p
if kwargs.get('dir', 0):
return p._ensuredirs()
parent = p.dirpath()
parent._ensuredirs()
p.write("")
p.add()
return p
def mkdir(self, *args):
""" create & return the directory joined with args. """
if args:
return self.join(*args).mkdir()
else:
self._svn('mkdir')
return self
def add(self):
""" add ourself to svn """
self._svn('add')
def remove(self, rec=1, force=1):
""" remove a file or a directory tree. 'rec'ursive is
ignored and considered always true (because of
underlying svn semantics.
"""
assert rec, "svn cannot remove non-recursively"
if not self.check(versioned=True):
# not added to svn (anymore?), just remove
py.path.local(self).remove()
return
flags = []
if force:
flags.append('--force')
self._svn('remove', *flags)
def copy(self, target):
""" copy path to target."""
py.process.cmdexec("svn copy %s %s" %(str(self), str(target)))
def rename(self, target):
""" rename this path to target. """
py.process.cmdexec("svn move --force %s %s" %(str(self), str(target)))
def lock(self):
""" set a lock (exclusive) on the resource """
out = self._authsvn('lock').strip()
if not out:
# warning or error, raise exception
raise ValueError("unknown error in svn lock command")
def unlock(self):
""" unset a previously set lock """
out = self._authsvn('unlock').strip()
if out.startswith('svn:'):
# warning or error, raise exception
raise Exception(out[4:])
def cleanup(self):
""" remove any locks from the resource """
# XXX should be fixed properly!!!
try:
self.unlock()
except:
pass
def status(self, updates=0, rec=0, externals=0):
""" return (collective) Status object for this file. """
# http://svnbook.red-bean.com/book.html#svn-ch-3-sect-4.3.1
# 2201 2192 jum test
# XXX
if externals:
raise ValueError("XXX cannot perform status() "
"on external items yet")
else:
#1.2 supports: externals = '--ignore-externals'
externals = ''
if rec:
rec= ''
else:
rec = '--non-recursive'
# XXX does not work on all subversion versions
#if not externals:
# externals = '--ignore-externals'
if updates:
updates = '-u'
else:
updates = ''
try:
cmd = 'status -v --xml --no-ignore %s %s %s' % (
updates, rec, externals)
out = self._authsvn(cmd)
except py.process.cmdexec.Error:
cmd = 'status -v --no-ignore %s %s %s' % (
updates, rec, externals)
out = self._authsvn(cmd)
rootstatus = WCStatus(self).fromstring(out, self)
else:
rootstatus = XMLWCStatus(self).fromstring(out, self)
return rootstatus
def diff(self, rev=None):
""" return a diff of the current path against revision rev (defaulting
to the last one).
"""
args = []
if rev is not None:
args.append("-r %d" % rev)
out = self._authsvn('diff', args)
return out
def blame(self):
""" return a list of tuples of three elements:
(revision, commiter, line)
"""
out = self._svn('blame')
result = []
blamelines = out.splitlines()
reallines = py.path.svnurl(self.url).readlines()
for i, (blameline, line) in enumerate(
zip(blamelines, reallines)):
m = rex_blame.match(blameline)
if not m:
raise ValueError("output line %r of svn blame does not match "
"expected format" % (line, ))
rev, name, _ = m.groups()
result.append((int(rev), name, line))
return result
_rex_commit = re.compile(r'.*Committed revision (\d+)\.$', re.DOTALL)
def commit(self, msg='', rec=1):
""" commit with support for non-recursive commits """
# XXX i guess escaping should be done better here?!?
cmd = 'commit -m "%s" --force-log' % (msg.replace('"', '\\"'),)
if not rec:
cmd += ' -N'
out = self._authsvn(cmd)
try:
del cache.info[self]
except KeyError:
pass
if out:
m = self._rex_commit.match(out)
return int(m.group(1))
def propset(self, name, value, *args):
""" set property name to value on this path. """
d = py.path.local.mkdtemp()
try:
p = d.join('value')
p.write(value)
self._svn('propset', name, '--file', str(p), *args)
finally:
d.remove()
def propget(self, name):
""" get property name on this path. """
res = self._svn('propget', name)
return res[:-1] # strip trailing newline
def propdel(self, name):
""" delete property name on this path. """
res = self._svn('propdel', name)
return res[:-1] # strip trailing newline
def proplist(self, rec=0):
""" return a mapping of property names to property values.
If rec is True, then return a dictionary mapping sub-paths to such mappings.
"""
if rec:
res = self._svn('proplist -R')
return make_recursive_propdict(self, res)
else:
res = self._svn('proplist')
lines = res.split('\n')
lines = [x.strip() for x in lines[1:]]
return PropListDict(self, lines)
def revert(self, rec=0):
""" revert the local changes of this path. if rec is True, do so
recursively. """
if rec:
result = self._svn('revert -R')
else:
result = self._svn('revert')
return result
def new(self, **kw):
""" create a modified version of this path. A 'rev' argument
indicates a new revision.
the following keyword arguments modify various path parts:
http://host.com/repo/path/file.ext
|-----------------------| dirname
|------| basename
|--| purebasename
|--| ext
"""
if kw:
localpath = self.localpath.new(**kw)
else:
localpath = self.localpath
return self.__class__(localpath, auth=self.auth)
def join(self, *args, **kwargs):
""" return a new Path (with the same revision) which is composed
of the self Path followed by 'args' path components.
"""
if not args:
return self
localpath = self.localpath.join(*args, **kwargs)
return self.__class__(localpath, auth=self.auth)
def info(self, usecache=1):
""" return an Info structure with svn-provided information. """
info = usecache and cache.info.get(self)
if not info:
try:
output = self._svn('info')
except py.process.cmdexec.Error:
e = sys.exc_info()[1]
if e.err.find('Path is not a working copy directory') != -1:
raise py.error.ENOENT(self, e.err)
elif e.err.find("is not under version control") != -1:
raise py.error.ENOENT(self, e.err)
raise
# XXX SVN 1.3 has output on stderr instead of stdout (while it does
# return 0!), so a bit nasty, but we assume no output is output
# to stderr...
if (output.strip() == '' or
output.lower().find('not a versioned resource') != -1):
raise py.error.ENOENT(self, output)
info = InfoSvnWCCommand(output)
# Can't reliably compare on Windows without access to win32api
if py.std.sys.platform != 'win32':
if info.path != self.localpath:
raise py.error.ENOENT(self, "not a versioned resource:" +
" %s != %s" % (info.path, self.localpath))
cache.info[self] = info
return info
def listdir(self, fil=None, sort=None):
""" return a sequence of Paths.
listdir will return either a tuple or a list of paths
depending on implementation choices.
"""
if isinstance(fil, str):
fil = common.FNMatcher(fil)
# XXX unify argument naming with LocalPath.listdir
def notsvn(path):
return path.basename != '.svn'
paths = []
for localpath in self.localpath.listdir(notsvn):
p = self.__class__(localpath, auth=self.auth)
if notsvn(p) and (not fil or fil(p)):
paths.append(p)
self._sortlist(paths, sort)
return paths
def open(self, mode='r'):
""" return an opened file with the given mode. """
return open(self.strpath, mode)
def _getbyspec(self, spec):
return self.localpath._getbyspec(spec)
class Checkers(py.path.local.Checkers):
def __init__(self, path):
self.svnwcpath = path
self.path = path.localpath
def versioned(self):
try:
s = self.svnwcpath.info()
except (py.error.ENOENT, py.error.EEXIST):
return False
except py.process.cmdexec.Error:
e = sys.exc_info()[1]
if e.err.find('is not a working copy')!=-1:
return False
if e.err.lower().find('not a versioned resource') != -1:
return False
raise
else:
return True
def log(self, rev_start=None, rev_end=1, verbose=False):
""" return a list of LogEntry instances for this path.
rev_start is the starting revision (defaulting to the first one).
rev_end is the last revision (defaulting to HEAD).
if verbose is True, then the LogEntry instances also know which files changed.
"""
assert self.check() # make it simpler for the pipe
rev_start = rev_start is None and "HEAD" or rev_start
rev_end = rev_end is None and "HEAD" or rev_end
if rev_start == "HEAD" and rev_end == 1:
rev_opt = ""
else:
rev_opt = "-r %s:%s" % (rev_start, rev_end)
verbose_opt = verbose and "-v" or ""
locale_env = fixlocale()
# some blather on stderr
auth_opt = self._makeauthoptions()
#stdin, stdout, stderr = os.popen3(locale_env +
# 'svn log --xml %s %s %s "%s"' % (
# rev_opt, verbose_opt, auth_opt,
# self.strpath))
cmd = locale_env + 'svn log --xml %s %s %s "%s"' % (
rev_opt, verbose_opt, auth_opt, self.strpath)
popen = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True,
)
stdout, stderr = popen.communicate()
stdout = py.builtin._totext(stdout, sys.getdefaultencoding())
minidom,ExpatError = importxml()
try:
tree = minidom.parseString(stdout)
except ExpatError:
raise ValueError('no such revision')
result = []
for logentry in filter(None, tree.firstChild.childNodes):
if logentry.nodeType == logentry.ELEMENT_NODE:
result.append(LogEntry(logentry))
return result
def size(self):
""" Return the size of the file content of the Path. """
return self.info().size
def mtime(self):
""" Return the last modification time of the file. """
return self.info().mtime
def __hash__(self):
return hash((self.strpath, self.__class__, self.auth))
class WCStatus:
attrnames = ('modified','added', 'conflict', 'unchanged', 'external',
'deleted', 'prop_modified', 'unknown', 'update_available',
'incomplete', 'kindmismatch', 'ignored', 'locked', 'replaced'
)
def __init__(self, wcpath, rev=None, modrev=None, author=None):
self.wcpath = wcpath
self.rev = rev
self.modrev = modrev
self.author = author
for name in self.attrnames:
setattr(self, name, [])
def allpath(self, sort=True, **kw):
d = {}
for name in self.attrnames:
if name not in kw or kw[name]:
for path in getattr(self, name):
d[path] = 1
l = d.keys()
if sort:
l.sort()
return l
# XXX a bit scary to assume there's always 2 spaces between username and
# path, however with win32 allowing spaces in user names there doesn't
# seem to be a more solid approach :(
_rex_status = re.compile(r'\s+(\d+|-)\s+(\S+)\s+(.+?)\s{2,}(.*)')
def fromstring(data, rootwcpath, rev=None, modrev=None, author=None):
""" return a new WCStatus object from data 's'
"""
rootstatus = WCStatus(rootwcpath, rev, modrev, author)
update_rev = None
for line in data.split('\n'):
if not line.strip():
continue
#print "processing %r" % line
flags, rest = line[:8], line[8:]
# first column
c0,c1,c2,c3,c4,c5,x6,c7 = flags
#if '*' in line:
# print "flags", repr(flags), "rest", repr(rest)
if c0 in '?XI':
fn = line.split(None, 1)[1]
if c0 == '?':
wcpath = rootwcpath.join(fn, abs=1)
rootstatus.unknown.append(wcpath)
elif c0 == 'X':
wcpath = rootwcpath.__class__(
rootwcpath.localpath.join(fn, abs=1),
auth=rootwcpath.auth)
rootstatus.external.append(wcpath)
elif c0 == 'I':
wcpath = rootwcpath.join(fn, abs=1)
rootstatus.ignored.append(wcpath)
continue
#elif c0 in '~!' or c4 == 'S':
# raise NotImplementedError("received flag %r" % c0)
m = WCStatus._rex_status.match(rest)
if not m:
if c7 == '*':
fn = rest.strip()
wcpath = rootwcpath.join(fn, abs=1)
rootstatus.update_available.append(wcpath)
continue
if line.lower().find('against revision:')!=-1:
update_rev = int(rest.split(':')[1].strip())
continue
if line.lower().find('status on external') > -1:
# XXX not sure what to do here... perhaps we want to
# store some state instead of just continuing, as right
# now it makes the top-level external get added twice
# (once as external, once as 'normal' unchanged item)
# because of the way SVN presents external items
continue
# keep trying
raise ValueError("could not parse line %r" % line)
else:
rev, modrev, author, fn = m.groups()
wcpath = rootwcpath.join(fn, abs=1)
#assert wcpath.check()
if c0 == 'M':
assert wcpath.check(file=1), "didn't expect a directory with changed content here"
rootstatus.modified.append(wcpath)
elif c0 == 'A' or c3 == '+' :
rootstatus.added.append(wcpath)
elif c0 == 'D':
rootstatus.deleted.append(wcpath)
elif c0 == 'C':
rootstatus.conflict.append(wcpath)
elif c0 == '~':
rootstatus.kindmismatch.append(wcpath)
elif c0 == '!':
rootstatus.incomplete.append(wcpath)
elif c0 == 'R':
rootstatus.replaced.append(wcpath)
elif not c0.strip():
rootstatus.unchanged.append(wcpath)
else:
raise NotImplementedError("received flag %r" % c0)
if c1 == 'M':
rootstatus.prop_modified.append(wcpath)
# XXX do we cover all client versions here?
if c2 == 'L' or c5 == 'K':
rootstatus.locked.append(wcpath)
if c7 == '*':
rootstatus.update_available.append(wcpath)
if wcpath == rootwcpath:
rootstatus.rev = rev
rootstatus.modrev = modrev
rootstatus.author = author
if update_rev:
rootstatus.update_rev = update_rev
continue
return rootstatus
fromstring = staticmethod(fromstring)
class XMLWCStatus(WCStatus):
def fromstring(data, rootwcpath, rev=None, modrev=None, author=None):
""" parse 'data' (XML string as outputted by svn st) into a status obj
"""
# XXX for externals, the path is shown twice: once
# with external information, and once with full info as if
# the item was a normal non-external... the current way of
# dealing with this issue is by ignoring it - this does make
# externals appear as external items as well as 'normal',
# unchanged ones in the status object so this is far from ideal
rootstatus = WCStatus(rootwcpath, rev, modrev, author)
update_rev = None
minidom, ExpatError = importxml()
try:
doc = minidom.parseString(data)
except ExpatError:
e = sys.exc_info()[1]
raise ValueError(str(e))
urevels = doc.getElementsByTagName('against')
if urevels:
rootstatus.update_rev = urevels[-1].getAttribute('revision')
for entryel in doc.getElementsByTagName('entry'):
path = entryel.getAttribute('path')
statusel = entryel.getElementsByTagName('wc-status')[0]
itemstatus = statusel.getAttribute('item')
if itemstatus == 'unversioned':
wcpath = rootwcpath.join(path, abs=1)
rootstatus.unknown.append(wcpath)
continue
elif itemstatus == 'external':
wcpath = rootwcpath.__class__(
rootwcpath.localpath.join(path, abs=1),
auth=rootwcpath.auth)
rootstatus.external.append(wcpath)
continue
elif itemstatus == 'ignored':
wcpath = rootwcpath.join(path, abs=1)
rootstatus.ignored.append(wcpath)
continue
elif itemstatus == 'incomplete':
wcpath = rootwcpath.join(path, abs=1)
rootstatus.incomplete.append(wcpath)
continue
rev = statusel.getAttribute('revision')
if itemstatus == 'added' or itemstatus == 'none':
rev = '0'
modrev = '?'
author = '?'
date = ''
elif itemstatus == "replaced":
pass
else:
#print entryel.toxml()
commitel = entryel.getElementsByTagName('commit')[0]
if commitel:
modrev = commitel.getAttribute('revision')
author = ''
author_els = commitel.getElementsByTagName('author')
if author_els:
for c in author_els[0].childNodes:
author += c.nodeValue
date = ''
for c in commitel.getElementsByTagName('date')[0]\
.childNodes:
date += c.nodeValue
wcpath = rootwcpath.join(path, abs=1)
assert itemstatus != 'modified' or wcpath.check(file=1), (
'did\'t expect a directory with changed content here')
itemattrname = {
'normal': 'unchanged',
'unversioned': 'unknown',
'conflicted': 'conflict',
'none': 'added',
}.get(itemstatus, itemstatus)
attr = getattr(rootstatus, itemattrname)
attr.append(wcpath)
propsstatus = statusel.getAttribute('props')
if propsstatus not in ('none', 'normal'):
rootstatus.prop_modified.append(wcpath)
if wcpath == rootwcpath:
rootstatus.rev = rev
rootstatus.modrev = modrev
rootstatus.author = author
rootstatus.date = date
# handle repos-status element (remote info)
rstatusels = entryel.getElementsByTagName('repos-status')
if rstatusels:
rstatusel = rstatusels[0]
ritemstatus = rstatusel.getAttribute('item')
if ritemstatus in ('added', 'modified'):
rootstatus.update_available.append(wcpath)
lockels = entryel.getElementsByTagName('lock')
if len(lockels):
rootstatus.locked.append(wcpath)
return rootstatus
fromstring = staticmethod(fromstring)
class InfoSvnWCCommand:
def __init__(self, output):
# Path: test
# URL: http://codespeak.net/svn/std.path/trunk/dist/std.path/test
# Repository UUID: fd0d7bf2-dfb6-0310-8d31-b7ecfe96aada
# Revision: 2151
# Node Kind: directory
# Schedule: normal
# Last Changed Author: hpk
# Last Changed Rev: 2100
# Last Changed Date: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)
# Properties Last Updated: 2003-11-03 14:47:48 +0100 (Mon, 03 Nov 2003)
d = {}
for line in output.split('\n'):
if not line.strip():
continue
key, value = line.split(':', 1)
key = key.lower().replace(' ', '')
value = value.strip()
d[key] = value
try:
self.url = d['url']
except KeyError:
raise ValueError("Not a versioned resource")
#raise ValueError, "Not a versioned resource %r" % path
self.kind = d['nodekind'] == 'directory' and 'dir' or d['nodekind']
try:
self.rev = int(d['revision'])
except KeyError:
self.rev = None
self.path = py.path.local(d['path'])
self.size = self.path.size()
if 'lastchangedrev' in d:
self.created_rev = int(d['lastchangedrev'])
if 'lastchangedauthor' in d:
self.last_author = d['lastchangedauthor']
if 'lastchangeddate' in d:
self.mtime = parse_wcinfotime(d['lastchangeddate'])
self.time = self.mtime * 1000000
def __eq__(self, other):
return self.__dict__ == other.__dict__
def parse_wcinfotime(timestr):
""" Returns seconds since epoch, UTC. """
# example: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)
m = re.match(r'(\d+-\d+-\d+ \d+:\d+:\d+) ([+-]\d+) .*', timestr)
if not m:
raise ValueError("timestring %r does not match" % timestr)
timestr, timezone = m.groups()
# do not handle timezone specially, return value should be UTC
parsedtime = time.strptime(timestr, "%Y-%m-%d %H:%M:%S")
return calendar.timegm(parsedtime)
def make_recursive_propdict(wcroot,
output,
rex = re.compile("Properties on '(.*)':")):
""" Return a dictionary of path->PropListDict mappings. """
lines = [x for x in output.split('\n') if x]
pdict = {}
while lines:
line = lines.pop(0)
m = rex.match(line)
if not m:
raise ValueError("could not parse propget-line: %r" % line)
path = m.groups()[0]
wcpath = wcroot.join(path, abs=1)
propnames = []
while lines and lines[0].startswith(' '):
propname = lines.pop(0).strip()
propnames.append(propname)
assert propnames, "must have found properties!"
pdict[wcpath] = PropListDict(wcpath, propnames)
return pdict
def importxml(cache=[]):
if cache:
return cache
from xml.dom import minidom
from xml.parsers.expat import ExpatError
cache.extend([minidom, ExpatError])
return cache
class LogEntry:
def __init__(self, logentry):
self.rev = int(logentry.getAttribute('revision'))
for lpart in filter(None, logentry.childNodes):
if lpart.nodeType == lpart.ELEMENT_NODE:
if lpart.nodeName == 'author':
self.author = lpart.firstChild.nodeValue
elif lpart.nodeName == 'msg':
if lpart.firstChild:
self.msg = lpart.firstChild.nodeValue
else:
self.msg = ''
elif lpart.nodeName == 'date':
#2003-07-29T20:05:11.598637Z
timestr = lpart.firstChild.nodeValue
self.date = parse_apr_time(timestr)
elif lpart.nodeName == 'paths':
self.strpaths = []
for ppart in filter(None, lpart.childNodes):
if ppart.nodeType == ppart.ELEMENT_NODE:
self.strpaths.append(PathEntry(ppart))
def __repr__(self):
return '<Logentry rev=%d author=%s date=%s>' % (
self.rev, self.author, self.date)
| mpl-2.0 |
sunqb/oa_qian | flask/Lib/site-packages/openid/extensions/sreg.py | 143 | 17848 | """Simple registration request and response parsing and object representation
This module contains objects representing simple registration requests
and responses that can be used with both OpenID relying parties and
OpenID providers.
1. The relying party creates a request object and adds it to the
C{L{AuthRequest<openid.consumer.consumer.AuthRequest>}} object
before making the C{checkid_} request to the OpenID provider::
auth_request.addExtension(SRegRequest(required=['email']))
2. The OpenID provider extracts the simple registration request from
the OpenID request using C{L{SRegRequest.fromOpenIDRequest}},
gets the user's approval and data, creates a C{L{SRegResponse}}
object and adds it to the C{id_res} response::
sreg_req = SRegRequest.fromOpenIDRequest(checkid_request)
# [ get the user's approval and data, informing the user that
# the fields in sreg_response were requested ]
sreg_resp = SRegResponse.extractResponse(sreg_req, user_data)
sreg_resp.toMessage(openid_response.fields)
3. The relying party uses C{L{SRegResponse.fromSuccessResponse}} to
extract the data from the OpenID response::
sreg_resp = SRegResponse.fromSuccessResponse(success_response)
@since: 2.0
@var sreg_data_fields: The names of the data fields that are listed in
the sreg spec, and a description of them in English
@var sreg_uri: The preferred URI to use for the simple registration
namespace and XRD Type value
"""
from openid.message import registerNamespaceAlias, \
NamespaceAliasRegistrationError
from openid.extension import Extension
from openid import oidutil
try:
basestring #pylint:disable-msg=W0104
except NameError:
# For Python 2.2
basestring = (str, unicode) #pylint:disable-msg=W0622
__all__ = [
'SRegRequest',
'SRegResponse',
'data_fields',
'ns_uri',
'ns_uri_1_0',
'ns_uri_1_1',
'supportsSReg',
]
# The data fields that are listed in the sreg spec
data_fields = {
'fullname':'Full Name',
'nickname':'Nickname',
'dob':'Date of Birth',
'email':'E-mail Address',
'gender':'Gender',
'postcode':'Postal Code',
'country':'Country',
'language':'Language',
'timezone':'Time Zone',
}
def checkFieldName(field_name):
"""Check to see that the given value is a valid simple
registration data field name.
@raise ValueError: if the field name is not a valid simple
registration data field name
"""
if field_name not in data_fields:
raise ValueError('%r is not a defined simple registration field' %
(field_name,))
# URI used in the wild for Yadis documents advertising simple
# registration support
ns_uri_1_0 = 'http://openid.net/sreg/1.0'
# URI in the draft specification for simple registration 1.1
# <http://openid.net/specs/openid-simple-registration-extension-1_1-01.html>
ns_uri_1_1 = 'http://openid.net/extensions/sreg/1.1'
# This attribute will always hold the preferred URI to use when adding
# sreg support to an XRDS file or in an OpenID namespace declaration.
ns_uri = ns_uri_1_1
try:
registerNamespaceAlias(ns_uri_1_1, 'sreg')
except NamespaceAliasRegistrationError, e:
oidutil.log('registerNamespaceAlias(%r, %r) failed: %s' % (ns_uri_1_1,
'sreg', str(e),))
def supportsSReg(endpoint):
"""Does the given endpoint advertise support for simple
registration?
@param endpoint: The endpoint object as returned by OpenID discovery
@type endpoint: openid.consumer.discover.OpenIDEndpoint
@returns: Whether an sreg type was advertised by the endpoint
@rtype: bool
"""
return (endpoint.usesExtension(ns_uri_1_1) or
endpoint.usesExtension(ns_uri_1_0))
class SRegNamespaceError(ValueError):
"""The simple registration namespace was not found and could not
be created using the expected name (there's another extension
using the name 'sreg')
This is not I{illegal}, for OpenID 2, although it probably
indicates a problem, since it's not expected that other extensions
will re-use the alias that is in use for OpenID 1.
If this is an OpenID 1 request, then there is no recourse. This
should not happen unless some code has modified the namespaces for
the message that is being processed.
"""
def getSRegNS(message):
"""Extract the simple registration namespace URI from the given
OpenID message. Handles OpenID 1 and 2, as well as both sreg
namespace URIs found in the wild, as well as missing namespace
definitions (for OpenID 1)
@param message: The OpenID message from which to parse simple
registration fields. This may be a request or response message.
@type message: C{L{openid.message.Message}}
@returns: the sreg namespace URI for the supplied message. The
message may be modified to define a simple registration
namespace.
@rtype: C{str}
@raise ValueError: when using OpenID 1 if the message defines
the 'sreg' alias to be something other than a simple
registration type.
"""
# See if there exists an alias for one of the two defined simple
# registration types.
for sreg_ns_uri in [ns_uri_1_1, ns_uri_1_0]:
alias = message.namespaces.getAlias(sreg_ns_uri)
if alias is not None:
break
else:
# There is no alias for either of the types, so try to add
# one. We default to using the modern value (1.1)
sreg_ns_uri = ns_uri_1_1
try:
message.namespaces.addAlias(ns_uri_1_1, 'sreg')
except KeyError, why:
# An alias for the string 'sreg' already exists, but it's
# defined for something other than simple registration
raise SRegNamespaceError(why[0])
# we know that sreg_ns_uri defined, because it's defined in the
# else clause of the loop as well, so disable the warning
return sreg_ns_uri #pylint:disable-msg=W0631
class SRegRequest(Extension):
"""An object to hold the state of a simple registration request.
@ivar required: A list of the required fields in this simple
registration request
@type required: [str]
@ivar optional: A list of the optional fields in this simple
registration request
@type optional: [str]
@ivar policy_url: The policy URL that was provided with the request
@type policy_url: str or NoneType
@group Consumer: requestField, requestFields, getExtensionArgs, addToOpenIDRequest
@group Server: fromOpenIDRequest, parseExtensionArgs
"""
ns_alias = 'sreg'
def __init__(self, required=None, optional=None, policy_url=None,
sreg_ns_uri=ns_uri):
"""Initialize an empty simple registration request"""
Extension.__init__(self)
self.required = []
self.optional = []
self.policy_url = policy_url
self.ns_uri = sreg_ns_uri
if required:
self.requestFields(required, required=True, strict=True)
if optional:
self.requestFields(optional, required=False, strict=True)
# Assign getSRegNS to a static method so that it can be
# overridden for testing.
_getSRegNS = staticmethod(getSRegNS)
def fromOpenIDRequest(cls, request):
"""Create a simple registration request that contains the
fields that were requested in the OpenID request with the
given arguments
@param request: The OpenID request
@type request: openid.server.CheckIDRequest
@returns: The newly created simple registration request
@rtype: C{L{SRegRequest}}
"""
self = cls()
# Since we're going to mess with namespace URI mapping, don't
# mutate the object that was passed in.
message = request.message.copy()
self.ns_uri = self._getSRegNS(message)
args = message.getArgs(self.ns_uri)
self.parseExtensionArgs(args)
return self
fromOpenIDRequest = classmethod(fromOpenIDRequest)
def parseExtensionArgs(self, args, strict=False):
"""Parse the unqualified simple registration request
parameters and add them to this object.
This method is essentially the inverse of
C{L{getExtensionArgs}}. This method restores the serialized simple
registration request fields.
If you are extracting arguments from a standard OpenID
checkid_* request, you probably want to use C{L{fromOpenIDRequest}},
which will extract the sreg namespace and arguments from the
OpenID request. This method is intended for cases where the
OpenID server needs more control over how the arguments are
parsed than that method provides.
>>> args = message.getArgs(ns_uri)
>>> request.parseExtensionArgs(args)
@param args: The unqualified simple registration arguments
@type args: {str:str}
@param strict: Whether requests with fields that are not
defined in the simple registration specification should be
tolerated (and ignored)
@type strict: bool
@returns: None; updates this object
"""
for list_name in ['required', 'optional']:
required = (list_name == 'required')
items = args.get(list_name)
if items:
for field_name in items.split(','):
try:
self.requestField(field_name, required, strict)
except ValueError:
if strict:
raise
self.policy_url = args.get('policy_url')
def allRequestedFields(self):
"""A list of all of the simple registration fields that were
requested, whether they were required or optional.
@rtype: [str]
"""
return self.required + self.optional
def wereFieldsRequested(self):
"""Have any simple registration fields been requested?
@rtype: bool
"""
return bool(self.allRequestedFields())
def __contains__(self, field_name):
"""Was this field in the request?"""
return (field_name in self.required or
field_name in self.optional)
def requestField(self, field_name, required=False, strict=False):
"""Request the specified field from the OpenID user
@param field_name: the unqualified simple registration field name
@type field_name: str
@param required: whether the given field should be presented
to the user as being a required to successfully complete
the request
@param strict: whether to raise an exception when a field is
added to a request more than once
@raise ValueError: when the field requested is not a simple
registration field or strict is set and the field was
requested more than once
"""
checkFieldName(field_name)
if strict:
if field_name in self.required or field_name in self.optional:
raise ValueError('That field has already been requested')
else:
if field_name in self.required:
return
if field_name in self.optional:
if required:
self.optional.remove(field_name)
else:
return
if required:
self.required.append(field_name)
else:
self.optional.append(field_name)
def requestFields(self, field_names, required=False, strict=False):
"""Add the given list of fields to the request
@param field_names: The simple registration data fields to request
@type field_names: [str]
@param required: Whether these values should be presented to
the user as required
@param strict: whether to raise an exception when a field is
added to a request more than once
@raise ValueError: when a field requested is not a simple
registration field or strict is set and a field was
requested more than once
"""
if isinstance(field_names, basestring):
raise TypeError('Fields should be passed as a list of '
'strings (not %r)' % (type(field_names),))
for field_name in field_names:
self.requestField(field_name, required, strict=strict)
def getExtensionArgs(self):
"""Get a dictionary of unqualified simple registration
arguments representing this request.
This method is essentially the inverse of
C{L{parseExtensionArgs}}. This method serializes the simple
registration request fields.
@rtype: {str:str}
"""
args = {}
if self.required:
args['required'] = ','.join(self.required)
if self.optional:
args['optional'] = ','.join(self.optional)
if self.policy_url:
args['policy_url'] = self.policy_url
return args
class SRegResponse(Extension):
"""Represents the data returned in a simple registration response
inside of an OpenID C{id_res} response. This object will be
created by the OpenID server, added to the C{id_res} response
object, and then extracted from the C{id_res} message by the
Consumer.
@ivar data: The simple registration data, keyed by the unqualified
simple registration name of the field (i.e. nickname is keyed
by C{'nickname'})
@ivar ns_uri: The URI under which the simple registration data was
stored in the response message.
@group Server: extractResponse
@group Consumer: fromSuccessResponse
@group Read-only dictionary interface: keys, iterkeys, items, iteritems,
__iter__, get, __getitem__, keys, has_key
"""
ns_alias = 'sreg'
def __init__(self, data=None, sreg_ns_uri=ns_uri):
Extension.__init__(self)
if data is None:
self.data = {}
else:
self.data = data
self.ns_uri = sreg_ns_uri
def extractResponse(cls, request, data):
"""Take a C{L{SRegRequest}} and a dictionary of simple
registration values and create a C{L{SRegResponse}}
object containing that data.
@param request: The simple registration request object
@type request: SRegRequest
@param data: The simple registration data for this
response, as a dictionary from unqualified simple
registration field name to string (unicode) value. For
instance, the nickname should be stored under the key
'nickname'.
@type data: {str:str}
@returns: a simple registration response object
@rtype: SRegResponse
"""
self = cls()
self.ns_uri = request.ns_uri
for field in request.allRequestedFields():
value = data.get(field)
if value is not None:
self.data[field] = value
return self
extractResponse = classmethod(extractResponse)
# Assign getSRegArgs to a static method so that it can be
# overridden for testing
_getSRegNS = staticmethod(getSRegNS)
def fromSuccessResponse(cls, success_response, signed_only=True):
"""Create a C{L{SRegResponse}} object from a successful OpenID
library response
(C{L{openid.consumer.consumer.SuccessResponse}}) response
message
@param success_response: A SuccessResponse from consumer.complete()
@type success_response: C{L{openid.consumer.consumer.SuccessResponse}}
@param signed_only: Whether to process only data that was
signed in the id_res message from the server.
@type signed_only: bool
@rtype: SRegResponse
@returns: A simple registration response containing the data
that was supplied with the C{id_res} response.
"""
self = cls()
self.ns_uri = self._getSRegNS(success_response.message)
if signed_only:
args = success_response.getSignedNS(self.ns_uri)
else:
args = success_response.message.getArgs(self.ns_uri)
if not args:
return None
for field_name in data_fields:
if field_name in args:
self.data[field_name] = args[field_name]
return self
fromSuccessResponse = classmethod(fromSuccessResponse)
def getExtensionArgs(self):
"""Get the fields to put in the simple registration namespace
when adding them to an id_res message.
@see: openid.extension
"""
return self.data
# Read-only dictionary interface
def get(self, field_name, default=None):
"""Like dict.get, except that it checks that the field name is
defined by the simple registration specification"""
checkFieldName(field_name)
return self.data.get(field_name, default)
def items(self):
"""All of the data values in this simple registration response
"""
return self.data.items()
def iteritems(self):
return self.data.iteritems()
def keys(self):
return self.data.keys()
def iterkeys(self):
return self.data.iterkeys()
def has_key(self, key):
return key in self
def __contains__(self, field_name):
checkFieldName(field_name)
return field_name in self.data
def __iter__(self):
return iter(self.data)
def __getitem__(self, field_name):
checkFieldName(field_name)
return self.data[field_name]
def __nonzero__(self):
return bool(self.data)
| apache-2.0 |
ct-23/home-assistant | homeassistant/components/device_tracker/linksys_ap.py | 5 | 3104 | """
Support for Linksys Access Points.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.linksys_ap/
"""
import base64
import logging
import requests
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.device_tracker import DOMAIN, PLATFORM_SCHEMA
from homeassistant.const import (
CONF_HOST, CONF_PASSWORD, CONF_USERNAME, CONF_VERIFY_SSL)
INTERFACES = 2
DEFAULT_TIMEOUT = 10
REQUIREMENTS = ['beautifulsoup4==4.6.0']
_LOGGER = logging.getLogger(__name__)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Optional(CONF_VERIFY_SSL, default=True): cv.boolean,
})
def get_scanner(hass, config):
"""Validate the configuration and return a Linksys AP scanner."""
try:
return LinksysAPDeviceScanner(config[DOMAIN])
except ConnectionError:
return None
class LinksysAPDeviceScanner(object):
"""This class queries a Linksys Access Point."""
def __init__(self, config):
"""Initialize the scanner."""
self.host = config[CONF_HOST]
self.username = config[CONF_USERNAME]
self.password = config[CONF_PASSWORD]
self.verify_ssl = config[CONF_VERIFY_SSL]
self.last_results = []
# Check if the access point is accessible
response = self._make_request()
if not response.status_code == 200:
raise ConnectionError("Cannot connect to Linksys Access Point")
def scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
self._update_info()
return self.last_results
# pylint: disable=no-self-use
def get_device_name(self, mac):
"""
Return the name (if known) of the device.
Linksys does not provide an API to get a name for a device,
so we just return None
"""
return None
def _update_info(self):
"""Check for connected devices."""
from bs4 import BeautifulSoup as BS
_LOGGER.info("Checking Linksys AP")
self.last_results = []
for interface in range(INTERFACES):
request = self._make_request(interface)
self.last_results.extend(
[x.find_all('td')[1].text
for x in BS(request.content, "html.parser")
.find_all(class_='section-row')]
)
return True
def _make_request(self, unit=0):
# No, the '&&' is not a typo - this is expected by the web interface.
login = base64.b64encode(bytes(self.username, 'utf8')).decode('ascii')
pwd = base64.b64encode(bytes(self.password, 'utf8')).decode('ascii')
url = 'https://{}/StatusClients.htm&&unit={}&vap=0'.format(
self.host, unit)
return requests.get(
url, timeout=DEFAULT_TIMEOUT, verify=self.verify_ssl,
cookies={'LoginName': login, 'LoginPWD': pwd})
| apache-2.0 |
cvsuser-chromium/chromium | ppapi/native_client/src/untrusted/pnacl_support_extension/pnacl_component_crx_gen.py | 48 | 13105 | #!/usr/bin/python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This script lays out the PNaCl translator files for a
normal Chrome installer, for one platform. Once run num-of-arches times,
the result can then be packed into a multi-CRX zip file.
This script depends on and pulls in the translator nexes and libraries
from the toolchain directory (so that must be downloaded first) and
it depends on the pnacl_irt_shim.
"""
import json
import logging
import optparse
import os
import platform
import re
import shutil
import sys
J = os.path.join
######################################################################
# Target arch and build arch junk to convert between all the
# silly conventions between SCons, Chrome and PNaCl.
# The version of the arch used by NaCl manifest files.
# This is based on the machine "building" this extension.
# We also used this to identify the arch-specific different versions of
# this extension.
def CanonicalArch(arch):
if arch in ('x86_64', 'x86-64', 'x64', 'amd64'):
return 'x86-64'
# TODO(jvoung): be more specific about the arm architecture version?
if arch in ('arm', 'armv7'):
return 'arm'
if re.match('^i.86$', arch) or arch in ('x86_32', 'x86-32', 'ia32', 'x86'):
return 'x86-32'
return None
def GetBuildArch():
arch = platform.machine()
return CanonicalArch(arch)
BUILD_ARCH = GetBuildArch()
ARCHES = ['x86-32', 'x86-64', 'arm']
def IsValidArch(arch):
return arch in ARCHES
# The version of the arch used by configure and pnacl's build.sh.
def StandardArch(arch):
return {'x86-32': 'i686',
'x86-64': 'x86_64',
'arm' : 'armv7'}[arch]
######################################################################
def GetNaClRoot():
""" Find the native_client path, relative to this script.
This script is in ppapi/... and native_client is a sibling of ppapi.
"""
script_file = os.path.abspath(__file__)
def SearchForNaCl(cur_dir):
if cur_dir.endswith('ppapi'):
parent = os.path.dirname(cur_dir)
sibling = os.path.join(parent, 'native_client')
if not os.path.isdir(sibling):
raise Exception('Could not find native_client relative to %s' %
script_file)
return sibling
# Detect when we've the root (linux is /, but windows is not...)
next_dir = os.path.dirname(cur_dir)
if cur_dir == next_dir:
raise Exception('Could not find native_client relative to %s' %
script_file)
return SearchForNaCl(next_dir)
return SearchForNaCl(script_file)
NACL_ROOT = GetNaClRoot()
######################################################################
# Normalize the platform name to be the way SCons finds chrome binaries.
# This is based on the platform "building" the extension.
def GetBuildPlatform():
if sys.platform == 'darwin':
platform = 'mac'
elif sys.platform.startswith('linux'):
platform = 'linux'
elif sys.platform in ('cygwin', 'win32'):
platform = 'windows'
else:
raise Exception('Unknown platform: %s' % sys.platform)
return platform
BUILD_PLATFORM = GetBuildPlatform()
def DetermineInstallerArches(target_arch):
arch = CanonicalArch(target_arch)
if not IsValidArch(arch):
raise Exception('Unknown target_arch %s' % target_arch)
# On windows, we need x86-32 and x86-64 (assuming non-windows RT).
if BUILD_PLATFORM == 'windows':
if arch.startswith('x86'):
return ['x86-32', 'x86-64']
else:
raise Exception('Unknown target_arch on windows w/ target_arch == %s' %
target_arch)
else:
return [arch]
######################################################################
class PnaclPackaging(object):
package_base = os.path.dirname(__file__)
# File paths that are set from the command line.
pnacl_template = None
tool_revisions = None
# Agreed-upon name for pnacl-specific info.
pnacl_json = 'pnacl.json'
@staticmethod
def SetPnaclInfoTemplatePath(path):
PnaclPackaging.pnacl_template = path
@staticmethod
def SetToolsRevisionPath(path):
PnaclPackaging.tool_revisions = path
@staticmethod
def PnaclToolsRevision():
with open(PnaclPackaging.tool_revisions, 'r') as f:
for line in f.read().splitlines():
if line.startswith('PNACL_VERSION'):
_, version = line.split('=')
# CWS happens to use version quads, so make it a quad too.
# However, each component of the quad is limited to 64K max.
# Try to handle a bit more.
max_version = 2 ** 16
version = int(version)
version_more = version / max_version
version = version % max_version
return '0.1.%d.%d' % (version_more, version)
raise Exception('Cannot find PNACL_VERSION in TOOL_REVISIONS file: %s' %
PnaclPackaging.tool_revisions)
@staticmethod
def GeneratePnaclInfo(target_dir, abi_version, arch):
# A note on versions: pnacl_version is the version of translator built
# by the NaCl repo, while abi_version is bumped when the NaCl sandbox
# actually changes.
pnacl_version = PnaclPackaging.PnaclToolsRevision()
with open(PnaclPackaging.pnacl_template, 'r') as pnacl_template_fd:
pnacl_template = json.load(pnacl_template_fd)
out_name = J(target_dir, UseWhitelistedChars(PnaclPackaging.pnacl_json,
None))
with open(out_name, 'w') as output_fd:
pnacl_template['pnacl-arch'] = arch
pnacl_template['pnacl-version'] = pnacl_version
json.dump(pnacl_template, output_fd, sort_keys=True, indent=4)
######################################################################
class PnaclDirs(object):
toolchain_dir = J(NACL_ROOT, 'toolchain')
output_dir = J(toolchain_dir, 'pnacl-package')
@staticmethod
def TranslatorRoot():
return J(PnaclDirs.toolchain_dir, 'pnacl_translator')
@staticmethod
def LibDir(target_arch):
return J(PnaclDirs.TranslatorRoot(), 'lib-%s' % target_arch)
@staticmethod
def SandboxedCompilerDir(target_arch):
return J(PnaclDirs.toolchain_dir,
'pnacl_translator', StandardArch(target_arch), 'bin')
@staticmethod
def SetOutputDir(d):
PnaclDirs.output_dir = d
@staticmethod
def OutputDir():
return PnaclDirs.output_dir
@staticmethod
def OutputAllDir(version_quad):
return J(PnaclDirs.OutputDir(), version_quad)
@staticmethod
def OutputArchBase(arch):
return '%s' % arch
@staticmethod
def OutputArchDir(arch):
# Nest this in another directory so that the layout will be the same
# as the "all"/universal version.
parent_dir = J(PnaclDirs.OutputDir(), PnaclDirs.OutputArchBase(arch))
return (parent_dir, J(parent_dir, PnaclDirs.OutputArchBase(arch)))
######################################################################
def StepBanner(short_desc, long_desc):
logging.info("**** %s\t%s", short_desc, long_desc)
def Clean():
out_dir = PnaclDirs.OutputDir()
StepBanner('CLEAN', 'Cleaning out old packaging: %s' % out_dir)
if os.path.isdir(out_dir):
shutil.rmtree(out_dir)
else:
logging.info('Clean skipped -- no previous output directory!')
######################################################################
def UseWhitelistedChars(orig_basename, arch):
""" Make the filename match the pattern expected by nacl_file_host.
Currently, this assumes there is prefix "pnacl_public_" and
that the allowed chars are in the set [a-zA-Z0-9_].
"""
if arch:
target_basename = 'pnacl_public_%s_%s' % (arch, orig_basename)
else:
target_basename = 'pnacl_public_%s' % orig_basename
result = re.sub(r'[^a-zA-Z0-9_]', '_', target_basename)
logging.info('UseWhitelistedChars using: %s' % result)
return result
def CopyFlattenDirsAndPrefix(src_dir, arch, dest_dir):
""" Copy files from src_dir to dest_dir.
When copying, also rename the files such that they match the white-listing
pattern in chrome/browser/nacl_host/nacl_file_host.cc.
"""
for (root, dirs, files) in os.walk(src_dir, followlinks=True):
for f in files:
# Assume a flat directory.
assert (f == os.path.basename(f))
full_name = J(root, f)
target_name = UseWhitelistedChars(f, arch)
shutil.copy(full_name, J(dest_dir, target_name))
def BuildArchForInstaller(version_quad, arch, lib_overrides):
""" Build an architecture specific version for the chrome installer.
"""
target_dir = PnaclDirs.OutputDir()
StepBanner('BUILD INSTALLER',
'Packaging for arch %s in %s' % (arch, target_dir))
# Copy llc.nexe and ld.nexe, but with some renaming and directory flattening.
CopyFlattenDirsAndPrefix(PnaclDirs.SandboxedCompilerDir(arch),
arch,
target_dir)
# Copy native libraries, also with renaming and directory flattening.
CopyFlattenDirsAndPrefix(PnaclDirs.LibDir(arch), arch, target_dir)
# Also copy files from the list of overrides.
# This needs the arch tagged onto the name too, like the other files.
if arch in lib_overrides:
for override in lib_overrides[arch]:
override_base = os.path.basename(override)
target_name = UseWhitelistedChars(override_base, arch)
shutil.copy(override, J(target_dir, target_name))
def BuildInstallerStyle(version_quad, lib_overrides, arches):
""" Package the pnacl component for use within the chrome installer
infrastructure. These files need to be named in a special way
so that white-listing of files is easy.
"""
StepBanner("BUILD_ALL", "Packaging installer for version: %s" % version_quad)
for arch in arches:
BuildArchForInstaller(version_quad, arch, lib_overrides)
# Generate pnacl info manifest.
# Hack around the fact that there may be more than one arch, on Windows.
if len(arches) == 1:
arches = arches[0]
PnaclPackaging.GeneratePnaclInfo(PnaclDirs.OutputDir(), version_quad, arches)
######################################################################
def Main():
usage = 'usage: %prog [options] version_arg'
parser = optparse.OptionParser(usage)
# We may want to accept a target directory to dump it in the usual
# output directory (e.g., scons-out).
parser.add_option('-c', '--clean', dest='clean',
action='store_true', default=False,
help='Clean out destination directory first.')
parser.add_option('-d', '--dest', dest='dest',
help='The destination root for laying out the extension')
parser.add_option('-L', '--lib_override',
dest='lib_overrides', action='append', default=[],
help='Specify path to a fresher native library ' +
'that overrides the tarball library with ' +
'(arch:libfile) tuple.')
parser.add_option('-t', '--target_arch',
dest='target_arch', default=None,
help='Only generate the chrome installer version for arch')
parser.add_option('--info_template_path',
dest='info_template_path', default=None,
help='Path of the info template file')
parser.add_option('--tool_revisions_path', dest='tool_revisions_path',
default=None, help='Location of NaCl TOOL_REVISIONS file.')
parser.add_option('-v', '--verbose', dest='verbose', default=False,
action='store_true',
help='Print verbose debug messages.')
(options, args) = parser.parse_args()
if options.verbose:
logging.getLogger().setLevel(logging.DEBUG)
else:
logging.getLogger().setLevel(logging.ERROR)
logging.info('pnacl_component_crx_gen w/ options %s and args %s\n'
% (options, args))
# Set destination directory before doing any cleaning, etc.
if options.dest:
PnaclDirs.SetOutputDir(options.dest)
if options.clean:
Clean()
if options.info_template_path:
PnaclPackaging.SetPnaclInfoTemplatePath(options.info_template_path)
if options.tool_revisions_path:
PnaclPackaging.SetToolsRevisionPath(options.tool_revisions_path)
lib_overrides = {}
for o in options.lib_overrides:
arch, override_lib = o.split(',')
arch = CanonicalArch(arch)
if not IsValidArch(arch):
raise Exception('Unknown arch for -L: %s (from %s)' % (arch, o))
if not os.path.isfile(override_lib):
raise Exception('Override native lib not a file for -L: %s (from %s)' %
(override_lib, o))
override_list = lib_overrides.get(arch, [])
override_list.append(override_lib)
lib_overrides[arch] = override_list
if len(args) != 1:
parser.print_help()
parser.error('Incorrect number of arguments')
abi_version = int(args[0])
arches = DetermineInstallerArches(options.target_arch)
BuildInstallerStyle(abi_version, lib_overrides, arches)
return 0
if __name__ == '__main__':
sys.exit(Main())
| bsd-3-clause |
LucasGandel/ITK | Modules/Filtering/AnisotropicSmoothing/wrapping/test/GradientAnisotropicDiffusionImageFilterTest.py | 7 | 1550 | # ==========================================================================
#
# Copyright NumFOCUS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0.txt
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ==========================================================================*/
import itk
from sys import argv
itk.auto_progress(2)
InputPixelType = itk.F
OutputPixelType = itk.F
InputImageType = itk.Image[InputPixelType, 2]
OutputImageType = itk.Image[OutputPixelType, 2]
reader = itk.ImageFileReader[InputImageType].New(FileName=argv[1])
filter = itk.GradientAnisotropicDiffusionImageFilter[
InputImageType, OutputImageType
].New(
reader,
NumberOfIterations=int(argv[3]),
TimeStep=float(argv[4]),
ConductanceParameter=float(argv[5]),
)
filter.Update()
WritePixelType = itk.UC
WriteImageType = itk.Image[WritePixelType, 2]
rescaler = itk.RescaleIntensityImageFilter[OutputImageType, WriteImageType].New(
filter, OutputMinimum=0, OutputMaximum=255
)
writer = itk.ImageFileWriter[WriteImageType].New(rescaler, FileName=argv[2])
writer.Update()
| apache-2.0 |
dhutty/ansible | v1/tests/TestPlayVarsFiles.py | 95 | 12363 | #!/usr/bin/env python
import os
import shutil
from tempfile import mkstemp
from tempfile import mkdtemp
from ansible.playbook.play import Play
import ansible
import unittest
from nose.plugins.skip import SkipTest
class FakeCallBacks(object):
def __init__(self):
pass
def on_vars_prompt(self):
pass
def on_import_for_host(self, host, filename):
pass
class FakeInventory(object):
def __init__(self):
self.hosts = {}
def basedir(self):
return "."
def src(self):
return "fakeinventory"
def get_variables(self, host, vault_password=None):
if host in self.hosts:
return self.hosts[host]
else:
return {}
class FakePlayBook(object):
def __init__(self):
self.extra_vars = {}
self.remote_user = None
self.remote_port = None
self.sudo = None
self.sudo_user = None
self.su = None
self.su_user = None
self.become = None
self.become_method = None
self.become_user = None
self.transport = None
self.only_tags = None
self.skip_tags = None
self.force_handlers = None
self.VARS_CACHE = {}
self.SETUP_CACHE = {}
self.inventory = FakeInventory()
self.callbacks = FakeCallBacks()
self.VARS_CACHE['localhost'] = {}
class TestMe(unittest.TestCase):
########################################
# BASIC FILE LOADING BEHAVIOR TESTS
########################################
def test_play_constructor(self):
# __init__(self, playbook, ds, basedir, vault_password=None)
playbook = FakePlayBook()
ds = { "hosts": "localhost"}
basedir = "."
play = Play(playbook, ds, basedir)
def test_vars_file(self):
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# create a play with a vars_file
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": [temp_path]}
basedir = "."
play = Play(playbook, ds, basedir)
os.remove(temp_path)
# make sure the variable was loaded
assert 'foo' in play.vars_file_vars, "vars_file was not loaded into play.vars_file_vars"
assert play.vars_file_vars['foo'] == 'bar', "foo was not set to bar in play.vars_file_vars"
def test_vars_file_nonlist_error(self):
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# create a play with a string for vars_files
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": temp_path}
basedir = "."
error_hit = False
try:
play = Play(playbook, ds, basedir)
except:
error_hit = True
os.remove(temp_path)
assert error_hit == True, "no error was thrown when vars_files was not a list"
def test_multiple_vars_files(self):
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# make a second vars file
fd, temp_path2 = mkstemp()
f = open(temp_path2, "wb")
f.write("baz: bang\n")
f.close()
# create a play with two vars_files
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": [temp_path, temp_path2]}
basedir = "."
play = Play(playbook, ds, basedir)
os.remove(temp_path)
os.remove(temp_path2)
# make sure the variables were loaded
assert 'foo' in play.vars_file_vars, "vars_file was not loaded into play.vars_file_vars"
assert play.vars_file_vars['foo'] == 'bar', "foo was not set to bar in play.vars_file_vars"
assert 'baz' in play.vars_file_vars, "vars_file2 was not loaded into play.vars_file_vars"
assert play.vars_file_vars['baz'] == 'bang', "baz was not set to bang in play.vars_file_vars"
def test_vars_files_first_found(self):
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# get a random file path
fd, temp_path2 = mkstemp()
# make sure this file doesn't exist
os.remove(temp_path2)
# create a play
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": [[temp_path2, temp_path]]}
basedir = "."
play = Play(playbook, ds, basedir)
os.remove(temp_path)
# make sure the variable was loaded
assert 'foo' in play.vars_file_vars, "vars_file was not loaded into play.vars_file_vars"
assert play.vars_file_vars['foo'] == 'bar', "foo was not set to bar in play.vars_file_vars"
def test_vars_files_multiple_found(self):
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# make a second vars file
fd, temp_path2 = mkstemp()
f = open(temp_path2, "wb")
f.write("baz: bang\n")
f.close()
# create a play
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": [[temp_path, temp_path2]]}
basedir = "."
play = Play(playbook, ds, basedir)
os.remove(temp_path)
os.remove(temp_path2)
# make sure the variables were loaded
assert 'foo' in play.vars_file_vars, "vars_file was not loaded into play.vars_file_vars"
assert play.vars_file_vars['foo'] == 'bar', "foo was not set to bar in play.vars_file_vars"
assert 'baz' not in play.vars_file_vars, "vars_file2 was loaded after vars_file1 was loaded"
def test_vars_files_assert_all_found(self):
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# make a second vars file
fd, temp_path2 = mkstemp()
# make sure it doesn't exist
os.remove(temp_path2)
# create a play
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": [temp_path, temp_path2]}
basedir = "."
error_hit = False
error_msg = None
try:
play = Play(playbook, ds, basedir)
except ansible.errors.AnsibleError, e:
error_hit = True
error_msg = e
os.remove(temp_path)
assert error_hit == True, "no error was thrown for missing vars_file"
########################################
# VARIABLE PRECEDENCE TESTS
########################################
# On the first run vars_files are loaded into play.vars_file_vars by host == None
# * only files with vars from host==None will work here
# On the secondary run(s), a host is given and the vars_files are loaded into VARS_CACHE
# * this only occurs if host is not None, filename2 has vars in the name, and filename3 does not
# filename -- the original string
# filename2 -- filename templated with play vars
# filename3 -- filename2 template with inject (hostvars + setup_cache + vars_cache)
# filename4 -- path_dwim(filename3)
def test_vars_files_for_host(self):
# host != None
# vars in filename2
# no vars in filename3
# make a vars file
fd, temp_path = mkstemp()
f = open(temp_path, "wb")
f.write("foo: bar\n")
f.close()
# build play attributes
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars_files": ["{{ temp_path }}"]}
basedir = "."
playbook.VARS_CACHE['localhost']['temp_path'] = temp_path
# create play and do first run
play = Play(playbook, ds, basedir)
# the second run is started by calling update_vars_files
play.update_vars_files(['localhost'])
os.remove(temp_path)
assert 'foo' in play.playbook.VARS_CACHE['localhost'], "vars_file vars were not loaded into vars_cache"
assert play.playbook.VARS_CACHE['localhost']['foo'] == 'bar', "foo does not equal bar"
########################################
# COMPLEX FILENAME TEMPLATING TESTS
########################################
def test_vars_files_two_vars_in_name(self):
# self.vars_file_vars = ds['vars']
# self.vars_file_vars += _get_vars() ... aka extra_vars
# make a temp dir
temp_dir = mkdtemp()
# make a temp file
fd, temp_file = mkstemp(dir=temp_dir)
f = open(temp_file, "wb")
f.write("foo: bar\n")
f.close()
# build play attributes
playbook = FakePlayBook()
ds = { "hosts": "localhost",
"vars": { "temp_dir": os.path.dirname(temp_file),
"temp_file": os.path.basename(temp_file) },
"vars_files": ["{{ temp_dir + '/' + temp_file }}"]}
basedir = "."
# create play and do first run
play = Play(playbook, ds, basedir)
# cleanup
shutil.rmtree(temp_dir)
assert 'foo' in play.vars_file_vars, "double var templated vars_files filename not loaded"
def test_vars_files_two_vars_different_scope(self):
#
# Use a play var and an inventory var to create the filename
#
# self.playbook.inventory.get_variables(host)
# {'group_names': ['ungrouped'], 'inventory_hostname': 'localhost',
# 'ansible_ssh_user': 'root', 'inventory_hostname_short': 'localhost'}
# make a temp dir
temp_dir = mkdtemp()
# make a temp file
fd, temp_file = mkstemp(dir=temp_dir)
f = open(temp_file, "wb")
f.write("foo: bar\n")
f.close()
# build play attributes
playbook = FakePlayBook()
playbook.inventory.hosts['localhost'] = {'inventory_hostname': os.path.basename(temp_file)}
ds = { "hosts": "localhost",
"vars": { "temp_dir": os.path.dirname(temp_file)},
"vars_files": ["{{ temp_dir + '/' + inventory_hostname }}"]}
basedir = "."
# create play and do first run
play = Play(playbook, ds, basedir)
# do the host run
play.update_vars_files(['localhost'])
# cleanup
shutil.rmtree(temp_dir)
assert 'foo' not in play.vars_file_vars, \
"mixed scope vars_file loaded into play vars"
assert 'foo' in play.playbook.VARS_CACHE['localhost'], \
"differently scoped templated vars_files filename not loaded"
assert play.playbook.VARS_CACHE['localhost']['foo'] == 'bar', \
"foo is not bar"
def test_vars_files_two_vars_different_scope_first_found(self):
#
# Use a play var and an inventory var to create the filename
#
# make a temp dir
temp_dir = mkdtemp()
# make a temp file
fd, temp_file = mkstemp(dir=temp_dir)
f = open(temp_file, "wb")
f.write("foo: bar\n")
f.close()
# build play attributes
playbook = FakePlayBook()
playbook.inventory.hosts['localhost'] = {'inventory_hostname': os.path.basename(temp_file)}
ds = { "hosts": "localhost",
"vars": { "temp_dir": os.path.dirname(temp_file)},
"vars_files": [["{{ temp_dir + '/' + inventory_hostname }}"]]}
basedir = "."
# create play and do first run
play = Play(playbook, ds, basedir)
# do the host run
play.update_vars_files(['localhost'])
# cleanup
shutil.rmtree(temp_dir)
assert 'foo' not in play.vars_file_vars, \
"mixed scope vars_file loaded into play vars"
assert 'foo' in play.playbook.VARS_CACHE['localhost'], \
"differently scoped templated vars_files filename not loaded"
assert play.playbook.VARS_CACHE['localhost']['foo'] == 'bar', \
"foo is not bar"
| gpl-3.0 |
wbyne/QGIS | python/plugins/processing/algs/grass7/ext/i_cluster.py | 7 | 2236 | # -*- coding: utf-8 -*-
"""
***************************************************************************
i_cluster.py
------------
Date : March 2016
Copyright : (C) 2016 by Médéric Ribreux
Email : medspx at medspx dot fr
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
from __future__ import absolute_import
__author__ = 'Médéric Ribreux'
__date__ = 'March 2016'
__copyright__ = '(C) 2016, Médéric Ribreux'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from .i import regroupRasters, file2Output, moveFile, verifyRasterNum
from os import path
from ..Grass7Utils import Grass7Utils
def checkParameterValuesBeforeExecuting(alg):
return verifyRasterNum(alg, 'input', 2)
def processCommand(alg):
# We need to extract the basename of the signature file
signatureFile = alg.getOutputFromName('signaturefile')
origSigFile = signatureFile.value
shortSigFile = path.basename(origSigFile)
alg.setOutputValue('signaturefile', shortSigFile)
# Transform output files in string parameters
signatureFile = file2Output(alg, 'signaturefile')
reportFile = file2Output(alg, 'reportfile')
# Regroup rasters
group, subgroup = regroupRasters(alg, 'input', 'group', 'subgroup')
# Re-add signature files
alg.addOutput(signatureFile)
alg.addOutput(reportFile)
# Find Grass directory
interSig = path.join(Grass7Utils.grassMapsetFolder(), 'PERMANENT', 'group', group, 'subgroup', subgroup, 'sig', shortSigFile)
moveFile(alg, interSig, origSigFile)
alg.setOutputValue('signaturefile', origSigFile)
| gpl-2.0 |
alexcepoi/pyscale | pyscale/utils/commands.py | 1 | 1148 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import subprocess as sbp
import shlex
import os
import pty
import gevent
from gevent.event import Event
from .gevsubprocess import GPopen
from ..lib.errors import PyscaleError
def command(cmd, exception=PyscaleError, sudo=False, shell=False):
# fix unicode stuff
cmd = str(cmd)
# parse args
if sudo:
# XXX: --session-command vs --command(-c)
# session-command seems to be better but is only available on CentOS & Co.
# cmd = "su -c '%s'" % cmd
cmd = "sudo -n bash -c '%s'" % cmd
if not shell:
cmd = shlex.split(cmd)
# execute
slave = None
if sudo:
# give su a pty
master, slave = pty.openpty()
out, err = GPopen(cmd, stdin=slave, stdout=sbp.PIPE, stderr = sbp.PIPE, shell=shell).communicate()
# handle errors
if not out and err:
if exception:
raise exception(err)
else:
print err
return out
def execute(cmd, env={}):
args = shlex.split(cmd)
if env:
environ = os.environ.copy()
environ.update(env)
os.execvpe(args[0], args, environ)
else:
os.execvp(args[0], args)
# main
if __name__ == '__main__':
print command('ls', sudo=True, shell=False)
| mit |
iModels/mbuild | mbuild/tests/test_gomc_conf_writer.py | 1 | 294149 | import pytest
import mbuild as mb
import mbuild.formats.gomc_conf_writer as gomc_control
from mbuild.formats.charmm_writer import Charmm
from mbuild.lattice import load_cif
from mbuild.tests.base_test import BaseTest
from mbuild.utils.io import get_fn, has_foyer
@pytest.mark.skipif(not has_foyer, reason="Foyer package not installed")
class TestGOMCControlFileWriter(BaseTest):
def test_dict_keys_to_list(
self,
):
dict = {"a": "1", "b": "2", "c": "3"}
keys = gomc_control.dict_keys_to_list(dict)
assert keys == ["a", "b", "c"]
def test_get_required_data(self):
value = gomc_control._get_required_data(description=False)
assert value == [
"charmm_object",
"ensemble_type",
"RunSteps",
"Temperature",
]
value = gomc_control._get_required_data(description=True)
assert gomc_control.dict_keys_to_list(value) == [
"charmm_object",
"ensemble_type",
"RunSteps",
"Temperature",
]
def test_get_all_possible_input_variable(self):
value = gomc_control._get_all_possible_input_variables(
description=False
)
assert value == [
"Restart",
"RestartCheckpoint",
"PRNG",
"ParaTypeCHARMM",
"ParaTypeMie",
"ParaTypeMARTINI",
"RcutCoulomb_box_0",
"RcutCoulomb_box_1",
"Pressure",
"Rcut",
"RcutLow",
"LRC",
"Exclude",
"Potential",
"Rswitch",
"ElectroStatic",
"Ewald",
"CachedFourier",
"Tolerance",
"Dielectric",
"PressureCalc",
"EqSteps",
"AdjSteps",
"VDWGeometricSigma",
"useConstantArea",
"FixVolBox0",
"ChemPot",
"Fugacity",
"CBMC_First",
"CBMC_Nth",
"CBMC_Ang",
"CBMC_Dih",
"OutputName",
"CoordinatesFreq",
"RestartFreq",
"CheckpointFreq",
"ConsoleFreq",
"BlockAverageFreq",
"HistogramFreq",
"DistName",
"HistName",
"RunNumber",
"RunLetter",
"SampleFreq",
"OutEnergy",
"OutPressure",
"OutMolNumber",
"OutDensity",
"OutVolume",
"OutSurfaceTension",
"FreeEnergyCalc",
"MoleculeType",
"InitialState",
"LambdaVDW",
"LambdaCoulomb",
"ScaleCoulomb",
"ScalePower",
"ScaleAlpha",
"MinSigma",
"DisFreq",
"RotFreq",
"IntraSwapFreq",
"SwapFreq",
"RegrowthFreq",
"CrankShaftFreq",
"VolFreq",
"MultiParticleFreq",
"IntraMEMC-1Freq",
"MEMC-1Freq",
"IntraMEMC-2Freq",
"MEMC-2Freq",
"IntraMEMC-3Freq",
"MEMC-3Freq",
"ExchangeVolumeDim",
"MEMC_DataInput",
]
value = gomc_control._get_all_possible_input_variables(description=True)
assert gomc_control.dict_keys_to_list(value) == [
"Restart",
"RestartCheckpoint",
"PRNG",
"ParaTypeCHARMM",
"ParaTypeMie",
"ParaTypeMARTINI",
"RcutCoulomb_box_0",
"RcutCoulomb_box_1",
"Pressure",
"Rcut",
"RcutLow",
"LRC",
"Exclude",
"Potential",
"Rswitch",
"ElectroStatic",
"Ewald",
"CachedFourier",
"Tolerance",
"Dielectric",
"PressureCalc",
"EqSteps",
"AdjSteps",
"VDWGeometricSigma",
"useConstantArea",
"FixVolBox0",
"ChemPot",
"Fugacity",
"CBMC_First",
"CBMC_Nth",
"CBMC_Ang",
"CBMC_Dih",
"OutputName",
"CoordinatesFreq",
"RestartFreq",
"CheckpointFreq",
"ConsoleFreq",
"BlockAverageFreq",
"HistogramFreq",
"DistName",
"HistName",
"RunNumber",
"RunLetter",
"SampleFreq",
"OutEnergy",
"OutPressure",
"OutMolNumber",
"OutDensity",
"OutVolume",
"OutSurfaceTension",
"FreeEnergyCalc",
"MoleculeType",
"InitialState",
"LambdaVDW",
"LambdaCoulomb",
"ScaleCoulomb",
"ScalePower",
"ScaleAlpha",
"MinSigma",
"DisFreq",
"RotFreq",
"IntraSwapFreq",
"SwapFreq",
"RegrowthFreq",
"CrankShaftFreq",
"VolFreq",
"MultiParticleFreq",
"IntraMEMC-1Freq",
"MEMC-1Freq",
"IntraMEMC-2Freq",
"MEMC-2Freq",
"IntraMEMC-3Freq",
"MEMC-3Freq",
"ExchangeVolumeDim",
"MEMC_DataInput",
]
def test_get_default_variables_dict(self):
value = gomc_control._get_default_variables_dict()
assert gomc_control.dict_keys_to_list(value) == [
"Restart",
"RestartCheckpoint",
"PRNG",
"ParaTypeCHARMM",
"ParaTypeMie",
"ParaTypeMARTINI",
"RcutCoulomb_box_0",
"RcutCoulomb_box_1",
"Pressure",
"Rcut",
"RcutLow",
"LRC",
"Exclude",
"coul_1_4_scaling",
"Potential",
"Rswitch",
"ElectroStatic",
"Ewald",
"CachedFourier",
"Tolerance",
"Dielectric",
"PressureCalc",
"EqSteps",
"AdjSteps",
"VDWGeometricSigma",
"useConstantArea",
"FixVolBox0",
"ChemPot",
"Fugacity",
"CBMC_First",
"CBMC_Nth",
"CBMC_Ang",
"CBMC_Dih",
"OutputName",
"CoordinatesFreq",
"RestartFreq",
"CheckpointFreq",
"ConsoleFreq",
"BlockAverageFreq",
"HistogramFreq",
"DistName",
"HistName",
"RunNumber",
"RunLetter",
"SampleFreq",
"OutEnergy",
"OutPressure",
"OutMolNumber",
"OutDensity",
"OutVolume",
"OutSurfaceTension",
"FreeEnergyCalc",
"MoleculeType",
"InitialState",
"LambdaVDW",
"LambdaCoulomb",
"ScaleCoulomb",
"ScalePower",
"ScaleAlpha",
"MinSigma",
"ExchangeVolumeDim",
"MEMC_DataInput",
"DisFreq",
"RotFreq",
"IntraSwapFreq",
"SwapFreq",
"RegrowthFreq",
"CrankShaftFreq",
"VolFreq",
"MultiParticleFreq",
"IntraMEMC-1Freq",
"MEMC-1Freq",
"IntraMEMC-2Freq",
"MEMC-2Freq",
"IntraMEMC-3Freq",
"MEMC-3Freq",
]
def test_print_ensemble_info(self):
try:
gomc_control.print_required_input(description=True)
gomc_control.print_required_input(description=False)
test_status = "PASSED"
except:
test_status = "FAILED"
assert test_status == "PASSED"
try:
gomc_control.print_valid_ensemble_input_variables(
"NVT", description=True
)
gomc_control.print_valid_ensemble_input_variables(
"NVT", description=False
)
test_status = "PASSED"
except:
test_status = "FAILED"
assert test_status == "PASSED"
try:
gomc_control.print_valid_ensemble_input_variables(
"NPT", description=True
)
gomc_control.print_valid_ensemble_input_variables(
"NPT", description=False
)
test_status = "PASSED"
except:
test_status = "FAILED"
assert test_status == "PASSED"
try:
gomc_control.print_valid_ensemble_input_variables(
"GEMC_NVT", description=True
)
gomc_control.print_valid_ensemble_input_variables(
"GEMC_NVT", description=False
)
test_status = "PASSED"
except:
test_status = "FAILED"
assert test_status == "PASSED"
try:
gomc_control.print_valid_ensemble_input_variables(
"GEMC_NPT", description=True
)
gomc_control.print_valid_ensemble_input_variables(
"GEMC_NPT", description=False
)
test_status = "PASSED"
except:
test_status = "FAILED"
assert test_status == "PASSED"
try:
gomc_control.print_valid_ensemble_input_variables(
"GCMC", description=True
)
gomc_control.print_valid_ensemble_input_variables(
"GCMC", description=False
)
test_status = "PASSED"
except:
test_status = "FAILED"
assert test_status == "PASSED"
def test_save_basic_NVT(self, ethane_gomc):
test_box_ethane_gomc = mb.fill_box(
compound=[ethane_gomc], n_compounds=[1], box=[1, 1, 1]
)
charmm = Charmm(
test_box_ethane_gomc,
"ethane",
ff_filename="ethane",
residues=[ethane_gomc.name],
forcefield_selection="oplsaa",
)
gomc_control.write_gomc_control_file(
charmm, "test_save_basic_NVT.conf", "NVT", 10, 300
)
with open("test_save_basic_NVT.conf", "r") as fp:
out_gomc = fp.readlines()
for i, line in enumerate(out_gomc):
if line.startswith("Restart "):
split_line = line.split()
assert split_line[1] == "False"
elif line.startswith("PRNG "):
split_line = line.split()
assert split_line[1] == "RANDOM"
elif line.startswith("ParaTypeCHARMM "):
split_line = line.split()
assert split_line[1] == "True"
elif line.startswith("Parameters "):
split_line = line.split()
assert split_line[1] == "ethane.inp"
elif line.startswith("Coordinates "):
split_line = line.split()
assert split_line[1] == "0"
assert split_line[2] == "ethane.pdb"
elif line.startswith("Structure "):
split_line = line.split()
assert split_line[1] == "0"
assert split_line[2] == "ethane.psf"
elif line.startswith("Temperature "):
split_line = line.split()
assert split_line[1] == "300"
elif line.startswith("Potential "):
split_line = line.split()
assert split_line[1] == "VDW"
elif line.startswith("LRC "):
split_line = line.split()
assert split_line[1] == "True"
elif line.startswith("Rcut "):
split_line = line.split()
assert split_line[1] == "10"
elif line.startswith("RcutLow "):
split_line = line.split()
assert split_line[1] == "1"
elif line.startswith("VDWGeometricSigma "):
split_line = line.split()
assert split_line[1] == "False"
elif line.startswith("Exclude "):
split_line = line.split()
assert split_line[1] == "1-3"
elif line.startswith("Ewald "):
split_line = line.split()
assert split_line[1] == "True"
elif line.startswith("ElectroStatic "):
split_line = line.split()
assert split_line[1] == "True"
elif line.startswith("CachedFourier "):
split_line = line.split()
assert split_line[1] == "False"
elif line.startswith("Tolerance "):
split_line = line.split()
assert split_line[1] == "1e-05"
elif line.startswith("1-4scaling "):
split_line = line.split()
assert split_line[1] == "0.5"
elif line.startswith("PressureCalc "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "1"
elif line.startswith("RunSteps "):
split_line = line.split()
assert split_line[1] == "10"
elif line.startswith("EqSteps "):
split_line = line.split()
assert split_line[1] == "1"
elif line.startswith("AdjSteps "):
split_line = line.split()
assert split_line[1] == "1"
elif line.startswith("DisFreq "):
split_line = line.split()
assert split_line[1] == "0.15"
elif line.startswith("RotFreq "):
split_line = line.split()
assert split_line[1] == "0.15"
elif line.startswith("IntraSwapFreq "):
split_line = line.split()
assert split_line[1] == "0.3"
elif line.startswith("SwapFreq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("RegrowthFreq "):
split_line = line.split()
assert split_line[1] == "0.3"
elif line.startswith("CrankShaftFreq "):
split_line = line.split()
assert split_line[1] == "0.1"
elif line.startswith("VolFreq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("MultiParticleFreq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("IntraMEMC-1Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("MEMC-1Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("IntraMEMC-2Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("MEMC-2Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("IntraMEMC-3Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("MEMC-3Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("CellBasisVector1 "):
split_line = line.split()
assert split_line[1] == "0"
print("split_line[2] = " + str(split_line[2]))
assert split_line[2] == "10.0"
assert split_line[3] == "0.0"
assert split_line[4] == "0.0"
elif line.startswith("CellBasisVector2 "):
split_line = line.split()
assert split_line[1] == "0"
assert split_line[2] == "0.0"
assert split_line[3] == "10.0"
assert split_line[4] == "0.0"
elif line.startswith("CellBasisVector3 "):
split_line = line.split()
assert split_line[1] == "0"
assert split_line[2] == "0.0"
assert split_line[3] == "0.0"
assert split_line[4] == "10.0"
elif line.startswith("CBMC_First "):
split_line = line.split()
assert split_line[1] == "12"
elif line.startswith("CBMC_Nth"):
split_line = line.split()
assert split_line[1] == "10"
elif line.startswith("CBMC_Ang "):
split_line = line.split()
assert split_line[1] == "50"
elif line.startswith("CBMC_Dih "):
split_line = line.split()
assert split_line[1] == "50"
elif line.startswith("OutputName "):
split_line = line.split()
assert split_line[1] == "Output_data"
elif line.startswith("RestartFreq "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "1"
elif line.startswith("CheckpointFreq "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "1"
elif line.startswith("CoordinatesFreq "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "1"
elif line.startswith("ConsoleFreq "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "1"
elif line.startswith("BlockAverageFreq "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "1"
elif line.startswith("HistogramFreq "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "1"
elif line.startswith("DistName "):
split_line = line.split()
assert split_line[1] == "dis"
elif line.startswith("HistName "):
split_line = line.split()
assert split_line[1] == "his"
elif line.startswith("RunNumber "):
split_line = line.split()
assert split_line[1] == "1"
elif line.startswith("RunLetter "):
split_line = line.split()
assert split_line[1] == "a"
elif line.startswith("SampleFreq "):
split_line = line.split()
assert split_line[1] == "1"
elif line.startswith("OutEnergy "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "True"
elif line.startswith("OutPressure "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "True"
elif line.startswith("OutMolNumber "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "True"
elif line.startswith("OutDensity "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "True"
elif line.startswith("OutVolume "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "True"
elif line.startswith("OutSurfaceTension "):
split_line = line.split()
assert split_line[1] == "False"
assert split_line[2] == "False"
else:
pass
def test_save_basic_NPT(self, ethane_gomc):
test_box_ethane_gomc = mb.fill_box(
compound=[ethane_gomc], n_compounds=[1], box=[2, 2, 2]
)
charmm = Charmm(
test_box_ethane_gomc,
"ethane",
ff_filename="ethane",
residues=[ethane_gomc.name],
forcefield_selection="oplsaa",
)
gomc_control.write_gomc_control_file(
charmm, "test_save_basic_NPT.conf", "NPT", 1000, 500
)
with open("test_save_basic_NPT.conf", "r") as fp:
out_gomc = fp.readlines()
for i, line in enumerate(out_gomc):
if line.startswith("Pressure "):
split_line = line.split()
assert split_line[1] == "1.01325"
elif line.startswith("Temperature "):
split_line = line.split()
assert split_line[1] == "500"
elif line.startswith("PressureCalc "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "100"
elif line.startswith("RunSteps "):
split_line = line.split()
assert split_line[1] == "1000"
elif line.startswith("EqSteps "):
split_line = line.split()
assert split_line[1] == "100"
elif line.startswith("AdjSteps "):
split_line = line.split()
assert split_line[1] == "100"
elif line.startswith("DisFreq "):
split_line = line.split()
assert split_line[1] == "0.15"
elif line.startswith("RotFreq "):
split_line = line.split()
assert split_line[1] == "0.15"
elif line.startswith("IntraSwapFreq "):
split_line = line.split()
assert split_line[1] == "0.29"
elif line.startswith("SwapFreq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("RegrowthFreq "):
split_line = line.split()
assert split_line[1] == "0.3"
elif line.startswith("CrankShaftFreq "):
split_line = line.split()
assert split_line[1] == "0.1"
elif line.startswith("VolFreq "):
split_line = line.split()
assert split_line[1] == "0.01"
elif line.startswith("MultiParticleFreq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("IntraMEMC-1Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("MEMC-1Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("IntraMEMC-2Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("MEMC-2Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("IntraMEMC-3Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("MEMC-3Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("CellBasisVector1 "):
split_line = line.split()
assert split_line[1] == "0"
assert split_line[2] == "20.0"
assert split_line[3] == "0.0"
assert split_line[4] == "0.0"
elif line.startswith("CellBasisVector2 "):
split_line = line.split()
assert split_line[1] == "0"
assert split_line[2] == "0.0"
assert split_line[3] == "20.0"
assert split_line[4] == "0.0"
elif line.startswith("CellBasisVector3 "):
split_line = line.split()
assert split_line[1] == "0"
assert split_line[2] == "0.0"
assert split_line[3] == "0.0"
assert split_line[4] == "20.0"
elif line.startswith("RestartFreq "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "100"
elif line.startswith("CheckpointFreq "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "100"
elif line.startswith("CoordinatesFreq "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "100"
elif line.startswith("ConsoleFreq "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "100"
elif line.startswith("BlockAverageFreq "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "100"
elif line.startswith("HistogramFreq "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "100"
elif line.startswith("SampleFreq "):
split_line = line.split()
assert split_line[1] == "100"
elif line.startswith("VDWGeometricSigma "):
split_line = line.split()
assert split_line[1] == "False"
elif line.startswith("useConstantArea "):
split_line = line.split()
assert split_line[1] == "False"
else:
pass
def test_save_basic_GCMC(self, ethane_gomc):
test_box_ethane_gomc = mb.fill_box(
compound=[ethane_gomc], n_compounds=[1], box=[2, 2, 2]
)
charmm = Charmm(
test_box_ethane_gomc,
"ethane_box_0",
structure_box_1=test_box_ethane_gomc,
filename_box_1="ethane_box_1",
ff_filename="ethane_FF",
residues=[ethane_gomc.name],
forcefield_selection="oplsaa",
)
gomc_control.write_gomc_control_file(
charmm,
"test_save_basic_GCMC.conf",
"GCMC",
100000,
500,
input_variables_dict={
"ChemPot": {"ETH": -4000},
"VDWGeometricSigma": True,
},
)
with open("test_save_basic_GCMC.conf", "r") as fp:
out_gomc = fp.readlines()
for i, line in enumerate(out_gomc):
if line.startswith("Parameters "):
split_line = line.split()
assert split_line[1] == "ethane_FF.inp"
elif line.startswith("Coordinates 0"):
split_line = line.split()
assert split_line[1] == "0"
assert split_line[2] == "ethane_box_0.pdb"
elif line.startswith("Coordinates 1"):
split_line = line.split()
assert split_line[1] == "1"
assert split_line[2] == "ethane_box_1.pdb"
elif line.startswith("Structure 0"):
split_line = line.split()
assert split_line[1] == "0"
assert split_line[2] == "ethane_box_0.psf"
elif line.startswith("Structure 1"):
split_line = line.split()
assert split_line[1] == "1"
assert split_line[2] == "ethane_box_1.psf"
elif line.startswith("Temperature "):
split_line = line.split()
assert split_line[1] == "500"
elif line.startswith("ChemPot "):
split_line = line.split()
assert split_line[1] == "ETH"
assert split_line[2] == "-4000"
elif line.startswith("PressureCalc "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "10000"
elif line.startswith("RunSteps "):
split_line = line.split()
assert split_line[1] == "100000"
elif line.startswith("EqSteps "):
split_line = line.split()
assert split_line[1] == "10000"
elif line.startswith("AdjSteps "):
split_line = line.split()
assert split_line[1] == "1000"
elif line.startswith("DisFreq "):
split_line = line.split()
assert split_line[1] == "0.15"
elif line.startswith("RotFreq "):
split_line = line.split()
assert split_line[1] == "0.15"
elif line.startswith("IntraSwapFreq "):
split_line = line.split()
assert split_line[1] == "0.1"
elif line.startswith("SwapFreq "):
split_line = line.split()
assert split_line[1] == "0.35"
elif line.startswith("RegrowthFreq "):
split_line = line.split()
assert split_line[1] == "0.15"
elif line.startswith("CrankShaftFreq "):
split_line = line.split()
assert split_line[1] == "0.1"
elif line.startswith("VolFreq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("MultiParticleFreq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("IntraMEMC-1Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("MEMC-1Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("IntraMEMC-2Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("MEMC-2Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("IntraMEMC-3Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("MEMC-3Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("CellBasisVector1 0"):
split_line = line.split()
assert split_line[1] == "0"
assert split_line[2] == "20.0"
assert split_line[3] == "0.0"
assert split_line[4] == "0.0"
elif line.startswith("CellBasisVector2 0"):
split_line = line.split()
assert split_line[1] == "0"
assert split_line[2] == "0.0"
assert split_line[3] == "20.0"
assert split_line[4] == "0.0"
elif line.startswith("CellBasisVector3 0"):
split_line = line.split()
assert split_line[1] == "0"
assert split_line[2] == "0.0"
assert split_line[3] == "0.0"
assert split_line[4] == "20.0"
elif line.startswith("CellBasisVector1 1"):
split_line = line.split()
assert split_line[1] == "1"
assert split_line[2] == "20.0"
assert split_line[3] == "0.0"
assert split_line[4] == "0.0"
elif line.startswith("CellBasisVector2 1"):
split_line = line.split()
assert split_line[1] == "1"
assert split_line[2] == "0.0"
assert split_line[3] == "20.0"
assert split_line[4] == "0.0"
elif line.startswith("CellBasisVector3 1"):
split_line = line.split()
assert split_line[1] == "1"
assert split_line[2] == "0.0"
assert split_line[3] == "0.0"
assert split_line[4] == "20.0"
elif line.startswith("RestartFreq "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "10000"
elif line.startswith("CheckpointFreq "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "10000"
elif line.startswith("CoordinatesFreq "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "10000"
elif line.startswith("ConsoleFreq "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "10000"
elif line.startswith("BlockAverageFreq "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "10000"
elif line.startswith("HistogramFreq "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "10000"
elif line.startswith("SampleFreq "):
split_line = line.split()
assert split_line[1] == "500"
elif line.startswith("VDWGeometricSigma "):
split_line = line.split()
assert split_line[1] == "True"
else:
pass
def test_save_basic_GEMC_NVT(self, ethane_gomc):
test_box_ethane_gomc = mb.fill_box(
compound=[ethane_gomc], n_compounds=[1], box=[2, 2, 2]
)
charmm = Charmm(
test_box_ethane_gomc,
"ethane_box_0",
structure_box_1=test_box_ethane_gomc,
filename_box_1="ethane_box_1",
ff_filename="ethane_FF",
residues=[ethane_gomc.name],
forcefield_selection="oplsaa",
)
gomc_control.write_gomc_control_file(
charmm, "test_save_basic_GEMC_NVT.conf", "GEMC_NVT", 1000000, 500
)
with open("test_save_basic_GEMC_NVT.conf", "r") as fp:
out_gomc = fp.readlines()
for i, line in enumerate(out_gomc):
if line.startswith("DisFreq "):
split_line = line.split()
assert split_line[1] == "0.2"
elif line.startswith("RotFreq "):
split_line = line.split()
assert split_line[1] == "0.2"
elif line.startswith("IntraSwapFreq "):
split_line = line.split()
assert split_line[1] == "0.1"
elif line.startswith("SwapFreq "):
split_line = line.split()
assert split_line[1] == "0.2"
elif line.startswith("RegrowthFreq "):
split_line = line.split()
assert split_line[1] == "0.2"
elif line.startswith("CrankShaftFreq "):
split_line = line.split()
assert split_line[1] == "0.1"
elif line.startswith("VolFreq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("MultiParticleFreq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("IntraMEMC-1Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("MEMC-1Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("IntraMEMC-2Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("MEMC-2Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("IntraMEMC-3Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("MEMC-3Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
else:
pass
def test_save_basic_GEMC_NPT(self, ethane_gomc):
test_box_ethane_gomc = mb.fill_box(
compound=[ethane_gomc], n_compounds=[1], box=[2, 2, 2]
)
charmm = Charmm(
test_box_ethane_gomc,
"ethane_box_0",
structure_box_1=test_box_ethane_gomc,
filename_box_1="ethane_box_1",
ff_filename="ethane_FF",
residues=[ethane_gomc.name],
forcefield_selection="oplsaa",
)
gomc_control.write_gomc_control_file(
charmm,
"test_save_basic_GEMC_NPT.conf",
"GEMC_NPT",
1000000,
500,
input_variables_dict={
"Pressure": 10,
"useConstantArea": True,
"FixVolBox0": True,
},
)
with open("test_save_basic_GEMC_NPT.conf", "r") as fp:
out_gomc = fp.readlines()
for i, line in enumerate(out_gomc):
if line.startswith("Pressure "):
split_line = line.split()
assert split_line[1] == "10"
elif line.startswith("DisFreq "):
split_line = line.split()
assert split_line[1] == "0.19"
elif line.startswith("RotFreq "):
split_line = line.split()
assert split_line[1] == "0.2"
elif line.startswith("IntraSwapFreq "):
split_line = line.split()
assert split_line[1] == "0.1"
elif line.startswith("SwapFreq "):
split_line = line.split()
assert split_line[1] == "0.2"
elif line.startswith("RegrowthFreq "):
split_line = line.split()
assert split_line[1] == "0.2"
elif line.startswith("CrankShaftFreq "):
split_line = line.split()
assert split_line[1] == "0.1"
elif line.startswith("VolFreq "):
split_line = line.split()
assert split_line[1] == "0.01"
elif line.startswith("MultiParticleFreq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("IntraMEMC-1Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("MEMC-1Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("IntraMEMC-2Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("MEMC-2Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("IntraMEMC-3Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("MEMC-3Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("useConstantArea "):
split_line = line.split()
assert split_line[1] == "True"
elif line.startswith("FixVolBox0 "):
split_line = line.split()
assert split_line[1] == "True"
else:
pass
def test_save_change_most_variable_NVT(self, ethane_gomc, ethanol_gomc):
test_box_ethane_ethanol = mb.fill_box(
compound=[ethane_gomc, ethanol_gomc],
n_compounds=[1, 1],
box=[4.0, 4.0, 4.0],
)
charmm = Charmm(
test_box_ethane_ethanol,
"ethane_ethanol",
ff_filename="ethane_ethanol",
residues=[ethane_gomc.name, ethanol_gomc.name],
forcefield_selection="oplsaa",
)
gomc_control.write_gomc_control_file(
charmm,
"test_save_change_most_variable_NVT.conf",
"NVT",
100000,
300,
input_variables_dict={
"Restart": True,
"PRNG": 123,
"ParaTypeCHARMM": True,
"ParaTypeMARTINI": False,
"ParaTypeMie": False,
"LRC": False,
"Rcut": 12,
"RcutLow": 8,
"Exclude": "1-4",
"Ewald": False,
"ElectroStatic": False,
"CachedFourier": True,
"RcutCoulomb_box_0": 14,
"PressureCalc": [False, 4],
"Tolerance": 0.01,
"DisFreq": 0.2,
"RotFreq": 0.2,
"IntraSwapFreq": 0.1,
"RegrowthFreq": 0.1,
"CrankShaftFreq": 0.2,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.05,
"IntraMEMC-2Freq": 0.05,
"IntraMEMC-3Freq": 0.05,
"CBMC_First": 55,
"CBMC_Nth": 66,
"CBMC_Ang": 33,
"CBMC_Dih": 22,
"OutputName": "test_out",
"RestartFreq": [False, 50],
"CheckpointFreq": [False, 50],
"CoordinatesFreq": [False, 50],
"ConsoleFreq": [False, 500],
"BlockAverageFreq": [False, 50],
"HistogramFreq": [False, 50],
"DistName": "dist",
"HistName": "hist",
"RunNumber": 4,
"RunLetter": "c",
"SampleFreq": 25,
"FreeEnergyCalc": [True, 50],
"MoleculeType": ["ETH", 1],
"InitialState": 3,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"OutEnergy": [False, False],
"OutPressure": [False, False],
"OutMolNumber": [False, False],
"OutDensity": [False, False],
"OutVolume": [False, False],
"OutSurfaceTension": [True, True],
},
)
with open("test_save_change_most_variable_NVT.conf", "r") as fp:
out_gomc = fp.readlines()
for i, line in enumerate(out_gomc):
if line.startswith("Restart "):
split_line = line.split()
assert split_line[1] == "True"
elif line.startswith("PRNG "):
split_line = line.split()
assert split_line[1] == "INTSEED"
elif line.startswith("Random_Seed "):
split_line = line.split()
assert split_line[1] == "123"
elif line.startswith("ParaTypeCHARMM "):
split_line = line.split()
assert split_line[1] == "True"
elif line.startswith("Parameters "):
split_line = line.split()
assert split_line[1] == "ethane_ethanol.inp"
elif line.startswith("Coordinates "):
split_line = line.split()
assert split_line[1] == "0"
assert split_line[2] == "ethane_ethanol.pdb"
elif line.startswith("Structure "):
split_line = line.split()
assert split_line[1] == "0"
assert split_line[2] == "ethane_ethanol.psf"
elif line.startswith("Temperature "):
split_line = line.split()
assert split_line[1] == "300"
elif line.startswith("Potential "):
split_line = line.split()
assert split_line[1] == "VDW"
elif line.startswith("LRC "):
split_line = line.split()
assert split_line[1] == "False"
elif line.startswith("Rcut "):
split_line = line.split()
assert split_line[1] == "12"
elif line.startswith("RcutLow "):
split_line = line.split()
assert split_line[1] == "8"
elif line.startswith("Exclude "):
split_line = line.split()
assert split_line[1] == "1-4"
elif line.startswith("Ewald "):
split_line = line.split()
assert split_line[1] == "False"
elif line.startswith("ElectroStatic "):
split_line = line.split()
assert split_line[1] == "False"
elif line.startswith("CachedFourier "):
split_line = line.split()
assert split_line[1] == "True"
elif line.startswith("Tolerance "):
split_line = line.split()
assert split_line[1] == "0.01"
elif line.startswith("1-4scaling "):
split_line = line.split()
assert split_line[1] == "0.5"
elif line.startswith("RcutCoulomb 0 "):
split_line = line.split()
assert split_line[1] == "0"
assert split_line[2] == "14"
elif line.startswith("PressureCalc "):
split_line = line.split()
assert split_line[1] == "False"
assert split_line[2] == "4"
elif line.startswith("RunSteps "):
split_line = line.split()
assert split_line[1] == "100000"
elif line.startswith("EqSteps "):
split_line = line.split()
assert split_line[1] == "10000"
elif line.startswith("AdjSteps "):
split_line = line.split()
assert split_line[1] == "1000"
elif line.startswith("DisFreq "):
split_line = line.split()
assert split_line[1] == "0.2"
elif line.startswith("RotFreq "):
split_line = line.split()
assert split_line[1] == "0.2"
elif line.startswith("IntraSwapFreq "):
split_line = line.split()
assert split_line[1] == "0.1"
elif line.startswith("SwapFreq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("RegrowthFreq "):
split_line = line.split()
assert split_line[1] == "0.1"
elif line.startswith("CrankShaftFreq "):
split_line = line.split()
assert split_line[1] == "0.2"
elif line.startswith("VolFreq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("MultiParticleFreq "):
split_line = line.split()
assert split_line[1] == "0.05"
elif line.startswith("IntraMEMC-1Freq "):
split_line = line.split()
assert split_line[1] == "0.05"
elif line.startswith("MEMC-1Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("IntraMEMC-2Freq "):
split_line = line.split()
assert split_line[1] == "0.05"
elif line.startswith("MEMC-2Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("IntraMEMC-3Freq "):
split_line = line.split()
assert split_line[1] == "0.05"
elif line.startswith("MEMC-3Freq "):
split_line = line.split()
assert split_line[1] == "0.0"
elif line.startswith("CellBasisVector1 "):
split_line = line.split()
assert split_line[1] == "0"
assert split_line[2] == "40.0"
assert split_line[3] == "0.0"
assert split_line[4] == "0.0"
elif line.startswith("CellBasisVector2 "):
split_line = line.split()
assert split_line[1] == "0"
assert split_line[2] == "0.0"
assert split_line[3] == "40.0"
assert split_line[4] == "0.0"
elif line.startswith("CellBasisVector3 "):
split_line = line.split()
assert split_line[1] == "0"
assert split_line[2] == "0.0"
assert split_line[3] == "0.0"
assert split_line[4] == "40.0"
elif line.startswith("FreeEnergyCalc "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "50"
elif line.startswith("MoleculeType "):
split_line = line.split()
assert split_line[1] == "ETH"
assert split_line[2] == "1"
elif line.startswith("InitialState "):
split_line = line.split()
assert split_line[1] == "3"
elif line.startswith("ScalePower "):
split_line = line.split()
assert split_line[1] == "2"
elif line.startswith("ScaleAlpha "):
split_line = line.split()
assert split_line[1] == "0.5"
elif line.startswith("MinSigma "):
split_line = line.split()
assert split_line[1] == "3"
elif line.startswith("ScaleCoulomb "):
split_line = line.split()
assert split_line[1] == "False"
elif line.startswith("# States "):
split_line = line.split()
assert split_line[2] == "0"
assert split_line[3] == "1"
assert split_line[4] == "2"
assert split_line[5] == "3"
elif line.startswith("LambdaVDW "):
split_line = line.split()
assert split_line[1] == "0.1"
assert split_line[2] == "0.2"
assert split_line[3] == "0.4"
assert split_line[4] == "1.0"
elif line.startswith("LambdaCoulomb "):
split_line = line.split()
assert split_line[1] == "0.1"
assert split_line[2] == "0.3"
assert split_line[3] == "0.8"
assert split_line[4] == "1.0"
elif line.startswith("CBMC_First "):
split_line = line.split()
assert split_line[1] == "55"
elif line.startswith("CBMC_Nth "):
split_line = line.split()
assert split_line[1] == "66"
elif line.startswith("CBMC_Ang "):
split_line = line.split()
assert split_line[1] == "33"
elif line.startswith("CBMC_Dih "):
split_line = line.split()
assert split_line[1] == "22"
elif line.startswith("OutputName "):
split_line = line.split()
assert split_line[1] == "test_out"
elif line.startswith("RestartFreq "):
split_line = line.split()
assert split_line[1] == "False"
assert split_line[2] == "50"
elif line.startswith("CheckpointFreq "):
split_line = line.split()
assert split_line[1] == "False"
assert split_line[2] == "50"
elif line.startswith("CoordinatesFreq "):
split_line = line.split()
assert split_line[1] == "False"
assert split_line[2] == "50"
elif line.startswith("ConsoleFreq "):
split_line = line.split()
assert split_line[1] == "False"
assert split_line[2] == "500"
elif line.startswith("BlockAverageFreq "):
split_line = line.split()
assert split_line[1] == "False"
assert split_line[2] == "50"
elif line.startswith("HistogramFreq "):
split_line = line.split()
assert split_line[1] == "False"
assert split_line[2] == "50"
elif line.startswith("DistName "):
split_line = line.split()
assert split_line[1] == "dist"
elif line.startswith("HistName "):
split_line = line.split()
assert split_line[1] == "hist"
elif line.startswith("RunNumber "):
split_line = line.split()
assert split_line[1] == "4"
elif line.startswith("RunLetter "):
split_line = line.split()
assert split_line[1] == "c"
elif line.startswith("SampleFreq "):
split_line = line.split()
assert split_line[1] == "25"
elif line.startswith("OutEnergy "):
split_line = line.split()
assert split_line[1] == "False"
assert split_line[2] == "False"
elif line.startswith("OutPressure "):
split_line = line.split()
assert split_line[1] == "False"
assert split_line[2] == "False"
elif line.startswith("OutMolNumber "):
split_line = line.split()
assert split_line[1] == "False"
assert split_line[2] == "False"
elif line.startswith("OutDensity "):
split_line = line.split()
assert split_line[1] == "False"
assert split_line[2] == "False"
elif line.startswith("OutVolume "):
split_line = line.split()
assert split_line[1] == "False"
assert split_line[2] == "False"
elif line.startswith("OutSurfaceTension "):
split_line = line.split()
assert split_line[1] == "True"
assert split_line[2] == "True"
else:
pass
def test_save_NVT_bad_lamda_value(self, ethane_gomc, ethanol_gomc):
test_box_ethane_ethanol = mb.fill_box(
compound=[ethane_gomc, ethanol_gomc],
n_compounds=[1, 1],
box=[1, 1, 1],
)
charmm = Charmm(
test_box_ethane_ethanol,
"ethane_ethanol",
ff_filename="ethane_ethanol",
residues=[ethane_gomc.name, ethanol_gomc.name],
forcefield_selection="oplsaa",
)
with pytest.raises(
ValueError,
match=r"ERROR: The last value in the LambdaCoulomb variable list must be a 1.0",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
100,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10],
"MoleculeType": ["ETH", 1],
"InitialState": 3,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 0.9],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The last value in the LambdaVDW variable list must be a 1.0",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
100,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10],
"MoleculeType": ["ETH", 1],
"InitialState": 3,
"LambdaVDW": [0.1, 0.2, 0.4, 0.9],
},
)
def test_save_NVT_bad_variables_part_1(self, ethane_gomc, ethanol_gomc):
test_box_ethane_ethanol = mb.fill_box(
compound=[ethane_gomc, ethanol_gomc],
n_compounds=[1, 1],
box=[1, 1, 1],
)
charmm = Charmm(
test_box_ethane_ethanol,
"ethane_ethanol",
ff_filename="ethane_ethanol",
residues=[ethane_gomc.name, ethanol_gomc.name],
forcefield_selection="oplsaa",
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['Restart'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"Restart": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['RestartCheckpoint'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"RestartCheckpoint": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['PRNG'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"PRNG": [1]},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ParaTypeCHARMM'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"ParaTypeCHARMM": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ParaTypeMie'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"ParaTypeMie": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ParaTypeMARTINI'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"ParaTypeMARTINI": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['RcutCoulomb_box_0'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"RcutCoulomb_box_0": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: All the correct input variables where not provided for "
r"the NVT ensemble. Please be sure to check that the keys in the "
r"input variables dictionary \(input_variables_dict\) is correct, and "
r"be aware that added spaces before or after the variable in any keys "
r"will also give this warning. The bad variable inputs ensemble "
r"inputs = \['RcutCoulomb_box_1'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"RcutCoulomb_box_1": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['Pressure'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"Pressure": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['Rcut'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"Rcut": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['RcutLow'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"RcutLow": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['LRC'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"LRC": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['Exclude'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"Exclude": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['Potential'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"Potential": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['Rswitch'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"Rswitch": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ElectroStatic'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"ElectroStatic": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['Ewald'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"Ewald": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['CachedFourier'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"CachedFourier": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['Tolerance'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"Tolerance": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['Dielectric'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"Dielectric": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['PressureCalc'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"PressureCalc": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['EqSteps'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"EqSteps": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['EqSteps'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"EqSteps": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['useConstantArea'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NPT",
10,
300,
input_variables_dict={"useConstantArea": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: All the correct input variables where not provided for "
r"the NVT ensemble. Please be sure to check that the keys in the "
r"input variables dictionary \(input_variables_dict\) is correct, and "
r"be aware that added spaces before or after the variable in any keys "
r"will also give this warning. The bad variable inputs ensemble "
r"inputs = \['ChemPot'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"ChemPot": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: All the correct input variables where not provided for "
r"the NVT ensemble. Please be sure to check that the keys in the "
r"input variables dictionary \(input_variables_dict\) is correct, and "
r"be aware that added spaces before or after the variable in any keys "
r"will also give this warning. The bad variable inputs ensemble "
r"inputs = \['Fugacity'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"Fugacity": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['CBMC_First'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"CBMC_First": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['CBMC_Nth'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"CBMC_Nth": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['CBMC_Ang'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"CBMC_Ang": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['CBMC_Dih'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"CBMC_Dih": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['OutputName'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"OutputName": 1},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['CoordinatesFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"CoordinatesFreq": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['RestartFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"RestartFreq": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['CheckpointFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"CheckpointFreq": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ConsoleFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"ConsoleFreq": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['BlockAverageFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"BlockAverageFreq": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['HistogramFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"HistogramFreq": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['DistName'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"DistName": 1},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['HistName'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"HistName": 1},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['RunNumber'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"RunNumber": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['RunLetter'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"RunLetter": 1},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['SampleFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"SampleFreq": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['OutEnergy'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"OutEnergy": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['OutPressure'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"OutPressure": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['OutMolNumber'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"OutMolNumber": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['OutDensity'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"OutDensity": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['OutVolume'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"OutVolume": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['OutSurfaceTension'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"OutSurfaceTension": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['FreeEnergyCalc'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": "s",
"MoleculeType": ["ETH", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MoleculeType'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETH", "s"],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MoleculeType'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": [["ETH"], 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MoleculeType'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": [{"ETH": "1"}, 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['InitialState'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETH", 1],
"InitialState": "s",
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['LambdaVDW'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETH", 1],
"InitialState": 1,
"LambdaVDW": "s",
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['LambdaCoulomb'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETH", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": "s",
},
)
with pytest.raises(
ValueError,
match=r"ERROR: To utilize the free energy calculations all the "
r"following variables need to be set, and not equal to "
r"None: FreeEnergyCalc, MoleculeType, InitialState, LambdaVDW.",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"FreeEnergyCalc": [True, 10000]},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ScaleCoulomb'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"ScaleCoulomb": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ScalePower'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"ScalePower": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ScaleAlpha'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"ScaleAlpha": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MinSigma'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"MinSigma": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ExchangeVolumeDim'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"ExchangeVolumeDim": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC_DataInput'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"MEMC_DataInput": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['DisFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"DisFreq": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['RotFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"RotFreq": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['IntraSwapFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"IntraSwapFreq": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['SwapFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"SwapFreq": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['RegrowthFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"RegrowthFreq": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['CrankShaftFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"CrankShaftFreq": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['VolFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"VolFreq": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MultiParticleFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"MultiParticleFreq": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['IntraMEMC-1Freq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"IntraMEMC-1Freq": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC-1Freq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"MEMC-1Freq": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['IntraMEMC-2Freq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"IntraMEMC-2Freq": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC-2Freq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"MEMC-2Freq": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['IntraMEMC-3Freq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"IntraMEMC-3Freq": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC-3Freq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"MEMC-3Freq": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: All the correct input variables where not provided for "
r"the NVT ensemble. Please be sure to check that the keys in the "
r"input variables dictionary \(input_variables_dict\) is correct, and "
r"be aware that added spaces before or after the variable in any keys "
r"will also give this warning. The bad variable inputs ensemble "
r"inputs = \['XXXXXX'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"XXXXXX": "s"},
)
def test_save_NVT_bad_variables_part_2(self, ethane_gomc, ethanol_gomc):
test_box_ethane_ethanol = mb.fill_box(
compound=[ethane_gomc, ethanol_gomc],
n_compounds=[1, 1],
box=[1, 1, 1],
)
charmm = Charmm(
test_box_ethane_ethanol,
"ethane_ethanol",
ff_filename="ethane_ethanol",
residues=[ethane_gomc.name, ethanol_gomc.name],
forcefield_selection="oplsaa",
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['Restart'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"Restart": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['RestartCheckpoint'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"RestartCheckpoint": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['PRNG'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"PRNG": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ParaTypeCHARMM'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"ParaTypeCHARMM": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ParaTypeMie'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"ParaTypeMie": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ParaTypeMARTINI'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"ParaTypeMARTINI": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['RcutCoulomb_box_0'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"RcutCoulomb_box_0": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: All the correct input variables where not provided for "
r"the NVT ensemble. Please be sure to check that the keys in the "
r"input variables dictionary \(input_variables_dict\) is correct, and "
r"be aware that added spaces before or after the variable in any keys "
r"will also give this warning. The bad variable inputs ensemble "
r"inputs = \['RcutCoulomb_box_1'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"RcutCoulomb_box_1": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['Pressure'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"Pressure": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['Rcut'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"Rcut": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['RcutLow'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"RcutLow": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['LRC'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"LRC": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['Exclude'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"Exclude": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['Potential'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"Potential": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['Rswitch'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"Rswitch": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ElectroStatic'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"ElectroStatic": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['Ewald'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"Ewald": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['CachedFourier'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"CachedFourier": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['Tolerance'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"Tolerance": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['Dielectric'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"Dielectric": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['PressureCalc'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"PressureCalc": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['EqSteps'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"EqSteps": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['AdjSteps'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"AdjSteps": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['useConstantArea'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"useConstantArea": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: All the correct input variables where not provided for "
r"the NVT ensemble. Please be sure to check that the keys in the "
r"input variables dictionary \(input_variables_dict\) is correct, and "
r"be aware that added spaces before or after the variable in any keys "
r"will also give this warning. The bad variable inputs ensemble "
r"inputs = \['FixVolBox0'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"FixVolBox0": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: All the correct input variables where not provided for "
r"the NVT ensemble. Please be sure to check that the keys in the "
r"input variables dictionary \(input_variables_dict\) is correct, and "
r"be aware that added spaces before or after the variable in any keys "
r"will also give this warning. The bad variable inputs ensemble "
r"inputs = \['ChemPot'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"ChemPot": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: All the correct input variables where not provided for "
r"the NVT ensemble. Please be sure to check that the keys in the "
r"input variables dictionary \(input_variables_dict\) is correct, and "
r"be aware that added spaces before or after the variable in any keys "
r"will also give this warning. The bad variable inputs ensemble "
r"inputs = \['Fugacity'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"Fugacity": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['CBMC_First'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"CBMC_First": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['CBMC_Nth'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"CBMC_Nth": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['CBMC_Ang'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"CBMC_Ang": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['CBMC_Dih'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"CBMC_Dih": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['OutputName'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"OutputName": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['CoordinatesFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"CoordinatesFreq": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['RestartFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"RestartFreq": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['CheckpointFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"CheckpointFreq": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ConsoleFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"ConsoleFreq": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['BlockAverageFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"BlockAverageFreq": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['HistogramFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"HistogramFreq": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['DistName'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"DistName": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['HistName'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"HistName": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['RunNumber'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"RunNumber": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['RunLetter'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"RunLetter": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['SampleFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"SampleFreq": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['OutEnergy'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"OutEnergy": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['OutPressure'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"OutPressure": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['OutMolNumber'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"OutMolNumber": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['OutDensity'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"OutDensity": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['OutVolume'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"OutVolume": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['OutSurfaceTension'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"OutSurfaceTension": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['FreeEnergyCalc'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [],
"MoleculeType": ["ETH", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MoleculeType'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETH", []],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MoleculeType'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": [["ETH"], 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MoleculeType'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": [{"ETH": "1"}, 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['InitialState'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETH", 1],
"InitialState": [],
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['LambdaVDW'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETH", 1],
"InitialState": 1,
"LambdaVDW": [],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['LambdaCoulomb'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETH", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: To utilize the free energy calculations all the "
r"following variables need to be set, and not equal to "
r"None: FreeEnergyCalc, MoleculeType, InitialState, LambdaVDW.",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"NVT",
10,
300,
input_variables_dict={"FreeEnergyCalc": [True, 10000]},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ScaleCoulomb'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"ScaleCoulomb": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ScalePower'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"ScalePower": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ScaleAlpha'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"ScaleAlpha": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MinSigma'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"MinSigma": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ExchangeVolumeDim'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"ExchangeVolumeDim": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC_DataInput'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"MEMC_DataInput": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['DisFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"DisFreq": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['DisFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"DisFreq": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['IntraSwapFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"IntraSwapFreq": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['IntraSwapFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"IntraSwapFreq": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['RegrowthFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"RegrowthFreq": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['CrankShaftFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"CrankShaftFreq": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['VolFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"VolFreq": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MultiParticleFreq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"MultiParticleFreq": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['IntraMEMC-1Freq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"IntraMEMC-1Freq": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC-1Freq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"MEMC-1Freq": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['IntraMEMC-2Freq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"IntraMEMC-2Freq": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC-2Freq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"MEMC-2Freq": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['IntraMEMC-3Freq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"IntraMEMC-3Freq": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC-3Freq'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"MEMC-3Freq": []},
)
with pytest.raises(
ValueError,
match=r"ERROR: All the correct input variables where not provided for "
r"the NVT ensemble. Please be sure to check that the keys in the "
r"input variables dictionary \(input_variables_dict\) is correct, and "
r"be aware that added spaces before or after the variable in any keys "
r"will also give this warning. The bad variable inputs ensemble "
r"inputs = \['XXXXXX'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_2.conf",
"NVT",
10,
300,
input_variables_dict={"XXXXXX": []},
)
def test_save_NVT_bad_variables_part_5(self, ethane_gomc, ethanol_gomc):
test_box_ethane_ethanol = mb.fill_box(
compound=[ethane_gomc, ethanol_gomc],
n_compounds=[1, 1],
box=[1, 1, 1],
)
charmm = Charmm(
test_box_ethane_ethanol,
"ethane_ethanol",
ff_filename="ethane_ethanol",
residues=[ethane_gomc.name, ethanol_gomc.name],
forcefield_selection="oplsaa",
)
try:
value = gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_5.conf",
"NPT",
10,
300,
input_variables_dict={"PressureCalc": [True, 10000]},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
try:
value = gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_5.conf",
"NPT",
10,
300,
input_variables_dict={"PressureCalc": [False, 10000]},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['PressureCalc'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_5.conf",
"NPT",
10,
300,
input_variables_dict={"PressureCalc": [1, 10000]},
)
try:
value = gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_5.conf",
"NVT",
10,
300,
input_variables_dict={"PressureCalc": [True, 10000]},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
try:
value = gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_5.conf",
"NVT",
10,
300,
input_variables_dict={"PressureCalc": [False, 10000]},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['PressureCalc'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_5.conf",
"NVT",
10,
300,
input_variables_dict={"PressureCalc": [1, 10000]},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['PressureCalc'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_5.conf",
"NVT",
10,
300,
input_variables_dict={"PressureCalc": ["", 10000]},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['PressureCalc'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_5.conf",
"NVT",
10,
300,
input_variables_dict={"PressureCalc": [["x"], 10000]},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['PressureCalc'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_5.conf",
"NVT",
10,
300,
input_variables_dict={"PressureCalc": [{"s": 1}, 10000]},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['PressureCalc'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_5.conf",
"NVT",
10,
300,
input_variables_dict={"PressureCalc": [True, 1.0]},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['PressureCalc'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_5.conf",
"NVT",
10,
300,
input_variables_dict={"PressureCalc": [True, "x"]},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['PressureCalc'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_5.conf",
"NVT",
10,
300,
input_variables_dict={"PressureCalc": [True, ["x"]]},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['PressureCalc'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_5.conf",
"NVT",
10,
300,
input_variables_dict={"PressureCalc": [True, {"s": 1}]},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['PressureCalc'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_5.conf",
"NVT",
10,
300,
input_variables_dict={"PressureCalc": [1, True]},
)
def test_save_NVT_bad_variables_part_6(self, ethane_gomc, ethanol_gomc):
test_box_ethane_ethanol = mb.fill_box(
compound=[ethane_gomc, ethanol_gomc],
n_compounds=[1, 1],
box=[1, 1, 1],
)
charmm = Charmm(
test_box_ethane_ethanol,
"ethane_ethanol",
ff_filename="ethane_ethanol",
residues=[ethane_gomc.name, ethanol_gomc.name],
forcefield_selection="oplsaa",
)
try:
value = gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_6.conf",
"NPT",
10,
300,
input_variables_dict={"OutEnergy": [True, True]},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
try:
value = gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_6.conf",
"NPT",
10,
300,
input_variables_dict={"OutEnergy": [False, True]},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
try:
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_6.conf",
"NPT",
10,
300,
input_variables_dict={"OutEnergy": [False, False]},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
try:
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_6.conf",
"NVT",
10,
300,
input_variables_dict={"OutEnergy": [True, True]},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
try:
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_6.conf",
"NVT",
10,
300,
input_variables_dict={"OutEnergy": [False, True]},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
try:
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_6.conf",
"NVT",
10,
300,
input_variables_dict={"OutEnergy": [False, False]},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['OutEnergy'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_6.conf",
"NVT",
10,
300,
input_variables_dict={"OutEnergy": [1, True]},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['OutEnergy'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_6.conf",
"NVT",
10,
300,
input_variables_dict={"OutEnergy": ["", True]},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['OutEnergy'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_6.conf",
"NVT",
10,
300,
input_variables_dict={"OutEnergy": [["x"], True]},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['OutEnergy'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_6.conf",
"NVT",
10,
300,
input_variables_dict={"OutEnergy": [{"s": 1}, True]},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['OutEnergy'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_6.conf",
"NVT",
10,
300,
input_variables_dict={"OutEnergy": [True, 1.0]},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['OutEnergy'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_6.conf",
"NVT",
10,
300,
input_variables_dict={"OutEnergy": [True, "x"]},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['OutEnergy'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_6.conf",
"NVT",
10,
300,
input_variables_dict={"OutEnergy": [True, ["x"]]},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['OutEnergy'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_6.conf",
"NVT",
10,
300,
input_variables_dict={"OutEnergy": [True, {"s": 1}]},
)
def test_save_NVT_bad_variables_part_7(self, ethane_gomc, ethanol_gomc):
test_box_ethane_ethanol = mb.fill_box(
compound=[ethane_gomc, ethanol_gomc],
n_compounds=[1, 1],
box=[1, 1, 1],
)
charmm = Charmm(
test_box_ethane_ethanol,
"ethane_ethanol",
ff_filename="ethane_ethanol",
residues=[ethane_gomc.name, ethanol_gomc.name],
forcefield_selection="oplsaa",
)
try:
value = gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NPT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETH", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
try:
value = gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NPT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [False, 10000],
"MoleculeType": ["ETO", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 0.9, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 0.8, 1.0],
},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
try:
value = gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETH", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
try:
value = gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [False, 10000],
"MoleculeType": ["ETO", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
with pytest.raises(
ValueError,
match=r"ERROR: To utilize the free energy calculations all the following "
r"variables need to be set, and not equal to None: FreeEnergyCalc, "
r"MoleculeType, InitialState, LambdaVDW.",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"MoleculeType": ["ETO", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: To utilize the free energy calculations all the following "
r"variables need to be set, and not equal to None: FreeEnergyCalc, "
r"MoleculeType, InitialState, LambdaVDW.",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [False, 10000],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: To utilize the free energy calculations all the following "
r"variables need to be set, and not equal to None: FreeEnergyCalc, "
r"MoleculeType, InitialState, LambdaVDW.",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [False, 10000],
"MoleculeType": ["ETO", 1],
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: To utilize the free energy calculations all the following "
r"variables need to be set, and not equal to None: FreeEnergyCalc, "
r"MoleculeType, InitialState, LambdaVDW.",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [False, 10000],
"MoleculeType": ["ETO", 1],
"InitialState": 1,
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
try:
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [False, 10000],
"MoleculeType": ["ETO", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
# starting bad inputs for the Free engergy calcs side from not using all required variables
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['FreeEnergyCalc'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [1, 10000],
"MoleculeType": ["ETO", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['FreeEnergyCalc'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": ["1", 10000],
"MoleculeType": ["ETO", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['FreeEnergyCalc'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [["1"], 10000],
"MoleculeType": ["ETO", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['FreeEnergyCalc'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [{"a": "1"}, 10000],
"MoleculeType": ["ETO", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
try:
value = gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [False, 10000],
"MoleculeType": ["ETO", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
# starting bad inputs for the Free engergy calcs side from not using all required variables
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['FreeEnergyCalc'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 1.0],
"MoleculeType": ["ETO", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['FreeEnergyCalc'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, "1"],
"MoleculeType": ["ETO", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['FreeEnergyCalc'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, ["1"]],
"MoleculeType": ["ETO", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['FreeEnergyCalc'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, {"a": "1"}],
"MoleculeType": ["ETO", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['FreeEnergyCalc'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000, "s"],
"MoleculeType": ["ETO", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
# start checking the MoleculeType variable for errors
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MoleculeType'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": [1, 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MoleculeType'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": [[1], 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MoleculeType'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": [{"a": "1"}, 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MoleculeType'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETO", "1"],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MoleculeType'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETO", ["1"]],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MoleculeType'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETO", {"a": "1"}],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MoleculeType'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETOa", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
# start checking the initial state variable
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['InitialState'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETO", 1],
"InitialState": "s",
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['InitialState'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETO", 1],
"InitialState": ["s"],
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['InitialState'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETO", 1],
"InitialState": {"a": "1"},
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['InitialState'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETO", 1],
"InitialState": 1.0,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
# start checking the LamdaVDW variable
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['LambdaVDW'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETO", 1],
"InitialState": 1,
"LambdaVDW": ["x", 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['LambdaVDW'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETO", 1],
"InitialState": 1,
"LambdaVDW": [[0.1], 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['LambdaVDW'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETO", 1],
"InitialState": 1,
"LambdaVDW": [{"a": "1"}, 0.2, 1.0],
"LambdaCoulomb": [0.1, 0.3, 1.0],
},
)
# start testing the LambdaCoulomb
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['LambdaCoulomb'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETO", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": ["x", 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['LambdaCoulomb'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETO", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [[0.1], 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['LambdaCoulomb'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETO", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 1.0],
"LambdaCoulomb": [{"a": "1"}, 0.3, 1.0],
},
)
# different LambdaVDW and LambdaCoulomb list lengths
with pytest.raises(
ValueError,
match=r"ERROR: The LambdaVDW and LambdaCoulomb list must be of equal length.",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETO", 1],
"InitialState": 1,
"LambdaVDW": [0.2, 0.4, 1.0],
"LambdaCoulomb": [0.1, 0.3, 0.8, 1.0],
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The LambdaVDW and LambdaCoulomb list must be of equal length.",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_7.conf",
"NVT",
10,
300,
input_variables_dict={
"FreeEnergyCalc": [True, 10000],
"MoleculeType": ["ETO", 1],
"InitialState": 1,
"LambdaVDW": [0.1, 0.2, 0.4, 1.0],
"LambdaCoulomb": [0.3, 0.8, 1.0],
},
)
def test_save_NVT_bad_variables_part_8(self, ethane_gomc, ethanol_gomc):
test_box_ethane_ethanol = mb.fill_box(
compound=[ethane_gomc, ethanol_gomc],
n_compounds=[1, 1],
box=[4.0, 4.0, 4.0],
)
charmm = Charmm(
test_box_ethane_ethanol,
"ethane_ethanol_box_0",
structure_box_1=test_box_ethane_ethanol,
filename_box_1="ethane_ethanol_box_1",
ff_filename="ethane_ethanol",
residues=[ethane_gomc.name, ethanol_gomc.name],
forcefield_selection="oplsaa",
)
test_box_ethane_ethanol = mb.fill_box(
compound=[ethane_gomc, ethanol_gomc],
n_compounds=[1, 1],
box=[4.0, 4.0, 4.0],
)
charmm_NPT_NVT = Charmm(
test_box_ethane_ethanol,
"ethane_ethanol_box_0",
ff_filename="ethane_ethanol",
residues=[ethane_gomc.name, ethanol_gomc.name],
forcefield_selection="oplsaa",
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['RcutCoulomb_box_1'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"GEMC_NVT",
10,
300,
input_variables_dict={"RcutCoulomb_box_1": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['FixVolBox0'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={"FixVolBox0": "s"},
)
# test ExchangeVolumeDim for errors
with pytest.raises(
ValueError,
match=r"The MEMC_DataInput variable is equal to None, but at least one "
r"of the MEMC move ratios are all non-zero \(IntraMEMC_1Freq, "
r"MEMC_1Freq, IntraMEMC_2Freq, MEMC_2Freq, IntraMEMC_3Freq, "
r"and MEMC_3Freq\).",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NVT",
10,
300,
input_variables_dict={"MEMC-1Freq": 1},
)
try:
value = gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NVT",
10,
300,
input_variables_dict={"ExchangeVolumeDim": [1.0, 1.0, 1.0]},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
try:
value = gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NVT",
10,
300,
input_variables_dict={"ExchangeVolumeDim": [1, 1, 1]},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ExchangeVolumeDim'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NVT",
10,
300,
input_variables_dict={"ExchangeVolumeDim": ["s", 1.0, 1.0]},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ExchangeVolumeDim'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NVT",
10,
300,
input_variables_dict={"ExchangeVolumeDim": [1.0, [1.0], 1.0]},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ExchangeVolumeDim'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NVT",
10,
300,
input_variables_dict={
"ExchangeVolumeDim": [1.0, 1.0, {"a": 1.0}]
},
)
# testing failures and passes for MEMC_DataInput
try:
value = gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
try:
value = gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "O1"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
try:
value = gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C2", "C1"], "ETO", ["O1", "C1"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC_DataInput'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "O1"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC_DataInput'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["O1", "C1"], "ETO", ["C2", "C1"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC_DataInput'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1.0, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC_DataInput'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
["s", "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC_DataInput'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[[1], "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC_DataInput'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[{"a": "1"}, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC_DataInput'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETHa", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC_DataInput'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, 1, ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC_DataInput'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, [1], ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC_DataInput'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", [1, "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC_DataInput'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", [[1], "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC_DataInput'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", 1], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC_DataInput'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", [1]], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC_DataInput'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], 1, ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC_DataInput'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], [1], ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC_DataInput'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", [1, "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC_DataInput'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", [[1], "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC_DataInput'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", 1]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['MEMC_DataInput'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", [1]]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
# test the MEMC move ratios cant be set without specifying the MEMC move paramters ("MEMC_DataInput")
with pytest.raises(
ValueError,
match=r"ERROR: The MEMC_DataInput variable is equal to None, but at least "
r"one of the MEMC move ratios are all non-zero "
r"\(IntraMEMC_1Freq, MEMC_1Freq, IntraMEMC_2Freq, MEMC_2Freq, "
r"IntraMEMC_3Freq, and MEMC_3Freq\).",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"IntraMEMC-1Freq": 0.20,
"MEMC-1Freq": 0.20,
"IntraMEMC-2Freq": 0.20,
"MEMC-2Freq": 0.20,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
# test some GCMC variable errors with Chempot and fugacity
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ChemPot'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"GCMC",
10,
300,
input_variables_dict={"ChemPot": "s"},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['Fugacity'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_1.conf",
"GCMC",
10,
300,
input_variables_dict={"Fugacity": "s"},
)
# testing the move frequency sum to 1 for all ensembles
try:
value = gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
with pytest.raises(
ValueError,
match=r"ERROR: The sum of the Monte Carlo move ratios does not equal 1. "
r"Note: The sum that was manually entered may equal 1, but some "
r"moves may not be valid for the provided ensemble. The moves that "
r"are invalid for a given ensemble are set to zero. If the default "
r"moves are not being used, all the move frequencies which do not have "
r"default values of zero will need to be set manually so the sum equals "
r"\(DisFreq, RotFreq, IntraSwapFreq, SwapFreq, RegrowthFreq, "
r"CrankShaftFreq, and VolFreq\).",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.20,
"MEMC-2Freq": 0.20,
"IntraMEMC-3Freq": 0.20,
"MEMC-3Freq": 0.20,
},
)
try:
value = gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NVT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.1,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
with pytest.raises(
ValueError,
match=r"ERROR: The sum of the Monte Carlo move ratios does not equal 1. "
r"Note: The sum that was manually entered may equal 1, but some "
r"moves may not be valid for the provided ensemble. The moves that "
r"are invalid for a given ensemble are set to zero. If the default "
r"moves are not being used, all the move frequencies which do not have "
r"default values of zero will need to be set manually so the sum equals "
r"\(DisFreq, RotFreq, IntraSwapFreq, SwapFreq, RegrowthFreq, "
r"CrankShaftFreq, and VolFreq\).",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GEMC_NVT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.1,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.20,
"IntraMEMC-2Freq": 0.20,
"MEMC-2Freq": 0.20,
"IntraMEMC-3Freq": 0.20,
"MEMC-3Freq": 0.20,
},
)
try:
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GCMC",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.1,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
"ChemPot": {"ETH": -4000, "ETO": 8000},
},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
with pytest.raises(
ValueError,
match=r"ERROR: The sum of the Monte Carlo move ratios does not equal 1. "
r"Note: The sum that was manually entered may equal 1, but some "
r"moves may not be valid for the provided ensemble. The moves that "
r"are invalid for a given ensemble are set to zero. If the default "
r"moves are not being used, all the move frequencies which do not have "
r"default values of zero will need to be set manually so the sum equals "
r"\(DisFreq, RotFreq, IntraSwapFreq, SwapFreq, RegrowthFreq, "
r"CrankShaftFreq, and VolFreq\).",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GCMC",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.05,
"RegrowthFreq": 0.05,
"CrankShaftFreq": 0.1,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.20,
"IntraMEMC-2Freq": 0.20,
"MEMC-2Freq": 0.20,
"IntraMEMC-3Freq": 0.20,
"MEMC-3Freq": 0.20,
"Fugacity": {"ETH": 0, "ETO": 1.0},
},
)
try:
value = gomc_control.write_gomc_control_file(
charmm_NPT_NVT,
"test_save_NVT_bad_variables_part_8.conf",
"NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.00,
"RegrowthFreq": 0.10,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.20,
"MEMC-1Freq": 0.00,
"IntraMEMC-2Freq": 0.20,
"MEMC-2Freq": 0.00,
"IntraMEMC-3Freq": 0.20,
"MEMC-3Freq": 0.00,
},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
with pytest.raises(
ValueError,
match=r"ERROR: The sum of the Monte Carlo move ratios does not equal 1. "
r"Note: The sum that was manually entered may equal 1, but some "
r"moves may not be valid for the provided ensemble. The moves that "
r"are invalid for a given ensemble are set to zero. If the default "
r"moves are not being used, all the move frequencies which do not have "
r"default values of zero will need to be set manually so the sum equals "
r"\(DisFreq, RotFreq, IntraSwapFreq, SwapFreq, RegrowthFreq, "
r"CrankShaftFreq, and VolFreq\).",
):
gomc_control.write_gomc_control_file(
charmm_NPT_NVT,
"test_save_NVT_bad_variables_part_8.conf",
"NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.00,
"RegrowthFreq": 0.10,
"CrankShaftFreq": 0.05,
"VolFreq": 0.05,
"MultiParticleFreq": 0.05,
"IntraMEMC-1Freq": 0.20,
"MEMC-1Freq": 0.00,
"IntraMEMC-2Freq": 0.20,
"MEMC-2Freq": 0.00,
"IntraMEMC-3Freq": 0.19,
"MEMC-3Freq": 0.00,
},
)
try:
value = gomc_control.write_gomc_control_file(
charmm_NPT_NVT,
"test_save_NVT_bad_variables_part_8.conf",
"NVT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.00,
"RegrowthFreq": 0.10,
"CrankShaftFreq": 0.05,
"VolFreq": 0.0,
"MultiParticleFreq": 0.1,
"IntraMEMC-1Freq": 0.20,
"MEMC-1Freq": 0.00,
"IntraMEMC-2Freq": 0.20,
"MEMC-2Freq": 0.00,
"IntraMEMC-3Freq": 0.20,
"MEMC-3Freq": 0.00,
},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
with pytest.raises(
ValueError,
match=r"ERROR: The sum of the Monte Carlo move ratios does not equal 1. "
r"Note: The sum that was manually entered may equal 1, but some "
r"moves may not be valid for the provided ensemble. The moves that "
r"are invalid for a given ensemble are set to zero. If the default "
r"moves are not being used, all the move frequencies which do not have "
r"default values of zero will need to be set manually so the sum equals "
r"\(DisFreq, RotFreq, IntraSwapFreq, SwapFreq, RegrowthFreq, "
r"CrankShaftFreq, and VolFreq\).",
):
gomc_control.write_gomc_control_file(
charmm_NPT_NVT,
"test_save_NVT_bad_variables_part_8.conf",
"NVT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.00,
"RegrowthFreq": 0.10,
"CrankShaftFreq": 0.05,
"VolFreq": 0.0,
"MultiParticleFreq": 0.1,
"IntraMEMC-1Freq": 0.20,
"MEMC-1Freq": 0.00,
"IntraMEMC-2Freq": 0.20,
"MEMC-2Freq": 0.00,
"IntraMEMC-3Freq": 0.21,
"MEMC-3Freq": 0.00,
},
)
# test good values of Volume for NVT, and GCMC if set to zero
try:
value = gomc_control.write_gomc_control_file(
charmm_NPT_NVT,
"test_save_NVT_bad_variables_part_8.conf",
"NVT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.00,
"RegrowthFreq": 0.10,
"CrankShaftFreq": 0.05,
"VolFreq": 0.0,
"MultiParticleFreq": 0.20,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.00,
"IntraMEMC-2Freq": 0.20,
"MEMC-2Freq": 0.00,
"IntraMEMC-3Freq": 0.20,
"MEMC-3Freq": 0.00,
},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
try:
value = gomc_control.write_gomc_control_file(
charmm_NPT_NVT,
"test_save_NVT_bad_variables_part_8.conf",
"NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.00,
"RegrowthFreq": 0.10,
"CrankShaftFreq": 0.05,
"VolFreq": 0.0,
"MultiParticleFreq": 0.20,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.00,
"IntraMEMC-2Freq": 0.20,
"MEMC-2Freq": 0.00,
"IntraMEMC-3Freq": 0.20,
"MEMC-3Freq": 0.00,
},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
try:
value = gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GCMC",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"ChemPot": {"ETH": -4000, "ETO": -8000},
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.00,
"RegrowthFreq": 0.10,
"CrankShaftFreq": 0.05,
"VolFreq": 0.0,
"MultiParticleFreq": 0.10,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.10,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.10,
"MEMC-3Freq": 0.10,
},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
# test come MEMC with GCMC
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['Fugacity'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GCMC",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 1,
"Fugacity": {1: 0, "ETO": 1.0},
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['Fugacity'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GCMC",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 1,
"Fugacity": {"ETH": -1, "ETO": 1.0},
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['Fugacity'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GCMC",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 1,
"Fugacity": {"ETH": "1", "ETO": 1.0},
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The MEMC_DataInput variable is not equal to None, "
r"but all the MEMC move ratios are zero \(IntraMEMC_1Freq, MEMC_1Freq, "
r"IntraMEMC_2Freq, MEMC_2Freq, IntraMEMC_3Freq, and MEMC_3Freq\).",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GCMC",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 1,
"Fugacity": {"ETH": 2, "ETO": 1.0},
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['Fugacity'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GCMC",
10,
300,
input_variables_dict={
"DisFreq": 1,
"Fugacity": {"ETH": 0, "XXX": 1.0},
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['Fugacity'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GCMC",
10,
300,
input_variables_dict={
"DisFreq": 1,
"Fugacity": {"XXX": 0, "ETO": 1.0},
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ChemPot'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GCMC",
10,
300,
input_variables_dict={
"DisFreq": 1,
"ChemPot": {1: -4000, "ETO": -8000},
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ChemPot'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GCMC",
10,
300,
input_variables_dict={
"DisFreq": 1,
"ChemPot": {"XXX": -4000, "ETO": -8000},
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ChemPot'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GCMC",
10,
300,
input_variables_dict={
"DisFreq": 1,
"ChemPot": {"ETH": -4000, "XXX": -8000},
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ChemPot'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GCMC",
10,
300,
input_variables_dict={
"DisFreq": 1,
"ChemPot": {"ETH": "40", "ETO": -8000},
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The following input variables have "
r"bad values \(check spelling and for empty spaces in the keys or that "
r"the values are in the correct form with the acceptable values\)"
r": \['ChemPot'\]",
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GCMC",
10,
300,
input_variables_dict={
"DisFreq": 1,
"ChemPot": {"ETH": ["40"], "ETO": -8000},
},
)
# test bad values of Volume for NVT, and GCMC
with pytest.raises(
ValueError,
match=r"ERROR: The input variable VolFreq is non-zero \(0\). "
r'VolFreq must be zero \(0\) for the "NVT", "GEMC_NVT", '
r'and "GCMC" ensembles.',
):
gomc_control.write_gomc_control_file(
charmm_NPT_NVT,
"test_save_NVT_bad_variables_part_8.conf",
"NVT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.00,
"RegrowthFreq": 0.10,
"CrankShaftFreq": 0.05,
"VolFreq": 0.1,
"MultiParticleFreq": 0.1,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.00,
"IntraMEMC-2Freq": 0.20,
"MEMC-2Freq": 0.00,
"IntraMEMC-3Freq": 0.20,
"MEMC-3Freq": 0.00,
},
)
with pytest.raises(
ValueError,
match=r"ERROR: The input variable VolFreq is non-zero \(0\). "
r'VolFreq must be zero \(0\) for the "NVT", "GEMC_NVT", '
r'and "GCMC" ensembles.',
):
gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GCMC",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"ChemPot": {"ETH": -4000, "ETO": -8000},
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.00,
"RegrowthFreq": 0.10,
"CrankShaftFreq": 0.05,
"VolFreq": 0.1,
"MultiParticleFreq": 0.1,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.00,
"IntraMEMC-2Freq": 0.20,
"MEMC-2Freq": 0.00,
"IntraMEMC-3Freq": 0.20,
"MEMC-3Freq": 0.00,
},
)
# test bad values of MEMC for NVT, NPT
with pytest.raises(
ValueError,
match=r"ERROR: All the MC move input variables must be non-zero \(0\) "
r"for the SwapFreq, MEMC_1Freq, MEMC_2Freq, and MEMC_3Freq. "
r"The SwapFreq, MEMC_1Freq, MEMC_2Freq, and MEMC_3Freq need to be zero "
r'\(0\) for the "NVT" and "NPT" ensembles.',
):
gomc_control.write_gomc_control_file(
charmm_NPT_NVT,
"test_save_NVT_bad_variables_part_8.conf",
"NVT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.00,
"RegrowthFreq": 0.10,
"CrankShaftFreq": 0.05,
"VolFreq": 0.0,
"MultiParticleFreq": 0.1,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.20,
"MEMC-2Freq": 0.00,
"IntraMEMC-3Freq": 0.20,
"MEMC-3Freq": 0.00,
},
)
with pytest.raises(
ValueError,
match=r"ERROR: All the MC move input variables must be non-zero \(0\) "
r"for the SwapFreq, MEMC_1Freq, MEMC_2Freq, and MEMC_3Freq. "
r"The SwapFreq, MEMC_1Freq, MEMC_2Freq, and MEMC_3Freq need to be zero "
r'\(0\) for the "NVT" and "NPT" ensembles.',
):
gomc_control.write_gomc_control_file(
charmm_NPT_NVT,
"test_save_NVT_bad_variables_part_8.conf",
"NVT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.00,
"RegrowthFreq": 0.10,
"CrankShaftFreq": 0.05,
"VolFreq": 0.0,
"MultiParticleFreq": 0.1,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.00,
"IntraMEMC-2Freq": 0.20,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.20,
"MEMC-3Freq": 0.00,
},
)
with pytest.raises(
ValueError,
match=r"ERROR: All the MC move input variables must be non-zero \(0\) "
r"for the SwapFreq, MEMC_1Freq, MEMC_2Freq, and MEMC_3Freq. "
r"The SwapFreq, MEMC_1Freq, MEMC_2Freq, and MEMC_3Freq need to be zero "
r'\(0\) for the "NVT" and "NPT" ensembles.',
):
gomc_control.write_gomc_control_file(
charmm_NPT_NVT,
"test_save_NVT_bad_variables_part_8.conf",
"NVT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.00,
"RegrowthFreq": 0.10,
"CrankShaftFreq": 0.05,
"VolFreq": 0.0,
"MultiParticleFreq": 0.1,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.00,
"IntraMEMC-2Freq": 0.20,
"MEMC-2Freq": 0.00,
"IntraMEMC-3Freq": 0.20,
"MEMC-3Freq": 0.10,
},
)
with pytest.raises(
ValueError,
match=r"ERROR: All the MC move input variables must be non-zero \(0\) "
r"for the SwapFreq, MEMC_1Freq, MEMC_2Freq, and MEMC_3Freq. "
r"The SwapFreq, MEMC_1Freq, MEMC_2Freq, and MEMC_3Freq need to be zero "
r'\(0\) for the "NVT" and "NPT" ensembles.',
):
gomc_control.write_gomc_control_file(
charmm_NPT_NVT,
"test_save_NVT_bad_variables_part_8.conf",
"NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.00,
"RegrowthFreq": 0.10,
"CrankShaftFreq": 0.05,
"VolFreq": 0.0,
"MultiParticleFreq": 0.1,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.20,
"MEMC-2Freq": 0.00,
"IntraMEMC-3Freq": 0.20,
"MEMC-3Freq": 0.00,
},
)
with pytest.raises(
ValueError,
match=r"ERROR: All the MC move input variables must be non-zero \(0\) "
r"for the SwapFreq, MEMC_1Freq, MEMC_2Freq, and MEMC_3Freq. "
r"The SwapFreq, MEMC_1Freq, MEMC_2Freq, and MEMC_3Freq need to be zero "
r'\(0\) for the "NVT" and "NPT" ensembles.',
):
gomc_control.write_gomc_control_file(
charmm_NPT_NVT,
"test_save_NVT_bad_variables_part_8.conf",
"NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.00,
"RegrowthFreq": 0.10,
"CrankShaftFreq": 0.05,
"VolFreq": 0.0,
"MultiParticleFreq": 0.1,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.00,
"IntraMEMC-2Freq": 0.20,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.20,
"MEMC-3Freq": 0.00,
},
)
with pytest.raises(
ValueError,
match=r"ERROR: All the MC move input variables must be non-zero \(0\) "
r"for the SwapFreq, MEMC_1Freq, MEMC_2Freq, and MEMC_3Freq. "
r"The SwapFreq, MEMC_1Freq, MEMC_2Freq, and MEMC_3Freq need to be zero "
r'\(0\) for the "NVT" and "NPT" ensembles.',
):
gomc_control.write_gomc_control_file(
charmm_NPT_NVT,
"test_save_NVT_bad_variables_part_8.conf",
"NPT",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.00,
"RegrowthFreq": 0.10,
"CrankShaftFreq": 0.05,
"VolFreq": 0.0,
"MultiParticleFreq": 0.1,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.00,
"IntraMEMC-2Freq": 0.20,
"MEMC-2Freq": 0.00,
"IntraMEMC-3Freq": 0.20,
"MEMC-3Freq": 0.10,
},
)
# test good values of MEMC with GCMC
try:
value = gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GCMC",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"ChemPot": {"ETH": -4000, "ETO": -8000},
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.00,
"RegrowthFreq": 0.10,
"CrankShaftFreq": 0.05,
"VolFreq": 0.0,
"MultiParticleFreq": 0.10,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.10,
"IntraMEMC-2Freq": 0.20,
"MEMC-2Freq": 0.00,
"IntraMEMC-3Freq": 0.20,
"MEMC-3Freq": 0.00,
},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
try:
value = gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GCMC",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"ChemPot": {"ETH": -4000, "ETO": -8000},
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.00,
"RegrowthFreq": 0.10,
"CrankShaftFreq": 0.05,
"VolFreq": 0.0,
"MultiParticleFreq": 0.1,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.00,
"IntraMEMC-2Freq": 0.20,
"MEMC-2Freq": 0.10,
"IntraMEMC-3Freq": 0.20,
"MEMC-3Freq": 0.00,
},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
try:
value = gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GCMC",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]]
],
"ChemPot": {"ETH": -4000, "ETO": -8000},
"DisFreq": 0.05,
"RotFreq": 0.05,
"IntraSwapFreq": 0.05,
"SwapFreq": 0.00,
"RegrowthFreq": 0.10,
"CrankShaftFreq": 0.05,
"VolFreq": 0.0,
"MultiParticleFreq": 0.1,
"IntraMEMC-1Freq": 0.10,
"MEMC-1Freq": 0.00,
"IntraMEMC-2Freq": 0.20,
"MEMC-2Freq": 0.00,
"IntraMEMC-3Freq": 0.20,
"MEMC-3Freq": 0.10,
},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
# try all case unspecific values
try:
value = gomc_control.write_gomc_control_file(
charmm,
"test_save_NVT_bad_variables_part_8.conf",
"GCMC",
10,
300,
input_variables_dict={
"MEMC_DataInput": [
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "C2"]],
[1, "ETH", ["C1", "C2"], "ETO", ["C1", "O1"]],
],
"ChEmPot": {"ETH": -4000, "ETO": -8000},
"DisFreQ": 0.05,
"RotFreq": 0.05,
"InTraSwapFreq": 0.05,
"SwaPFreq": 0.00,
"ReGrowthFreq": 0.10,
"crankshaftfreq": 0.05,
"VolFrEQ": 0.0,
"MULtiParticleFreq": 0.1,
"IntRAMEMC-1Freq": 0.10,
"MEMC-1FREQ": 0.00,
"IntrAMEMC-2Freq": 0.20,
"MEMC-2FReq": 0.00,
"intramemc-3Freq": 0.20,
"memc-3Freq": 0.10,
},
)
except:
value = "TEST_FAILED"
assert value == "GOMC_CONTROL_FILE_WRITTEN"
def test_charmm_object_has_proper_no_boxes_for_ensemble_part_9(
self, ethane_gomc, ethanol_gomc
):
test_box_ethane_ethanol_liq = mb.fill_box(
compound=[ethane_gomc, ethanol_gomc],
n_compounds=[4, 4],
box=[4.0, 4.0, 4.0],
)
test_box_ethane_ethanol_vap = mb.fill_box(
compound=[ethane_gomc, ethanol_gomc],
n_compounds=[1, 1],
box=[8.0, 8.0, 8.0],
)
charmm_one_box = Charmm(
test_box_ethane_ethanol_liq,
"ethane_ethanol_1_box_liq",
ff_filename="ethane_ethanol",
residues=[ethane_gomc.name, ethanol_gomc.name],
forcefield_selection="oplsaa",
)
charmm_two_boxes = Charmm(
test_box_ethane_ethanol_liq,
"ethane_ethanol_2_boxes_liq",
structure_box_1=test_box_ethane_ethanol_vap,
filename_box_1="ethane_box_2_boxes_vap",
ff_filename="ethane_ethanol",
residues=[ethane_gomc.name, ethanol_gomc.name],
forcefield_selection="oplsaa",
)
# test that it fails with the GEMC_NVT with only 1 box in the Charmm object
with pytest.raises(
ValueError,
match=r"ERROR: The ensemble type selection of {} is using a Charmm "
r"object with one simulation boxes, and the {} ensemble only accepts "
r"two boxes \(box 0 and box 1\).".format("GEMC_NVT", "GEMC_NVT"),
):
gomc_control.write_gomc_control_file(
charmm_one_box,
"test_charmm_object_has_proper_no_boxes_for_ensemble_part_9_1_box",
"GEMC_NVT",
100,
300,
)
# test that it fails with the GEMC_NPT with only 1 box in the Charmm object
with pytest.raises(
ValueError,
match=r"ERROR: The ensemble type selection of {} is using a Charmm "
r"object with one simulation boxes, and the {} ensemble only accepts "
r"two boxes \(box 0 and box 1\).".format("GEMC_NPT", "GEMC_NPT"),
):
gomc_control.write_gomc_control_file(
charmm_one_box,
"test_charmm_object_has_proper_no_boxes_for_ensemble_part_9_1_box",
"GEMC_NPT",
100,
300,
)
# test that it fails with the GCMC with only 1 box in the Charmm object
with pytest.raises(
ValueError,
match=r"ERROR: The ensemble type selection of {} is using a Charmm "
r"object with one simulation boxes, and the {} ensemble only accepts "
r"two boxes \(box 0 and box 1\).".format("GCMC", "GCMC"),
):
gomc_control.write_gomc_control_file(
charmm_one_box,
"test_charmm_object_has_proper_no_boxes_for_ensemble_part_9_1_box",
"GCMC",
100,
300,
)
# test that it fails with the NVT with 2 boxes in the Charmm object
with pytest.raises(
ValueError,
match=r"ERROR: The ensemble type selection of {} is using a Charmm "
r"object with two simulation boxes, and the {} ensemble only accepts "
r"one box \(box 0\).".format("NVT", "NVT"),
):
gomc_control.write_gomc_control_file(
charmm_two_boxes,
"test_charmm_object_has_proper_no_boxes_for_ensemble_part_9_1_box",
"NVT",
100,
300,
)
# test that it fails with the NPT with 2 boxes in the Charmm object
with pytest.raises(
ValueError,
match=r"ERROR: The ensemble type selection of {} is using a Charmm "
r"object with two simulation boxes, and the {} ensemble only accepts "
r"one box \(box 0\).".format("NPT", "NPT"),
):
gomc_control.write_gomc_control_file(
charmm_two_boxes,
"test_charmm_object_has_proper_no_boxes_for_ensemble_part_9_1_box",
"NPT",
100,
300,
)
def test_save_non_othoganol_writer(self):
lattice_cif_ETV_triclinic = load_cif(
file_or_path=get_fn("ETV_triclinic.cif")
)
ETV_triclinic_1_cell = lattice_cif_ETV_triclinic.populate(x=1, y=1, z=1)
ETV_triclinic_1_cell.name = "ETV_1"
ETV_triclinic_3_cell = lattice_cif_ETV_triclinic.populate(x=3, y=3, z=3)
ETV_triclinic_3_cell.name = "ETV_3"
charmm = Charmm(
ETV_triclinic_1_cell,
"ETV_triclinic_1_cell_box_0",
structure_box_1=ETV_triclinic_3_cell,
filename_box_1="ETV_triclinic_3_cell_box_1",
ff_filename="ETV_triclinic_FF",
forcefield_selection={
ETV_triclinic_1_cell.name: get_fn(
"Charmm_writer_testing_only_zeolite.xml"
),
ETV_triclinic_3_cell.name: get_fn(
"Charmm_writer_testing_only_zeolite.xml"
),
},
residues=[ETV_triclinic_1_cell.name, ETV_triclinic_3_cell.name],
bead_to_atom_name_dict=None,
fix_residue=[ETV_triclinic_1_cell.name, ETV_triclinic_3_cell.name],
)
gomc_control.write_gomc_control_file(
charmm,
"test_save_non_othoganol_writer.conf",
"GEMC_NVT",
100000,
300,
)
with open("test_save_non_othoganol_writer.conf", "r") as fp:
out_gomc = fp.readlines()
for i, line in enumerate(out_gomc):
if line.startswith("CellBasisVector1 0"):
split_line = line.split()
assert split_line[1] == "0"
assert split_line[2] == "8.7503"
assert split_line[3] == "0.0"
assert split_line[4] == "0.0"
elif line.startswith("CellBasisVector2 0"):
split_line = line.split()
assert split_line[1] == "0"
assert split_line[2] == "-1.179131"
assert split_line[3] == "9.575585"
assert split_line[4] == "0.0"
elif line.startswith("CellBasisVector3 0"):
split_line = line.split()
assert split_line[1] == "0"
assert split_line[2] == "-1.817231"
assert split_line[3] == "-3.027821"
assert split_line[4] == "9.645823"
if line.startswith("CellBasisVector1 1"):
split_line = line.split()
assert split_line[1] == "1"
assert split_line[2] == "26.2509"
assert split_line[3] == "0.0"
assert split_line[4] == "0.0"
elif line.startswith("CellBasisVector2 1"):
split_line = line.split()
assert split_line[1] == "1"
assert split_line[2] == "-3.537381"
assert split_line[3] == "28.726735"
assert split_line[4] == "0.0"
elif line.startswith("CellBasisVector3 1"):
split_line = line.split()
assert split_line[1] == "1"
assert split_line[2] == "-5.451699"
assert split_line[3] == "-9.083469"
assert split_line[4] == "28.937455"
else:
pass
def test_box_vector_too_many_char(self):
methane = mb.Compound(name="MET")
methane_child_bead = mb.Compound(name="_CH4")
methane.add(methane_child_bead, inherit_periodicity=False)
methane_box_orth = mb.fill_box(
compound=methane, n_compounds=10, box=[1, 2, 3]
)
charmm_bad_box_0 = Charmm(
methane_box_orth,
"methane_box_0_orth",
ff_filename="methane_box_orth_bad_box_0_non_orth",
residues=[methane.name],
forcefield_selection="trappe-ua",
)
# set the vectors all too long
charmm_bad_box_0.box_0_vectors[0][0] = -0.45678901234561
charmm_bad_box_0.box_0_vectors[0][1] = -0.45678901234562
charmm_bad_box_0.box_0_vectors[0][2] = -0.45678901234563
charmm_bad_box_0.box_0_vectors[1][0] = -0.45678901234564
charmm_bad_box_0.box_0_vectors[1][1] = -0.45678901234565
charmm_bad_box_0.box_0_vectors[1][2] = -0.45678901234566
charmm_bad_box_0.box_0_vectors[2][0] = -0.45678901234567
charmm_bad_box_0.box_0_vectors[2][1] = -0.45678901234568
charmm_bad_box_0.box_0_vectors[2][2] = -0.45678901234569
charmm_bad_box_1 = Charmm(
methane_box_orth,
"methane_box_0_orth",
structure_box_1=methane_box_orth,
filename_box_1="methane_box_1_orth",
ff_filename="methane_box_orth_bad_box_1_non_orth",
residues=[methane.name],
forcefield_selection="trappe-ua",
)
# set the vectors all too long
charmm_bad_box_1.box_1_vectors[0][0] = -0.45678901234561
charmm_bad_box_1.box_1_vectors[0][1] = -0.45678901234562
charmm_bad_box_1.box_1_vectors[0][2] = -0.45678901234563
charmm_bad_box_1.box_1_vectors[1][0] = -0.45678901234564
charmm_bad_box_1.box_1_vectors[1][1] = -0.45678901234565
charmm_bad_box_1.box_1_vectors[1][2] = -0.45678901234566
charmm_bad_box_1.box_1_vectors[2][0] = -0.45678901234567
charmm_bad_box_1.box_1_vectors[2][1] = -0.45678901234568
charmm_bad_box_1.box_1_vectors[2][2] = -0.45678901234569
# test that it fails with the GEMC_NVT with only 1 box in the Charmm object
with pytest.raises(
ValueError,
match=r"ERROR: At lease one of the individual box {} vectors are too large "
"or greater than {} characters."
"".format(0, 16),
):
gomc_control.write_gomc_control_file(
charmm_bad_box_0,
"test_box_vector_too_many_char_box_0",
"NVT",
100,
300,
)
# test that it fails with the GEMC_NPT with only 1 box in the Charmm object
with pytest.raises(
ValueError,
match=r"ERROR: At lease one of the individual box {} vectors are too large "
"or greater than {} characters."
"".format(1, 16),
):
gomc_control.write_gomc_control_file(
charmm_bad_box_1,
"test_box_vector_too_many_char_box_1",
"GCMC",
100,
300,
)
| mit |
Ban3/Limnoria | plugins/MessageParser/plugin.py | 3 | 18794 | ###
# Copyright (c) 2010, Daniel Folkinshteyn
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
import supybot.utils as utils
from supybot.commands import *
import supybot.utils.minisix as minisix
import supybot.plugins as plugins
import supybot.ircutils as ircutils
import supybot.callbacks as callbacks
import supybot.conf as conf
import supybot.ircdb as ircdb
import re
import os
import sys
import time
try:
from supybot.i18n import PluginInternationalization
from supybot.i18n import internationalizeDocstring
_ = PluginInternationalization('MessageParser')
except:
# This are useless functions that's allow to run the plugin on a bot
# without the i18n plugin
_ = lambda x:x
internationalizeDocstring = lambda x:x
#try:
#import sqlite
#except ImportError:
#raise callbacks.Error, 'You need to have PySQLite installed to use this ' \
#'plugin. Download it at ' \
#'<http://code.google.com/p/pysqlite/>'
import sqlite3
# these are needed cuz we are overriding getdb
import threading
import supybot.world as world
import supybot.log as log
class MessageParser(callbacks.Plugin, plugins.ChannelDBHandler):
"""This plugin can set regexp triggers to activate the bot.
Use 'add' command to add regexp trigger, 'remove' to remove."""
threaded = True
def __init__(self, irc):
callbacks.Plugin.__init__(self, irc)
plugins.ChannelDBHandler.__init__(self)
def makeDb(self, filename):
"""Create the database and connect to it."""
if os.path.exists(filename):
db = sqlite3.connect(filename)
if minisix.PY2:
db.text_factory = str
return db
db = sqlite3.connect(filename)
if minisix.PY2:
db.text_factory = str
cursor = db.cursor()
cursor.execute("""CREATE TABLE triggers (
id INTEGER PRIMARY KEY,
regexp TEXT UNIQUE ON CONFLICT REPLACE,
added_by TEXT,
added_at TIMESTAMP,
usage_count INTEGER,
action TEXT,
locked BOOLEAN
)""")
db.commit()
return db
# override this because sqlite3 doesn't have autocommit
# use isolation_level instead.
def getDb(self, channel):
"""Use this to get a database for a specific channel."""
currentThread = threading.currentThread()
if channel not in self.dbCache and currentThread == world.mainThread:
self.dbCache[channel] = self.makeDb(self.makeFilename(channel))
if currentThread != world.mainThread:
db = self.makeDb(self.makeFilename(channel))
else:
db = self.dbCache[channel]
db.isolation_level = None
return db
def _updateRank(self, channel, regexp):
subfolder = None if channel == 'global' else channel
if self.registryValue('keepRankInfo', subfolder):
db = self.getDb(channel)
cursor = db.cursor()
cursor.execute("""SELECT usage_count
FROM triggers
WHERE regexp=?""", (regexp,))
old_count = cursor.fetchall()[0][0]
cursor.execute("UPDATE triggers SET usage_count=? WHERE regexp=?", (old_count + 1, regexp,))
db.commit()
def _runCommandFunction(self, irc, msg, command):
"""Run a command from message, as if command was sent over IRC."""
tokens = callbacks.tokenize(command)
try:
self.Proxy(irc.irc, msg, tokens)
except Exception as e:
log.exception('Uncaught exception in function called by MessageParser:')
def _checkManageCapabilities(self, irc, msg, channel):
"""Check if the user has any of the required capabilities to manage
the regexp database."""
capabilities = self.registryValue('requireManageCapability')
if capabilities:
for capability in re.split(r'\s*;\s*', capabilities):
if capability.startswith('channel,'):
capability = capability[8:]
if channel != 'global':
capability = ircdb.makeChannelCapability(channel, capability)
if capability and ircdb.checkCapability(msg.prefix, capability):
#print "has capability:", capability
return True
return False
else:
return True
def do_privmsg_notice(self, irc, msg):
channel = msg.args[0]
if not irc.isChannel(channel):
return
if self.registryValue('enable', channel):
actions = []
results = []
for channel in set(map(plugins.getChannel, (channel, 'global'))):
db = self.getDb(channel)
cursor = db.cursor()
cursor.execute("SELECT regexp, action FROM triggers")
# Fetch results and prepend channel name or 'global'. This
# prevents duplicating the following lines.
results.extend([(channel,)+x for x in cursor.fetchall()])
if len(results) == 0:
return
max_triggers = self.registryValue('maxTriggers', channel)
for (channel, regexp, action) in results:
for match in re.finditer(regexp, msg.args[1]):
if match is not None:
thisaction = action
self._updateRank(channel, regexp)
for (i, j) in enumerate(match.groups()):
if match.group(i+1):
thisaction = re.sub(r'\$' + str(i+1), match.group(i+1), thisaction)
actions.append(thisaction)
if max_triggers != 0 and max_triggers == len(actions):
break
if max_triggers != 0 and max_triggers == len(actions):
break
for action in actions:
self._runCommandFunction(irc, msg, action)
def doPrivmsg(self, irc, msg):
if not callbacks.addressed(irc.nick, msg): #message is not direct command
self.do_privmsg_notice(irc, msg)
def doNotice(self, irc, msg):
if self.registryValue('enableForNotices', msg.args[0]):
self.do_privmsg_notice(irc, msg)
@internationalizeDocstring
def add(self, irc, msg, args, channel, regexp, action):
"""[<channel>|global] <regexp> <action>
Associates <regexp> with <action>. <channel> is only
necessary if the message isn't sent on the channel
itself. Action is echoed upon regexp match, with variables $1, $2,
etc. being interpolated from the regexp match groups."""
if not self._checkManageCapabilities(irc, msg, channel):
capabilities = self.registryValue('requireManageCapability')
irc.errorNoCapability(capabilities, Raise=True)
db = self.getDb(channel)
cursor = db.cursor()
cursor.execute("SELECT id, usage_count, locked FROM triggers WHERE regexp=?", (regexp,))
results = cursor.fetchall()
if len(results) != 0:
(id, usage_count, locked) = list(map(int, results[0]))
else:
locked = 0
usage_count = 0
if not locked:
try:
re.compile(regexp)
except Exception as e:
irc.error(_('Invalid python regexp: %s') % (e,))
return
if ircdb.users.hasUser(msg.prefix):
name = ircdb.users.getUser(msg.prefix).name
else:
name = msg.nick
cursor.execute("""INSERT INTO triggers VALUES
(NULL, ?, ?, ?, ?, ?, ?)""",
(regexp, name, int(time.time()), usage_count, action, locked,))
db.commit()
irc.replySuccess()
else:
irc.error(_('That trigger is locked.'))
return
add = wrap(add, ['channelOrGlobal', 'something', 'something'])
@internationalizeDocstring
def remove(self, irc, msg, args, channel, optlist, regexp):
"""[<channel>|global] [--id] <regexp>]
Removes the trigger for <regexp> from the triggers database.
<channel> is only necessary if
the message isn't sent in the channel itself.
If option --id specified, will retrieve by regexp id, not content.
"""
if not self._checkManageCapabilities(irc, msg, channel):
capabilities = self.registryValue('requireManageCapability')
irc.errorNoCapability(capabilities, Raise=True)
db = self.getDb(channel)
cursor = db.cursor()
target = 'regexp'
for (option, arg) in optlist:
if option == 'id':
target = 'id'
sql = "SELECT id, locked FROM triggers WHERE %s=?" % (target,)
cursor.execute(sql, (regexp,))
results = cursor.fetchall()
if len(results) != 0:
(id, locked) = list(map(int, results[0]))
else:
irc.error(_('There is no such regexp trigger.'))
return
if locked:
irc.error(_('This regexp trigger is locked.'))
return
cursor.execute("""DELETE FROM triggers WHERE id=?""", (id,))
db.commit()
irc.replySuccess()
remove = wrap(remove, ['channelOrGlobal',
getopts({'id': '',}),
'something'])
@internationalizeDocstring
def lock(self, irc, msg, args, channel, regexp):
"""[<channel>|global] <regexp>
Locks the <regexp> so that it cannot be
removed or overwritten to. <channel> is only necessary if the message isn't
sent in the channel itself.
"""
if not self._checkManageCapabilities(irc, msg, channel):
capabilities = self.registryValue('requireManageCapability')
irc.errorNoCapability(capabilities, Raise=True)
db = self.getDb(channel)
cursor = db.cursor()
cursor.execute("SELECT id FROM triggers WHERE regexp=?", (regexp,))
results = cursor.fetchall()
if len(results) == 0:
irc.error(_('There is no such regexp trigger.'))
return
cursor.execute("UPDATE triggers SET locked=1 WHERE regexp=?", (regexp,))
db.commit()
irc.replySuccess()
lock = wrap(lock, ['channelOrGlobal', 'text'])
@internationalizeDocstring
def unlock(self, irc, msg, args, channel, regexp):
"""[<channel>|global] <regexp>
Unlocks the entry associated with <regexp> so that it can be
removed or overwritten. <channel> is only necessary if the message isn't
sent in the channel itself.
"""
if not self._checkManageCapabilities(irc, msg, channel):
capabilities = self.registryValue('requireManageCapability')
irc.errorNoCapability(capabilities, Raise=True)
db = self.getDb(channel)
cursor = db.cursor()
cursor.execute("SELECT id FROM triggers WHERE regexp=?", (regexp,))
results = cursor.fetchall()
if len(results) == 0:
irc.error(_('There is no such regexp trigger.'))
return
cursor.execute("UPDATE triggers SET locked=0 WHERE regexp=?", (regexp,))
db.commit()
irc.replySuccess()
unlock = wrap(unlock, ['channelOrGlobal', 'text'])
@internationalizeDocstring
def show(self, irc, msg, args, channel, optlist, regexp):
"""[<channel>|global] [--id] <regexp>
Looks up the value of <regexp> in the triggers database.
<channel> is only necessary if the message isn't sent in the channel
itself.
If option --id specified, will retrieve by regexp id, not content.
"""
db = self.getDb(channel)
cursor = db.cursor()
target = 'regexp'
for (option, arg) in optlist:
if option == 'id':
target = 'id'
sql = "SELECT regexp, action FROM triggers WHERE %s=?" % (target,)
cursor.execute(sql, (regexp,))
results = cursor.fetchall()
if len(results) != 0:
(regexp, action) = results[0]
else:
irc.error(_('There is no such regexp trigger.'))
return
irc.reply("The action for regexp trigger \"%s\" is \"%s\"" % (regexp, action))
show = wrap(show, ['channelOrGlobal',
getopts({'id': '',}),
'something'])
@internationalizeDocstring
def info(self, irc, msg, args, channel, optlist, regexp):
"""[<channel>|global] [--id] <regexp>
Display information about <regexp> in the triggers database.
<channel> is only necessary if the message isn't sent in the channel
itself.
If option --id specified, will retrieve by regexp id, not content.
"""
db = self.getDb(channel)
cursor = db.cursor()
target = 'regexp'
for (option, arg) in optlist:
if option == 'id':
target = 'id'
sql = "SELECT * FROM triggers WHERE %s=?" % (target,)
cursor.execute(sql, (regexp,))
results = cursor.fetchall()
if len(results) != 0:
(id, regexp, added_by, added_at, usage_count,
action, locked) = results[0]
else:
irc.error(_('There is no such regexp trigger.'))
return
irc.reply(_("The regexp id is %d, regexp is \"%s\", and action is"
" \"%s\". It was added by user %s on %s, has been "
"triggered %d times, and is %s.") % (id,
regexp,
action,
added_by,
time.strftime(conf.supybot.reply.format.time(),
time.localtime(int(added_at))),
usage_count,
locked and _("locked") or _("not locked"),))
info = wrap(info, ['channelOrGlobal',
getopts({'id': '',}),
'something'])
@internationalizeDocstring
def list(self, irc, msg, args, channel):
"""[<channel>|global]
Lists regexps present in the triggers database.
<channel> is only necessary if the message isn't sent in the channel
itself. Regexp ID listed in parentheses.
"""
db = self.getDb(channel)
cursor = db.cursor()
cursor.execute("SELECT regexp, id FROM triggers ORDER BY id")
results = cursor.fetchall()
if len(results) != 0:
regexps = results
else:
irc.reply(_('There are no regexp triggers in the database.'))
return
s = [ "%s: %s" % (ircutils.bold('#'+str(regexp[1])), regexp[0]) for regexp in regexps ]
separator = self.registryValue('listSeparator', channel)
irc.reply(separator.join(s))
list = wrap(list, ['channelOrGlobal'])
@internationalizeDocstring
def rank(self, irc, msg, args, channel):
"""[<channel>|global]
Returns a list of top-ranked regexps, sorted by usage count
(rank). The number of regexps returned is set by the
rankListLength registry value. <channel> is only necessary if the
message isn't sent in the channel itself.
"""
numregexps = self.registryValue('rankListLength', channel)
db = self.getDb(channel)
cursor = db.cursor()
cursor.execute("""SELECT regexp, usage_count
FROM triggers
ORDER BY usage_count DESC
LIMIT ?""", (numregexps,))
regexps = cursor.fetchall()
if len(regexps) == 0:
irc.reply(_('There are no regexp triggers in the database.'))
return
s = [ "#%d \"%s\" (%d)" % (i+1, regexp[0], regexp[1]) for i, regexp in enumerate(regexps) ]
irc.reply(", ".join(s))
rank = wrap(rank, ['channelOrGlobal'])
@internationalizeDocstring
def vacuum(self, irc, msg, args, channel):
"""[<channel>|global]
Vacuums the database for <channel>.
See SQLite vacuum doc here: http://www.sqlite.org/lang_vacuum.html
<channel> is only necessary if the message isn't sent in
the channel itself.
First check if user has the required capability specified in plugin
config requireVacuumCapability.
"""
capability = self.registryValue('requireVacuumCapability')
if capability:
if not ircdb.checkCapability(msg.prefix, capability):
irc.errorNoCapability(capability, Raise=True)
db = self.getDb(channel)
cursor = db.cursor()
cursor.execute("""VACUUM""")
db.commit()
irc.replySuccess()
vacuum = wrap(vacuum, ['channelOrGlobal'])
MessageParser = internationalizeDocstring(MessageParser)
Class = MessageParser
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
| bsd-3-clause |
AICIDNN/cloudbiolinux | cloudbio/custom/cloudman.py | 10 | 4819 | """Custom install scripts for CloudMan environment.
From Enis Afgan: https://bitbucket.org/afgane/mi-deployment
"""
import os
import contextlib
from fabric.api import cd
from fabric.contrib.files import settings, hide
from cloudbio.custom.shared import (_make_tmp_dir, _setup_conf_file)
from cloudbio.cloudman import (_configure_cloudman, _configure_novnc,
_configure_desktop, _configure_ec2_autorun)
from cloudbio.galaxy import _install_nginx
CDN_ROOT_URL = "http://linuxcourse.rutgers.edu/rate/Clusters/download"
REPO_ROOT_URL = "https://bitbucket.org/afgane/mi-deployment/raw/tip"
def install_cloudman(env):
""" A meta method for installing all of CloudMan components.
Allows CloudMan and all of its dependencies to be installed via:
fab -f fabfile.py -i <key> -H ubuntu@<IP> install_custom:cloudman
"""
env.logger.debug("Installing CloudMan")
_configure_cloudman(env, use_repo_autorun=False)
install_nginx(env)
install_proftpd(env)
install_sge(env)
install_novnc(env)
def install_ec2_autorun(env):
_configure_ec2_autorun(env)
def install_novnc(env):
_configure_novnc(env)
_configure_desktop(env)
def install_nginx(env):
_install_nginx(env)
def install_proftpd(env):
"""Highly configurable GPL-licensed FTP server software.
http://proftpd.org/
"""
version = "1.3.4c"
postgres_ver = "9.1"
url = "ftp://ftp.tpnet.pl/pub/linux/proftpd/distrib/source/proftpd-%s.tar.gz" % version
modules = "mod_sql:mod_sql_postgres:mod_sql_passwd"
extra_modules = env.get("extra_proftp_modules", "") # Comma separated list of extra modules
if extra_modules:
modules = "%s:%s" % (modules, extra_modules.replace(",", ":"))
install_dir = os.path.join(env.install_dir, 'proftpd')
remote_conf_dir = os.path.join(install_dir, "etc")
# Skip install if already available
if env.safe_exists(remote_conf_dir):
env.logger.debug("ProFTPd seems to already be installed in {0}".format(install_dir))
return
with _make_tmp_dir() as work_dir:
with cd(work_dir):
env.safe_run("wget %s" % url)
with settings(hide('stdout')):
env.safe_run("tar xvzf %s" % os.path.split(url)[1])
with cd("proftpd-%s" % version):
env.safe_run("CFLAGS='-I/usr/include/postgresql' ./configure --prefix=%s "
"--disable-auth-file --disable-ncurses --disable-ident --disable-shadow "
"--enable-openssl --with-modules=%s "
"--with-libraries=/usr/lib/postgresql/%s/lib" % (install_dir, modules, postgres_ver))
env.safe_sudo("make")
env.safe_sudo("make install")
env.safe_sudo("make clean")
# Get the init.d startup script
initd_script = 'proftpd.initd'
initd_url = os.path.join(REPO_ROOT_URL, 'conf_files', initd_script)
remote_file = "/etc/init.d/proftpd"
env.safe_sudo("wget --output-document=%s %s" % (remote_file, initd_url))
env.safe_sed(remote_file, 'REPLACE_THIS_WITH_CUSTOM_INSTALL_DIR', install_dir, use_sudo=True)
env.safe_sudo("chmod 755 %s" % remote_file)
# Set the configuration file
conf_file = 'proftpd.conf'
remote_file = os.path.join(remote_conf_dir, conf_file)
if "postgres_port" not in env:
env.postgres_port = '5910'
if "galaxy_ftp_user_password" not in env:
env.galaxy_ftp_user_password = 'fu5yOj2sn'
proftpd_conf = {'galaxy_uid': env.safe_run('id -u galaxy'),
'galaxy_fs': '/mnt/galaxy', # Should be a var but uncertain how to get it
'install_dir': install_dir}
_setup_conf_file(env, remote_file, conf_file, overrides=proftpd_conf,
default_source="proftpd.conf.template")
# Get the custom welcome msg file
welcome_msg_file = 'welcome_msg.txt'
welcome_url = os.path.join(REPO_ROOT_URL, 'conf_files', welcome_msg_file)
env.safe_sudo("wget --output-document=%s %s" %
(os.path.join(remote_conf_dir, welcome_msg_file), welcome_url))
# Stow
env.safe_sudo("cd %s; stow proftpd" % env.install_dir)
env.logger.debug("----- ProFTPd %s installed to %s -----" % (version, install_dir))
def install_sge(env):
"""Sun Grid Engine.
"""
out_dir = "ge6.2u5"
url = "%s/ge62u5_lx24-amd64.tar.gz" % CDN_ROOT_URL
install_dir = env.install_dir
if env.safe_exists(os.path.join(install_dir, out_dir)):
return
with _make_tmp_dir() as work_dir:
with contextlib.nested(cd(work_dir), settings(hide('stdout'))):
env.safe_run("wget %s" % url)
env.safe_sudo("chown %s %s" % (env.user, install_dir))
env.safe_run("tar -C %s -xvzf %s" % (install_dir, os.path.split(url)[1]))
env.logger.debug("SGE setup")
| mit |
vrenaville/bank-payment | __unported__/account_banking_nl_multibank/__openerp__.py | 3 | 1409 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2009 EduSense BV (<http://www.edusense.nl>).
# All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Banking - NL Multibank import',
'version': '0.62',
'license': 'AGPL-3',
'author': 'EduSense BV',
'website': 'http://www.edusense.nl',
'category': 'Account Banking',
'depends': ['account_banking'],
'description': '''
Module to import Dutch Multibank format transation files.
This modules contains no logic, just an import filter for account_banking.
''',
'installable': False,
}
| agpl-3.0 |
JoshuaSkelly/TroubleInCloudLand | ui/infobubble.py | 3 | 3780 | from core import actor
from core.actor import *
from utils import utility
class InfoBubble(actor.Actor):
def __init__(self, surface, target, life_timer = -1):
actor.Actor.__init__(self)
self.surface = surface
self.surface_rect = self.surface.get_rect()
self.mounted = False
self.target = target
self.image = None
self.balloon_pointer_down = utility.load_image('balloonPointerDown')
self.balloon_pointer_up = utility.load_image('balloonPointerUp')
self.balloon_pointer_direction = 'Down'
self.rect = None
self.velocity = vector.Vector2d.zero
self.bounds = 0, 0, SCREEN_WIDTH, SCREEN_HEIGHT
self.bound_style = BOUND_STYLE_CUSTOM
self.offset = vector.Vector2d.zero
self.life_timer = life_timer
self.position = target.position + self.offset
self.mounted = True
self.balloon_pointer_rect = None
self.create_bubble()
self.update()
def actor_update(self):
if self.life_timer:
if self.mounted:
self.position = self.target.position + self.offset + vector.Vector2d(self.target.hitrect_offset_x, self.target.hitrect_offset_y)
self.life_timer -= 1
if not self.life_timer:
self.die()
def set_offset(self, offSet):
self.offset = offSet
self.position += self.offset
def create_bubble(self):
white_box = pygame.Surface((self.surface.get_width() + 6, self.surface.get_height() + 6))
white_box.fill((255,255,255))
white_box_rect = white_box.get_rect()
dark_box = pygame.Surface((self.surface.get_width() + 14, self.surface.get_height() + 14))
dark_box.fill(FONT_COLOR)
dark_box_rect = dark_box.get_rect()
self.balloon_pointer_rect = self.balloon_pointer_down.get_rect()
self.image = pygame.Surface((dark_box.get_width(), dark_box.get_height() + 38))
self.image.set_colorkey((0,0,0))
self.rect = self.image.get_rect()
dark_box_rect.center = self.rect.center
white_box_rect.center = dark_box_rect.center
self.surface_rect.center = white_box_rect.center
self.balloon_pointer_rect.center = white_box_rect.center
self.image.blit(dark_box,dark_box_rect)
self.image.blit(white_box,white_box_rect)
self.image.blit(self.surface, self.surface_rect)
if self.offset.y <= 0 and self.balloon_pointer_direction == 'Down':
self.balloon_pointer_rect.top = white_box_rect.bottom
self.image.blit(self.balloon_pointer_down, self.balloon_pointer_rect)
self.balloon_pointer_direction = 'Up'
if self.offset.y > 0 and self.balloon_pointer_direction == 'Up':
self.balloon_pointer_rect.bottom = white_box_rect.top
self.image.blit(self.balloon_pointer_up, self.balloon_pointer_rect)
self.balloon_pointer_direction = 'Down'
self.bounds = self.image.get_width() / 2, self.image.get_height() / 2 , SCREEN_WIDTH - (self.image.get_width() / 2), SCREEN_HEIGHT - (self.image.get_height() / 2)
def draw(self, screen):
screen.blit(self.image, self.rect)
def custom_bounds(self):
if self.position.y < self.bounds[TOP] or self.position.y > self.bounds[BOTTOM]:
self.offset *= -1
self.create_bubble()
if self.position.x < self.bounds[LEFT]:
self.position = vector.Vector2d(self.bounds[LEFT], self.position.y)
elif self.position.x > self.bounds[RIGHT]:
self.position = vector.Vector2d(self.bounds[RIGHT], self.position.y)
| mit |
jcftang/ansible | lib/ansible/modules/utilities/logic/include_role.py | 34 | 2684 | #!/usr/bin/python
# -*- mode: python -*-
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'core',
'version': '1.0'}
DOCUMENTATION = '''
---
author:
- "Ansible Core Team (@ansible)"
module: include_role
short_description: Load and execute a role
description:
- "Loads and executes a role as a task, this frees roles from the `role:` directive and allows them to be treated more as tasks."
version_added: "2.2"
options:
name:
description:
- The name of the role to be executed.
required: True
tasks_from:
description:
- "File to load from a Role's tasks/ directory."
required: False
default: 'main'
vars_from:
description:
- "File to load from a Role's vars/ directory."
required: False
default: 'main'
defaults_from:
description:
- "File to load from a Role's defaults/ directory."
required: False
default: 'main'
allow_duplicates:
description:
- Overrides the role's metadata setting to allow using a role more than once with the same parameters.
required: False
default: True
private:
description:
- If True the variables from defaults/ and vars/ in a role will not be made available to the rest of the play.
default: None
notes:
- Handlers are made available to the whole play.
- simple dependencies seem to work fine.
- As with C(include) this task can be static or dynamic, If static it implies that it won't need templating nor loops nor conditionals and will show included tasks in the --list options. Ansible will try to autodetect what is needed, but you can set `static` to `yes` or `no` at task level to control this.
'''
EXAMPLES = """
- include_role:
name: myrole
- name: Run tasks/other.yml instead of 'main'
include_role:
name: myrole
tasks_from: other
- name: Pass variables to role
include_role:
name: myrole
vars:
rolevar1: 'value from task'
- name: Use role in loop
include_role:
name: myrole
with_items:
- '{{ roleinput1 }}'
- '{{ roleinput2 }}'
loop_control:
loop_var: roleinputvar
- name: conditional role
include_role:
name: myrole
when: not idontwanttorun
"""
RETURN = """
# this module does not return anything except tasks to execute
"""
| gpl-3.0 |
zenodo/zenodo | tests/unit/records/test_api_views.py | 1 | 6739 | # -*- coding: utf-8 -*-
#
# This file is part of Zenodo.
# Copyright (C) 2019 CERN.
#
# Zenodo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Zenodo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Zenodo. If not, see <http://www.gnu.org/licenses/>.
#
# In applying this licence, CERN does not waive the privileges and immunities
# granted to it by virtue of its status as an Intergovernmental Organization
# or submit itself to any jurisdiction.
"""Unit tests Zenodo JSON deserializer."""
from urlparse import parse_qs, urlparse
import pytest
from flask import url_for
from invenio_indexer.api import RecordIndexer
from invenio_search import current_search
@pytest.mark.parametrize(('val', 'status', 'error_message'), [
('-1.43,-1.53, 2.45,1.63', 200, None),
('1.23, -1.43 , 1.53 , 2.34', 200, None),
('2.45,1.63', 400,
'Invalid bounds: four comma-separated numbers required. '
'Example: 143.37158,-38.99357,146.90918,-37.35269'),
('2.45,\'1.63\',-1.43,-1.53', 400, 'Invalid number in bounds.'),
('2.45,\' \',-1.43,-1.53', 400, 'Invalid number in bounds.'),
('2.45,\'\',-1.43,-1.53', 400, 'Invalid number in bounds.'),
('2.45,,-1.43,-1.53', 400, 'Invalid number in bounds.'),
('2.45, ,-1.43,-1.53', 400, 'Invalid number in bounds.'),
('2.45;1.63,-1.43,-1.53', 400,
'Invalid bounds: four comma-separated numbers required. '
'Example: 143.37158,-38.99357,146.90918,-37.35269'),
('181,1.63,-181,-1.53', 400, 'Longitude must be between -180 and 180.'),
('2.45,91,-1.43,-91', 400, 'Latitude must be between -90 and 90.'),
('2.45,1.63,NaN,-1.53', 400,
'Invalid number: "NaN" is not a permitted value.'),
('2.45,1.63,Infinity,-1.53', 400,
'Longitude must be between -180 and 180.'),
('-1.43,1.63,2.45,-1.53', 400,
'Top-right latitude must be greater than bottom-left latitude.'),
])
def test_geographical_search_validation(
es, api, json_headers, record_with_bucket, val, status, error_message):
"""Test geographical search validation."""
pid, record = record_with_bucket
RecordIndexer().index(record)
with api.test_request_context():
with api.test_client() as client:
res = client.get(
url_for('invenio_records_rest.recid_list', bounds=val),
headers=json_headers
)
assert res.status_code == status
if error_message:
assert res.json['message'] == 'Validation error.'
assert len(res.json['errors']) == 1
assert res.json['errors'][0]['field'] == 'bounds'
assert res.json['errors'][0]['message'] == error_message
def test_geographical_search(es, api, json_headers, record_with_bucket):
"""Test geographical search."""
pid, record = record_with_bucket
record['locations'] = [
{'lat': 46.204391, 'lon': 6.143158, 'place': 'Geneva'},
{'place': 'New York'}
]
RecordIndexer().index(record)
current_search.flush_and_refresh(index='records')
with api.test_request_context():
with api.test_client() as client:
res = client.get(
url_for('invenio_records_rest.recid_list',
bounds='6.059634,46.167928,6.230161,46.244911'),
headers=json_headers
)
assert len(res.json) == 1
@pytest.mark.parametrize(('val', 'status', 'error_message'), [
('[dwc:family]:[Felidae]', 200, None),
('[dwc:foobar]:[Felidae]', 400, 'The "dwc:foobar" term is not supported.'),
('[dwc:family]:Felidae', 400, 'The parameter should have the '
'format: custom=[term]:[value].'),
('[dwc:family]', 400, 'The parameter should have the '
'format: custom=[term]:[value].'),
(':Felidae', 400, 'The parameter should have the '
'format: custom=[term]:[value].')
])
def test_custom_search_validation(
es, api, json_headers, val, status, error_message):
"""Test custom metadata search validation."""
with api.test_request_context():
with api.test_client() as client:
res = client.get(
url_for('invenio_records_rest.recid_list', custom=val),
headers=json_headers
)
assert res.status_code == status
if error_message:
assert res.json['message'] == 'Validation error.'
assert len(res.json['errors']) == 1
assert res.json['errors'][0]['field'] == 'custom'
assert res.json['errors'][0]['message'] == error_message
@pytest.mark.parametrize(('query', 'result'), [
('[dwc:family]:[Felidae]', 1),
('[dwc:family]:[foobar]', 0),
('[obo:RO_0002453]:[Cat:]', 1),
('[obo:RO_0002453]:[:"Cat flea"]', 1),
('[obo:RO_0002453]:[foobar:]', 0),
('[obo:RO_0002453]:[(foobar OR "Felis catus"):]', 1),
('[obo:RO_0002453]:["Felis catus":"Cat flea"]', 1),
('[obo:RO_0002453]:["Felis catus":foobar]', 0),
])
def test_custom_search(es, api, json_headers, record_with_bucket,
custom_metadata, query, result):
"""Test custom metadata search."""
pid, record = record_with_bucket
record['custom'] = custom_metadata
RecordIndexer().index(record)
current_search.flush_and_refresh(index='records')
with api.test_request_context():
with api.test_client() as client:
res = client.get(
url_for('invenio_records_rest.recid_list',
custom=query),
headers=json_headers)
assert len(res.json) == result
@pytest.mark.parametrize(('query', 'result'), [
("", None),
("?all_versions", "true"),
("?all_versions=true", "true"),
("?all_versions=True", "true"),
("?all_versions=1", "true"),
("?all_versions=1234", "1234"),
("?all_versions=test", "test"),
])
def test_apply_version_filters(es, api, query, result):
"""Test record version filters to search."""
with api.test_request_context(), api.test_client() as client:
res = client.get(url_for('invenio_records_rest.recid_list') + query)
url = urlparse(res.json["links"]["self"])
qs = parse_qs(url.query, keep_blank_values=True)
assert qs.get("all_versions", [None]) == [result]
| gpl-2.0 |
ykim362/mxnet | example/rcnn/rcnn/tools/test_rcnn.py | 41 | 5671 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import argparse
import pprint
import mxnet as mx
from ..logger import logger
from ..config import config, default, generate_config
from ..symbol import *
from ..dataset import *
from ..core.loader import TestLoader
from ..core.tester import Predictor, pred_eval
from ..utils.load_model import load_param
def test_rcnn(network, dataset, image_set, root_path, dataset_path,
ctx, prefix, epoch,
vis, shuffle, has_rpn, proposal, thresh):
# set config
if has_rpn:
config.TEST.HAS_RPN = True
# print config
pprint.pprint(config)
# load symbol and testing data
if has_rpn:
sym = eval('get_' + network + '_test')(num_classes=config.NUM_CLASSES, num_anchors=config.NUM_ANCHORS)
imdb = eval(dataset)(image_set, root_path, dataset_path)
roidb = imdb.gt_roidb()
else:
sym = eval('get_' + network + '_rcnn_test')(num_classes=config.NUM_CLASSES)
imdb = eval(dataset)(image_set, root_path, dataset_path)
gt_roidb = imdb.gt_roidb()
roidb = eval('imdb.' + proposal + '_roidb')(gt_roidb)
# get test data iter
test_data = TestLoader(roidb, batch_size=1, shuffle=shuffle, has_rpn=has_rpn)
# load model
arg_params, aux_params = load_param(prefix, epoch, convert=True, ctx=ctx, process=True)
# infer shape
data_shape_dict = dict(test_data.provide_data)
arg_shape, _, aux_shape = sym.infer_shape(**data_shape_dict)
arg_shape_dict = dict(zip(sym.list_arguments(), arg_shape))
aux_shape_dict = dict(zip(sym.list_auxiliary_states(), aux_shape))
# check parameters
for k in sym.list_arguments():
if k in data_shape_dict or 'label' in k:
continue
assert k in arg_params, k + ' not initialized'
assert arg_params[k].shape == arg_shape_dict[k], \
'shape inconsistent for ' + k + ' inferred ' + str(arg_shape_dict[k]) + ' provided ' + str(arg_params[k].shape)
for k in sym.list_auxiliary_states():
assert k in aux_params, k + ' not initialized'
assert aux_params[k].shape == aux_shape_dict[k], \
'shape inconsistent for ' + k + ' inferred ' + str(aux_shape_dict[k]) + ' provided ' + str(aux_params[k].shape)
# decide maximum shape
data_names = [k[0] for k in test_data.provide_data]
label_names = None
max_data_shape = [('data', (1, 3, max([v[0] for v in config.SCALES]), max([v[1] for v in config.SCALES])))]
if not has_rpn:
max_data_shape.append(('rois', (1, config.TEST.PROPOSAL_POST_NMS_TOP_N + 30, 5)))
# create predictor
predictor = Predictor(sym, data_names, label_names,
context=ctx, max_data_shapes=max_data_shape,
provide_data=test_data.provide_data, provide_label=test_data.provide_label,
arg_params=arg_params, aux_params=aux_params)
# start detection
pred_eval(predictor, test_data, imdb, vis=vis, thresh=thresh)
def parse_args():
parser = argparse.ArgumentParser(description='Test a Fast R-CNN network')
# general
parser.add_argument('--network', help='network name', default=default.network, type=str)
parser.add_argument('--dataset', help='dataset name', default=default.dataset, type=str)
args, rest = parser.parse_known_args()
generate_config(args.network, args.dataset)
parser.add_argument('--image_set', help='image_set name', default=default.test_image_set, type=str)
parser.add_argument('--root_path', help='output data folder', default=default.root_path, type=str)
parser.add_argument('--dataset_path', help='dataset path', default=default.dataset_path, type=str)
# testing
parser.add_argument('--prefix', help='model to test with', default=default.rcnn_prefix, type=str)
parser.add_argument('--epoch', help='model to test with', default=default.rcnn_epoch, type=int)
parser.add_argument('--gpu', help='GPU device to test with', default=0, type=int)
# rcnn
parser.add_argument('--vis', help='turn on visualization', action='store_true')
parser.add_argument('--thresh', help='valid detection threshold', default=1e-3, type=float)
parser.add_argument('--shuffle', help='shuffle data on visualization', action='store_true')
parser.add_argument('--has_rpn', help='generate proposals on the fly', action='store_true')
parser.add_argument('--proposal', help='can be ss for selective search or rpn', default='rpn', type=str)
args = parser.parse_args()
return args
def main():
args = parse_args()
logger.info('Called with argument: %s' % args)
ctx = mx.gpu(args.gpu)
test_rcnn(args.network, args.dataset, args.image_set, args.root_path, args.dataset_path,
ctx, args.prefix, args.epoch,
args.vis, args.shuffle, args.has_rpn, args.proposal, args.thresh)
if __name__ == '__main__':
main()
| apache-2.0 |
luisibanez/ITKExamples | utilities/ext/sphinxcontrib/doxylink/doxylink.py | 1 | 18225 | # -*- coding: utf-8 -*-
import os
import xml.etree.ElementTree as ET
import urlparse
import re
import itertools
from docutils import nodes, utils
from sphinx.util.nodes import split_explicit_title
from sphinx.util.console import bold, standout
from parsing import normalise, ParseException
def find_url(doc, symbol):
"""
Return the URL for a given symbol.
This is where the magic happens.
This function could be a lot more clever. At present it required the passed symbol to be almost exactly the same as the entries in the Doxygen tag file.
.. todo::
Maybe print a list of all possible matches as a warning (but still only return the first)
:Parameters:
doc : xml.etree.ElementTree
The XML DOM object
symbol : string
The symbol to lookup in the file. E.g. something like 'PolyVox::Array' or 'tidyUpMemory'
:return: String representing the filename part of the URL
"""
#First check for an exact match with a top-level object (namespaces, objects etc.)
#env = inliner.document.settings.env
matches = []
for compound in doc.findall('.//compound'):
if compound.find('name').text == symbol:
matches += [{'file':compound.find('filename').text, 'kind':compound.get('kind')}]
if len(matches) > 1:
pass
#env.warn(env.docname, 'There were multiple matches for `%s`: %s' % (symbol, matches))
if len(matches) == 1:
return matches[0]
#Strip off first namespace bit of the compound name so that 'ArraySizes' can match 'PolyVox::ArraySizes'
for compound in doc.findall('.//compound'):
symbol_list = compound.find('name').text.split('::', 1)
if len(symbol_list) == 2:
reducedsymbol = symbol_list[1]
if reducedsymbol == symbol:
return {'file':compound.find('filename').text, 'kind':compound.get('kind')}
#Now split the symbol by '::'. Find an exact match for the first part and then a member match for the second
#So PolyVox::Array::operator[] becomes like {namespace: "PolyVox::Array", endsymbol: "operator[]"}
symbol_list = symbol.rsplit('::', 1)
if len(symbol_list) == 2:
namespace = symbol_list[0]
endsymbol = symbol_list[1]
for compound in doc.findall('.//compound'):
if compound.find('name').text == namespace:
for member in compound.findall('member'):
#If this compound object contains the matching member then return it
if member.find('name').text == endsymbol:
return {'file':(member.findtext('anchorfile') or compound.findtext('filename')) + '#' + member.find('anchor').text, 'kind':member.get('kind')}
#Then we'll look at unqualified members
for member in doc.findall('.//member'):
if member.find('name').text == symbol:
return {'file':(member.findtext('anchorfile') or compound.findtext('filename')) + '#' + member.find('anchor').text, 'kind':member.get('kind')}
return None
def parse_tag_file(doc):
"""
Takes in an XML tree from a Doxygen tag file and returns a dictionary that looks something like:
.. code-block:: python
{'PolyVox': {'file': 'namespace_poly_vox.html',
'kind': 'namespace'},
'PolyVox::Array': {'file': 'class_poly_vox_1_1_array.html',
'kind': 'class'},
'PolyVox::Array1DDouble': {'file': 'namespace_poly_vox.html#a7a1f5fd5c4f7fbb4258a495d707b5c13',
'kind': 'typedef'},
'PolyVox::Array1DFloat': {'file': 'namespace_poly_vox.html#a879a120e49733eba1905c33f8a7f131b',
'kind': 'typedef'},
'PolyVox::Array1DInt16': {'file': 'namespace_poly_vox.html#aa1463ece448c6ebed55ab429d6ae3e43',
'kind': 'typedef'},
'QScriptContext::throwError': {'arglist': {'( Error error, const QString & text )': 'qscriptcontext.html#throwError',
'( const QString & text )': 'qscriptcontext.html#throwError-2'},
'kind': 'function'},
'QScriptContext::toString': {'arglist': {'()': 'qscriptcontext.html#toString'},
'kind': 'function'}}
Note the different form for functions. This is required to allow for 'overloading by argument type'.
To access a filename for a symbol you do:
.. code-block:: python
symbol_mapping = mapping[symbol]
if symbol_mapping['kind'] == 'function':
url = symbol_mapping['arglist'][argument_string]
else:
url = symbol_mapping['file']
:Parameters:
doc : xml.etree.ElementTree
The XML DOM object
:return: a dictionary mapping fully qualified symbols to files
"""
mapping = {}
function_list = [] #This is a list of function to be parsed and inserted into mapping at the end of the function.
for compound in doc.findall("./compound"):
compound_kind = compound.get('kind')
if compound_kind != 'namespace' and compound_kind != 'class' and compound_kind!= 'struct' and compound_kind != 'file':
continue #Skip everything that isn't a namespace, class, struct or file
compound_name = compound.findtext('name')
compound_filename = compound.findtext('filename')
#TODO The following is a hack bug fix I think
#Doxygen doesn't seem to include the file extension to <compound kind="file"><filename> entries
#If it's a 'file' type, check if it _does_ have an extension, if not append '.html'
if compound_kind == 'file' and not os.path.splitext(compound_filename)[1]:
compound_filename = join(compound_filename, '.html')
#If it's a compound we can simply add it
mapping[compound_name] = {'kind' : compound_kind, 'file' : compound_filename}
for member in compound.findall('member'):
#If the member doesn't have an <anchorfile> element, use the parent compounds <filename> instead
#This is the way it is in the qt.tag and is perhaps an artefact of old Doxygen
anchorfile = member.findtext('anchorfile') or compound_filename
member_symbol = join(compound_name, '::', member.findtext('name'))
member_kind = member.get('kind')
arglist_text = member.findtext('./arglist') #If it has an <arglist> then we assume it's a function. Empty <arglist> returns '', not None. Things like typedefs and enums can have empty arglists
if arglist_text and member_kind != 'variable' and member_kind != 'typedef' and member_kind != 'enumeration':
function_list.append((member_symbol, arglist_text, member_kind, join(anchorfile,'#',member.findtext('anchor'))))
else:
mapping[member_symbol] = {'kind' : member.get('kind'), 'file' : join(anchorfile,'#',member.findtext('anchor'))}
for old_tuple, normalised_tuple in zip(function_list, itertools.imap(normalise, (member_tuple[1] for member_tuple in function_list))):
member_symbol = old_tuple[0]
original_arglist = old_tuple[1]
kind = old_tuple[2]
anchor_link = old_tuple[3]
normalised_arglist = normalised_tuple[1]
if normalised_tuple[1] is not None: #This is a 'flag' for a ParseException having happened
if mapping.get(member_symbol):
mapping[member_symbol]['arglist'][normalised_arglist] = anchor_link
else:
mapping[member_symbol] = {'kind' : kind, 'arglist' : {normalised_arglist : anchor_link}}
else:
print('Skipping %s %s%s. Error reported from parser was: %s' % (old_tuple[2], old_tuple[0], old_tuple[1], normalised_tuple[0]))
#from pprint import pprint; pprint(mapping)
return mapping
def find_url2(mapping, symbol):
"""
Return the URL for a given symbol.
This is where the magic happens.
.. todo::
Maybe print a list of all possible matches as a warning (but still only return the first)
:Parameters:
mapping : dictionary
A dictionary of the form returned by :py:func:`parse_tag_file`
symbol : string
The symbol to lookup in the file. E.g. something like 'PolyVox::Array' or 'tidyUpMemory'
:return: String representing the filename part of the URL
:raises:
LookupError
Raised if the symbol could not be matched in the file
"""
#print "\n\nSearching for", symbol
try:
symbol, normalised_arglist = normalise(symbol)
except ParseException as error:
raise LookupError(error)
#print symbol, normalised_arglist
#If we have an exact match then return it.
if mapping.get(symbol):
#print ('Exact match')
return return_from_mapping(mapping[symbol], normalised_arglist)
#If the user didn't pass in any arguments, i.e. `arguments == ''` then they don't care which version of the overloaded funtion they get.
#First we check for any mapping entries which even slightly match the requested symbol
#endswith_list = {}
#for item, data in mapping.items():
# if item.endswith(symbol):
#print symbol + ' : ' + item
# endswith_list[item] = data
# mapping[item]['file']
#If we only find one then we return it.
#if len(endswith_list) is 1:
# return endswith_list.values()[0]['file']
#print("Still", len(endswith_list), 'possible matches')
piecewise_list = find_url_piecewise(mapping, symbol)
#If there is only one match, return it.
if len(piecewise_list) is 1:
return return_from_mapping(piecewise_list.values()[0], normalised_arglist)
#print("Still", len(piecewise_list), 'possible matches')
#If there is more than one item in piecewise_list then there is an ambiguity
#Often this is due to the symbol matching the name of the constructor as well as the class name itself
classes_list = find_url_classes(piecewise_list, symbol)
#If there is only one by here we return it.
if len(classes_list) is 1:
return classes_list.values()[0]
#print("Still", len(classes_list), 'possible matches')
#If we exhaused the list by requiring classes, use the list from before the filter.
if len(classes_list) == 0:
classes_list = piecewise_list
no_templates_list = find_url_remove_templates(classes_list, symbol)
if len(no_templates_list) is 1:
return return_from_mapping(no_templates_list.values()[0], normalised_arglist)
#print("Still", len(no_templates_list), 'possible matches')
#If not found by now, just return the first one in the list
if len(no_templates_list) != 0:
#TODO return a warning here?
return return_from_mapping(no_templates_list.values()[0], normalised_arglist)
#Else return None if the list is empty
else:
LookupError('Could not find a match')
def return_from_mapping(mapping_entry, normalised_arglist=''):
"""
Return a mapping to a single URL in the form. This is needed since mapping entries for functions are more complicated due to function overriding.
If the mapping to be returned is not a function, this will simply return the mapping entry intact. If the entry is a function it will attempt to get the right version based on the function signature.
:Parameters:
mapping_entry : dict
should be a single entry from the large mapping file corresponding to a single symbol. If the symbol is a function, then ``mappingentry['arglist']`` will be a dictionary mapping normalised signatures to URLs
normalised_arglist : string
the normalised form of the arglist that the user has requested. This can be empty in which case the function will return just the first element of ``mappingentry['arglist']``. This parameter is ignored if ``mappingentry['kind'] != 'function'``
:return: dictionary something like:
.. code-block:: python
{'kind' : 'function', 'file' : 'something.html#foo'}
"""
#If it's a function we need to grab the right signature from the arglist.
if mapping_entry['kind'] == 'function':
#If the user has requested a specific function through specifying an arglist then get the right anchor
if normalised_arglist:
filename = mapping_entry['arglist'].get(normalised_arglist)
if not filename: #If we didn't get the filename because it's not in the mapping then we will just return a random one?
#TODO return a warning here!
filename = mapping_entry['arglist'].values()[0]
else:
#Otherwise just return the first entry (if they don't care they get whatever comes first)
filename = mapping_entry['arglist'].values()[0]
return {'kind' : 'function', 'file' : filename}
elif mapping_entry.get('arglist'):
#This arglist should only be one entry long and that entry should have '' as its key
return {'kind' : mapping_entry['kind'], 'file' : mapping_entry['arglist']['']}
#If it's not a function, then return it raw
return mapping_entry
def find_url_piecewise(mapping, symbol):
"""
Match the requested symbol reverse piecewise (split on ``::``) against the tag names to ensure they match exactly (modulo ambiguity)
So, if in the mapping there is ``PolyVox::Volume::FloatVolume`` and ``PolyVox::Volume`` they would be split into:
.. code-block:: python
['PolyVox', 'Volume', 'FloatVolume'] and ['PolyVox', 'Volume']
and reversed:
.. code-block:: python
['FloatVolume', 'Volume', 'PolyVox'] and ['Volume', 'PolyVox']
and truncated to the shorter of the two:
.. code-block:: python
['FloatVolume', 'Volume'] and ['Volume', 'PolyVox']
If we're searching for the ``PolyVox::Volume`` symbol we would compare:
.. code-block:: python
['Volume', 'PolyVox'] to ['FloatVolume', 'Volume', 'PolyVox'].
That doesn't match so we look at the next in the mapping:
.. code-block:: python
['Volume', 'PolyVox'] to ['Volume', 'PolyVox'].
Good, so we add it to the list
"""
piecewise_list = {}
for item, data in mapping.items():
split_symbol = symbol.split('::')
split_item = item.split('::')
split_symbol.reverse()
split_item.reverse()
min_length = min(len(split_symbol), len(split_item))
split_symbol = split_symbol[:min_length]
split_item = split_item[:min_length]
#print split_symbol, split_item
if split_symbol == split_item:
#print symbol + ' : ' + item
piecewise_list[item] = data
return piecewise_list
def find_url_classes(mapping, symbol):
"""Prefer classes over names of constructors"""
classes_list = {}
for item, data in mapping.items():
if data['kind'] == 'class':
#print symbol + ' : ' + item
classes_list[item] = data
return classes_list
def find_url_remove_templates(mapping, symbol):
"""Now, to disambiguate between ``PolyVox::Array< 1, ElementType >::operator[]`` and ``PolyVox::Array::operator[]`` matching ``operator[]``, we will ignore templated (as in C++ templates) tag names by removing names containing ``<``"""
no_templates_list = {}
for item, data in mapping.items():
if '<' not in item:
#print symbol + ' : ' + item
no_templates_list[item] = data
return no_templates_list
def join(*args):
return ''.join(args)
def create_role(app, tag_filename, rootdir):
#Tidy up the root directory path
if not rootdir.endswith(('/', '\\')):
rootdir = join(rootdir, os.sep)
try:
tag_file = ET.parse(tag_filename)
cache_name = os.path.basename(tag_filename)
app.info(bold('Checking tag file cache for %s: ' % cache_name), nonl=True)
if not hasattr(app.env, 'doxylink_cache'):
# no cache present at all, initialise it
app.info('No cache at all, rebuilding...')
mapping = parse_tag_file(tag_file)
app.env.doxylink_cache = { cache_name : {'mapping' : mapping, 'mtime' : os.path.getmtime(tag_filename)}}
elif not app.env.doxylink_cache.get(cache_name):
# Main cache is there but the specific sub-cache for this tag file is not
app.info('Sub cache is missing, rebuilding...')
mapping = parse_tag_file(tag_file)
app.env.doxylink_cache[cache_name] = {'mapping' : mapping, 'mtime' : os.path.getmtime(tag_filename)}
elif app.env.doxylink_cache[cache_name]['mtime'] < os.path.getmtime(tag_filename):
# tag file has been modified since sub-cache creation
app.info('Sub-cache is out of date, rebuilding...')
mapping = parse_tag_file(tag_file)
app.env.doxylink_cache[cache_name] = {'mapping' : mapping, 'mtime' : os.path.getmtime(tag_filename)}
else:
#The cache is up to date
app.info('Sub-cache is up-to-date')
except IOError:
tag_file = None
app.warn(standout('Could not open tag file %s. Make sure your `doxylink` config variable is set correctly.' % tag_filename))
def find_doxygen_link(name, rawtext, text, lineno, inliner, options={}, content=[]):
text = utils.unescape(text)
# from :name:`title <part>`
has_explicit_title, title, part = split_explicit_title(text)
warning_messages = []
if tag_file:
url = find_url(tag_file, part)
try:
url = find_url2(app.env.doxylink_cache[cache_name]['mapping'], part)
except LookupError as error:
warning_messages.append('Error while parsing `%s`. Is not a well-formed C++ function call or symbol. If this is not the case, it is a doxylink bug so please report it. Error reported was: %s' % (part, error))
if url:
#If it's an absolute path then the link will work regardless of the document directory
#Also check if it is a URL (i.e. it has a 'scheme' like 'http' or 'file')
if os.path.isabs(rootdir) or urlparse.urlparse(rootdir).scheme:
full_url = join(rootdir, url['file'])
#But otherwise we need to add the relative path of the current document to the root source directory to the link
else:
relative_path_to_docsrc = os.path.relpath(app.env.srcdir, os.path.dirname(inliner.document.current_source))
full_url = join(relative_path_to_docsrc, '/', rootdir, url['file']) #We always use the '/' here rather than os.sep since this is a web link avoids problems like documentation/.\../library/doc/ (mixed slashes)
if url['kind'] == 'function' and app.config.add_function_parentheses and not normalise(title)[1]:
title = join(title, '()')
pnode = nodes.reference(title, title, internal=False, refuri=full_url)
return [pnode], []
#By here, no match was found
warning_messages.append('Could not find match for `%s` in `%s` tag file' % (part, tag_filename))
else:
warning_messages.append('Could not find match for `%s` because tag file not found' % (part))
pnode = nodes.inline(rawsource=title, text=title)
return [pnode], [inliner.reporter.warning(message, line=lineno) for message in warning_messages]
return find_doxygen_link
def setup_doxylink_roles(app):
for name, [tag_filename, rootdir] in app.config.doxylink.iteritems():
app.add_role(name, create_role(app, tag_filename, rootdir))
def setup(app):
app.add_config_value('doxylink', {}, 'env')
app.connect('builder-inited', setup_doxylink_roles)
| apache-2.0 |
uogbuji/versa | test/py/test_postgres.py | 1 | 7595 | '''
Requires http://pytest.org/ e.g.:
pip install pytest
----
Recommended: first set up environment. On BASH:
export VUSER=versa
export VPASS=password
(Replacing "vuser" & "password" accordingly)
Then before running test:
createdb -U $VUSER versa_test "A temp DB for Versa test suite"
Pass in your PG password, e.g.:
py.test test/py/test_postgres.py --user=$VUSER --pass=$VPASS
or to debug:
py.test test/py/test_postgres.py --user=$VUSER --pass=$VPASS --debug
----
If something breaks and the temp DB isn't cleaned up, you can do:
VHOST=localhost; python -c "import psycopg2; from versa.driver import postgres; c = postgres.connection('host=$VHOST dbname=versa_test user=$VUSER password=$VPASS'); c.drop_space()"
Replace localhost as needed
If you want to set up the temp DB for playing around, do:
createdb -U $VUSER versa_test "Test DB for Versa."
psql -U $VUSER versa_test < test/py/test1.sql
Then you can fiddle around:
psql -U $VUSER versa_test
SELECT relationship.rawid, relationship.subj, relationship.pred, relationship.obj FROM relationship, attribute WHERE relationship.subj = 'http://copia.ogbuji.net';
SELECT relationship.rawid, relationship.subj, relationship.pred, relationship.obj FROM relationship, attribute WHERE relationship.subj = 'http://copia.ogbuji.net' AND relationship.rawid = attribute.rawid AND attribute.name = '@context' AND attribute.value = 'http://copia.ogbuji.net#metadata';
SELECT relationship.rawid, relationship.subj, relationship.pred, relationship.obj, attribute.name, attribute.value FROM relationship, attribute WHERE relationship.subj = 'http://copia.ogbuji.net' AND relationship.rawid = attribute.rawid AND attribute.name = '@context' AND attribute.value = 'http://copia.ogbuji.net#metadata';
versa_test=# SELECT relationship.rawid, attribute.rawid, relationship.subj, relationship.pred, relationship.obj, attribute.name, attribute.value FROM relationship FULL JOIN attribute ON relationship.rawid = attribute.rawid WHERE relationship.subj = 'http://copia.ogbuji.net' AND EXISTS (SELECT 1 from attribute AS subattr WHERE subattr.rawid = relationship.rawid AND subattr.name = '@context' AND subattr.value = 'http://copia.ogbuji.net#_metadata');
rawid | rawid | subj | pred | obj | name | value
-------+-------+-------------------------+-----------------------------------------+-------------+----------+-----------------------------------
1 | 1 | http://copia.ogbuji.net | http://purl.org/dc/elements/1.1/creator | Uche Ogbuji | @context | http://copia.ogbuji.net#_metadata
2 | 2 | http://copia.ogbuji.net | http://purl.org/dc/elements/1.1/title | Copia | @context | http://copia.ogbuji.net#_metadata
2 | 2 | http://copia.ogbuji.net | http://purl.org/dc/elements/1.1/title | Copia | @lang | en
(3 rows)
versa_test=# SELECT relationship.rawid, attribute.rawid, relationship.subj, relationship.pred, relationship.obj, attribute.name, attribute.value FROM relationship FULL JOIN attribute ON relationship.rawid = attribute.rawid WHERE relationship.subj = 'http://uche.ogbuji.net' AND EXISTS (SELECT 1 from attribute AS subattr WHERE subattr.rawid = relationship.rawid AND subattr.name = '@context' AND subattr.value = 'http://uche.ogbuji.net#_metadata') ORDER BY relationship.rawid;
rawid | rawid | subj | pred | obj | name | value
-------+-------+------------------------+-----------------------------------------+-------------+----------+----------------------------------
3 | 3 | http://uche.ogbuji.net | http://purl.org/dc/elements/1.1/creator | Uche Ogbuji | @context | http://uche.ogbuji.net#_metadata
4 | 4 | http://uche.ogbuji.net | http://purl.org/dc/elements/1.1/title | Uche's home | @context | http://uche.ogbuji.net#_metadata
4 | 4 | http://uche.ogbuji.net | http://purl.org/dc/elements/1.1/title | Uche's home | @lang | en
5 | 5 | http://uche.ogbuji.net | http://purl.org/dc/elements/1.1/title | Ulo Uche | @context | http://uche.ogbuji.net#_metadata
5 | 5 | http://uche.ogbuji.net | http://purl.org/dc/elements/1.1/title | Ulo Uche | @lang | ig
(5 rows)
versa_test=# SELECT relationship.rawid, attribute.rawid, relationship.subj, relationship.pred, relationship.obj, attribute.name, attribute.value FROM relationship FULL JOIN attribute ON relationship.rawid = attribute.rawid WHERE relationship.subj = 'http://uche.ogbuji.net' AND EXISTS (SELECT 1 from attribute AS subattr WHERE subattr.rawid = relationship.rawid AND subattr.name = '@context' AND subattr.value = 'http://uche.ogbuji.net#_metadata') AND EXISTS (SELECT 1 from attribute AS subattr WHERE subattr.rawid = relationship.rawid AND subattr.name = '@lang' AND subattr.value = 'ig') ORDER BY relationship.rawid;
rawid | rawid | subj | pred | obj | name | value
-------+-------+------------------------+---------------------------------------+----------+----------+----------------------------------
5 | 5 | http://uche.ogbuji.net | http://purl.org/dc/elements/1.1/title | Ulo Uche | @context | http://uche.ogbuji.net#_metadata
5 | 5 | http://uche.ogbuji.net | http://purl.org/dc/elements/1.1/title | Ulo Uche | @lang | ig
(2 rows)
See also:
* http://www.postgresql.org/docs/9.1/static/app-pgdump.html
* http://www.postgresql.org/docs/9.1/static/app-psql.html
'''
import logging
from versa.driver import postgres
#If you do this you also need --nologcapture
#Handle --tc=debug:y option
#if config.get('debug', 'n').startswith('y'):
# logging.basicConfig(level=logging.DEBUG)
#@with_setup(pg_setup, pg_teardown)
def test_basics(pgdb):
"test ..."
conn = pgdb
for (subj, pred, obj, attrs) in RELS_1:
conn.add(subj, pred, obj, attrs)
assert conn.size() == len(RELS_1)
results = conn.match(origin='http://copia.ogbuji.net')
logging.debug('BASICS PART 1')
for result in results:
logging.debug('Result: {0}'.format(repr(result)))
#assert result == ()
#assert results == None, "Boo! "
results = conn.match(origin='http://uche.ogbuji.net', attrs={u'@lang': u'ig'})
logging.debug('BASICS PART 2')
results = list(results)
for result in results:
logging.debug('Result: {0}'.format(repr(result)))
#assert result == ()
expected = ('http://uche.ogbuji.net', 'http://purl.org/dc/elements/1.1/title', 'Ulo Uche', {'@context': 'http://uche.ogbuji.net#_metadata', '@lang': 'ig'})
assert results[0] == expected, (results[0], expected)
RELS_1 = [
("http://copia.ogbuji.net", "http://purl.org/dc/elements/1.1/creator", "Uche Ogbuji", {"@context": "http://copia.ogbuji.net#_metadata"}),
("http://copia.ogbuji.net", "http://purl.org/dc/elements/1.1/title", "Copia", {"@context": "http://copia.ogbuji.net#_metadata", '@lang': 'en'}),
("http://uche.ogbuji.net", "http://purl.org/dc/elements/1.1/creator", "Uche Ogbuji", {"@context": "http://uche.ogbuji.net#_metadata"}),
("http://uche.ogbuji.net", "http://purl.org/dc/elements/1.1/title", "Uche's home", {"@context": "http://uche.ogbuji.net#_metadata", '@lang': 'en'}),
("http://uche.ogbuji.net", "http://purl.org/dc/elements/1.1/title", "Ulo Uche", {"@context": "http://uche.ogbuji.net#_metadata", '@lang': 'ig'}),
]
if __name__ == '__main__':
raise SystemExit("Run with py.test")
| apache-2.0 |
taichatha/youtube-dl | youtube_dl/extractor/mooshare.py | 128 | 3640 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_urllib_request,
compat_urllib_parse,
)
from ..utils import (
ExtractorError,
)
class MooshareIE(InfoExtractor):
IE_NAME = 'mooshare'
IE_DESC = 'Mooshare.biz'
_VALID_URL = r'http://(?:www\.)?mooshare\.biz/(?P<id>[\da-z]{12})'
_TESTS = [
{
'url': 'http://mooshare.biz/8dqtk4bjbp8g',
'md5': '4e14f9562928aecd2e42c6f341c8feba',
'info_dict': {
'id': '8dqtk4bjbp8g',
'ext': 'mp4',
'title': 'Comedy Football 2011 - (part 1-2)',
'duration': 893,
},
},
{
'url': 'http://mooshare.biz/aipjtoc4g95j',
'info_dict': {
'id': 'aipjtoc4g95j',
'ext': 'mp4',
'title': 'Orange Caramel Dashing Through the Snow',
'duration': 212,
},
'params': {
# rtmp download
'skip_download': True,
}
}
]
def _real_extract(self, url):
video_id = self._match_id(url)
page = self._download_webpage(url, video_id, 'Downloading page')
if re.search(r'>Video Not Found or Deleted<', page) is not None:
raise ExtractorError('Video %s does not exist' % video_id, expected=True)
hash_key = self._html_search_regex(r'<input type="hidden" name="hash" value="([^"]+)">', page, 'hash')
title = self._html_search_regex(r'(?m)<div class="blockTitle">\s*<h2>Watch ([^<]+)</h2>', page, 'title')
download_form = {
'op': 'download1',
'id': video_id,
'hash': hash_key,
}
request = compat_urllib_request.Request(
'http://mooshare.biz/%s' % video_id, compat_urllib_parse.urlencode(download_form))
request.add_header('Content-Type', 'application/x-www-form-urlencoded')
self._sleep(5, video_id)
video_page = self._download_webpage(request, video_id, 'Downloading video page')
thumbnail = self._html_search_regex(r'image:\s*"([^"]+)",', video_page, 'thumbnail', fatal=False)
duration_str = self._html_search_regex(r'duration:\s*"(\d+)",', video_page, 'duration', fatal=False)
duration = int(duration_str) if duration_str is not None else None
formats = []
# SD video
mobj = re.search(r'(?m)file:\s*"(?P<url>[^"]+)",\s*provider:', video_page)
if mobj is not None:
formats.append({
'url': mobj.group('url'),
'format_id': 'sd',
'format': 'SD',
})
# HD video
mobj = re.search(r'\'hd-2\': { file: \'(?P<url>[^\']+)\' },', video_page)
if mobj is not None:
formats.append({
'url': mobj.group('url'),
'format_id': 'hd',
'format': 'HD',
})
# rtmp video
mobj = re.search(r'(?m)file: "(?P<playpath>[^"]+)",\s*streamer: "(?P<rtmpurl>rtmp://[^"]+)",', video_page)
if mobj is not None:
formats.append({
'url': mobj.group('rtmpurl'),
'play_path': mobj.group('playpath'),
'rtmp_live': False,
'ext': 'mp4',
'format_id': 'rtmp',
'format': 'HD',
})
return {
'id': video_id,
'title': title,
'thumbnail': thumbnail,
'duration': duration,
'formats': formats,
}
| unlicense |
ceibal-tatu/sugar | src/jarabe/desktop/schoolserver.py | 4 | 5556 | # Copyright (C) 2007, 2008 One Laptop Per Child
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import logging
from gettext import gettext as _
import xmlrpclib
import socket
import httplib
import os
from string import ascii_uppercase
import random
import time
import uuid
import sys
from gi.repository import GConf
from sugar3 import env
from sugar3.profile import get_profile
_REGISTER_URL = 'http://schoolserver:8080/'
_REGISTER_TIMEOUT = 8
_OFW_TREE = '/ofw'
_PROC_TREE = '/proc/device-tree'
_MFG_SN = 'mfg-data/SN'
_MFG_UUID = 'mfg-data/U#'
def _generate_serial_number():
""" Generates a serial number based on 3 random uppercase letters
and the last 8 digits of the current unix seconds. """
serial_part1 = []
for y_ in range(3):
serial_part1.append(random.choice(ascii_uppercase))
serial_part1 = ''.join(serial_part1)
serial_part2 = str(int(time.time()))[-8:]
serial = serial_part1 + serial_part2
return serial
def _store_identifiers(serial_number, uuid_, backup_url):
""" Stores the serial number, uuid and backup_url
in the identifier folder inside the profile directory
so that these identifiers can be used for backup. """
identifier_path = os.path.join(env.get_profile_path(), 'identifiers')
if not os.path.exists(identifier_path):
os.mkdir(identifier_path)
if os.path.exists(os.path.join(identifier_path, 'sn')):
os.remove(os.path.join(identifier_path, 'sn'))
serial_file = open(os.path.join(identifier_path, 'sn'), 'w')
serial_file.write(serial_number)
serial_file.close()
if os.path.exists(os.path.join(identifier_path, 'uuid')):
os.remove(os.path.join(identifier_path, 'uuid'))
uuid_file = open(os.path.join(identifier_path, 'uuid'), 'w')
uuid_file.write(uuid_)
uuid_file.close()
if os.path.exists(os.path.join(identifier_path, 'backup_url')):
os.remove(os.path.join(identifier_path, 'backup_url'))
backup_url_file = open(os.path.join(identifier_path, 'backup_url'), 'w')
backup_url_file.write(backup_url)
backup_url_file.close()
class RegisterError(Exception):
pass
class _TimeoutHTTP(httplib.HTTP):
def __init__(self, host='', port=None, strict=None, timeout=None):
if port == 0:
port = None
# FIXME: Depending on undocumented internals that can break between
# Python releases. Please have a look at SL #2350
self._setup(self._connection_class(host,
port, strict, timeout=_REGISTER_TIMEOUT))
class _TimeoutTransport(xmlrpclib.Transport):
def make_connection(self, host):
host, extra_headers, x509_ = self.get_host_info(host)
return _TimeoutHTTP(host, timeout=_REGISTER_TIMEOUT)
def register_laptop(url=_REGISTER_URL):
profile = get_profile()
client = GConf.Client.get_default()
if _have_ofw_tree():
sn = _read_mfg_data(os.path.join(_OFW_TREE, _MFG_SN))
uuid_ = _read_mfg_data(os.path.join(_OFW_TREE, _MFG_UUID))
elif _have_proc_device_tree():
sn = _read_mfg_data(os.path.join(_PROC_TREE, _MFG_SN))
uuid_ = _read_mfg_data(os.path.join(_PROC_TREE, _MFG_UUID))
else:
sn = _generate_serial_number()
uuid_ = str(uuid.uuid1())
sn = sn or 'SHF00000000'
uuid_ = uuid_ or '00000000-0000-0000-0000-000000000000'
setting_name = '/desktop/sugar/collaboration/jabber_server'
jabber_server = client.get_string(setting_name)
_store_identifiers(sn, uuid_, jabber_server)
if jabber_server:
url = 'http://' + jabber_server + ':8080/'
nick = client.get_string('/desktop/sugar/user/nick')
if sys.hexversion < 0x2070000:
server = xmlrpclib.ServerProxy(url, _TimeoutTransport())
else:
socket.setdefaulttimeout(_REGISTER_TIMEOUT)
server = xmlrpclib.ServerProxy(url)
try:
data = server.register(sn, nick, uuid_, profile.pubkey)
except (xmlrpclib.Error, TypeError, socket.error):
logging.exception('Registration: cannot connect to server')
raise RegisterError(_('Cannot connect to the server.'))
finally:
socket.setdefaulttimeout(None)
if data['success'] != 'OK':
logging.error('Registration: server could not complete request: %s',
data['error'])
raise RegisterError(_('The server could not complete the request.'))
client.set_string('/desktop/sugar/collaboration/jabber_server',
data['jabberserver'])
client.set_string('/desktop/sugar/backup_url', data['backupurl'])
return True
def _have_ofw_tree():
return os.path.exists(_OFW_TREE)
def _have_proc_device_tree():
return os.path.exists(_PROC_TREE)
def _read_mfg_data(path):
if not os.path.exists(path):
return None
fh = open(path, 'r')
data = fh.read().rstrip('\0\n')
fh.close()
return data
| gpl-2.0 |
spthaolt/socorro | socorro/app/example_app.py | 11 | 1884 | #! /usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""demonstrates using configman to make a Socorro app"""
# This app can be invoked like this:
# .../socorro/app/example_app.py --help
# set your path to make that simpler
# set both socorro and configman in your PYTHONPATH
import datetime
from socorro.app.generic_app import App, main
from configman import Namespace
#==============================================================================
class ExampleApp(App):
app_name = 'example'
app_version = '0.1'
app_description = __doc__
#--------------------------------------------------------------------------
# in this section, define any configuration requirements
required_config = Namespace()
required_config.add_option('name',
default='Wilma',
doc='a name to echo')
required_config.add_option('time',
default=datetime.datetime.now(),
doc='the time of day')
#--------------------------------------------------------------------------
# implementing this constructor is only necessary when there is more
# initialization to be done before main can be called
#def __init__(self, config):
#super(ExampleApp,self).__init__(config)
#--------------------------------------------------------------------------
def main(self):
# this is where we'd implement the app
# the configuraton is already setup as self.config
print 'hello, %s. The time is: %s' % (self.config.name,
self.config.time)
if __name__ == '__main__':
main(ExampleApp)
| mpl-2.0 |
mstriemer/olympia | src/olympia/editors/views_themes.py | 4 | 18329 | import datetime
import json
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist
from django.db.models import Q
from django.forms.formsets import formset_factory
from django.shortcuts import get_object_or_404, redirect
from django.utils.datastructures import MultiValueDictKeyError
from django.utils.translation import ugettext as _, ungettext as ngettext
from olympia import amo
from olympia.constants import editors as rvw
from olympia.access import acl
from olympia.addons.models import Addon, Persona
from olympia.amo.decorators import json_view, post_required
from olympia.amo.urlresolvers import reverse
from olympia.amo.utils import paginate, render
from olympia.devhub.models import ActivityLog
from olympia.editors import forms
from olympia.editors.models import RereviewQueueTheme, ReviewerScore, ThemeLock
from olympia.editors.views import base_context as context
from olympia.search.views import name_only_query
from olympia.zadmin.decorators import admin_required
from .decorators import personas_reviewer_required
QUEUE_PER_PAGE = 100
@personas_reviewer_required
def home(request):
data = context(
reviews_total=ActivityLog.objects.total_reviews(theme=True)[:5],
reviews_monthly=ActivityLog.objects.monthly_reviews(theme=True)[:5],
queue_counts=queue_counts_themes(request)
)
return render(request, 'editors/themes/home.html', data)
def queue_counts_themes(request):
counts = {
'themes': Persona.objects.no_cache()
.filter(addon__status=amo.STATUS_PENDING)
.count(),
}
if acl.action_allowed(request, 'SeniorPersonasTools', 'View'):
counts.update({
'flagged_themes': (Persona.objects.no_cache()
.filter(addon__status=amo.STATUS_REVIEW_PENDING)
.count()),
'rereview_themes': RereviewQueueTheme.objects.count()
})
rv = {}
if isinstance(type, basestring):
return counts[type]
for k, v in counts.items():
if not isinstance(type, list) or k in type:
rv[k] = v
return rv
@personas_reviewer_required
def themes_list(request, flagged=False, rereview=False):
"""Themes queue in list format."""
themes = []
if flagged:
# TODO (ngoke): rename to STATUS_FLAGGED.
themes = Addon.objects.filter(status=amo.STATUS_REVIEW_PENDING,
type=amo.ADDON_PERSONA,
persona__isnull=False)
elif rereview:
themes = [
rqt.theme.addon for rqt in
RereviewQueueTheme.objects.select_related('theme__addon')]
else:
themes = Addon.objects.filter(status=amo.STATUS_PENDING,
type=amo.ADDON_PERSONA,
persona__isnull=False)
search_form = forms.ThemeSearchForm(request.GET)
per_page = request.GET.get('per_page', QUEUE_PER_PAGE)
pager = paginate(request, themes, per_page)
return render(request, 'editors/themes/queue_list.html', context(
**{'addons': pager.object_list,
'flagged': flagged,
'pager': pager,
'rereview': rereview,
'theme_search_form': search_form,
'statuses': dict((k, unicode(v)) for k, v in
amo.STATUS_CHOICES_API.items()),
'tab': ('rereview_themes' if rereview else
'flagged_themes' if flagged else 'pending_themes')}))
def _themes_queue(request, flagged=False, rereview=False):
"""Themes queue in interactive format."""
themes = _get_themes(request, request.user, flagged=flagged,
rereview=rereview)
ThemeReviewFormset = formset_factory(forms.ThemeReviewForm)
formset = ThemeReviewFormset(
initial=[{'theme': _rereview_to_theme(rereview, theme).id} for theme
in themes])
return render(request, 'editors/themes/queue.html', context(
**{'actions': get_actions_json(),
'formset': formset,
'flagged': flagged,
'reject_reasons': rvw.THEME_REJECT_REASONS,
'rereview': rereview,
'reviewable': True,
'theme_formsets': zip(themes, formset),
'theme_count': len(themes),
'tab': (
'flagged' if flagged else
'rereview' if rereview else 'pending')}))
def _get_themes(request, reviewer, flagged=False, rereview=False):
"""Check out themes.
:param flagged: Flagged themes (amo.STATUS_REVIEW_PENDING)
:param rereview: Re-uploaded themes (RereviewQueueTheme)
"""
num = 0
themes = []
locks = []
status = (amo.STATUS_REVIEW_PENDING if flagged else
amo.STATUS_PUBLIC if rereview else amo.STATUS_PENDING)
if rereview:
# Rereview themes.
num, themes, locks = _get_rereview_themes(reviewer)
else:
# Pending and flagged themes.
locks = ThemeLock.objects.no_cache().filter(
reviewer=reviewer, theme__addon__status=status)
num, themes = _calc_num_themes_checkout(locks)
if themes:
return themes
themes = Persona.objects.no_cache().filter(
addon__status=status, themelock=None)
# Don't allow self-reviews.
if (not settings.ALLOW_SELF_REVIEWS and
not acl.action_allowed(request, 'Admin', '%')):
if rereview:
themes = themes.exclude(theme__addon__addonuser__user=reviewer)
else:
themes = themes.exclude(addon__addonuser__user=reviewer)
# Check out themes by setting lock.
themes = list(themes)[:num]
expiry = get_updated_expiry()
for theme in themes:
ThemeLock.objects.create(theme=_rereview_to_theme(rereview, theme),
reviewer=reviewer, expiry=expiry)
# Empty pool? Go look for some expired locks.
if not themes:
expired_locks = ThemeLock.objects.filter(
expiry__lte=datetime.datetime.now(),
theme__addon__status=status)[:rvw.THEME_INITIAL_LOCKS]
# Steal expired locks.
for lock in expired_locks:
lock.reviewer = reviewer
lock.expiry = expiry
lock.save()
if expired_locks:
locks = expired_locks
if rereview:
return (RereviewQueueTheme.objects.no_cache()
.filter(theme__themelock__reviewer=reviewer)
.exclude(theme__addon__status=amo.STATUS_REJECTED))
# New theme locks may have been created, grab all reviewer's themes again.
return [lock.theme for lock in locks]
@json_view
@personas_reviewer_required
def themes_search(request):
search_form = forms.ThemeSearchForm(request.GET)
if search_form.is_valid():
q = search_form.cleaned_data['q']
rereview = search_form.cleaned_data['queue_type'] == 'rereview'
flagged = search_form.cleaned_data['queue_type'] == 'flagged'
# ES query on name.
themes = Addon.search().filter(type=amo.ADDON_PERSONA)
if rereview:
themes = themes.filter(has_theme_rereview=True)
else:
themes = themes.filter(status=(amo.STATUS_REVIEW_PENDING if flagged
else amo.STATUS_PENDING),
has_theme_rereview=False)
themes = themes.query(or_=name_only_query(q))[:100]
now = datetime.datetime.now()
reviewers = []
for theme in themes:
try:
themelock = theme.persona.themelock
if themelock.expiry > now:
reviewers.append(themelock.reviewer.email)
else:
reviewers.append('')
except ObjectDoesNotExist:
reviewers.append('')
themes = list(themes.values_dict('name', 'slug', 'status'))
for theme, reviewer in zip(themes, reviewers):
# Collapse single value fields from a list.
theme['id'] = theme['id'][0]
theme['slug'] = theme['slug'][0]
theme['status'] = theme['status'][0]
# Dehydrate.
theme['reviewer'] = reviewer
return {'objects': themes, 'meta': {'total_count': len(themes)}}
@personas_reviewer_required
def themes_queue(request):
# By default, redirect back to the queue after a commit.
request.session['theme_redirect_url'] = reverse(
'editors.themes.queue_themes')
return _themes_queue(request)
@admin_required(theme_reviewers=True)
def themes_queue_flagged(request):
# By default, redirect back to the queue after a commit.
request.session['theme_redirect_url'] = reverse(
'editors.themes.queue_flagged')
return _themes_queue(request, flagged=True)
@admin_required(theme_reviewers=True)
def themes_queue_rereview(request):
# By default, redirect back to the queue after a commit.
request.session['theme_redirect_url'] = reverse(
'editors.themes.queue_rereview')
return _themes_queue(request, rereview=True)
def _rereview_to_theme(rereview, theme):
"""
Follows foreign key of RereviewQueueTheme object to theme if in rereview
queue.
"""
if rereview:
return theme.theme
return theme
def _calc_num_themes_checkout(locks):
"""
Calculate number of themes to check out based on how many themes user
currently has checked out.
"""
current_num = locks.count()
if current_num < rvw.THEME_INITIAL_LOCKS:
# Check out themes from the pool if none or not enough checked out.
return rvw.THEME_INITIAL_LOCKS - current_num, []
else:
# Update the expiry on currently checked-out themes.
locks.update(expiry=get_updated_expiry())
return 0, [lock.theme for lock in locks]
def _get_rereview_themes(reviewer):
"""Check out re-uploaded themes."""
locks = (ThemeLock.objects.select_related().no_cache()
.filter(reviewer=reviewer,
theme__rereviewqueuetheme__isnull=False)
.exclude(theme__addon__status=amo.STATUS_REJECTED))
num, updated_locks = _calc_num_themes_checkout(locks)
if updated_locks:
locks = updated_locks
themes = (RereviewQueueTheme.objects.no_cache()
.filter(theme__addon__isnull=False, theme__themelock=None)
.exclude(theme__addon__status=amo.STATUS_REJECTED))
return num, themes, locks
@post_required
@personas_reviewer_required
def themes_commit(request):
ThemeReviewFormset = formset_factory(forms.ThemeReviewForm)
formset = ThemeReviewFormset(request.POST)
scores = []
for form in formset:
try:
lock = ThemeLock.objects.filter(
theme_id=form.data[form.prefix + '-theme'],
reviewer=request.user)
except MultiValueDictKeyError:
# Address off-by-one error caused by management form.
continue
if lock and form.is_valid():
scores.append(form.save())
# Success message.
points = sum(scores)
success = ngettext(
# L10n: {0} is the number of reviews. {1} is the points just earned.
# L10n: {2} is the total number of points the reviewer has overall.
'{0} theme review successfully processed (+{1} points, {2} total).',
'{0} theme reviews successfully processed (+{1} points, {2} total).',
len(scores)).format(len(scores), points,
ReviewerScore.get_total(request.user))
amo.messages.success(request, success)
if 'theme_redirect_url' in request.session:
return redirect(request.session['theme_redirect_url'])
else:
return redirect(reverse('editors.themes.queue_themes'))
@personas_reviewer_required
def release_locks(request):
ThemeLock.objects.filter(reviewer=request.user).delete()
amo.messages.success(
request,
_('Your theme locks have successfully been released. '
'Other reviewers may now review those released themes. '
'You may have to refresh the page to see the changes reflected in '
'the table below.'))
return redirect(reverse('editors.themes.list'))
@personas_reviewer_required
def themes_single(request, slug):
"""
Like a detail page, manually review a single theme if it is pending
and isn't locked.
"""
reviewer = request.user
reviewable = True
# Don't review an already reviewed theme.
theme = get_object_or_404(Persona, addon__slug=slug)
if (theme.addon.status != amo.STATUS_PENDING and
not theme.rereviewqueuetheme_set.all()):
reviewable = False
if (not settings.ALLOW_SELF_REVIEWS and
not acl.action_allowed(request, 'Admin', '%') and
theme.addon.has_author(request.user)):
reviewable = False
else:
# Don't review a locked theme (that's not locked to self).
try:
lock = theme.themelock
if (lock.reviewer.id != reviewer.id and
lock.expiry > datetime.datetime.now()):
reviewable = False
elif (lock.reviewer.id != reviewer.id and
lock.expiry < datetime.datetime.now()):
# Steal expired lock.
lock.reviewer = reviewer
lock.expiry = get_updated_expiry()
lock.save()
else:
# Update expiry.
lock.expiry = get_updated_expiry()
lock.save()
except ThemeLock.DoesNotExist:
# Create lock if not created.
ThemeLock.objects.create(theme=theme, reviewer=reviewer,
expiry=get_updated_expiry())
ThemeReviewFormset = formset_factory(forms.ThemeReviewForm)
formset = ThemeReviewFormset(initial=[{'theme': theme.id}])
# Since we started the review on the single page, we want to return to the
# single page rather than get shot back to the queue.
request.session['theme_redirect_url'] = reverse('editors.themes.single',
args=[theme.addon.slug])
rereview = (theme.rereviewqueuetheme_set.all()[0] if
theme.rereviewqueuetheme_set.exists() else None)
return render(request, 'editors/themes/single.html', context(
**{'formset': formset,
'theme': rereview if rereview else theme,
'theme_formsets': zip([rereview if rereview else theme], formset),
'theme_reviews': paginate(request, ActivityLog.objects.filter(
action=amo.LOG.THEME_REVIEW.id,
_arguments__contains=theme.addon.id)),
'actions': get_actions_json(),
'theme_count': 1,
'rereview': rereview,
'reviewable': reviewable,
'reject_reasons': rvw.THEME_REJECT_REASONS,
'action_dict': rvw.REVIEW_ACTIONS,
'tab': ('flagged' if theme.addon.status == amo.STATUS_REVIEW_PENDING
else 'rereview' if rereview else 'pending')}))
@personas_reviewer_required
def themes_logs(request):
data = request.GET.copy()
if not data.get('start') and not data.get('end'):
today = datetime.date.today()
data['start'] = datetime.date(today.year, today.month, 1)
form = forms.ReviewThemeLogForm(data)
theme_logs = ActivityLog.objects.filter(action=amo.LOG.THEME_REVIEW.id)
if form.is_valid():
data = form.cleaned_data
if data.get('start'):
theme_logs = theme_logs.filter(created__gte=data['start'])
if data.get('end'):
theme_logs = theme_logs.filter(created__lte=data['end'])
if data.get('search'):
term = data['search']
theme_logs = theme_logs.filter(
Q(_details__icontains=term) |
Q(user__display_name__icontains=term) |
Q(user__username__icontains=term)).distinct()
pager = paginate(request, theme_logs, 30)
data = context(form=form, pager=pager,
ACTION_DICT=rvw.REVIEW_ACTIONS,
REJECT_REASONS=rvw.THEME_REJECT_REASONS, tab='themes')
return render(request, 'editors/themes/logs.html', data)
@admin_required(theme_reviewers=True)
def deleted_themes(request):
data = request.GET.copy()
deleted = Addon.unfiltered.filter(type=amo.ADDON_PERSONA,
status=amo.STATUS_DELETED)
if not data.get('start') and not data.get('end'):
today = datetime.date.today()
data['start'] = datetime.date(today.year, today.month, 1)
form = forms.DeletedThemeLogForm(data)
if form.is_valid():
data = form.cleaned_data
if data.get('start'):
deleted = deleted.filter(modified__gte=data['start'])
if data.get('end'):
deleted = deleted.filter(modified__lte=data['end'])
if data.get('search'):
term = data['search']
deleted = deleted.filter(
Q(name__localized_string__icontains=term))
return render(request, 'editors/themes/deleted.html', {
'form': form,
'pager': paginate(request, deleted.order_by('-modified'), 30),
'tab': 'deleted'
})
@personas_reviewer_required
def themes_history(request, username):
if not username:
username = request.user.username
return render(request, 'editors/themes/history.html', context(
**{'theme_reviews':
paginate(request, ActivityLog.objects.filter(
action=amo.LOG.THEME_REVIEW.id, user__username=username), 20),
'user_history': True,
'username': username,
'reject_reasons': rvw.THEME_REJECT_REASONS,
'action_dict': rvw.REVIEW_ACTIONS}))
def get_actions_json():
return json.dumps({
'moreinfo': rvw.ACTION_MOREINFO,
'flag': rvw.ACTION_FLAG,
'duplicate': rvw.ACTION_DUPLICATE,
'reject': rvw.ACTION_REJECT,
'approve': rvw.ACTION_APPROVE,
})
def get_updated_expiry():
return (datetime.datetime.now() +
datetime.timedelta(minutes=rvw.THEME_LOCK_EXPIRY))
| bsd-3-clause |
pieterdp/phd_ewout | python/matcher.py | 1 | 1687 | from lib.dbMatch import dbMatch
from lib.aMatch import aMatch
from lib.cFile import cFile
from lib.dbConnect import dbConnect
from progressbar import ProgressBar, Percentage, Bar
import sys
import getopt
"""
Command-line options
"""
def parse_cli (argv):
try:
opts, args = getopt.getopt (argv, "f:c:t:", ["filter=", "columns=", "table="])
except getopt.GetoptError:
usage ()
sys.exit (2)
for opt, arg in opts:
if opt in ('-f', '--filter'):
filter_column = arg
elif opt in ('-c', '--columns'):
columns = arg
columns.split (',')
elif opt in ('-t', '--table'):
table = arg
else:
usage ()
sys.exit (2)
source = "".join (args)
return (filter_column, columns, table, source)
def usage ():
pass
"""
Configuration options
"""
cf = cFile ('etc/settings.conf')
"""
Application
"""
if __name__ == "__main__":
cli = parse_cli (sys.argv[1:])
else:
sys.exit (-1)
print ('Preparing matching:')
db_match = dbMatch (cli[2], cli[2] + "_match")
db_match.config_start ()
db_match.connect ()
db_match.matchPepare (cli[0], cli[1].split (','), 'ID')
print ('Merging tables')
print ('Comparing')
single = db_match.suggest_single (db_match.filterViews['Wakkerdam'], [('Naam_a', 'Naam_b'), ('Voornaam_a', 'Voornaam_b')], ('ID_a', 2049), 'ID_b', 'ID')
amatch = aMatch ()
r = amatch.matchCompare (single, ['Naam_a', 'Voornaam_a'])
print (r)
sys.exit ()
m = db_match.match_single(r)
print (m)
for (id, sc, av) in m:
print (id)
sf = db_match.get_single_item_by_id (id, 'app_test')
sf = [str (s) for s in sf]
print (" ".join (sf)) | gpl-3.0 |
Sbalbp/DIRAC | RequestManagementSystem/private/RequestTask.py | 1 | 16433 | ########################################################################
# $HeadURL $
# File: RequestTask.py
# Author: Krzysztof.Ciba@NOSPAMgmail.com
# Date: 2013/03/13 12:42:45
########################################################################
""" :mod: RequestTask
=================
.. module: RequestTask
:synopsis: request processing task
.. moduleauthor:: Krzysztof.Ciba@NOSPAMgmail.com
request processing task to be used inside ProcessTask created in RequesteExecutingAgent
"""
__RCSID__ = "$Id $"
# #
# @file RequestTask.py
# @author Krzysztof.Ciba@NOSPAMgmail.com
# @date 2013/03/13 12:42:54
# @brief Definition of RequestTask class.
# # imports
import os, time
# # from DIRAC
from DIRAC import gLogger, S_OK, S_ERROR, gMonitor, gConfig
from DIRAC.RequestManagementSystem.Client.ReqClient import ReqClient
from DIRAC.RequestManagementSystem.Client.Request import Request
from DIRAC.RequestManagementSystem.private.OperationHandlerBase import OperationHandlerBase
from DIRAC.FrameworkSystem.Client.ProxyManagerClient import gProxyManager
from DIRAC.ConfigurationSystem.Client.ConfigurationData import gConfigurationData
from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
from DIRAC.Core.DISET.RPCClient import RPCClient
from DIRAC.Core.Security import CS
########################################################################
class RequestTask( object ):
"""
.. class:: RequestTask
request's processing task
"""
def __init__( self, requestJSON, handlersDict, csPath, agentName, standalone = False ):
"""c'tor
:param self: self reference
:param str requestJSON: request serialized to JSON
:param dict opHandlers: operation handlers
"""
self.request = Request( requestJSON )
# # csPath
self.csPath = csPath
# # agent name
self.agentName = agentName
# # standalone flag
self.standalone = standalone
# # handlers dict
self.handlersDict = handlersDict
# # handlers class def
self.handlers = {}
# # own sublogger
self.log = gLogger.getSubLogger( "pid_%s/%s" % ( os.getpid(), self.request.RequestName ) )
# # get shifters info
self.__managersDict = {}
shifterProxies = self.__setupManagerProxies()
if not shifterProxies["OK"]:
self.log.error( shifterProxies["Message"] )
# # initialize gMonitor
gMonitor.setComponentType( gMonitor.COMPONENT_AGENT )
gMonitor.setComponentName( self.agentName )
gMonitor.initialize()
# # own gMonitor activities
gMonitor.registerActivity( "RequestAtt", "Requests processed",
"RequestExecutingAgent", "Requests/min", gMonitor.OP_SUM )
gMonitor.registerActivity( "RequestFail", "Requests failed",
"RequestExecutingAgent", "Requests/min", gMonitor.OP_SUM )
gMonitor.registerActivity( "RequestOK", "Requests done",
"RequestExecutingAgent", "Requests/min", gMonitor.OP_SUM )
self.requestClient = ReqClient()
def __setupManagerProxies( self ):
""" setup grid proxy for all defined managers """
oHelper = Operations()
shifters = oHelper.getSections( "Shifter" )
if not shifters["OK"]:
self.log.error( shifters["Message"] )
return shifters
shifters = shifters["Value"]
for shifter in shifters:
shifterDict = oHelper.getOptionsDict( "Shifter/%s" % shifter )
if not shifterDict["OK"]:
self.log.error( shifterDict["Message"] )
continue
userName = shifterDict["Value"].get( "User", "" )
userGroup = shifterDict["Value"].get( "Group", "" )
userDN = CS.getDNForUsername( userName )
if not userDN["OK"]:
self.log.error( userDN["Message"] )
continue
userDN = userDN["Value"][0]
vomsAttr = CS.getVOMSAttributeForGroup( userGroup )
if vomsAttr:
self.log.debug( "getting VOMS [%s] proxy for shifter %s@%s (%s)" % ( vomsAttr, userName,
userGroup, userDN ) )
getProxy = gProxyManager.downloadVOMSProxyToFile( userDN, userGroup,
requiredTimeLeft = 1200,
cacheTime = 4 * 43200 )
else:
self.log.debug( "getting proxy for shifter %s@%s (%s)" % ( userName, userGroup, userDN ) )
getProxy = gProxyManager.downloadProxyToFile( userDN, userGroup,
requiredTimeLeft = 1200,
cacheTime = 4 * 43200 )
if not getProxy["OK"]:
self.log.error( getProxy["Message" ] )
return S_ERROR( "unable to setup shifter proxy for %s: %s" % ( shifter, getProxy["Message"] ) )
chain = getProxy["chain"]
fileName = getProxy["Value" ]
self.log.debug( "got %s: %s %s" % ( shifter, userName, userGroup ) )
self.__managersDict[shifter] = { "ShifterDN" : userDN,
"ShifterName" : userName,
"ShifterGroup" : userGroup,
"Chain" : chain,
"ProxyFile" : fileName }
return S_OK()
def setupProxy( self ):
""" download and dump request owner proxy to file and env
:return: S_OK with name of newly created owner proxy file and shifter name if any
"""
self.__managersDict = {}
shifterProxies = self.__setupManagerProxies()
if not shifterProxies["OK"]:
self.log.error( shifterProxies["Message"] )
ownerDN = self.request.OwnerDN
ownerGroup = self.request.OwnerGroup
isShifter = []
for shifter, creds in self.__managersDict.items():
if creds["ShifterDN"] == ownerDN and creds["ShifterGroup"] == ownerGroup:
isShifter.append( shifter )
if isShifter:
proxyFile = self.__managersDict[isShifter[0]]["ProxyFile"]
os.environ["X509_USER_PROXY"] = proxyFile
return S_OK( { "Shifter": isShifter, "ProxyFile": proxyFile } )
# # if we're here owner is not a shifter at all
ownerProxy = gProxyManager.downloadVOMSProxy( ownerDN, ownerGroup )
if not ownerProxy["OK"] or not ownerProxy["Value"]:
reason = ownerProxy["Message"] if "Message" in ownerProxy else "No valid proxy found in ProxyManager."
return S_ERROR( "Change proxy error for '%s'@'%s': %s" % ( ownerDN, ownerGroup, reason ) )
ownerProxyFile = ownerProxy["Value"].dumpAllToFile()
if not ownerProxyFile["OK"]:
return S_ERROR( ownerProxyFile["Message"] )
ownerProxyFile = ownerProxyFile["Value"]
os.environ["X509_USER_PROXY"] = ownerProxyFile
return S_OK( { "Shifter": isShifter, "ProxyFile": ownerProxyFile } )
@staticmethod
def getPluginName( pluginPath ):
if not pluginPath:
return ''
if "/" in pluginPath:
pluginPath = ".".join( [ chunk for chunk in pluginPath.split( "/" ) if chunk ] )
return pluginPath.split( "." )[-1]
@staticmethod
def loadHandler( pluginPath ):
""" Create an instance of requested plugin class, loading and importing it when needed.
This function could raise ImportError when plugin cannot be find or TypeError when
loaded class object isn't inherited from BaseOperation class.
:param str pluginName: dotted path to plugin, specified as in import statement, i.e.
"DIRAC.CheesShopSystem.private.Cheddar" or alternatively in 'normal' path format
"DIRAC/CheesShopSystem/private/Cheddar"
:return: object instance
This function try to load and instantiate an object from given path. It is assumed that:
- :pluginPath: is pointing to module directory "importable" by python interpreter, i.e.: it's
package's top level directory is in $PYTHONPATH env variable,
- the module should consist a class definition following module name,
- the class itself is inherited from DIRAC.RequestManagementSystem.private.BaseOperation.BaseOperation
If above conditions aren't meet, function is throwing exceptions:
- ImportError when class cannot be imported
- TypeError when class isn't inherited from OperationHandlerBase
"""
if "/" in pluginPath:
pluginPath = ".".join( [ chunk for chunk in pluginPath.split( "/" ) if chunk ] )
pluginName = pluginPath.split( "." )[-1]
if pluginName not in globals():
mod = __import__( pluginPath, globals(), fromlist = [ pluginName ] )
pluginClassObj = getattr( mod, pluginName )
else:
pluginClassObj = globals()[pluginName]
if not issubclass( pluginClassObj, OperationHandlerBase ):
raise TypeError( "operation handler '%s' isn't inherited from OperationHandlerBase class" % pluginName )
for key, status in ( ( "Att", "Attempted" ), ( "OK", "Successful" ) , ( "Fail", "Failed" ) ):
gMonitor.registerActivity( "%s%s" % ( pluginName, key ), "%s operations %s" % ( pluginName, status ),
"RequestExecutingAgent", "Operations/min", gMonitor.OP_SUM )
# # return an instance
return pluginClassObj
def getHandler( self, operation ):
""" return instance of a handler for a given operation type on demand
all created handlers are kept in self.handlers dict for further use
:param Operation operation: Operation instance
"""
if operation.Type not in self.handlersDict:
return S_ERROR( "handler for operation '%s' not set" % operation.Type )
handler = self.handlers.get( operation.Type, None )
if not handler:
try:
handlerCls = self.loadHandler( self.handlersDict[operation.Type] )
self.handlers[operation.Type] = handlerCls( csPath = "%s/OperationHandlers/%s" % ( self.csPath,
operation.Type ) )
handler = self.handlers[ operation.Type ]
except ( ImportError, TypeError ), error:
self.log.exception( "getHandler: %s" % str( error ), lException = error )
return S_ERROR( str( error ) )
# # set operation for this handler
handler.setOperation( operation )
# # and return
return S_OK( handler )
def updateRequest( self ):
""" put back request to the RequestDB """
updateRequest = self.requestClient.putRequest( self.request, useFailoverProxy = False, retryMainServer = 2 )
if not updateRequest["OK"]:
self.log.error( updateRequest["Message"] )
return updateRequest
def __call__( self ):
""" request processing """
self.log.debug( "about to execute request" )
gMonitor.addMark( "RequestAtt", 1 )
# # setup proxy for request owner
setupProxy = self.setupProxy()
if not setupProxy["OK"]:
self.request.Error = setupProxy["Message"]
if 'has no proxy registered' in setupProxy["Message"]:
self.log.error( 'Request set to Failed:', setupProxy["Message"] )
# If user is no longer registered, fail the request
for operation in self.request:
for opFile in operation:
opFile.Status = 'Failed'
operation.Status = 'Failed'
else:
self.log.error( setupProxy["Message"] )
return S_OK( self.request )
shifter = setupProxy["Value"]["Shifter"]
proxyFile = setupProxy["Value"]["ProxyFile"]
error = None
while self.request.Status == "Waiting":
# # get waiting operation
operation = self.request.getWaiting()
if not operation["OK"]:
self.log.error( operation["Message"] )
return operation
operation = operation["Value"]
self.log.info( "executing operation #%s '%s'" % ( operation.Order, operation.Type ) )
# # and handler for it
handler = self.getHandler( operation )
if not handler["OK"]:
self.log.error( "unable to process operation %s: %s" % ( operation.Type, handler["Message"] ) )
# gMonitor.addMark( "%s%s" % ( operation.Type, "Fail" ), 1 )
operation.Error = handler["Message"]
break
handler = handler["Value"]
# # set shifters list in the handler
handler.shifter = shifter
# # and execute
pluginName = self.getPluginName( self.handlersDict.get( operation.Type ) )
if self.standalone:
useServerCertificate = gConfig.useServerCertificate()
else:
# Always use server certificates if executed within an agent
useServerCertificate = True
try:
if pluginName:
gMonitor.addMark( "%s%s" % ( pluginName, "Att" ), 1 )
# Always use request owner proxy
if useServerCertificate:
gConfigurationData.setOptionInCFG( '/DIRAC/Security/UseServerCertificate', 'false' )
exe = handler()
if useServerCertificate:
gConfigurationData.setOptionInCFG( '/DIRAC/Security/UseServerCertificate', 'true' )
if not exe["OK"]:
self.log.error( "unable to process operation %s: %s" % ( operation.Type, exe["Message"] ) )
if pluginName:
gMonitor.addMark( "%s%s" % ( pluginName, "Fail" ), 1 )
gMonitor.addMark( "RequestFail", 1 )
if self.request.JobID:
# Check if the job exists
monitorServer = RPCClient( "WorkloadManagement/JobMonitoring", useCertificates = True )
res = monitorServer.getJobPrimarySummary( int( self.request.JobID ) )
if not res["OK"]:
self.log.error( "RequestTask: Failed to get job %d status" % self.request.JobID )
elif not res['Value']:
self.log.warn( "RequestTask: job %d does not exist (anymore): failed request" % self.request.JobID )
for opFile in operation:
opFile.Status = 'Failed'
if operation.Status != 'Failed':
operation.Status = 'Failed'
self.request.Error = 'Job no longer exists'
except Exception, error:
self.log.exception( "hit by exception: %s" % str( error ) )
if pluginName:
gMonitor.addMark( "%s%s" % ( pluginName, "Fail" ), 1 )
gMonitor.addMark( "RequestFail", 1 )
if useServerCertificate:
gConfigurationData.setOptionInCFG( '/DIRAC/Security/UseServerCertificate', 'true' )
break
# # operation status check
if operation.Status == "Done" and pluginName:
gMonitor.addMark( "%s%s" % ( pluginName, "OK" ), 1 )
elif operation.Status == "Failed" and pluginName:
gMonitor.addMark( "%s%s" % ( pluginName, "Fail" ), 1 )
elif operation.Status in ( "Waiting", "Scheduled" ):
# # no update for waiting or all files scheduled
break
# # not a shifter at all? delete temp proxy file
if not shifter:
os.unlink( proxyFile )
gMonitor.flush()
if error:
return S_ERROR( error )
# # request done?
if self.request.Status == "Done":
# # update request to the RequestDB
self.log.info( 'updating request with status %s' % self.request.Status )
update = self.updateRequest()
if not update["OK"]:
self.log.error( update["Message"] )
return update
self.log.info( "request '%s' is done" % self.request.RequestName )
gMonitor.addMark( "RequestOK", 1 )
# # and there is a job waiting for it? finalize!
if self.request.JobID:
attempts = 0
while True:
finalizeRequest = self.requestClient.finalizeRequest( self.request.RequestName, self.request.JobID )
if not finalizeRequest["OK"]:
if not attempts:
self.log.error( "unable to finalize request %s: %s, will retry" % ( self.request.RequestName,
finalizeRequest["Message"] ) )
self.log.verbose( "Waiting 10 seconds" )
attempts += 1
if attempts == 10:
self.log.error( "giving up finalize request after %d attempts" % attempts )
return S_ERROR( 'Could not finalize request' )
time.sleep( 10 )
else:
self.log.info( "request '%s' is finalized%s" % ( self.request.RequestName,
( ' after %d attempts' % attempts ) if attempts else '' ) )
break
# Request will be updated by the callBack method
return S_OK( self.request )
| gpl-3.0 |
dou800/php-buildpack-legacy | builds/runtimes/python-2.7.6/lib/python2.7/encodings/iso2022_jp_ext.py | 816 | 1069 | #
# iso2022_jp_ext.py: Python Unicode Codec for ISO2022_JP_EXT
#
# Written by Hye-Shik Chang <perky@FreeBSD.org>
#
import _codecs_iso2022, codecs
import _multibytecodec as mbc
codec = _codecs_iso2022.getcodec('iso2022_jp_ext')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='iso2022_jp_ext',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| mit |
lbdreyer/iris | lib/iris/tests/unit/common/mixin/test__get_valid_standard_name.py | 3 | 2542 | # Copyright Iris contributors
#
# This file is part of Iris and is released under the LGPL license.
# See COPYING and COPYING.LESSER in the root of the repository for full
# licensing details.
"""
Unit tests for the :func:`iris.common.mixin._get_valid_standard_name`.
"""
# Import iris.tests first so that some things can be initialised before
# importing anything else.
import iris.tests as tests
from iris.common.mixin import _get_valid_standard_name
class Test(tests.IrisTest):
def setUp(self):
self.emsg = "'{}' is not a valid standard_name"
def test_pass_thru_none(self):
name = None
self.assertEqual(_get_valid_standard_name(name), name)
def test_pass_thru_empty(self):
name = ""
self.assertEqual(_get_valid_standard_name(name), name)
def test_pass_thru_whitespace(self):
name = " "
self.assertEqual(_get_valid_standard_name(name), name)
def test_valid_standard_name(self):
name = "air_temperature"
self.assertEqual(_get_valid_standard_name(name), name)
def test_standard_name_alias(self):
name = "atmosphere_optical_thickness_due_to_pm1_ambient_aerosol"
self.assertEqual(_get_valid_standard_name(name), name)
def test_invalid_standard_name(self):
name = "not_a_standard_name"
with self.assertRaisesRegex(ValueError, self.emsg.format(name)):
_get_valid_standard_name(name)
def test_valid_standard_name_valid_modifier(self):
name = "air_temperature standard_error"
self.assertEqual(_get_valid_standard_name(name), name)
def test_valid_standard_name_valid_modifier_extra_spaces(self):
name = "air_temperature standard_error"
self.assertEqual(_get_valid_standard_name(name), name)
def test_invalid_standard_name_valid_modifier(self):
name = "not_a_standard_name standard_error"
with self.assertRaisesRegex(ValueError, self.emsg.format(name)):
_get_valid_standard_name(name)
def test_valid_standard_invalid_name_modifier(self):
name = "air_temperature extra_names standard_error"
with self.assertRaisesRegex(ValueError, self.emsg.format(name)):
_get_valid_standard_name(name)
def test_valid_standard_valid_name_modifier_extra_names(self):
name = "air_temperature standard_error extra words"
with self.assertRaisesRegex(ValueError, self.emsg.format(name)):
_get_valid_standard_name(name)
if __name__ == "__main__":
tests.main()
| lgpl-3.0 |
kaleb-himes/RIOT | tests/gnrc_sock_udp/tests/01-run.py | 24 | 3816 | #!/usr/bin/env python3
# Copyright (C) 2016 Kaspar Schleiser <kaspar@schleiser.de>
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import os
import sys
from datetime import datetime
sys.path.append(os.path.join(os.environ['RIOTBASE'], 'dist/tools/testrunner'))
import testrunner
class InvalidTimeout(Exception):
pass
def testfunc(child):
child.expect_exact(u"Calling test_sock_udp_create__EADDRINUSE()")
child.expect_exact(u"Calling test_sock_udp_create__EAFNOSUPPORT()")
child.expect_exact(u"Calling test_sock_udp_create__EINVAL_addr()")
child.expect_exact(u"Calling test_sock_udp_create__EINVAL_netif()")
child.expect_exact(u"Calling test_sock_udp_create__no_endpoints()")
child.expect_exact(u"Calling test_sock_udp_create__only_local()")
child.expect_exact(u"Calling test_sock_udp_create__only_local_reuse_ep()")
child.expect_exact(u"Calling test_sock_udp_create__only_remote()")
child.expect_exact(u"Calling test_sock_udp_create__full()")
child.expect_exact(u"Calling test_sock_udp_recv__EADDRNOTAVAIL()")
child.expect_exact(u"Calling test_sock_udp_recv__EAGAIN()")
child.expect_exact(u"Calling test_sock_udp_recv__ENOBUFS()")
child.expect_exact(u"Calling test_sock_udp_recv__EPROTO()")
child.expect_exact(u"Calling test_sock_udp_recv__ETIMEDOUT()")
child.match # get to ensure program reached that point
start = datetime.now()
child.expect_exact(u" * Calling sock_udp_recv()")
child.expect(u" \\* \\(timed out with timeout (\\d+)\\)")
exp_diff = int(child.match.group(1))
stop = datetime.now()
diff = (stop - start)
diff = (diff.seconds * 1000000) + diff.microseconds
# fail within 5% of expected
if diff > (exp_diff + (exp_diff * 0.05)) or \
diff < (exp_diff - (exp_diff * 0.05)):
raise InvalidTimeout("Invalid timeout %d (expected %d)" % (diff, exp_diff));
else:
print("Timed out correctly: %d (expected %d)" % (diff, exp_diff))
child.expect_exact(u"Calling test_sock_udp_recv__socketed()")
child.expect_exact(u"Calling test_sock_udp_recv__socketed_with_remote()")
child.expect_exact(u"Calling test_sock_udp_recv__unsocketed()")
child.expect_exact(u"Calling test_sock_udp_recv__unsocketed_with_remote()")
child.expect_exact(u"Calling test_sock_udp_recv__with_timeout()")
child.expect_exact(u"Calling test_sock_udp_recv__non_blocking()")
child.expect_exact(u"Calling test_sock_udp_send__EAFNOSUPPORT()")
child.expect_exact(u"Calling test_sock_udp_send__EINVAL_addr()")
child.expect_exact(u"Calling test_sock_udp_send__EINVAL_netif()")
child.expect_exact(u"Calling test_sock_udp_send__EINVAL_port()")
child.expect_exact(u"Calling test_sock_udp_send__ENOTCONN()")
child.expect_exact(u"Calling test_sock_udp_send__socketed_no_local_no_netif()")
child.expect_exact(u"Calling test_sock_udp_send__socketed_no_netif()")
child.expect_exact(u"Calling test_sock_udp_send__socketed_no_local()")
child.expect_exact(u"Calling test_sock_udp_send__socketed()")
child.expect_exact(u"Calling test_sock_udp_send__socketed_other_remote()")
child.expect_exact(u"Calling test_sock_udp_send__unsocketed_no_local_no_netif()")
child.expect_exact(u"Calling test_sock_udp_send__unsocketed_no_netif()")
child.expect_exact(u"Calling test_sock_udp_send__unsocketed_no_local()")
child.expect_exact(u"Calling test_sock_udp_send__unsocketed()")
child.expect_exact(u"Calling test_sock_udp_send__no_sock_no_netif()")
child.expect_exact(u"Calling test_sock_udp_send__no_sock()")
child.expect_exact(u"ALL TESTS SUCCESSFUL")
if __name__ == "__main__":
sys.exit(testrunner.run(testfunc))
| lgpl-2.1 |
bingosummer/azure-quickstart-templates | elasticsearch-centos-3node/elasticinstall.py | 168 | 2840 | #!/usr/bin/env python
import subprocess
import socket
import sys
clustername = sys.argv[1]
number_nodes = sys.argv[2]
accountname = sys.argv[3]
accountkey = sys.argv[4]
print"inputs:\n"
print "clustername = " + clustername
print "accontname = " + accountname
print "accountkey = " + accountkey
hostname = socket.gethostname()
print "hostname: " + hostname
hostbase = "10.0.2.1"
print "hostbase: " + hostbase
def RunCommand(cmd):
ret = subprocess.check_output(cmd, shell=True)
print ret
return
cmds = ["yum -y install nano",
"yum -y install java-1.8.0-openjdk.x86_64",
"curl 'https://download.elastic.co/elasticsearch/elasticsearch/elasticsearch-1.7.3.noarch.rpm' -o 'elasticsearch-1.7.3.noarch.rpm'",
"rpm -ivh elasticsearch-1.7.3.noarch.rpm",
"systemctl enable elasticsearch.service",
"/usr/share/elasticsearch/bin/plugin -install royrusso/elasticsearch-HQ",
"/usr/share/elasticsearch/bin/plugin -install elasticsearch/elasticsearch-cloud-azure/2.8.2"]
print "start running installs"
for cmd in cmds:
RunCommand(cmd)
print "prep data disk for use"
cmds=["sfdisk /dev/sdc < sdc.layout",
"mkfs -t ext4 /dev/sdc1",
"mkdir /data",
"mount /dev/sdc1 /data"]
for cmd in cmds:
RunCommand(cmd)
temp = subprocess.check_output("blkid /dev/sdc1", shell=True)
uuid = temp[17:53]
with open("/etc/fstab", "a") as fstab:
fstab.write("UUID="+uuid+"\t/data\text4\tdefaults\t1\t2\n")
print RunCommand("chmod go+w /data")
datapath = "/data/elastic"
cmds=["mkdir " + datapath,
"chown -R elasticsearch:elasticsearch " + datapath,
"chmod 755 " + datapath]
for cmd in cmds:
RunCommand(cmd)
#re-write conf for heap
sysconf = '/etc/sysconfig/elasticsearch'
RunCommand("mv " + sysconf + " " + sysconf + ".bak")
heapsize="2g"
sysconfig = open(sysconf, 'w')
sysconfig.truncate()
sysconfig.write("ES_HEAP_SIZE=" + heapsize + "\n")
sysconfig.close()
print "start writing elastic config"
# write config
hosts=""
for n in range(0, int(number_nodes)):
hosts=hosts+hostbase+str(n)+","
hosts=hosts[:-1]
filename = '/etc/elasticsearch/elasticsearch.yml'
RunCommand("mv " + filename + " " + filename + ".bak")
config = open(filename, 'w')
config.truncate()
config.write("cluster.name: " + clustername + "\n")
config.write("node.name: " + hostname + "\n")
config.write("path.data: " + datapath + "\n")
config.write("discovery.zen.ping.multicast.enabled: false\n")
config.write("discovery.zen.ping.unicast.hosts: " + hosts + "\n")
config.write("node.master: true\n")
config.write("node.data: true\n")
config.write("cloud:\n")
config.write(" azure:\n")
config.write(" storage:\n")
config.write(" account: " + accountname + "\n")
config.write(" key: " + accountkey + "\n")
config.close()
print "finished writing config file"
RunCommand("systemctl start elasticsearch")
print "elastic install script finished"
| mit |
lucafavatella/intellij-community | python/helpers/coveragepy/coverage/codeunit.py | 215 | 4487 | """Code unit (module) handling for Coverage."""
import glob, os
from coverage.backward import open_source, string_class, StringIO
from coverage.misc import CoverageException
def code_unit_factory(morfs, file_locator):
"""Construct a list of CodeUnits from polymorphic inputs.
`morfs` is a module or a filename, or a list of same.
`file_locator` is a FileLocator that can help resolve filenames.
Returns a list of CodeUnit objects.
"""
# Be sure we have a list.
if not isinstance(morfs, (list, tuple)):
morfs = [morfs]
# On Windows, the shell doesn't expand wildcards. Do it here.
globbed = []
for morf in morfs:
if isinstance(morf, string_class) and ('?' in morf or '*' in morf):
globbed.extend(glob.glob(morf))
else:
globbed.append(morf)
morfs = globbed
code_units = [CodeUnit(morf, file_locator) for morf in morfs]
return code_units
class CodeUnit(object):
"""Code unit: a filename or module.
Instance attributes:
`name` is a human-readable name for this code unit.
`filename` is the os path from which we can read the source.
`relative` is a boolean.
"""
def __init__(self, morf, file_locator):
self.file_locator = file_locator
if hasattr(morf, '__file__'):
f = morf.__file__
else:
f = morf
# .pyc files should always refer to a .py instead.
if f.endswith('.pyc') or f.endswith('.pyo'):
f = f[:-1]
elif f.endswith('$py.class'): # Jython
f = f[:-9] + ".py"
self.filename = self.file_locator.canonical_filename(f)
if hasattr(morf, '__name__'):
n = modname = morf.__name__
self.relative = True
else:
n = os.path.splitext(morf)[0]
rel = self.file_locator.relative_filename(n)
if os.path.isabs(n):
self.relative = (rel != n)
else:
self.relative = True
n = rel
modname = None
self.name = n
self.modname = modname
def __repr__(self):
return "<CodeUnit name=%r filename=%r>" % (self.name, self.filename)
# Annoying comparison operators. Py3k wants __lt__ etc, and Py2k needs all
# of them defined.
def __lt__(self, other):
return self.name < other.name
def __le__(self, other):
return self.name <= other.name
def __eq__(self, other):
return self.name == other.name
def __ne__(self, other):
return self.name != other.name
def __gt__(self, other):
return self.name > other.name
def __ge__(self, other):
return self.name >= other.name
def flat_rootname(self):
"""A base for a flat filename to correspond to this code unit.
Useful for writing files about the code where you want all the files in
the same directory, but need to differentiate same-named files from
different directories.
For example, the file a/b/c.py might return 'a_b_c'
"""
if self.modname:
return self.modname.replace('.', '_')
else:
root = os.path.splitdrive(self.name)[1]
return root.replace('\\', '_').replace('/', '_').replace('.', '_')
def source_file(self):
"""Return an open file for reading the source of the code unit."""
if os.path.exists(self.filename):
# A regular text file: open it.
return open_source(self.filename)
# Maybe it's in a zip file?
source = self.file_locator.get_zip_data(self.filename)
if source is not None:
return StringIO(source)
# Couldn't find source.
raise CoverageException(
"No source for code '%s'." % self.filename
)
def should_be_python(self):
"""Does it seem like this file should contain Python?
This is used to decide if a file reported as part of the exection of
a program was really likely to have contained Python in the first
place.
"""
# Get the file extension.
_, ext = os.path.splitext(self.filename)
# Anything named *.py* should be Python.
if ext.startswith('.py'):
return True
# A file with no extension should be Python.
if not ext:
return True
# Everything else is probably not Python.
return False
| apache-2.0 |
kernelmilowill/PDMQBACKTEST | vn.trader/ltsGateway/ltsGateway.py | 6 | 48098 | # encoding: UTF-8
'''
vn.lts的gateway接入
'''
import os
import json
from vnltsmd import MdApi
from vnltstd import TdApi
from vnltsqry import QryApi
from ltsDataType import *
from vtGateway import *
# 以下为一些VT类型和LTS类型的映射字典
# 价格类型映射
priceTypeMap= {}
priceTypeMap[PRICETYPE_LIMITPRICE] = defineDict["SECURITY_FTDC_OPT_LimitPrice"]
priceTypeMap[PRICETYPE_MARKETPRICE] = defineDict["SECURITY_FTDC_OPT_AnyPrice"]
priceTypeMap[PRICETYPE_FAK] = defineDict["SECURITY_FTDC_OPT_BestPrice"]
priceTypeMap[PRICETYPE_FOK] = defineDict["SECURITY_FTDC_OPT_AllLimitPrice"]
priceTypeMapReverse = {v: k for k, v in priceTypeMap.items()}
# 方向类型映射
directionMap = {}
directionMap[DIRECTION_LONG] = defineDict["SECURITY_FTDC_D_Buy"]
directionMap[DIRECTION_SHORT] = defineDict["SECURITY_FTDC_D_Sell"]
directionMapReverse = {v: k for k, v in directionMap.items()}
# 开平类型映射
offsetMap = {}
offsetMap[OFFSET_OPEN] = defineDict["SECURITY_FTDC_OF_Open"]
offsetMap[OFFSET_CLOSE] = defineDict["SECURITY_FTDC_OF_Close"]
offsetMap[OFFSET_CLOSETODAY] = defineDict["SECURITY_FTDC_OF_CloseToday"]
offsetMap[OFFSET_CLOSEYESTERDAY] = defineDict["SECURITY_FTDC_OF_CloseYesterday"]
offsetMapReverse = {v:k for k,v in offsetMap.items()}
# 交易所类型映射
exchangeMap = {}
exchangeMap[EXCHANGE_SSE] = 'SSE'
exchangeMap[EXCHANGE_SZSE] = 'SZE'
exchangeMap[EXCHANGE_HKEX] = 'HGE'
exchangeMapReverse = {v:k for k,v in exchangeMap.items()}
# 持仓类型映射
posiDirectionMap = {}
posiDirectionMap[DIRECTION_NET] = defineDict["SECURITY_FTDC_PD_Net"]
posiDirectionMap[DIRECTION_LONG] = defineDict["SECURITY_FTDC_PD_Long"]
posiDirectionMap[DIRECTION_SHORT] = defineDict["SECURITY_FTDC_PD_Short"]
posiDirectionMapReverse = {v:k for k,v in posiDirectionMap.items()}
########################################################################################
class LtsGateway(VtGateway):
"""Lts接口"""
#----------------------------------------------------------------------
def __init__(self, eventEngine, gatewayName='LTS'):
"""Constructor"""
super(LtsGateway, self).__init__(eventEngine, gatewayName)
self.mdApi = LtsMdApi(self)
self.tdApi = LtsTdApi(self)
self.qryApi = LtsQryApi(self)
self.mdConnected = False
self.tdConnected = False
self.qryConnected = False
self.qryEnabled = False # 是否要启动循环查询
#----------------------------------------------------------------------
def connect(self):
"""连接"""
# 载入json 文件
fileName = self.gatewayName + '_connect.json'
path = os.path.abspath(os.path.dirname(__file__))
fileName = os.path.join(path, fileName)
try:
f = file(fileName)
except IOError:
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'读取连接配置出错,请检查'
self.onLog(log)
return
# 解析json文件
setting = json.load(f)
try:
userID = str(setting['userID'])
mdPassword = str(setting['mdPassword'])
tdPassword = str(setting['tdPassword'])
brokerID = str(setting['brokerID'])
tdAddress = str(setting['tdAddress'])
mdAddress = str(setting['mdAddress'])
qryAddress = str(setting['qryAddress'])
productInfo = str(setting['productInfo'])
authCode = str(setting['authCode'])
except KeyError:
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'连接配置缺少字段,请检查'
self.onLog(log)
return
# 创建行情和交易接口对象
self.mdApi.connect(userID, mdPassword, brokerID, mdAddress)
self.tdApi.connect(userID, tdPassword, brokerID, tdAddress, productInfo, authCode)
self.qryApi.connect(userID, tdPassword, brokerID, qryAddress, productInfo, authCode)
# 初始化并启动查询
self.initQuery()
self.startQuery()
#----------------------------------------------------------------------
def subscribe(self, subscribeReq):
"""订阅行情"""
self.mdApi.subscribe(subscribeReq)
#----------------------------------------------------------------------
def sendOrder(self, orderReq):
"""发单"""
return self.tdApi.sendOrder(orderReq)
#----------------------------------------------------------------------
def cancelOrder(self, cancelOrderReq):
"""撤单"""
self.tdApi.cancelOrder(cancelOrderReq)
#----------------------------------------------------------------------
def qryAccount(self):
"""查询账户资金"""
self.qryApi.qryAccount()
#----------------------------------------------------------------------
def qryPosition(self):
"""查询持仓"""
self.qryApi.qryPosition()
#----------------------------------------------------------------------
def close(self):
"""关闭"""
if self.mdConnected:
self.mdApi.close()
if self.tdConnected:
self.tdApi.close()
if self.qryConnected:
self.qryApi.close()
#----------------------------------------------------------------------
def initQuery(self):
"""初始化连续查询"""
if self.qryEnabled:
# 需要循环的查询函数列表
self.qryFunctionList = [self.qryAccount, self.qryPosition]
self.qryCount = 0 # 查询触发倒计时
self.qryTrigger = 2 # 查询触发点
self.qryNextFunction = 0 # 上次运行的查询函数索引
self.startQuery()
#----------------------------------------------------------------------
def query(self, event):
"""注册到事件处理引擎上的查询函数"""
self.qryCount += 1
if self.qryCount > self.qryTrigger:
# 清空倒计时
self.qryCount = 0
# 执行查询函数
function = self.qryFunctionList[self.qryNextFunction]
function()
# 计算下次查询函数的索引,如果超过了列表长度,则重新设为0
self.qryNextFunction += 1
if self.qryNextFunction == len(self.qryFunctionList):
self.qryNextFunction = 0
#----------------------------------------------------------------------
def startQuery(self):
"""启动连续查询"""
self.eventEngine.register(EVENT_TIMER, self.query)
#----------------------------------------------------------------------
def setQryEnabled(self, qryEnabled):
"""设置是否要启动循环查询"""
self.qryEnabled = qryEnabled
########################################################################
class LtsMdApi(MdApi):
"""Lts行情API实现"""
#----------------------------------------------------------------------
def __init__(self, gateway):
"""Constructor"""
super(LtsMdApi, self).__init__()
self.gateway = gateway #gateway对象
self.gatewayName = gateway.gatewayName #gateway对象名称
self.reqID = EMPTY_INT # 操作请求编号
self.connectionStatus = False # 连接状态
self.loginStatus = False # 登陆状态
self.subscribedSymbols = set()
self.userID = EMPTY_STRING # 账号
self.password = EMPTY_STRING # 密码
self.brokerID = EMPTY_STRING # 经纪商代码
self.address = EMPTY_STRING # 服务器地址
#----------------------------------------------------------------------
def onFrontConnected(self):
"""服务器连接"""
self.connectionStatus = True
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'行情服务器连接成功'
self.gateway.onLog(log)
self.login()
#----------------------------------------------------------------------
def onFrontDisconnected(self,n):
"""服务器断开"""
self.connectionStatus= False
self.loginStatus = False
self.gateway.mdConnected = False
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'行情服务器连接断开'
self.gateway.onLog(log)
#----------------------------------------------------------------------
def onHeartBeatWarning(self, n):
"""心跳报警"""
pass
#----------------------------------------------------------------------
def onRspError(self,error,n,last):
"""错误回报"""
err = VtErrorData()
err.gatewayName = self.gatewayName
err.errorID = error['ErrorID']
err.errorMsg = error['ErrorMsg'].decode('gbk')
self.gateway.onError(err)
#----------------------------------------------------------------------
def onRspUserLogin(self, data, error, n, last):
"""登陆回报"""
# 如果登录成功,推送日志信息
if error['ErrorID'] == 0:
self.loginStatus = True
self.gateway.mdConnected = True
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'行情服务器登录完成'
self.gateway.onLog(log)
# 重新订阅之前订阅的合约
for subscribeReq in self.subscribedSymbols:
self.subscribe(subscribeReq)
# 否则,推送错误信息
else:
err = VtErrorData()
err.gatewayName = self.gatewayName
err.errorID = error['ErrorID']
err.errorMsg = error['ErrorMsg'].decode('gbk')
self.gateway.onError(err)
#----------------------------------------------------------------------
def onRspUserLogout(self, data, error, n, last):
"""登出回报"""
# 如果登出成功,推送日志信息
if error['ErrorID'] == 0:
self.loginStatus = False
self.gateway.tdConnected = False
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'行情服务器登出完成'
self.gateway.onLog(log)
# 否则,推送错误信息
else:
err = VtErrorData()
err.gatewayName = self.gatewayName
err.errorID = error['ErrorID']
err.errorMsg = error['ErrorMsg'].decode('gbk')
self.gateway.onError(err)
#----------------------------------------------------------------------
def onRspSubMarketData(self, data, error, n, last):
"""订阅合约回报"""
# 通常不在乎订阅错误,选择忽略
pass
#----------------------------------------------------------------------
def onRspUnSubMarketData(self, data, error, n, last):
"""退订合约回报"""
# 同上
pass
#----------------------------------------------------------------------
def onRtnDepthMarketData(self, data):
"""行情推送"""
tick = VtTickData()
tick.gatewayName = self.gatewayName
tick.symbol = data['InstrumentID']
tick.exchange = exchangeMapReverse.get(data['ExchangeID'], u'未知')
tick.vtSymbol = '.'.join([tick.symbol, tick.exchange])
tick.lastPrice = data['LastPrice']
tick.volume = data['Volume']
tick.openInterest = data['OpenInterest']
tick.time = '.'.join([data['UpdateTime'], str(data['UpdateMillisec']/100)])
tick.date = data['TradingDay']
tick.openPrice = data['OpenPrice']
tick.highPrice = data['HighestPrice']
tick.lowPrice = data['LowestPrice']
tick.preClosePrice = data['PreClosePrice']
tick.upperLimit = data['UpperLimitPrice']
tick.lowerLimit = data['LowerLimitPrice']
# LTS有5档行情
tick.bidPrice1 = data['BidPrice1']
tick.bidVolume1 = data['BidVolume1']
tick.askPrice1 = data['AskPrice1']
tick.askVolume1 = data['AskVolume1']
tick.bidPrice2 = data['BidPrice2']
tick.bidVolume2 = data['BidVolume2']
tick.askPrice2 = data['AskPrice2']
tick.askVolume2 = data['AskVolume2']
tick.bidPrice3 = data['BidPrice3']
tick.bidVolume3 = data['BidVolume3']
tick.askPrice3 = data['AskPrice3']
tick.askVolume3 = data['AskVolume3']
tick.bidPrice4 = data['BidPrice4']
tick.bidVolume4 = data['BidVolume4']
tick.askPrice4 = data['AskPrice4']
tick.askVolume4 = data['AskVolume4']
tick.bidPrice5 = data['BidPrice5']
tick.bidVolume5 = data['BidVolume5']
tick.askPrice5 = data['AskPrice5']
tick.askVolume5 = data['AskVolume5']
self.gateway.onTick(tick)
#----------------------------------------------------------------------
def connect(self, userID, password, brokerID, address):
"""初始化连接"""
self.userID = userID # 账号
self.password = password # 密码
self.brokerID = brokerID # 经纪商代码
self.address = address # 服务器地址
# 如果尚未建立服务器连接,则进行连接
if not self.connectionStatus:
# 创建C++环境中的API对象,这里传入的参数是需要用来保存.con文件的文件夹路径
path = os.getcwd() + '/temp/' + self.gatewayName + '/'
if not os.path.exists(path):
os.makedirs(path)
self.createFtdcMdApi(path)
# 注册服务器地址
self.registerFront(self.address)
# 初始化连接,成功会调用onFrontConnected
self.init()
# 若已经连接但尚未登录,则进行登录
else:
if not self.loginStatus:
self.login()
#----------------------------------------------------------------------
def subscribe(self, subscribeReq):
"""订阅合约"""
req = {}
req['InstrumentID'] = str(subscribeReq.symbol)
req['ExchangeID'] = exchangeMap.get(str(subscribeReq.exchange), '')
# 这里的设计是,如果尚未登录就调用了订阅方法
# 则先保存订阅请求,登录完成后会自动订阅
if self.loginStatus:
self.subscribeMarketData(req)
self.subscribedSymbols.add(subscribeReq)
#----------------------------------------------------------------------
def login(self):
"""登录"""
# 如果填入了用户名密码等,则登录
if self.userID and self.password and self.brokerID:
req = {}
req['UserID'] = self.userID
req['Password'] = self.password
req['BrokerID'] = self.brokerID
self.reqID += 1
self.reqUserLogin(req, self.reqID)
#----------------------------------------------------------------------
def close(self):
"""关闭"""
self.exit()
########################################################################
class LtsTdApi(TdApi):
"""LTS交易API实现"""
#----------------------------------------------------------------------
def __init__(self, gateway):
"""API对象的初始化函数"""
super(LtsTdApi, self).__init__()
self.gateway = gateway # gateway对象
self.gatewayName = gateway.gatewayName # gateway对象名称
self.reqID = EMPTY_INT # 操作请求编号
self.orderRef = EMPTY_INT # 订单编号
self.connectionStatus = False # 连接状态
self.loginStatus = False # 登录状态
self.userID = EMPTY_STRING # 账号
self.password = EMPTY_STRING # 密码
self.brokerID = EMPTY_STRING # 经纪商代码
self.address = EMPTY_STRING # 服务器地址
self.productInfo = EMPTY_STRING # 程序产品名称
self.authCode = EMPTY_STRING # 授权码
self.randCode = EMPTY_STRING # 随机码
self.frontID = EMPTY_INT # 前置机编号
self.sessionID = EMPTY_INT # 会话编号
#----------------------------------------------------------------------
def onFrontConnected(self):
"""服务器连接"""
self.connectionStatus = True
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'交易服务器连接成功'
self.gateway.onLog(log)
# 前置机连接后,请求随机码
self.reqID += 1
self.reqFetchAuthRandCode({}, self.reqID)
#----------------------------------------------------------------------
def onFrontDisconnected(self, n):
"""服务器断开"""
self.connectionStatus = False
self.loginStatus = False
self.gateway.tdConnected = False
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'交易服务器连接断开'
self.gateway.onLog(log)
#----------------------------------------------------------------------
def onHeartBeatWarning(self, n):
""""""
pass
#----------------------------------------------------------------------
def onRspUserLogin(self, data, error, n, last):
"""登陆回报"""
# 如果登录成功,推送日志信息
if error['ErrorID'] == 0:
self.frontID = str(data['FrontID'])
self.sessionID = str(data['SessionID'])
self.loginStatus = True
self.gateway.mdConnected = True
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'交易服务器登录完成'
self.gateway.onLog(log)
# 否则,推送错误信息
else:
err = VtErrorData()
err.gatewayName = self.gateway
err.errorID = error['ErrorID']
err.errorMsg = error['ErrorMsg'].decode('gbk')
self.gateway.onError(err)
#----------------------------------------------------------------------
def onRspUserLogout(self, data, error, n, last):
"""登出回报"""
# 如果登出成功,推送日志信息
if error['ErrorID'] == 0:
self.loginStatus = False
self.gateway.tdConnected = False
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'交易服务器登出完成'
self.gateway.onLog(log)
# 否则,推送错误信息
else:
err = VtErrorData()
err.gatewayName = self.gatewayName
err.errorID = error['ErrorID']
err.errorMsg = error['ErrorMsg'].decode('gbk')
self.gateway.onError(err)
#----------------------------------------------------------------------
def onRspFetchAuthRandCode(self, data, error, n, last):
"""请求随机认证码"""
self.randCode = data['RandCode']
self.login()
#----------------------------------------------------------------------
def onRspUserPasswordUpdate(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspTradingAccountPasswordUpdate(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspOrderInsert(self, data, error, n, last):
"""发单错误(柜台)"""
err = VtErrorData()
err.gatewayName = self.gatewayName
err.errorID = error['ErrorID']
err.errorMsg = error['ErrorMsg'].decode('gbk')
self.gateway.onError(err)
#----------------------------------------------------------------------
def onRspOrderAction(self, data, error, n, last):
"""撤单错误(柜台)"""
err = VtErrorData()
err.gatewayName = self.gatewayName
err.errorID = error['ErrorID']
err.errorMsg = error['ErrorMsg'].decode('gbk')
self.gateway.onError(err)
#----------------------------------------------------------------------
def onRspError(self, error, n, last):
"""错误回报"""
err = VtErrorData()
err.gatewayName = self.gatewayName
err.errorID = error['ErrorID']
err.errorMsg = error['ErrorMsg'].decode('gbk')
self.gateway.onError(err)
#----------------------------------------------------------------------
def onRtnOrder(self, data):
"""报单回报"""
# 更新最大报单编号
newref = data['OrderRef']
self.orderRef = max(self.orderRef, int(newref))
# 创建报单数据对象
order = VtOrderData()
order.gatewayName = self.gatewayName
# 保存代码和报单号
order.symbol = data['InstrumentID']
order.exchange = exchangeMapReverse.get(data['ExchangeID'], '')
order.vtSymbol = '.'.join([order.symbol, order.exchange])
order.orderID = data['OrderRef']
# 方向
if data['Direction'] == '0':
order.direction = DIRECTION_LONG
elif data['Direction'] == '1':
order.direction = DIRECTION_SHORT
else:
order.direction = DIRECTION_UNKNOWN
# 开平
if data['CombOffsetFlag'] == '0':
order.offset = OFFSET_OPEN
elif data['CombOffsetFlag'] == '1':
order.offset = OFFSET_CLOSE
else:
order.offset = OFFSET_UNKNOWN
# 状态
if data['OrderStatus'] == '0':
order.status = STATUS_ALLTRADED
elif data['OrderStatus'] == '1':
order.status = STATUS_PARTTRADED
elif data['OrderStatus'] == '3':
order.status = STATUS_NOTTRADED
elif data['OrderStatus'] == '5':
order.status = STATUS_CANCELLED
else:
order.status = STATUS_UNKNOWN
# 价格、报单量等数值
order.price = float(data['LimitPrice'])
order.totalVolume = data['VolumeTotalOriginal']
order.tradedVolume = data['VolumeTraded']
order.orderTime = data['InsertTime']
order.cancelTime = data['CancelTime']
order.frontID = data['FrontID']
order.sessionID = data['SessionID']
# CTP的报单号一致性维护需要基于frontID, sessionID, orderID三个字段
order.vtOrderID = '.'.join([self.gatewayName, order.orderID])
# 推送
self.gateway.onOrder(order)
#----------------------------------------------------------------------
def onRtnTrade(self, data):
"""成交回报"""
# 创建报单数据对象
trade = VtTradeData()
trade.gatewayName = self.gatewayName
# 保存代码和报单号
trade.symbol = data['InstrumentID']
trade.exchange = exchangeMapReverse.get(data['ExchangeID'], '')
trade.vtSymbol = '.'.join([trade.symbol, trade.exchange])
trade.tradeID = data['TradeID']
trade.vtTradeID = '.'.join([self.gatewayName, trade.tradeID])
trade.orderID = data['OrderRef']
trade.vtOrderID = '.'.join([self.gatewayName, trade.orderID])
# 方向
trade.direction = directionMapReverse.get(data['Direction'], '')
# 开平
trade.offset = offsetMapReverse.get(data['OffsetFlag'], '')
# 价格、报单量等数值
trade.price = float(data['Price'])
trade.volume = data['Volume']
trade.tradeTime = data['TradeTime']
# 推送
self.gateway.onTrade(trade)
#----------------------------------------------------------------------
def onErrRtnOrderInsert(self, data, error):
"""发单错误回报(交易所)"""
err = VtErrorData()
err.gatewayName = self.gatewayName
err.errorID = error['ErrorID']
err.errorMsg = error['ErrorMsg'].decode('gbk')
self.gateway.onError(err)
#----------------------------------------------------------------------
def onErrRtnOrderAction(self, data, error):
"""撤单错误回报(交易所)"""
err = VtErrorData()
err.gatewayName = self.gatewayName
err.errorID = error['ErrorID']
err.errorMsg = error['ErrorMsg'].decode('gbk')
self.gateway.onError(err)
#----------------------------------------------------------------------
def onRspFundOutByLiber(self, data, error, n, last):
"""LTS发起出金应答"""
pass
#----------------------------------------------------------------------
def onRtnFundOutByLiber(self, data):
"""LTS发起出金通知"""
pass
#----------------------------------------------------------------------
def onErrRtnFundOutByLiber(self, data, error):
"""LTS发起出金错误回报"""
pass
#----------------------------------------------------------------------
def onRtnFundInByBank(self, data):
"""银行发起入金通知"""
pass
#----------------------------------------------------------------------
def onRspFundInterTransfer(self, data, error, n, last):
"""资金内转应答"""
pass
#----------------------------------------------------------------------
def onRtnFundInterTransferSerial(self, data):
"""资金内转流水通知"""
pass
#----------------------------------------------------------------------
def onErrRtnFundInterTransfer(self, data, error):
"""资金内转错误回报"""
pass
#----------------------------------------------------------------------
def connect(self, userID, password, brokerID, address, productInfo, authCode):
"""初始化连接"""
self.userID = userID # 账号
self.password = password # 密码
self.brokerID = brokerID # 经纪商代码
self.address = address # 服务器地址
self.productInfo = productInfo
self.authCode = authCode
# 如果尚未建立服务器连接,则进行连接
if not self.connectionStatus:
# 创建C++环境中的API对象,这里传入的参数是需要用来保存.con文件的文件夹路径
path = os.getcwd() + '/temp/' + self.gatewayName + '/'
if not os.path.exists(path):
os.makedirs(path)
self.createFtdcTraderApi(path)
# 设置数据同步模式为推送从今日开始所有数据
self.subscribePrivateTopic(0)
self.subscribePublicTopic(0)
# 注册服务器地址
self.registerFront(self.address)
# 初始化连接,成功会调用onFrontConnected
self.init()
# 若已经连接但尚未登录,则进行登录
else:
if not self.loginStatus:
self.login()
#----------------------------------------------------------------------
def login(self):
"""连接服务器"""
# 如果填入了用户名密码等,则登录
if self.userID and self.password and self.brokerID:
req = {}
req['UserID'] = self.userID
req['Password'] = self.password
req['BrokerID'] = self.brokerID
req['UserProductInfo'] = self.productInfo
req['AuthCode'] = self.authCode
req['RandCode'] = self.randCode
self.reqID += 1
self.reqUserLogin(req, self.reqID)
#----------------------------------------------------------------------
def sendOrder(self, orderReq):
"""发单"""
self.reqID += 1
self.orderRef += 1
req = {}
req['InstrumentID'] = str(orderReq.symbol)
req['LimitPrice'] = str(orderReq.price) # LTS里的价格是字符串
req['VolumeTotalOriginal'] = int(orderReq.volume)
req['ExchangeID'] = exchangeMap.get(orderReq.exchange, '')
# 下面如果由于传入的类型本接口不支持,则会返回空字符串
try:
req['OrderPriceType'] = priceTypeMap[orderReq.priceType]
req['Direction'] = directionMap[orderReq.direction]
req['CombOffsetFlag'] = offsetMap[orderReq.offset]
req['ExchangeID'] = exchangeMap[orderReq.exchange]
except KeyError:
return ''
req['OrderRef'] = str(self.orderRef)
req['InvestorID'] = self.userID
req['UserID'] = self.userID
req['BrokerID'] = self.brokerID
req['CombHedgeFlag'] = defineDict['SECURITY_FTDC_HF_Speculation'] # 投机单
req['ContingentCondition'] = defineDict['SECURITY_FTDC_CC_Immediately'] # 立即发单
req['ForceCloseReason'] = defineDict['SECURITY_FTDC_FCC_NotForceClose'] # 非强平
req['IsAutoSuspend'] = 0 # 非自动挂起
req['TimeCondition'] = defineDict['SECURITY_FTDC_TC_GFD'] # 今日有效
req['VolumeCondition'] = defineDict['SECURITY_FTDC_VC_AV'] # 任意成交量
req['MinVolume'] = 1 # 最小成交量为1
req['UserForceClose'] = 0
self.reqOrderInsert(req, self.reqID)
# 返回订单号(字符串),便于某些算法进行动态管理
vtOrderID = '.'.join([self.gatewayName, str(self.orderRef)])
return vtOrderID
#----------------------------------------------------------------------
def cancelOrder(self, cancelOrderReq):
"""撤单"""
self.reqID += 1
req = {}
req['InstrumentID'] = cancelOrderReq.symbol
req['ExchangeID'] = cancelOrderReq.exchange
req['OrderRef'] = cancelOrderReq.orderID
req['FrontID'] = cancelOrderReq.frontID
req['SessionID'] = cancelOrderReq.sessionID
req['ActionFlag'] = defineDict['SECURITY_FTDC_AF_Delete']
req['BrokerID'] = self.brokerID
req['InvestorID'] = self.userID
self.reqOrderAction(req, self.reqID)
#----------------------------------------------------------------------
def close(self):
"""关闭"""
self.exit()
########################################################################
class LtsQryApi(QryApi):
"""Lts账户查询实现"""
#----------------------------------------------------------------------
def __init__(self, gateway):
"""API对象的初始化函数"""
super(LtsQryApi, self).__init__()
self.gateway = gateway # gateway对象
self.gatewayName = gateway.gatewayName # gateway对象名称
self.reqID = EMPTY_INT # 操作请求编号
self.orderRef = EMPTY_INT # 订单编号
self.connectionStatus = False # 连接状态
self.loginStatus = False # 登录状态
self.userID = EMPTY_STRING # 账号
self.password = EMPTY_STRING # 密码
self.brokerID = EMPTY_STRING # 经纪商代码
self.address = EMPTY_STRING # 服务器地址
self.productInfo = EMPTY_STRING # 程序产品名称
self.authCode = EMPTY_STRING # 授权码
self.randCode = EMPTY_STRING # 随机码
self.frontID = EMPTY_INT # 前置机编号
self.sessionID = EMPTY_INT # 会话编号
#----------------------------------------------------------------------
def onFrontConnected(self):
"""服务器连接"""
self.connectionStatus = True
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'查询服务器连接成功'
self.gateway.onLog(log)
# 前置机连接后,请求随机码
self.reqID += 1
self.reqFetchAuthRandCode({}, self.reqID)
#----------------------------------------------------------------------
def onFrontDisconnected(self, n):
"""服务器断开"""
self.connectionStatus = False
self.loginStatus = False
self.gateway.tdConnected = False
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'查询服务器连接断开'
self.gateway.onLog(log)
#----------------------------------------------------------------------
def onHeartBeatWarning(self, n):
""""""
pass
#----------------------------------------------------------------------
def onRspError(self, error, n, last):
"""错误回报"""
err = VtErrorData()
err.gatewayName = self.gatewayName
err.errorID = error['ErrorID']
err.errorMsg = error['ErrorMsg'].decode('gbk')
self.gateway.onError(err)
#----------------------------------------------------------------------
def onRspUserLogin(self, data, error, n, last):
"""登陆回报"""
# 如果登录成功,推送日志信息
if error['ErrorID'] == 0:
self.frontID = str(data['FrontID'])
self.sessionID = str(data['SessionID'])
self.loginStatus = True
self.gateway.qryConnected = True
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'查询服务器登录完成'
self.gateway.onLog(log)
# 查询合约代码
self.reqID += 1
self.reqQryInstrument({}, self.reqID)
# 否则,推送错误信息
else:
err = VtErrorData()
err.gatewayName = self.gateway
err.errorID = error['ErrorID']
err.errorMsg = error['ErrorMsg'].decode('gbk')
self.gateway.onError(err)
#----------------------------------------------------------------------
def onRspUserLogout(self, data, error, n, last):
"""登出回报"""
# 如果登出成功,推送日志信息
if error['ErrorID'] == 0:
self.loginStatus = False
self.gateway.qryConnected = False
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'查询服务器登出完成'
self.gateway.onLog(log)
# 否则,推送错误信息
else:
err = VtErrorData()
err.gatewayName = self.gatewayName
err.errorID = error['ErrorID']
err.errorMsg = error['ErrorMsg'].decode('gbk')
self.gateway.onError(err)
#----------------------------------------------------------------------
def onRspFetchAuthRandCode(self, data, error, n, last):
"""请求随机认证码"""
self.randCode = data['RandCode']
self.login()
#----------------------------------------------------------------------
def onRspQryExchange(self, data, error, n, last):
pass
#----------------------------------------------------------------------
def onRspQryInstrument(self, data, error, n, last):
"""合约查询回报"""
contract = VtContractData()
contract.gatewayName = self.gatewayName
contract.symbol = data['InstrumentID']
contract.exchange = exchangeMapReverse[data['ExchangeID']]
contract.vtSymbol = '.'.join([contract.symbol, contract.exchange])
contract.name = data['InstrumentName'].decode('GBK')
# 合约数值
contract.size = data['VolumeMultiple']
contract.priceTick = data['PriceTick']
contract.strikePrice = data['ExecPrice']
contract.underlyingSymbol = data['MarketID']
# 合约类型
if data['ProductClass'] == '1':
contract.productClass = PRODUCT_FUTURES
elif data['ProductClass'] == '2':
contract.productClass = PRODUCT_OPTION
elif data['ProductClass'] == '3':
contract.productClass = PRODUCT_COMBINATION
elif data['ProductClass'] == '6':
contract.productClass = PRODUCT_EQUITY
elif data['ProductClass'] == '8':
contract.productClass = PRODUCT_EQUITY
else:
print data['ProductClass']
# 期权类型
if data['InstrumentType'] == '1':
contract.optionType = OPTION_CALL
elif data['InstrumentType'] == '2':
contract.optionType = OPTION_PUT
# 推送
self.gateway.onContract(contract)
if last:
log = VtLogData()
log.gatewayName = self.gatewayName
log.logContent = u'交易合约信息获取完成'
self.gateway.onLog(log)
#----------------------------------------------------------------------
def onRspQryInvestor(self, data, error, n, last):
"""投资者查询回报"""
pass
#----------------------------------------------------------------------
def onRspQryTradingCode(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryTradingAccount(self, data, error, n, last):
"""资金账户查询回报"""
account = VtAccountData()
account.gatewayName = self.gatewayName
# 账户代码
account.accountID = data['AccountID']
account.vtAccountID = '.'.join([self.gatewayName, account.accountID])
# 数值相关
account.preBalance = data['PreBalance']
account.available = data['Available']
account.commission = data['Commission']
account.margin = data['CurrMargin']
#account.closeProfit = data['CloseProfit']
#account.positionProfit = data['PositionProfit']
# 这里的balance和快期中的账户不确定是否一样,需要测试
account.balance = data['Balance']
# 推送
self.gateway.onAccount(account)
#----------------------------------------------------------------------
def onRspQryBondInterest(self, data, error, n, last):
"""债券利息查询回报"""
pass
#----------------------------------------------------------------------
def onRspQryMarketRationInfo(self, data, error, n, last):
"""市值配售查询回报"""
pass
#----------------------------------------------------------------------
def onRspQryInstrumentCommissionRate(self, data, error, n, last):
"""合约手续费查询回报"""
pass
#----------------------------------------------------------------------
def onRspQryETFInstrument(self, data, error, n, last):
"""ETF基金查询回报"""
pass
#----------------------------------------------------------------------
def onRspQryETFBasket(self, data, error, n, last):
"""ETF股票篮查询回报"""
pass
#----------------------------------------------------------------------
def onRspQryOFInstrument(self, data, error, n, last):
"""OF合约查询回报"""
pass
#----------------------------------------------------------------------
def onRspQrySFInstrument(self, data, error, n, last):
"""SF合约查询回报"""
pass
#----------------------------------------------------------------------
def onRspQryInstrumentUnitMargin(self, data, error, n, last):
"""查询单手保证金"""
pass
#----------------------------------------------------------------------
def onRspQryPreDelivInfo(self, data, error, n , last):
"""查询预交割信息"""
pass
#----------------------------------------------------------------------
def onRsyQryCreditStockAssignInfo(self, data, error, n, last):
"""查询可融券分配"""
pass
#----------------------------------------------------------------------
def onRspQryCreditCashAssignInfo(self, data, error, n , last):
"""查询可融资分配"""
pass
#----------------------------------------------------------------------
def onRsyQryConversionRate(self, data, error, n, last):
"""查询证券这算率"""
pass
#----------------------------------------------------------------------
def onRspQryHisCreditDebtInfo(self,data, error, n, last):
"""查询历史信用负债"""
pass
#----------------------------------------------------------------------
def onRspQryMarketDataStaticInfo(self, data, error, n, last):
"""查询行情静态信息"""
pass
#----------------------------------------------------------------------
def onRspQryExpireRepurchInfo(self, data, error, n, last):
"""查询到期回购信息响应"""
pass
#----------------------------------------------------------------------
def onRspQryBondPledgeRate(self, data, error, n, last):
"""查询债券质押为标准券比例"""
pass
#----------------------------------------------------------------------
def onRspQryPledgeBond(self, data, error, n, last):
"""查询债券质押代码对照关系"""
pass
#----------------------------------------------------------------------
def onRspQryOrder(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryTrade(self, data, error, n, last):
""""""
pass
#----------------------------------------------------------------------
def onRspQryInvestorPosition(self, data, error, n, last):
"""持仓查询回报"""
pos = VtPositionData()
pos.gatewayName = self.gatewayName
# 保存代码
pos.symbol = data['InstrumentID']
pos.exchange = exchangeMapReverse.get(data['ExchangeID'], '')
pos.vtSymbol = '.'.join([pos.symbol, pos.exchange])
# 方向和持仓冻结数量
pos.direction = posiDirectionMapReverse.get(data['PosiDirection'], '')
if pos.direction == DIRECTION_NET or pos.direction == DIRECTION_LONG:
pos.frozen = data['LongFrozen']
elif pos.direction == DIRECTION_SHORT:
pos.frozen = data['ShortFrozen']
# 持仓量
pos.position = data['Position']
pos.ydPosition = data['YdPosition']
# 持仓均价
if pos.position:
pos.price = data['OpenCost'] / pos.position
# VT系统持仓名
pos.vtPositionName = '.'.join([pos.vtSymbol, pos.direction])
# 推送
self.gateway.onPosition(pos)
#----------------------------------------------------------------------
def OnRspQryFundTransferSerial(self, data, error, n, last):
"""资金转账查询"""
pass
#----------------------------------------------------------------------
def onRspQryFundInterTransferSerial(self, data, error,n, last):
"""资金内转流水查询"""
pass
#----------------------------------------------------------------------
def connect(self, userID, password, brokerID, address, productInfo, authCode):
"""初始化连接"""
self.userID = userID # 账号
self.password = password # 密码
self.brokerID = brokerID # 经纪商代码
self.address = address # 服务器地址
self.productInfo = productInfo
self.authCode = authCode
# 如果尚未建立服务器连接,则进行连接
if not self.connectionStatus:
# 创建C++环境中的API对象,这里传入的参数是需要用来保存.con文件的文件夹路径
path = os.getcwd() + '/temp/' + self.gatewayName + '/'
if not os.path.exists(path):
os.makedirs(path)
self.createFtdcQueryApi(path)
# 注册服务器地址
self.registerFront(self.address)
# 初始化连接,成功会调用onFrontConnected
self.init()
# 若已经连接但尚未登录,则进行登录
else:
if not self.loginStatus:
self.login()
#----------------------------------------------------------------------
def login(self):
"""连接服务器"""
# 如果填入了用户名密码等,则登录
if self.userID and self.password and self.brokerID:
req = {}
req['UserID'] = self.userID
req['Password'] = self.password
req['BrokerID'] = self.brokerID
req['UserProductInfo'] = self.productInfo
req['AuthCode'] = self.authCode
req['RandCode'] = self.randCode
self.reqID += 1
self.reqUserLogin(req, self.reqID)
#----------------------------------------------------------------------
def qryAccount(self):
"""查询账户"""
self.reqID += 1
#是否需要INVESTERID, BROKERID?
req = {}
req['BrokerID'] = self.brokerID
req['InvestorID'] = self.userID
self.reqQryTradingAccount(req, self.reqID)
#----------------------------------------------------------------------
def qryPosition(self):
"""查询持仓"""
self.reqID += 1
req = {}
req['BrokerID'] = self.brokerID
req['InvestorID'] = self.userID
self.reqQryInvestorPosition(req, self.reqID)
#----------------------------------------------------------------------
def close(self):
"""关闭"""
self.exit()
| mit |
botswana-harvard/tshilo-dikotla | td_infant/models/infant_congenital_anomalies.py | 1 | 13443 | from django.db import models
from edc_base.model.fields import OtherCharField
# from edc_base.audit_trail import AuditTrail
from edc_base.model.models.base_uuid_model import BaseUuidModel
from edc_constants.choices import CONFIRMED_SUSPECTED
from edc_visit_tracking.models import CrfInlineModelMixin
from edc_export.models import ExportTrackingFieldsMixin
from edc_sync.models import SyncModelMixin, SyncHistoricalRecords
from tshilo_dikotla.choices import (
CNS_ABNORMALITIES, FACIAL_DEFECT, CLEFT_DISORDER, MOUTH_UP_GASTROINT_DISORDER,
CARDIOVASCULAR_DISORDER, RESPIRATORY_DEFECT, LOWER_GASTROINTESTINAL_ABNORMALITY,
FEM_GENITAL_ANOMALY, MALE_GENITAL_ANOMALY, RENAL_ANOMALY, MUSCULOSKELETAL_ABNORMALITY,
SKIN_ABNORMALITY, TRISOME_CHROSOMESOME_ABNORMALITY, OTHER_DEFECT)
from ..managers import (InfantCnsManager, InfantFacialDefectManager,
InfantCleftDisorderManager, InfantMouthUpGiManager, InfantOtherAbnormalityItemsManager,
InfantCardioDisorderManager, InfantRespiratoryDefectManager, InfantLowerGiManager,
InfantFemaleGenitalManager, InfantMaleGenitalManager, InfantRenalManager,
InfantMusculoskeletalManager, InfantSkinManager, InfantTrisomiesManager)
from .infant_crf_model import InfantCrfModel
class InfantCongenitalAnomalies(InfantCrfModel):
""" A model completed by the user on the infant's congenital anomalies. """
class Meta:
app_label = 'td_infant'
verbose_name = "Congenital Anomalies"
class BaseCnsItem(CrfInlineModelMixin, SyncModelMixin, ExportTrackingFieldsMixin, BaseUuidModel):
congenital_anomalies = models.ForeignKey(InfantCongenitalAnomalies)
history = SyncHistoricalRecords()
class Meta:
abstract = True
class InfantCns(BaseCnsItem):
cns = models.CharField(
max_length=250,
choices=CNS_ABNORMALITIES,
verbose_name="Central nervous system abnormality",
)
abnormality_status = models.CharField(
max_length=35,
choices=CONFIRMED_SUSPECTED,
verbose_name="Abnormality status",
)
cns_other = OtherCharField(
max_length=250,
verbose_name="if other specify...",
blank=True,
null=True,
)
objects = InfantCnsManager()
def natural_key(self):
return (self.cns, ) + self.congenital_anomalies.natural_key()
class Meta:
app_label = 'td_infant'
verbose_name = "Congenital Anomalies:Cns"
unique_together = ('cns', 'congenital_anomalies')
class InfantFacialDefect(BaseCnsItem):
facial_defect = models.CharField(
max_length=250,
choices=FACIAL_DEFECT,
verbose_name="Facial defects",
)
abnormality_status = models.CharField(
max_length=35,
choices=CONFIRMED_SUSPECTED,
verbose_name="Abnormality status",
)
facial_defects_other = OtherCharField(
max_length=250,
verbose_name="if other specify...",
blank=True,
null=True,
)
objects = InfantFacialDefectManager()
def natural_key(self):
return (self.facial_defect, ) + self.congenital_anomalies.natural_key()
class Meta:
app_label = 'td_infant'
verbose_name = "Congenital Anomalies:Facial"
unique_together = ('facial_defect', 'congenital_anomalies')
class InfantCleftDisorder(BaseCnsItem):
cleft_disorder = models.CharField(
max_length=250,
choices=CLEFT_DISORDER,
verbose_name="Cleft disorders",
)
abnormality_status = models.CharField(
max_length=35,
choices=CONFIRMED_SUSPECTED,
verbose_name="Abnormality status",
)
cleft_disorders_other = OtherCharField(
max_length=250,
verbose_name="if other specify...",
blank=True,
null=True,
)
objects = InfantCleftDisorderManager()
def natural_key(self):
return (self.cleft_disorder, ) + self.congenital_anomalies.natural_key()
class Meta:
app_label = 'td_infant'
verbose_name = "Congenital Anomalies:Cleft"
unique_together = ('cleft_disorder', 'congenital_anomalies')
class InfantMouthUpGi(BaseCnsItem):
mouth_up_gi = models.CharField(
max_length=250,
choices=MOUTH_UP_GASTROINT_DISORDER,
verbose_name="Mouth and upper gastrointestinal disorders",
)
abnormality_status = models.CharField(
max_length=35,
choices=CONFIRMED_SUSPECTED,
verbose_name="Abnormality status",
)
mouth_up_gi_other = OtherCharField(
max_length=250,
verbose_name="if other specify...",
blank=True,
null=True
)
objects = InfantMouthUpGiManager()
def natural_key(self):
return (self.mouth_up_gi, ) + self.congenital_anomalies.natural_key()
class Meta:
app_label = 'td_infant'
verbose_name = "Congenital Anomalies:MouthUp"
unique_together = ('mouth_up_gi', 'congenital_anomalies')
class InfantCardioDisorder(BaseCnsItem):
cardio_disorder = models.CharField(
max_length=250,
choices=CARDIOVASCULAR_DISORDER,
verbose_name="Cardiovascular disorders",
)
abnormality_status = models.CharField(
max_length=35,
choices=CONFIRMED_SUSPECTED,
verbose_name="Abnormality status",
)
cardiovascular_other = OtherCharField(
max_length=250,
verbose_name="if other specify...",
blank=True,
null=True,
)
objects = InfantCardioDisorderManager()
def natural_key(self):
return (self.cardio_disorder, ) + self.congenital_anomalies.natural_key()
class Meta:
app_label = 'td_infant'
verbose_name = "Congenital Anomalies:Cardio"
unique_together = ('cardio_disorder', 'congenital_anomalies')
class InfantRespiratoryDefect(BaseCnsItem):
respiratory_defect = models.CharField(
max_length=250,
choices=RESPIRATORY_DEFECT,
verbose_name="Respiratory defects",
)
abnormality_status = models.CharField(
max_length=35,
choices=CONFIRMED_SUSPECTED,
verbose_name="Abnormality status",
)
respiratory_defects_other = OtherCharField(
max_length=250,
verbose_name="if other specify...",
blank=True,
null=True,
)
objects = InfantRespiratoryDefectManager()
def natural_key(self):
return (self.respiratory_defect, ) + self.congenital_anomalies.natural_key()
class Meta:
app_label = 'td_infant'
verbose_name = "Congenital Anomalies:Respiratory"
unique_together = ('respiratory_defect', 'congenital_anomalies')
class InfantLowerGi(BaseCnsItem):
lower_gi = models.CharField(
max_length=250,
choices=LOWER_GASTROINTESTINAL_ABNORMALITY,
verbose_name="Lower gastrointestinal abnormalities",
)
abnormality_status = models.CharField(
max_length=35,
choices=CONFIRMED_SUSPECTED,
verbose_name="Abnormality status",
)
lower_gi_other = OtherCharField(
max_length=250,
verbose_name="if other specify",
blank=True,
null=True,
)
objects = InfantLowerGiManager()
def natural_key(self):
return (self.lower_gi, ) + self.congenital_anomalies.natural_key()
class Meta:
app_label = 'td_infant'
verbose_name = "Congenital Anomalies:Lower GI"
unique_together = ('lower_gi', 'congenital_anomalies')
class InfantFemaleGenital(BaseCnsItem):
female_genital = models.CharField(
max_length=250,
choices=FEM_GENITAL_ANOMALY,
verbose_name="Female genital anomaly",
)
abnormality_status = models.CharField(
max_length=35,
choices=CONFIRMED_SUSPECTED,
verbose_name="Abnormality status",
)
female_genital_other = OtherCharField(
max_length=250,
verbose_name="if other specify...",
blank=True,
null=True,
)
objects = InfantFemaleGenitalManager()
def natural_key(self):
return (self.female_genital, ) + self.congenital_anomalies.natural_key()
class Meta:
app_label = 'td_infant'
verbose_name = "Congenital Anomalies:Female Gen"
unique_together = ('female_genital', 'congenital_anomalies')
class InfantMaleGenital(BaseCnsItem):
male_genital = models.CharField(
max_length=250,
choices=MALE_GENITAL_ANOMALY,
verbose_name="Male genital anomaly",
)
abnormality_status = models.CharField(
max_length=35,
choices=CONFIRMED_SUSPECTED,
verbose_name="Abnormality status",
)
male_genital_other = OtherCharField(
max_length=250,
verbose_name="if other specify...",
blank=True,
null=True,
)
objects = InfantMaleGenitalManager()
def natural_key(self):
return (self.male_genital, ) + self.congenital_anomalies.natural_key()
class Meta:
app_label = 'td_infant'
verbose_name = "Congenital Anomalies:Male Gen"
unique_together = ('male_genital', 'congenital_anomalies')
class InfantRenal(BaseCnsItem):
renal = models.CharField(
max_length=250,
choices=RENAL_ANOMALY,
verbose_name="Renal anomalies",
)
abnormality_status = models.CharField(
max_length=35,
choices=CONFIRMED_SUSPECTED,
verbose_name="Abnormality status",
)
renal_other = OtherCharField(
max_length=250,
verbose_name="if other specify...",
blank=True,
null=True,
)
objects = InfantRenalManager()
def natural_key(self):
return (self.renal, ) + self.congenital_anomalies.natural_key()
class Meta:
app_label = 'td_infant'
verbose_name = "Congenital Anomalies: Renal"
unique_together = ('renal', 'congenital_anomalies')
class InfantMusculoskeletal(BaseCnsItem):
musculo_skeletal = models.CharField(
max_length=250,
choices=MUSCULOSKELETAL_ABNORMALITY,
verbose_name="Musculo-skeletal abnomalities",
)
abnormality_status = models.CharField(
max_length=35,
choices=CONFIRMED_SUSPECTED,
verbose_name="Abnormality status",
)
musculo_skeletal_other = OtherCharField(
max_length=250,
verbose_name="if other specify...",
blank=True,
null=True,
)
objects = InfantMusculoskeletalManager()
def natural_key(self):
return (self.musculo_skeletal, ) + self.congenital_anomalies.natural_key()
class Meta:
app_label = 'td_infant'
verbose_name = "Congenital Anomalies: Musculo-sk"
unique_together = ('musculo_skeletal', 'congenital_anomalies')
class InfantSkin(BaseCnsItem):
skin = models.CharField(
max_length=250,
choices=SKIN_ABNORMALITY,
verbose_name="Skin abnormalities",
help_text="Excludes cafe au lait spots, Mongolian spots, port wine stains, "
"nevus, hemangloma <4 cm in diameter. If hemangloma is >4 cm, specify",
)
abnormality_status = models.CharField(
max_length=35,
choices=CONFIRMED_SUSPECTED,
verbose_name="Abnormality status",
)
skin_other = OtherCharField(
max_length=250,
verbose_name="if other specify...",
blank=True,
null=True,
)
objects = InfantSkinManager()
def natural_key(self):
return (self.skin, ) + self.congenital_anomalies.natural_key()
class Meta:
app_label = 'td_infant'
verbose_name = "Congenital Anomalies: Skin"
unique_together = ('skin', 'congenital_anomalies')
class InfantTrisomies(BaseCnsItem):
trisomies = models.CharField(
max_length=250,
choices=TRISOME_CHROSOMESOME_ABNORMALITY,
verbose_name="Trisomies / chromosomes abnormalities",
)
abnormality_status = models.CharField(
max_length=35,
choices=CONFIRMED_SUSPECTED,
verbose_name="Abnormality status",
)
trisomies_other = OtherCharField(
max_length=250,
verbose_name="if other specify...",
blank=True,
null=True,
)
objects = InfantTrisomiesManager()
def natural_key(self):
return (self.trisomies, ) + self.congenital_anomalies.natural_key()
class Meta:
app_label = 'td_infant'
verbose_name = "Congenital Anomalies: Trisomes"
unique_together = ('trisomies', 'congenital_anomalies')
class InfantOtherAbnormalityItems(BaseCnsItem):
other_abnormalities = models.CharField(
max_length=250,
choices=OTHER_DEFECT,
verbose_name="Other",
)
abnormality_status = models.CharField(
max_length=35,
choices=CONFIRMED_SUSPECTED,
verbose_name="Abnormality status",
)
other_abnormalities_other = OtherCharField(
max_length=250,
verbose_name="if other specify...",
blank=True,
null=True,
)
objects = InfantOtherAbnormalityItemsManager()
def natural_key(self):
return (self.other_abnormalities, ) + self.congenital_anomalies.natural_key()
class Meta:
app_label = 'td_infant'
verbose_name = "Congenital Anomalies: Other"
unique_together = ('other_abnormalities', 'congenital_anomalies')
| gpl-2.0 |
simonjduff/rattle-media | rattlemediaplayer.py | 1 | 7063 | import config
from gmusicapi import Mobileclient
import logging
from gi.repository import Gst, GLib
from collections import deque
from gevent import Greenlet
import gevent
class PlayerStates:
Stopped = "Stopped"
Paused = "Paused"
Playing = "Playing"
class RattleMediaPlayer:
def __init__(self):
self._logger = logging.getLogger('rattlemedia')
Gst.init(None)
self._player = Gst.ElementFactory.make('playbin', None)
if not self._player:
raise Exception('Player is None')
self._player.set_state(Gst.State.NULL)
self._logger.info('Starting to watch for gstreamer signals')
Greenlet.spawn(self.watch_for_message)
def watch_for_message(self):
bus = self._player.get_bus()
if not bus:
raise Exception('Couldn\'t create bus')
# Ideally we'd be using signal_watch on bus to fire on an event basis
# but getting the GLib main loop to work with gevent has proved problematic
# Polling works, but isn't as elegant
while True:
message = bus.pop()
if message:
self._logger.debug('Message received: {0}'.format(message.type))
if message.type == Gst.MessageType.EOS:
self._logger.info('End of stream received')
self.end_of_stream_event_handler()
elif message.type == Gst.MessageType.STATE_CHANGED:
self._logger.debug('State changed {0}'.format(self._player.get_state(100)[1]))
if not message:
gevent.sleep(0.5)
def _set_state(self, state):
try:
if state == PlayerStates.Stopped:
self._player.set_state(Gst.State.NULL)
elif state == PlayerStates.Paused:
self._player.set_state(Gst.State.PAUSED)
elif state == PlayerStates.Playing:
self._player.set_state(Gst.State.PLAYING)
else:
raise Exception('Unknown state')
finally:
self.state_change_event_handler()
def get_state(self):
current_state = self._player.get_state(Gst.CLOCK_TIME_NONE)[1]
if current_state == Gst.State.NULL:
return PlayerStates.Stopped
elif current_state == Gst.State.PAUSED:
return PlayerStates.Paused
elif current_state == Gst.State.PLAYING:
return PlayerStates.Playing
else:
self._logger.error('GStreamer player in unknown state {0}'.format(current_state))
def play_track(self, track_url):
self._player.set_property('uri', track_url)
self._set_state(PlayerStates.Playing)
def stop(self):
self._set_state(PlayerStates.Stopped)
def pause(self):
self._set_state(PlayerStates.Paused)
def play(self):
self._set_state(PlayerStates.Playing)
# Override with function to call on end of stream
def end_of_stream_event_handler(self):
pass
# Override with function to call on state change
def state_change_event_handler(self):
pass
class ControllerState:
def __init__(self, controller, player):
self._player = player
self._controller = controller
self._logger = logging.getLogger('rattlemedia')
def __play_next_track(self):
self._logger.info('Playing')
try:
# This sucks a bit. Should state own the api?
track_url = self._controller._api.get_stream_url(self._controller._queue.popleft(), config.google_device_id)
self._player.play_track(track_url)
except IndexError:
self._logger.info('Queue empty. Stopping.')
self._player.stop()
finally:
self._controller.update_state()
def play(self):
self.__play_next_track()
def stop(self):
self._logger.info('Stopping')
self._player.stop()
def toggle(self):
pass
def next(self):
self.__play_next_track()
class ControllerStatePlaying(ControllerState):
def play(self):
pass
def toggle(self):
self._player.pause()
class ControllerStateStopped(ControllerState):
def stop(self):
pass
def toggle(self):
pass
class ControllerStatePaused(ControllerState):
def play(self):
self._player.play()
def toggle(self):
self.play()
class RattleMediaController:
_states = None
def __init__(self):
api = Mobileclient()
api.login(config.google_username, config.google_password, config.google_device_id)
self._api = api
self._logger = logging.getLogger('rattlemedia')
self._player = RattleMediaPlayer()
self._player.end_of_stream_event_handler = self.end_of_stream_event
self._player.state_change_event_handler = self.update_state
self._queue = deque([])
RattleMediaController._states = {PlayerStates.Paused: ControllerStatePaused(self, self._player),
PlayerStates.Stopped: ControllerStateStopped(self, self._player),
PlayerStates.Playing: ControllerStatePlaying(self, self._player),
'Unknown': ControllerState(self, self._player)}
self.state = ControllerState(self, self._player)
self.update_state()
def end_of_stream_event(self):
self._player.stop()
self.play()
def search(self, search_term):
self._logger.debug('Searching for {0}'.format(search_term))
return self._api.search_all_access(search_term)
def enqueue(self, song_id):
self._logger.info('Enqueuing {0}'.format(song_id))
self._queue.append(song_id)
def play(self):
self.state.play()
def stop(self):
self.state.stop()
self._queue.clear()
def toggle_playback(self):
self.state.toggle()
def next(self):
self.state.next()
def play_album(self, album_id):
self._logger.info('Playing album {0}'.format(album_id))
self.stop()
self.enqueue_album(album_id)
self.play()
def enqueue_album(self, album_id):
album = self._api.get_album_info(album_id)
tracks = album['tracks']
for track in tracks:
self._queue.append(track['nid'])
def update_state(self):
current_state = None
try:
current_state = self._player.get_state()
self._logger.debug('Switching state to {0}'.format(current_state))
self.state = self._states[current_state]
self._logger.info('Switched state to {0}'.format(self.state))
except KeyError:
self._logger.warn('Switching to unknown state {0}'.format(current_state))
self.state = self._states['Unknown']
finally:
self.state_change_callback(current_state)
# Override with callback if required
def state_change_callback(self, new_state):
pass
| mit |
ondrokrc/gramps | gramps/gen/config.py | 1 | 14703 | # -*- coding: utf-8 -*-
#
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2005-2007 Donald N. Allingham
# Copyright (C) 2008-2009 Gary Burton
# Copyright (C) 2009-2012 Doug Blank <doug.blank@gmail.com>
# Copyright (C) 2011 Tim G L Lyons
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
"""
This package implements access to GRAMPS configuration.
"""
#---------------------------------------------------------------
#
# Gramps imports
#
#---------------------------------------------------------------
import os, sys
import logging
#---------------------------------------------------------------
#
# Gramps imports
#
#---------------------------------------------------------------
from .const import GRAMPS_LOCALE as glocale
_ = glocale.translation.gettext
from .const import HOME_DIR, USER_HOME, VERSION_DIR
from .utils.configmanager import ConfigManager
#---------------------------------------------------------------
#
# Constants
#
#---------------------------------------------------------------
INIFILE = os.path.join(VERSION_DIR, "gramps.ini")
#---------------------------------------------------------------
#
# Module functions
#
#---------------------------------------------------------------
def register(key, value):
""" Module shortcut to register key, value """
return CONFIGMAN.register(key, value)
def get(key):
""" Module shortcut to get value from key """
return CONFIGMAN.get(key)
def get_default(key):
""" Module shortcut to get default from key """
return CONFIGMAN.get_default(key)
def has_default(key):
""" Module shortcut to get see if there is a default for key """
return CONFIGMAN.has_default(key)
def get_sections():
""" Module shortcut to get all section names of settings """
return CONFIGMAN.get_sections()
def get_section_settings(section):
""" Module shortcut to get all settings of a section """
return CONFIGMAN.get_section_settings(section)
def set(key, value):
""" Module shortcut to set value from key """
return CONFIGMAN.set(key, value)
def is_set(key):
""" Module shortcut to set value from key """
return CONFIGMAN.is_set(key)
def save(filename=None):
""" Module shortcut to save config file """
return CONFIGMAN.save(filename)
def connect(key, func):
"""
Module shortcut to connect a key to a callback func.
Returns a unique callback ID number.
"""
return CONFIGMAN.connect(key, func)
def disconnect(callback_id):
""" Module shortcut to remove callback by ID number """
return CONFIGMAN.disconnect(callback_id)
def reset(key=None):
""" Module shortcut to reset some or all config data """
return CONFIGMAN.reset(key)
def load(filename=None, oldstyle=False):
""" Module shortcut to load an INI file into config data """
return CONFIGMAN.load(filename, oldstyle)
def emit(key):
""" Module shortcut to call all callbacks associated with key """
return CONFIGMAN.emit(key)
#---------------------------------------------------------------
#
# Register the system-wide settings in a singleton config manager
#
#---------------------------------------------------------------
CONFIGMAN = ConfigManager(INIFILE, "plugins")
register('behavior.addmedia-image-dir', '')
register('behavior.addmedia-relative-path', False)
register('behavior.autoload', False)
register('behavior.avg-generation-gap', 20)
register('behavior.betawarn', False)
register('behavior.check-for-updates', 0)
register('behavior.check-for-update-types', ["new"])
register('behavior.last-check-for-updates', "1970/01/01")
register('behavior.previously-seen-updates', [])
register('behavior.do-not-show-previously-seen-updates', True)
register('behavior.database-path', os.path.join( HOME_DIR, 'grampsdb'))
register('behavior.database-backend', 'bsddb')
register('behavior.date-about-range', 50)
register('behavior.date-after-range', 50)
register('behavior.date-before-range', 50)
register('behavior.generation-depth', 15)
register('behavior.max-age-prob-alive', 110)
register('behavior.max-sib-age-diff', 20)
register('behavior.min-generation-years', 13)
register('behavior.owner-warn', False)
register('behavior.pop-plugin-status', False)
register('behavior.recent-export-type', 3)
register('behavior.spellcheck', False)
register('behavior.startup', 0)
register('behavior.surname-guessing', 0)
register('behavior.use-tips', False)
register('behavior.welcome', 100)
register('behavior.web-search-url', 'http://google.com/#&q=%(text)s')
register('behavior.addons-url', "https://raw.githubusercontent.com/gramps-project/addons/master/gramps50")
register('export.proxy-order', [
["privacy", 0],
["living", 0],
["person", 0],
["note", 0],
["reference", 0],
])
register('geography.center-lon', 0.0)
register('geography.lock', False)
register('geography.center-lat', 0.0)
register('geography.map', "person")
register('geography.map_service', 1)
register('geography.zoom', 0)
register('geography.zoom_when_center', 12)
register('geography.show_cross', False)
register('geography.path', "")
register('geography.use-keypad', True)
register('interface.address-height', 450)
register('interface.address-width', 650)
register('interface.attribute-height', 350)
register('interface.attribute-width', 600)
register('interface.child-ref-height', 450)
register('interface.child-ref-width', 600)
register('interface.citation-height', 450)
register('interface.citation-sel-height', 450)
register('interface.citation-sel-width', 600)
register('interface.citation-width', 600)
register('interface.clipboard-height', 300)
register('interface.clipboard-width', 300)
register('interface.dont-ask', False)
register('interface.view-categories',
["Dashboard", "People", "Relationships", "Families",
"Ancestry", "Events", "Places", "Geography", "Sources",
"Citations", "Repositories", "Media", "Notes"])
register('interface.edit-filter-width', 500)
register('interface.edit-filter-height', 420)
register('interface.edit-rule-width', 600)
register('interface.edit-rule-height', 450)
register('interface.event-height', 450)
register('interface.event-ref-height', 450)
register('interface.event-ref-width', 600)
register('interface.event-sel-height', 450)
register('interface.event-sel-width', 600)
register('interface.event-width', 600)
register('interface.family-height', 500)
register('interface.family-sel-height', 450)
register('interface.family-sel-width', 600)
register('interface.family-width', 700)
register('interface.filter', False)
register('interface.filter-editor-width', 400)
register('interface.filter-editor-height', 350)
register('interface.fullscreen', False)
register('interface.grampletbar-close', False)
register('interface.height', 500)
register('interface.ignore-gexiv2', False)
register('interface.ignore-osmgpsmap', False)
register('interface.lds-height', 450)
register('interface.lds-width', 600)
register('interface.location-height', 250)
register('interface.location-width', 600)
register('interface.mapservice', 'OpenStreetMap')
register('interface.media-height', 450)
register('interface.media-ref-height', 450)
register('interface.media-ref-width', 600)
register('interface.media-sel-height', 450)
register('interface.media-sel-width', 600)
register('interface.media-width', 650)
register('interface.name-height', 350)
register('interface.name-width', 600)
register('interface.note-height', 500)
register('interface.note-sel-height', 450)
register('interface.note-sel-width', 600)
register('interface.note-width', 700)
register('interface.open-with-default-viewer', False)
register('interface.pedview-layout', 0)
register('interface.pedview-show-images', True)
register('interface.pedview-show-marriage', False)
register('interface.pedview-tree-size', 5)
register('interface.pedview-tree-direction', 2)
register('interface.pedview-show-unknown-people', False)
register('interface.person-height', 550)
register('interface.person-ref-height', 350)
register('interface.person-ref-width', 600)
register('interface.person-sel-height', 450)
register('interface.person-sel-width', 600)
register('interface.person-width', 750)
register('interface.place-height', 450)
register('interface.place-name-height', 100)
register('interface.place-name-width', 450)
register('interface.place-ref-height', 450)
register('interface.place-ref-width', 600)
register('interface.place-sel-height', 450)
register('interface.place-sel-width', 600)
register('interface.place-width', 650)
register('interface.repo-height', 450)
register('interface.repo-ref-height', 450)
register('interface.repo-ref-width', 600)
register('interface.repo-sel-height', 450)
register('interface.repo-sel-width', 600)
register('interface.repo-width', 650)
register('interface.sidebar-text', True)
register('interface.size-checked', False)
register('interface.source-height', 450)
register('interface.source-ref-height', 450)
register('interface.source-ref-width', 600)
register('interface.source-sel-height', 450)
register('interface.source-sel-width', 600)
register('interface.source-width', 600)
register('interface.statusbar', 1)
register('interface.toolbar-on', True)
register('interface.url-height', 150)
register('interface.url-width', 600)
register('interface.view', True)
register('interface.width', 775)
register('interface.surname-box-height', 150)
register('paths.recent-export-dir', '')
register('paths.recent-file', '')
register('paths.recent-import-dir', '')
register('paths.report-directory', USER_HOME)
register('paths.website-directory', USER_HOME)
register('paths.quick-backup-directory', USER_HOME)
register('paths.quick-backup-filename',
"%(filename)s_%(year)d-%(month)02d-%(day)02d.%(extension)s")
register('preferences.date-format', 0)
register('preferences.calendar-format-report', 0)
register('preferences.cprefix', 'C%04d')
register('preferences.default-source', False)
register('preferences.tag-on-import', False)
register('preferences.tag-on-import-format', _("Imported %Y/%m/%d %H:%M:%S"))
register('preferences.eprefix', 'E%04d')
register('preferences.family-warn', True)
register('preferences.fprefix', 'F%04d')
register('preferences.hide-ep-msg', False)
register('preferences.invalid-date-format', "<b>%s</b>")
register('preferences.iprefix', 'I%04d')
register('preferences.name-format', 1)
register('preferences.place-auto', True)
register('preferences.place-number', False)
register('preferences.place-reverse', False)
register('preferences.place-restrict', 0)
register('preferences.place-lang', '')
register('preferences.patronimic-surname', False)
register('preferences.no-given-text', "[%s]" % _("Missing Given Name"))
register('preferences.no-record-text', "[%s]" % _("Missing Record"))
register('preferences.no-surname-text', "[%s]" % _("Missing Surname"))
register('preferences.nprefix', 'N%04d')
register('preferences.online-maps', False)
register('preferences.oprefix', 'O%04d')
register('preferences.paper-metric', 0)
register('preferences.paper-preference', 'Letter')
register('preferences.pprefix', 'P%04d')
register('preferences.private-given-text', "[%s]" % _("Living"))
register('preferences.private-record-text', "[%s]" % _("Private Record"))
register('preferences.private-surname-text', "[%s]" % _("Living"))
register('preferences.rprefix', 'R%04d')
register('preferences.sprefix', 'S%04d')
register('preferences.use-last-view', False)
register('preferences.last-view', '')
register('preferences.last-views', [])
register('preferences.family-relation-type', 3) # UNKNOWN
register('preferences.age-display-precision', 1)
register('preferences.color-gender-male-alive', '#b8cee6')
register('preferences.color-gender-male-death', '#b8cee6')
register('preferences.color-gender-female-alive', '#feccf0')
register('preferences.color-gender-female-death', '#feccf0')
register('preferences.color-gender-unknown-alive', '#f3dbb6')
register('preferences.color-gender-unknown-death', '#f3dbb6')
#register('preferences.color-gender-other-alive', '#fcaf3e')
#register('preferences.color-gender-other-death', '#fcaf3e')
register('preferences.bordercolor-gender-male-alive', '#1f4986')
register('preferences.bordercolor-gender-male-death', '#000000')
register('preferences.bordercolor-gender-female-alive', '#861f69')
register('preferences.bordercolor-gender-female-death', '#000000')
register('preferences.bordercolor-gender-unknown-alive', '#8e5801')
register('preferences.bordercolor-gender-unknown-death', '#000000')
#register('preferences.bordercolor-gender-other-alive', '#f57900')
#register('preferences.bordercolor-gender-other-death', '#000000')
register('researcher.researcher-addr', '')
register('researcher.researcher-locality', '')
register('researcher.researcher-city', '')
register('researcher.researcher-country', '')
register('researcher.researcher-email', '')
register('researcher.researcher-name', '')
register('researcher.researcher-phone', '')
register('researcher.researcher-postal', '')
register('researcher.researcher-state', '')
register('plugin.hiddenplugins', [])
register('plugin.addonplugins', [])
#---------------------------------------------------------------
#
# Upgrade Conversions go here.
#
#---------------------------------------------------------------
# If we have not already upgraded to this version,
# we can tell by seeing if there is a key file for this version:
if not os.path.exists(CONFIGMAN.filename):
# If not, let's read old if there:
if os.path.exists(os.path.join(HOME_DIR, "keys.ini")):
# read it in old style:
logging.warning("Importing old key file 'keys.ini'...")
CONFIGMAN.load(os.path.join(HOME_DIR, "keys.ini"),
oldstyle=True)
logging.warning("Done importing old key file 'keys.ini'")
# other version upgrades here...
#---------------------------------------------------------------
#
# Now, load the settings from the config file, if one
#
#---------------------------------------------------------------
CONFIGMAN.load()
config = CONFIGMAN
| gpl-2.0 |
mbayon/TFG-MachineLearning | venv/lib/python3.6/site-packages/pandas/tests/io/json/test_json_table_schema.py | 9 | 18572 | """Tests for Table Schema integration."""
import json
from collections import OrderedDict
import numpy as np
import pandas as pd
import pytest
from pandas import DataFrame
from pandas.core.dtypes.dtypes import (
PeriodDtype, CategoricalDtype, DatetimeTZDtype)
from pandas.io.json.table_schema import (
as_json_table_type,
build_table_schema,
make_field,
set_default_names)
class TestBuildSchema(object):
def setup_method(self, method):
self.df = DataFrame(
{'A': [1, 2, 3, 4],
'B': ['a', 'b', 'c', 'c'],
'C': pd.date_range('2016-01-01', freq='d', periods=4),
'D': pd.timedelta_range('1H', periods=4, freq='T'),
},
index=pd.Index(range(4), name='idx'))
def test_build_table_schema(self):
result = build_table_schema(self.df, version=False)
expected = {
'fields': [{'name': 'idx', 'type': 'integer'},
{'name': 'A', 'type': 'integer'},
{'name': 'B', 'type': 'string'},
{'name': 'C', 'type': 'datetime'},
{'name': 'D', 'type': 'duration'},
],
'primaryKey': ['idx']
}
assert result == expected
result = build_table_schema(self.df)
assert "pandas_version" in result
def test_series(self):
s = pd.Series([1, 2, 3], name='foo')
result = build_table_schema(s, version=False)
expected = {'fields': [{'name': 'index', 'type': 'integer'},
{'name': 'foo', 'type': 'integer'}],
'primaryKey': ['index']}
assert result == expected
result = build_table_schema(s)
assert 'pandas_version' in result
def tets_series_unnamed(self):
result = build_table_schema(pd.Series([1, 2, 3]), version=False)
expected = {'fields': [{'name': 'index', 'type': 'integer'},
{'name': 'values', 'type': 'integer'}],
'primaryKey': ['index']}
assert result == expected
def test_multiindex(self):
df = self.df.copy()
idx = pd.MultiIndex.from_product([('a', 'b'), (1, 2)])
df.index = idx
result = build_table_schema(df, version=False)
expected = {
'fields': [{'name': 'level_0', 'type': 'string'},
{'name': 'level_1', 'type': 'integer'},
{'name': 'A', 'type': 'integer'},
{'name': 'B', 'type': 'string'},
{'name': 'C', 'type': 'datetime'},
{'name': 'D', 'type': 'duration'},
],
'primaryKey': ['level_0', 'level_1']
}
assert result == expected
df.index.names = ['idx0', None]
expected['fields'][0]['name'] = 'idx0'
expected['primaryKey'] = ['idx0', 'level_1']
result = build_table_schema(df, version=False)
assert result == expected
class TestTableSchemaType(object):
def test_as_json_table_type_int_data(self):
int_data = [1, 2, 3]
int_types = [np.int, np.int16, np.int32, np.int64]
for t in int_types:
assert as_json_table_type(np.array(
int_data, dtype=t)) == 'integer'
def test_as_json_table_type_float_data(self):
float_data = [1., 2., 3.]
float_types = [np.float, np.float16, np.float32, np.float64]
for t in float_types:
assert as_json_table_type(np.array(
float_data, dtype=t)) == 'number'
def test_as_json_table_type_bool_data(self):
bool_data = [True, False]
bool_types = [bool, np.bool]
for t in bool_types:
assert as_json_table_type(np.array(
bool_data, dtype=t)) == 'boolean'
def test_as_json_table_type_date_data(self):
date_data = [pd.to_datetime(['2016']),
pd.to_datetime(['2016'], utc=True),
pd.Series(pd.to_datetime(['2016'])),
pd.Series(pd.to_datetime(['2016'], utc=True)),
pd.period_range('2016', freq='A', periods=3)]
for arr in date_data:
assert as_json_table_type(arr) == 'datetime'
def test_as_json_table_type_string_data(self):
strings = [pd.Series(['a', 'b']), pd.Index(['a', 'b'])]
for t in strings:
assert as_json_table_type(t) == 'string'
def test_as_json_table_type_categorical_data(self):
assert as_json_table_type(pd.Categorical(['a'])) == 'any'
assert as_json_table_type(pd.Categorical([1])) == 'any'
assert as_json_table_type(pd.Series(pd.Categorical([1]))) == 'any'
assert as_json_table_type(pd.CategoricalIndex([1])) == 'any'
assert as_json_table_type(pd.Categorical([1])) == 'any'
# ------
# dtypes
# ------
def test_as_json_table_type_int_dtypes(self):
integers = [np.int, np.int16, np.int32, np.int64]
for t in integers:
assert as_json_table_type(t) == 'integer'
def test_as_json_table_type_float_dtypes(self):
floats = [np.float, np.float16, np.float32, np.float64]
for t in floats:
assert as_json_table_type(t) == 'number'
def test_as_json_table_type_bool_dtypes(self):
bools = [bool, np.bool]
for t in bools:
assert as_json_table_type(t) == 'boolean'
def test_as_json_table_type_date_dtypes(self):
# TODO: datedate.date? datetime.time?
dates = [np.datetime64, np.dtype("<M8[ns]"), PeriodDtype(),
DatetimeTZDtype('ns', 'US/Central')]
for t in dates:
assert as_json_table_type(t) == 'datetime'
def test_as_json_table_type_timedelta_dtypes(self):
durations = [np.timedelta64, np.dtype("<m8[ns]")]
for t in durations:
assert as_json_table_type(t) == 'duration'
def test_as_json_table_type_string_dtypes(self):
strings = [object] # TODO
for t in strings:
assert as_json_table_type(t) == 'string'
def test_as_json_table_type_categorical_dtypes(self):
assert as_json_table_type(pd.Categorical) == 'any'
assert as_json_table_type(CategoricalDtype()) == 'any'
class TestTableOrient(object):
def setup_method(self, method):
self.df = DataFrame(
{'A': [1, 2, 3, 4],
'B': ['a', 'b', 'c', 'c'],
'C': pd.date_range('2016-01-01', freq='d', periods=4),
'D': pd.timedelta_range('1H', periods=4, freq='T'),
'E': pd.Series(pd.Categorical(['a', 'b', 'c', 'c'])),
'F': pd.Series(pd.Categorical(['a', 'b', 'c', 'c'],
ordered=True)),
'G': [1., 2., 3, 4.],
'H': pd.date_range('2016-01-01', freq='d', periods=4,
tz='US/Central'),
},
index=pd.Index(range(4), name='idx'))
def test_build_series(self):
s = pd.Series([1, 2], name='a')
s.index.name = 'id'
result = s.to_json(orient='table', date_format='iso')
result = json.loads(result, object_pairs_hook=OrderedDict)
assert "pandas_version" in result['schema']
result['schema'].pop('pandas_version')
fields = [{'name': 'id', 'type': 'integer'},
{'name': 'a', 'type': 'integer'}]
schema = {
'fields': fields,
'primaryKey': ['id'],
}
expected = OrderedDict([
('schema', schema),
('data', [OrderedDict([('id', 0), ('a', 1)]),
OrderedDict([('id', 1), ('a', 2)])])])
assert result == expected
def test_to_json(self):
df = self.df.copy()
df.index.name = 'idx'
result = df.to_json(orient='table', date_format='iso')
result = json.loads(result, object_pairs_hook=OrderedDict)
assert "pandas_version" in result['schema']
result['schema'].pop('pandas_version')
fields = [
{'name': 'idx', 'type': 'integer'},
{'name': 'A', 'type': 'integer'},
{'name': 'B', 'type': 'string'},
{'name': 'C', 'type': 'datetime'},
{'name': 'D', 'type': 'duration'},
{'constraints': {'enum': ['a', 'b', 'c']},
'name': 'E',
'ordered': False,
'type': 'any'},
{'constraints': {'enum': ['a', 'b', 'c']},
'name': 'F',
'ordered': True,
'type': 'any'},
{'name': 'G', 'type': 'number'},
{'name': 'H', 'type': 'datetime', 'tz': 'US/Central'}
]
schema = {
'fields': fields,
'primaryKey': ['idx'],
}
data = [
OrderedDict([('idx', 0), ('A', 1), ('B', 'a'),
('C', '2016-01-01T00:00:00.000Z'),
('D', 'P0DT1H0M0S'),
('E', 'a'), ('F', 'a'), ('G', 1.),
('H', '2016-01-01T06:00:00.000Z')
]),
OrderedDict([('idx', 1), ('A', 2), ('B', 'b'),
('C', '2016-01-02T00:00:00.000Z'),
('D', 'P0DT1H1M0S'),
('E', 'b'), ('F', 'b'), ('G', 2.),
('H', '2016-01-02T06:00:00.000Z')
]),
OrderedDict([('idx', 2), ('A', 3), ('B', 'c'),
('C', '2016-01-03T00:00:00.000Z'),
('D', 'P0DT1H2M0S'),
('E', 'c'), ('F', 'c'), ('G', 3.),
('H', '2016-01-03T06:00:00.000Z')
]),
OrderedDict([('idx', 3), ('A', 4), ('B', 'c'),
('C', '2016-01-04T00:00:00.000Z'),
('D', 'P0DT1H3M0S'),
('E', 'c'), ('F', 'c'), ('G', 4.),
('H', '2016-01-04T06:00:00.000Z')
]),
]
expected = OrderedDict([('schema', schema), ('data', data)])
assert result == expected
def test_to_json_float_index(self):
data = pd.Series(1, index=[1., 2.])
result = data.to_json(orient='table', date_format='iso')
result = json.loads(result, object_pairs_hook=OrderedDict)
result['schema'].pop('pandas_version')
expected = (
OrderedDict([('schema', {
'fields': [{'name': 'index', 'type': 'number'},
{'name': 'values', 'type': 'integer'}],
'primaryKey': ['index']
}),
('data', [OrderedDict([('index', 1.0), ('values', 1)]),
OrderedDict([('index', 2.0), ('values', 1)])])])
)
assert result == expected
def test_to_json_period_index(self):
idx = pd.period_range('2016', freq='Q-JAN', periods=2)
data = pd.Series(1, idx)
result = data.to_json(orient='table', date_format='iso')
result = json.loads(result, object_pairs_hook=OrderedDict)
result['schema'].pop('pandas_version')
fields = [{'freq': 'Q-JAN', 'name': 'index', 'type': 'datetime'},
{'name': 'values', 'type': 'integer'}]
schema = {'fields': fields, 'primaryKey': ['index']}
data = [OrderedDict([('index', '2015-11-01T00:00:00.000Z'),
('values', 1)]),
OrderedDict([('index', '2016-02-01T00:00:00.000Z'),
('values', 1)])]
expected = OrderedDict([('schema', schema), ('data', data)])
assert result == expected
def test_to_json_categorical_index(self):
data = pd.Series(1, pd.CategoricalIndex(['a', 'b']))
result = data.to_json(orient='table', date_format='iso')
result = json.loads(result, object_pairs_hook=OrderedDict)
result['schema'].pop('pandas_version')
expected = (
OrderedDict([('schema',
{'fields': [{'name': 'index', 'type': 'any',
'constraints': {'enum': ['a', 'b']},
'ordered': False},
{'name': 'values', 'type': 'integer'}],
'primaryKey': ['index']}),
('data', [
OrderedDict([('index', 'a'),
('values', 1)]),
OrderedDict([('index', 'b'), ('values', 1)])])])
)
assert result == expected
def test_date_format_raises(self):
with pytest.raises(ValueError):
self.df.to_json(orient='table', date_format='epoch')
# others work
self.df.to_json(orient='table', date_format='iso')
self.df.to_json(orient='table')
def test_make_field_int(self):
data = [1, 2, 3]
kinds = [pd.Series(data, name='name'), pd.Index(data, name='name')]
for kind in kinds:
result = make_field(kind)
expected = {"name": "name", "type": 'integer'}
assert result == expected
def test_make_field_float(self):
data = [1., 2., 3.]
kinds = [pd.Series(data, name='name'), pd.Index(data, name='name')]
for kind in kinds:
result = make_field(kind)
expected = {"name": "name", "type": 'number'}
assert result == expected
def test_make_field_datetime(self):
data = [1., 2., 3.]
kinds = [pd.Series(pd.to_datetime(data), name='values'),
pd.to_datetime(data)]
for kind in kinds:
result = make_field(kind)
expected = {"name": "values", "type": 'datetime'}
assert result == expected
kinds = [pd.Series(pd.to_datetime(data, utc=True), name='values'),
pd.to_datetime(data, utc=True)]
for kind in kinds:
result = make_field(kind)
expected = {"name": "values", "type": 'datetime', "tz": "UTC"}
assert result == expected
arr = pd.period_range('2016', freq='A-DEC', periods=4)
result = make_field(arr)
expected = {"name": "values", "type": 'datetime', "freq": "A-DEC"}
assert result == expected
def test_make_field_categorical(self):
data = ['a', 'b', 'c']
ordereds = [True, False]
for ordered in ordereds:
arr = pd.Series(pd.Categorical(data, ordered=ordered), name='cats')
result = make_field(arr)
expected = {"name": "cats", "type": "any",
"constraints": {"enum": data},
"ordered": ordered}
assert result == expected
arr = pd.CategoricalIndex(data, ordered=ordered, name='cats')
result = make_field(arr)
expected = {"name": "cats", "type": "any",
"constraints": {"enum": data},
"ordered": ordered}
assert result == expected
def test_categorical(self):
s = pd.Series(pd.Categorical(['a', 'b', 'a']))
s.index.name = 'idx'
result = s.to_json(orient='table', date_format='iso')
result = json.loads(result, object_pairs_hook=OrderedDict)
result['schema'].pop('pandas_version')
fields = [{'name': 'idx', 'type': 'integer'},
{'constraints': {'enum': ['a', 'b']},
'name': 'values',
'ordered': False,
'type': 'any'}]
expected = OrderedDict([
('schema', {'fields': fields,
'primaryKey': ['idx']}),
('data', [OrderedDict([('idx', 0), ('values', 'a')]),
OrderedDict([('idx', 1), ('values', 'b')]),
OrderedDict([('idx', 2), ('values', 'a')])])])
assert result == expected
def test_set_default_names_unset(self):
data = pd.Series(1, pd.Index([1]))
result = set_default_names(data)
assert result.index.name == 'index'
def test_set_default_names_set(self):
data = pd.Series(1, pd.Index([1], name='myname'))
result = set_default_names(data)
assert result.index.name == 'myname'
def test_set_default_names_mi_unset(self):
data = pd.Series(
1, pd.MultiIndex.from_product([('a', 'b'), ('c', 'd')]))
result = set_default_names(data)
assert result.index.names == ['level_0', 'level_1']
def test_set_default_names_mi_set(self):
data = pd.Series(
1, pd.MultiIndex.from_product([('a', 'b'), ('c', 'd')],
names=['n1', 'n2']))
result = set_default_names(data)
assert result.index.names == ['n1', 'n2']
def test_set_default_names_mi_partion(self):
data = pd.Series(
1, pd.MultiIndex.from_product([('a', 'b'), ('c', 'd')],
names=['n1', None]))
result = set_default_names(data)
assert result.index.names == ['n1', 'level_1']
def test_timestamp_in_columns(self):
df = pd.DataFrame([[1, 2]], columns=[pd.Timestamp('2016'),
pd.Timedelta(10, unit='s')])
result = df.to_json(orient="table")
js = json.loads(result)
assert js['schema']['fields'][1]['name'] == 1451606400000
assert js['schema']['fields'][2]['name'] == 10000
def test_overlapping_names(self):
cases = [
pd.Series([1], index=pd.Index([1], name='a'), name='a'),
pd.DataFrame({"A": [1]}, index=pd.Index([1], name="A")),
pd.DataFrame({"A": [1]}, index=pd.MultiIndex.from_arrays([
['a'], [1]
], names=["A", "a"])),
]
for data in cases:
with pytest.raises(ValueError) as excinfo:
data.to_json(orient='table')
assert 'Overlapping' in str(excinfo.value)
def test_mi_falsey_name(self):
# GH 16203
df = pd.DataFrame(np.random.randn(4, 4),
index=pd.MultiIndex.from_product([('A', 'B'),
('a', 'b')]))
result = [x['name'] for x in build_table_schema(df)['fields']]
assert result == ['level_0', 'level_1', 0, 1, 2, 3]
| mit |
haifeng60902/osgswig | examples/python/gtkosgwidget.py | 2 | 5623 | #!/usr/bin/env python
"""
gtkosgwidget.py - OpenGL-capable gtk drawingarea widget with osg viewer embedded
2008 - Gerwin de Haan
After simple.c in PyGtkGLExt, Alif Wahid, March 2003. Rewritten in object-oriented style, Naofumi
"""
import sys
import pygtk
pygtk.require('2.0')
import gtk
import gtk.gtkgl
from OpenGL.GL import *
import osg
import osgDB
import osgUtil
import osgViewer
import osgGA
# Create OpenGL-capable gtk.DrawingArea by subclassing
# gtk.gtkgl.Widget mixin.
class SimpleDrawingArea(gtk.DrawingArea, gtk.gtkgl.Widget):
"""OpenGL drawing area for simple demo."""
def __init__(self, glconfig,viewer=None,window=None):
gtk.DrawingArea.__init__(self)
# Set OpenGL-capability to the drawing area
self.set_gl_capability(glconfig)
# Connect the relevant signals.
self.connect_after('realize', self._on_realize)
self.connect('configure_event', self._on_configure_event)
self.connect('expose_event', self._on_expose_event)
self.connect("key_press_event", self._on_key_press_event)
self.add_events(gtk.gdk.BUTTON_PRESS_MASK |
gtk.gdk.BUTTON_RELEASE_MASK)
self.__motion_events=0
self.__motion_events |= gtk.gdk.BUTTON_MOTION_MASK
self.__motion_events |= gtk.gdk.POINTER_MOTION_MASK
self.add_events(self.__motion_events)
self.connect('motion_notify_event', self._on_mouse_event)
self.connect("button_press_event", self._on_mouse_event)
self.connect("button_release_event", self._on_mouse_event)
#experimental: Stereo in a window
#self.ds = osg.DisplaySettings_instance()
#self.ds.setStereo(True)
#self.ds.setStereoMode(osg.DisplaySettings.QUAD_BUFFER)
if viewer is None:
self.viewer = osgViewer.Viewer()
else:
self.viewer = viewer
self.osgwindow = self.viewer.setUpViewerAsEmbeddedInWindow(0,0,200,200)
# one could try using a pythonic alternative:
# self.osgwindow = self.setup_osgwindow(0,0,200,200)
self.viewer.setCameraManipulator(osgGA.TrackballManipulator())
self.viewer.addEventHandler(osgViewer.StatsHandler())
self.viewer.addEventHandler(osgViewer.HelpHandler())
self.rootnode = osg.MatrixTransformRef(osg.MatrixTransform())
self.viewer.setSceneData(self.rootnode.get())
self._x,self._y =0,0
def setup_osgwindow(self,x,y,width,height):
"""
pythonic alternative to setUpViewerAsEmbeddedInWindow
"""
self.traits = osg.GraphicsContext.Traits()
self.traits.x = x
self.traits.y = y
self.traits.width = width
self.traits.height = height
#stereo is possible:
# self.traits.quadBufferStereo = True
# self.traits.screenNum = 1
gw = osgViewer.GraphicsWindowEmbedded(self.traits);
self.viewer.getCamera().setViewport(osg.Viewport(0,0,width,height));
self.viewer.getCamera().setProjectionMatrixAsPerspective(30.0,float(width)/float(height), 1.0, 10000.0);
self.viewer.getCamera().setGraphicsContext(gw);
return gw
def load_file (self,pFileName):
print "Opening file ", pFileName
if self.rootnode.getNumChildren>0:
self.rootnode.removeChild(0)
self.objnode = osgDB.readNodeFile(pFileName)
self.rootnode.addChild(self.objnode)
def register_key_events(self, focus_window):
# Add key events to focus_window
focus_window.connect('key_press_event', self._on_key_press_event)
focus_window.connect('key_release_event', self._on_key_press_event)
focus_window.add_events(gtk.gdk.KEY_PRESS_MASK |
gtk.gdk.KEY_RELEASE_MASK)
def _on_realize(self, *args):
# Obtain a reference to the OpenGL drawable
# and rendering context.
gldrawable = self.get_gl_drawable()
glcontext = self.get_gl_context()
# OpenGL begin.
if not gldrawable.gl_begin(glcontext):
return
gldrawable.gl_end()
def _on_configure_event(self, *args):
self.osgwindow.resized(0,0,self.allocation.width,self.allocation.height)
return False
def _on_expose_event(self, *args):
# Obtain a reference to the OpenGL drawable
# and rendering context.
gldrawable = self.get_gl_drawable()
self.viewer.frame()
if gldrawable.is_double_buffered():
gldrawable.swap_buffers()
else:
glFlush()
return False
def update (self,*args):
#print "Update ", self.window
self.window.invalidate_rect(self.allocation, False)
#self.window.process_updates(False)
return True
def _on_key_press_event (self,widget,event):
#print "_on_key_press_event", widget, event,event.type
if event.type== gtk.gdk.KEY_PRESS:
q = self.viewer.getEventQueue()
q.keyPress(event.keyval)
return False
def _on_mouse_event(self,widget,event):
#print "_on_mouse_event", widget, event
q = self.viewer.getEventQueue()
if event.type==gtk.gdk.BUTTON_PRESS:
q.mouseButtonPress(self._x, self._y, event.button)
elif event.type==gtk.gdk.BUTTON_RELEASE:
q.mouseButtonRelease(self._x, self._y, event.button)
elif event.type==gtk.gdk.MOTION_NOTIFY:
self._x = event.x
self._y = event.y
q.mouseMotion(self._x,self._y)
return False
| mit |
aperigault/ansible | packaging/release/versionhelper/version_helper.py | 124 | 6838 | from __future__ import absolute_import, division, print_function
__metaclass__ = type
import argparse
import os
import re
import sys
from packaging.version import Version, VERSION_PATTERN
class AnsibleVersionMunger(object):
tag_offsets = dict(
dev=0,
a=100,
b=200,
rc=1000
)
# TODO: allow overrides here for packaging bump etc
def __init__(self, raw_version, revision=None, codename=None):
self._raw_version = raw_version
self._revision = revision
self._parsed_version = Version(raw_version)
self._codename = codename
self._parsed_regex_match = re.match(VERSION_PATTERN, raw_version, re.VERBOSE | re.IGNORECASE)
@property
def deb_version(self):
v = self._parsed_version
match = self._parsed_regex_match
# treat dev/post as prerelease for now; treat dev/post as equivalent and disallow together
if v.is_prerelease or match.group('dev') or match.group('post'):
if match.group('dev') and match.group('post'):
raise Exception("dev and post may not currently be used together")
if match.group('pre'):
tag_value = match.group('pre')
tag_type = match.group('pre_l')
if match.group('dev'):
tag_value += ('~%s' % match.group('dev').strip('.'))
if match.group('post'):
tag_value += ('~%s' % match.group('post').strip('.'))
elif match.group('dev'):
tag_type = "dev"
tag_value = match.group('dev').strip('.')
elif match.group('post'):
tag_type = "dev"
tag_value = match.group('post').strip('.')
else:
raise Exception("unknown prerelease type for version {0}".format(self._raw_version))
else:
tag_type = None
tag_value = ''
# not a pre/post/dev release, just return base version
if not tag_type:
return '{base_version}'.format(base_version=self.base_version)
# it is a pre/dev release, include the tag value with a ~
return '{base_version}~{tag_value}'.format(base_version=self.base_version, tag_value=tag_value)
@property
def deb_release(self):
return '1' if self._revision is None else str(self._revision)
@property
def rpm_release(self):
v = self._parsed_version
match = self._parsed_regex_match
# treat presence of dev/post as prerelease for now; treat dev/post the same and disallow together
if v.is_prerelease or match.group('dev') or match.group('post'):
if match.group('dev') and match.group('post'):
raise Exception("dev and post may not currently be used together")
if match.group('pre'):
tag_value = match.group('pre')
tag_type = match.group('pre_l')
tag_ver = match.group('pre_n')
if match.group('dev'):
tag_value += match.group('dev')
if match.group('post'):
tag_value += match.group('post')
elif match.group('dev'):
tag_type = "dev"
tag_value = match.group('dev')
tag_ver = match.group('dev_n')
elif match.group('post'):
tag_type = "dev"
tag_value = match.group('post')
tag_ver = match.group('post_n')
else:
raise Exception("unknown prerelease type for version {0}".format(self._raw_version))
else:
tag_type = None
tag_value = ''
tag_ver = 0
# not a pre/post/dev release, just append revision (default 1)
if not tag_type:
if self._revision is None:
self._revision = 1
return '{revision}'.format(revision=self._revision)
# cleanse tag value in case it starts with .
tag_value = tag_value.strip('.')
# coerce to int and None == 0
tag_ver = int(tag_ver if tag_ver else 0)
if self._revision is None:
tag_offset = self.tag_offsets.get(tag_type)
if tag_offset is None:
raise Exception('no tag offset defined for tag {0}'.format(tag_type))
pkgrel = '0.{0}'.format(tag_offset + tag_ver)
else:
pkgrel = self._revision
return '{pkgrel}.{tag_value}'.format(pkgrel=pkgrel, tag_value=tag_value)
@property
def raw(self):
return self._raw_version
# return the x.y.z version without any other modifiers present
@property
def base_version(self):
return self._parsed_version.base_version
# return the x.y version without any other modifiers present
@property
def major_version(self):
return re.match(r'^(\d+.\d+)', self._raw_version).group(1)
@property
def codename(self):
return self._codename if self._codename else "UNKNOWN"
def main():
parser = argparse.ArgumentParser(description='Extract/transform Ansible versions to various packaging formats')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('--raw', action='store_true')
group.add_argument('--majorversion', action='store_true')
group.add_argument('--baseversion', action='store_true')
group.add_argument('--debversion', action='store_true')
group.add_argument('--debrelease', action='store_true')
group.add_argument('--rpmrelease', action='store_true')
group.add_argument('--codename', action='store_true')
group.add_argument('--all', action='store_true')
parser.add_argument('--revision', action='store', default='auto')
args = parser.parse_args()
mydir = os.path.dirname(__file__)
release_loc = os.path.normpath(mydir + '/../../../lib')
sys.path.insert(0, release_loc)
from ansible import release
rev = None
if args.revision != 'auto':
rev = args.revision
v_raw = release.__version__
codename = release.__codename__
v = AnsibleVersionMunger(v_raw, revision=rev, codename=codename)
if args.raw:
print(v.raw)
elif args.baseversion:
print(v.base_version)
elif args.majorversion:
print(v.major_version)
elif args.debversion:
print(v.deb_version)
elif args.debrelease:
print(v.deb_release)
elif args.rpmrelease:
print(v.rpm_release)
elif args.codename:
print(v.codename)
elif args.all:
props = [name for (name, impl) in vars(AnsibleVersionMunger).items() if isinstance(impl, property)]
for propname in props:
print('{0}: {1}'.format(propname, getattr(v, propname)))
if __name__ == '__main__':
main()
| gpl-3.0 |
li-xiao-nan/gyp_tools | test/win/gyptest-link-ltcg.py | 204 | 1097 | #!/usr/bin/env python
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Make sure LTCG is working properly.
"""
import TestGyp
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'linker-flags'
test.run_gyp('ltcg.gyp', chdir=CHDIR)
# Here we expect LTCG is able to inline functions beyond compile unit.
# Note: This marker is embedded in 'inline_test_main.cc'
INLINE_MARKER = '==== inlined ===='
# test 'LinkTimeCodeGenerationOptionDefault'
test.build('ltcg.gyp', 'test_ltcg_off', chdir=CHDIR)
test.run_built_executable('test_ltcg_off', chdir=CHDIR)
test.must_not_contain_any_line(test.stdout(), [INLINE_MARKER])
# test 'LinkTimeCodeGenerationOptionUse'
test.build('ltcg.gyp', 'test_ltcg_on', chdir=CHDIR)
test.must_contain_any_line(test.stdout(), ['Generating code'])
test.run_built_executable('test_ltcg_on', chdir=CHDIR)
test.must_contain_any_line(test.stdout(), [INLINE_MARKER])
test.pass_test()
| bsd-3-clause |
michaelpacer/python-future | src/libfuturize/fixes/fix_absolute_import.py | 62 | 3141 | """
Fixer for import statements, with a __future__ import line.
Based on lib2to3/fixes/fix_import.py, but extended slightly so it also
supports Cython modules.
If spam is being imported from the local directory, this import:
from spam import eggs
becomes:
from __future__ import absolute_import
from .spam import eggs
and this import:
import spam
becomes:
from __future__ import absolute_import
from . import spam
"""
from os.path import dirname, join, exists, sep
from lib2to3.fixes.fix_import import FixImport
from lib2to3.fixer_util import FromImport, syms
from lib2to3.fixes.fix_import import traverse_imports
from libfuturize.fixer_util import future_import
class FixAbsoluteImport(FixImport):
run_order = 9
def transform(self, node, results):
"""
Copied from FixImport.transform(), but with this line added in
any modules that had implicit relative imports changed:
from __future__ import absolute_import"
"""
if self.skip:
return
imp = results['imp']
if node.type == syms.import_from:
# Some imps are top-level (eg: 'import ham')
# some are first level (eg: 'import ham.eggs')
# some are third level (eg: 'import ham.eggs as spam')
# Hence, the loop
while not hasattr(imp, 'value'):
imp = imp.children[0]
if self.probably_a_local_import(imp.value):
imp.value = u"." + imp.value
imp.changed()
future_import(u"absolute_import", node)
else:
have_local = False
have_absolute = False
for mod_name in traverse_imports(imp):
if self.probably_a_local_import(mod_name):
have_local = True
else:
have_absolute = True
if have_absolute:
if have_local:
# We won't handle both sibling and absolute imports in the
# same statement at the moment.
self.warning(node, "absolute and local imports together")
return
new = FromImport(u".", [imp])
new.prefix = node.prefix
future_import(u"absolute_import", node)
return new
def probably_a_local_import(self, imp_name):
"""
Like the corresponding method in the base class, but this also
supports Cython modules.
"""
if imp_name.startswith(u"."):
# Relative imports are certainly not local imports.
return False
imp_name = imp_name.split(u".", 1)[0]
base_path = dirname(self.filename)
base_path = join(base_path, imp_name)
# If there is no __init__.py next to the file its not in a package
# so can't be a relative import.
if not exists(join(dirname(base_path), "__init__.py")):
return False
for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd", ".pyx"]:
if exists(base_path + ext):
return True
return False
| mit |
Pedram26/Humans-vs-Aliens | HumansAliens.app/Contents/Resources/lib/python2.7/pygame/tests/mask_test.py | 6 | 13761 | if __name__ == '__main__':
import sys
import os
pkg_dir = os.path.split(os.path.abspath(__file__))[0]
parent_dir, pkg_name = os.path.split(pkg_dir)
is_pygame_pkg = (pkg_name == 'tests' and
os.path.split(parent_dir)[1] == 'pygame')
if not is_pygame_pkg:
sys.path.insert(0, parent_dir)
else:
is_pygame_pkg = __name__.startswith('pygame.tests.')
import unittest
import pygame
import pygame.mask
from pygame.locals import *
import random
def random_mask(size = (100,100)):
"""random_mask(size=(100,100)): return Mask
Create a mask of the given size, with roughly half the bits set at random."""
m = pygame.Mask(size)
for i in range(size[0] * size[1] // 2):
x, y = random.randint(0,size[0] - 1), random.randint(0, size[1] - 1)
m.set_at((x,y))
return m
def maskFromSurface(surface, threshold = 127):
mask = pygame.Mask(surface.get_size())
key = surface.get_colorkey()
if key:
for y in range(surface.get_height()):
for x in range(surface.get_width()):
if surface.get_at((x+0.1,y+0.1)) != key:
mask.set_at((x,y),1)
else:
for y in range(surface.get_height()):
for x in range (surface.get_width()):
if surface.get_at((x,y))[3] > threshold:
mask.set_at((x,y),1)
return mask
#pygame.init()
#pygame.display.set_mode((10,10))
class MaskTypeTest( unittest.TestCase ):
def assertMaskEquals(self, m1, m2):
self.assertEquals(m1.get_size(), m2.get_size())
for i in range(m1.get_size()[0]):
for j in range(m1.get_size()[1]):
self.assertEquals(m1.get_at((i,j)), m2.get_at((i,j)))
def todo_test_get_at(self):
# __doc__ (as of 2008-08-02) for pygame.mask.Mask.get_at:
# Mask.get_at((x,y)) -> int
# Returns nonzero if the bit at (x,y) is set.
#
# Coordinates start at (0,0) is top left - just like Surfaces.
self.fail()
def todo_test_get_size(self):
# __doc__ (as of 2008-08-02) for pygame.mask.Mask.get_size:
# Mask.get_size() -> width,height
# Returns the size of the mask.
self.fail()
def todo_test_overlap(self):
# __doc__ (as of 2008-08-02) for pygame.mask.Mask.overlap:
# Mask.overlap(othermask, offset) -> x,y
# Returns the point of intersection if the masks overlap with the
# given offset - or None if it does not overlap.
# The overlap tests uses the following offsets (which may be negative):
# +----+----------..
# |A | yoffset
# | +-+----------..
# +--|B
# |xoffset
# | |
# : :
self.fail()
def todo_test_overlap_area(self):
# __doc__ (as of 2008-08-02) for pygame.mask.Mask.overlap_area:
# Mask.overlap_area(othermask, offset) -> numpixels
# Returns the number of overlapping 'pixels'.
#
# You can see how many pixels overlap with the other mask given. This
# can be used to see in which direction things collide, or to see how
# much the two masks collide.
self.fail()
def todo_test_set_at(self):
# __doc__ (as of 2008-08-02) for pygame.mask.Mask.set_at:
# Mask.set_at((x,y),value)
# Sets the position in the mask given by x and y.
self.fail()
def test_mask_access( self ):
""" do the set_at, and get_at parts work correctly?
"""
m = pygame.Mask((10,10))
m.set_at((0,0), 1)
self.assertEqual(m.get_at((0,0)), 1)
m.set_at((9,0), 1)
self.assertEqual(m.get_at((9,0)), 1)
#s = pygame.Surface((10,10))
#s.set_at((1,0), (0, 0, 1, 255))
#self.assertEqual(s.get_at((1,0)), (0, 0, 1, 255))
#s.set_at((-1,0), (0, 0, 1, 255))
# out of bounds, should get IndexError
self.assertRaises(IndexError, lambda : m.get_at((-1,0)) )
self.assertRaises(IndexError, lambda : m.set_at((-1,0), 1) )
self.assertRaises(IndexError, lambda : m.set_at((10,0), 1) )
self.assertRaises(IndexError, lambda : m.set_at((0,10), 1) )
def test_drawing(self):
""" Test fill, clear, invert, draw, erase
"""
m = pygame.Mask((100,100))
self.assertEqual(m.count(), 0)
m.fill()
self.assertEqual(m.count(), 10000)
m2 = pygame.Mask((10,10))
m2.fill()
m.erase(m2, (50,50))
self.assertEqual(m.count(), 9900)
m.invert()
self.assertEqual(m.count(), 100)
m.draw(m2, (0,0))
self.assertEqual(m.count(), 200)
m.clear()
self.assertEqual(m.count(), 0)
def test_outline(self):
"""
"""
m = pygame.Mask((20,20))
self.assertEqual(m.outline(), [])
m.set_at((10,10), 1)
self.assertEqual(m.outline(), [(10,10)])
m.set_at((10,12), 1)
self.assertEqual(m.outline(10), [(10,10)])
m.set_at((11,11), 1)
self.assertEqual(m.outline(), [(10,10), (11,11), (10,12), (11,11), (10,10)])
self.assertEqual(m.outline(2), [(10,10), (10,12), (10,10)])
#TODO: Test more corner case outlines.
def test_convolve__size(self):
sizes = [(1,1), (31,31), (32,32), (100,100)]
for s1 in sizes:
m1 = pygame.Mask(s1)
for s2 in sizes:
m2 = pygame.Mask(s2)
o = m1.convolve(m2)
for i in (0,1):
self.assertEquals(o.get_size()[i], m1.get_size()[i] + m2.get_size()[i] - 1)
def test_convolve__point_identities(self):
"""Convolving with a single point is the identity, while convolving a point with something flips it."""
m = random_mask((100,100))
k = pygame.Mask((1,1))
k.set_at((0,0))
self.assertMaskEquals(m,m.convolve(k))
self.assertMaskEquals(m,k.convolve(k.convolve(m)))
def test_convolve__with_output(self):
"""checks that convolution modifies only the correct portion of the output"""
m = random_mask((10,10))
k = pygame.Mask((2,2))
k.set_at((0,0))
o = pygame.Mask((50,50))
test = pygame.Mask((50,50))
m.convolve(k,o)
test.draw(m,(1,1))
self.assertMaskEquals(o, test)
o.clear()
test.clear()
m.convolve(k,o, (10,10))
test.draw(m,(11,11))
self.assertMaskEquals(o, test)
def test_convolve__out_of_range(self):
full = pygame.Mask((2,2))
full.fill()
self.assertEquals(full.convolve(full, None, ( 0, 3)).count(), 0)
self.assertEquals(full.convolve(full, None, ( 0, 2)).count(), 3)
self.assertEquals(full.convolve(full, None, (-2, -2)).count(), 1)
self.assertEquals(full.convolve(full, None, (-3, -3)).count(), 0)
def test_convolve(self):
"""Tests the definition of convolution"""
m1 = random_mask((100,100))
m2 = random_mask((100,100))
conv = m1.convolve(m2)
for i in range(conv.get_size()[0]):
for j in range(conv.get_size()[1]):
self.assertEquals(conv.get_at((i,j)) == 0, m1.overlap(m2, (i - 99, j - 99)) is None)
def test_connected_components(self):
"""
"""
m = pygame.Mask((10,10))
self.assertEquals(repr(m.connected_components()), "[]")
comp = m.connected_component()
self.assertEquals(m.count(), comp.count())
m.set_at((0,0), 1)
m.set_at((1,1), 1)
comp = m.connected_component()
comps = m.connected_components()
comps1 = m.connected_components(1)
comps2 = m.connected_components(2)
comps3 = m.connected_components(3)
self.assertEquals(comp.count(), comps[0].count())
self.assertEquals(comps1[0].count(), 2)
self.assertEquals(comps2[0].count(), 2)
self.assertEquals(repr(comps3), "[]")
m.set_at((9, 9), 1)
comp = m.connected_component()
comp1 = m.connected_component((1, 1))
comp2 = m.connected_component((2, 2))
comps = m.connected_components()
comps1 = m.connected_components(1)
comps2 = m.connected_components(2)
comps3 = m.connected_components(3)
self.assertEquals(comp.count(), 2)
self.assertEquals(comp1.count(), 2)
self.assertEquals(comp2.count(), 0)
self.assertEquals(len(comps), 2)
self.assertEquals(len(comps1), 2)
self.assertEquals(len(comps2), 1)
self.assertEquals(len(comps3), 0)
def test_get_bounding_rects(self):
"""
"""
m = pygame.Mask((10,10))
m.set_at((0,0), 1)
m.set_at((1,0), 1)
m.set_at((0,1), 1)
m.set_at((0,3), 1)
m.set_at((3,3), 1)
r = m.get_bounding_rects()
self.assertEquals(repr(r), "[<rect(0, 0, 2, 2)>, <rect(0, 3, 1, 1)>, <rect(3, 3, 1, 1)>]")
#1100
#1111
m = pygame.Mask((4,2))
m.set_at((0,0), 1)
m.set_at((1,0), 1)
m.set_at((2,0), 0)
m.set_at((3,0), 0)
m.set_at((0,1), 1)
m.set_at((1,1), 1)
m.set_at((2,1), 1)
m.set_at((3,1), 1)
r = m.get_bounding_rects()
self.assertEquals(repr(r), "[<rect(0, 0, 4, 2)>]")
#00100
#01110
#00100
m = pygame.Mask((5,3))
m.set_at((0,0), 0)
m.set_at((1,0), 0)
m.set_at((2,0), 1)
m.set_at((3,0), 0)
m.set_at((4,0), 0)
m.set_at((0,1), 0)
m.set_at((1,1), 1)
m.set_at((2,1), 1)
m.set_at((3,1), 1)
m.set_at((4,1), 0)
m.set_at((0,2), 0)
m.set_at((1,2), 0)
m.set_at((2,2), 1)
m.set_at((3,2), 0)
m.set_at((4,2), 0)
r = m.get_bounding_rects()
self.assertEquals(repr(r), "[<rect(1, 0, 3, 3)>]")
#00010
#00100
#01000
m = pygame.Mask((5,3))
m.set_at((0,0), 0)
m.set_at((1,0), 0)
m.set_at((2,0), 0)
m.set_at((3,0), 1)
m.set_at((4,0), 0)
m.set_at((0,1), 0)
m.set_at((1,1), 0)
m.set_at((2,1), 1)
m.set_at((3,1), 0)
m.set_at((4,1), 0)
m.set_at((0,2), 0)
m.set_at((1,2), 1)
m.set_at((2,2), 0)
m.set_at((3,2), 0)
m.set_at((4,2), 0)
r = m.get_bounding_rects()
self.assertEquals(repr(r), "[<rect(1, 0, 3, 3)>]")
#00011
#11111
m = pygame.Mask((5,2))
m.set_at((0,0), 0)
m.set_at((1,0), 0)
m.set_at((2,0), 0)
m.set_at((3,0), 1)
m.set_at((4,0), 1)
m.set_at((0,1), 1)
m.set_at((1,1), 1)
m.set_at((2,1), 1)
m.set_at((3,1), 1)
m.set_at((3,1), 1)
r = m.get_bounding_rects()
#TODO: this should really make one bounding rect.
#self.assertEquals(repr(r), "[<rect(0, 0, 5, 2)>]")
class MaskModuleTest(unittest.TestCase):
def test_from_surface(self):
""" Does the mask.from_surface() work correctly?
"""
mask_from_surface = pygame.mask.from_surface
surf = pygame.Surface((70,70), SRCALPHA, 32)
surf.fill((255,255,255,255))
amask = pygame.mask.from_surface(surf)
#amask = mask_from_surface(surf)
self.assertEqual(amask.get_at((0,0)), 1)
self.assertEqual(amask.get_at((66,1)), 1)
self.assertEqual(amask.get_at((69,1)), 1)
surf.set_at((0,0), (255,255,255,127))
surf.set_at((1,0), (255,255,255,128))
surf.set_at((2,0), (255,255,255,0))
surf.set_at((3,0), (255,255,255,255))
amask = mask_from_surface(surf)
self.assertEqual(amask.get_at((0,0)), 0)
self.assertEqual(amask.get_at((1,0)), 1)
self.assertEqual(amask.get_at((2,0)), 0)
self.assertEqual(amask.get_at((3,0)), 1)
surf.fill((255,255,255,0))
amask = mask_from_surface(surf)
self.assertEqual(amask.get_at((0,0)), 0)
#TODO: test a color key surface.
def test_from_threshold(self):
""" Does mask.from_threshold() work correctly?
"""
a = [16, 24, 32]
for i in a:
surf = pygame.surface.Surface((70,70), 0, i)
surf.fill((100,50,200),(20,20,20,20))
mask = pygame.mask.from_threshold(surf,(100,50,200,255),(10,10,10,255))
self.assertEqual(mask.count(), 400)
self.assertEqual(mask.get_bounding_rects(), [pygame.Rect((20,20,20,20))])
for i in a:
surf = pygame.surface.Surface((70,70), 0, i)
surf2 = pygame.surface.Surface((70,70), 0, i)
surf.fill((100,100,100))
surf2.fill((150,150,150))
surf2.fill((100,100,100), (40,40,10,10))
mask = pygame.mask.from_threshold(surf, (0,0,0,0), (10,10,10,255), surf2)
self.assertEqual(mask.count(), 100)
self.assertEqual(mask.get_bounding_rects(), [pygame.Rect((40,40,10,10))])
if __name__ == '__main__':
if 1:
unittest.main()
else:
mask_from_surface = maskFromSurface
surf = pygame.Surface((70,70), SRCALPHA, 32)
#surf = surf.convert_alpha()
surf.set_at((0,0), (255,255,255,0))
print (surf.get_at((0,0)))
print ("asdf")
print (surf)
| apache-2.0 |
messagebird/python-rest-api | tests/test_conversation_message.py | 1 | 2141 | import unittest
from datetime import datetime
from unittest.mock import Mock
from dateutil.tz import tzutc
from messagebird import Client
class TestConversationMessage(unittest.TestCase):
def test_conversation_list_messages(self):
http_client = Mock()
http_client.request.return_value = '{"count":1,"items":[{"id":"54445534","conversationId":"54345543543","channelId":"4535434354","type":"text","content":{"text":"Hello"},"direction":"sent","status":"delivered","createdDatetime":"2019-04-02T08:54:54.608157775Z","updatedDatetime":"2019-04-02T08:54:54.63910221Z"}],"limit":10,"offset":0,"totalCount":1}'
msg = Client('', http_client).conversation_list_messages(54567)
self.assertEqual(1, msg.count)
self.assertEqual('54445534', msg.items[0].id)
http_client.request.assert_called_once_with('conversations/54567/messages?limit=10&offset=0', 'GET', None)
def test_conversation_read_message(self):
http_client = Mock()
http_client.request.return_value = '{}'
Client('', http_client).conversation_read_message('message-id')
http_client.request.assert_called_once_with('messages/message-id', 'GET', None)
def test_create_message(self):
http_client = Mock()
http_client.request.return_value = '{"id":"id","conversationId":"conversation-id","channelId":"channel-id","type":"text","content":{"text":"Example Text Message"},"direction":"sent","status":"pending","createdDatetime":"2019-04-02T11:57:52.142641447Z","updatedDatetime":"2019-04-02T11:57:53.142641447Z"}'
data = {
'channelId': 1234,
'type': 'text',
'content': {
'text': 'this is a message'
},
}
msg = Client('', http_client).conversation_create_message('conversation-id', data)
self.assertEqual(datetime(2019, 4, 2, 11, 57, 53, tzinfo=tzutc()), msg.updatedDatetime)
self.assertEqual(datetime(2019, 4, 2, 11, 57, 52, tzinfo=tzutc()), msg.createdDatetime)
http_client.request.assert_called_once_with('conversations/conversation-id/messages', 'POST', data)
| bsd-2-clause |
tttthemanCorp/CardmeleonAppEngine | django/template/defaultfilters.py | 150 | 29467 | """Default variable filters."""
import re
from decimal import Decimal, InvalidOperation, ROUND_HALF_UP
import random as random_module
try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps # Python 2.4 fallback.
from django.template.base import Variable, Library
from django.conf import settings
from django.utils import formats
from django.utils.encoding import force_unicode, iri_to_uri
from django.utils.html import conditional_escape
from django.utils.safestring import mark_safe, SafeData
from django.utils.translation import ugettext, ungettext
register = Library()
#######################
# STRING DECORATOR #
#######################
def stringfilter(func):
"""
Decorator for filters which should only receive unicode objects. The object
passed as the first positional argument will be converted to a unicode
object.
"""
def _dec(*args, **kwargs):
if args:
args = list(args)
args[0] = force_unicode(args[0])
if isinstance(args[0], SafeData) and getattr(func, 'is_safe', False):
return mark_safe(func(*args, **kwargs))
return func(*args, **kwargs)
# Include a reference to the real function (used to check original
# arguments by the template parser).
_dec._decorated_function = getattr(func, '_decorated_function', func)
for attr in ('is_safe', 'needs_autoescape'):
if hasattr(func, attr):
setattr(_dec, attr, getattr(func, attr))
return wraps(func)(_dec)
###################
# STRINGS #
###################
def addslashes(value):
"""
Adds slashes before quotes. Useful for escaping strings in CSV, for
example. Less useful for escaping JavaScript; use the ``escapejs``
filter instead.
"""
return value.replace('\\', '\\\\').replace('"', '\\"').replace("'", "\\'")
addslashes.is_safe = True
addslashes = stringfilter(addslashes)
def capfirst(value):
"""Capitalizes the first character of the value."""
return value and value[0].upper() + value[1:]
capfirst.is_safe=True
capfirst = stringfilter(capfirst)
def escapejs(value):
"""Hex encodes characters for use in JavaScript strings."""
from django.utils.html import escapejs
return escapejs(value)
escapejs = stringfilter(escapejs)
def fix_ampersands(value):
"""Replaces ampersands with ``&`` entities."""
from django.utils.html import fix_ampersands
return fix_ampersands(value)
fix_ampersands.is_safe=True
fix_ampersands = stringfilter(fix_ampersands)
# Values for testing floatformat input against infinity and NaN representations,
# which differ across platforms and Python versions. Some (i.e. old Windows
# ones) are not recognized by Decimal but we want to return them unchanged vs.
# returning an empty string as we do for completley invalid input. Note these
# need to be built up from values that are not inf/nan, since inf/nan values do
# not reload properly from .pyc files on Windows prior to some level of Python 2.5
# (see Python Issue757815 and Issue1080440).
pos_inf = 1e200 * 1e200
neg_inf = -1e200 * 1e200
nan = (1e200 * 1e200) / (1e200 * 1e200)
special_floats = [str(pos_inf), str(neg_inf), str(nan)]
def floatformat(text, arg=-1):
"""
Displays a float to a specified number of decimal places.
If called without an argument, it displays the floating point number with
one decimal place -- but only if there's a decimal place to be displayed:
* num1 = 34.23234
* num2 = 34.00000
* num3 = 34.26000
* {{ num1|floatformat }} displays "34.2"
* {{ num2|floatformat }} displays "34"
* {{ num3|floatformat }} displays "34.3"
If arg is positive, it will always display exactly arg number of decimal
places:
* {{ num1|floatformat:3 }} displays "34.232"
* {{ num2|floatformat:3 }} displays "34.000"
* {{ num3|floatformat:3 }} displays "34.260"
If arg is negative, it will display arg number of decimal places -- but
only if there are places to be displayed:
* {{ num1|floatformat:"-3" }} displays "34.232"
* {{ num2|floatformat:"-3" }} displays "34"
* {{ num3|floatformat:"-3" }} displays "34.260"
If the input float is infinity or NaN, the (platform-dependent) string
representation of that value will be displayed.
"""
try:
input_val = force_unicode(text)
d = Decimal(input_val)
except UnicodeEncodeError:
return u''
except InvalidOperation:
if input_val in special_floats:
return input_val
try:
d = Decimal(force_unicode(float(text)))
except (ValueError, InvalidOperation, TypeError, UnicodeEncodeError):
return u''
try:
p = int(arg)
except ValueError:
return input_val
try:
m = int(d) - d
except (ValueError, OverflowError, InvalidOperation):
return input_val
if not m and p < 0:
return mark_safe(formats.number_format(u'%d' % (int(d)), 0))
if p == 0:
exp = Decimal(1)
else:
exp = Decimal(u'1.0') / (Decimal(10) ** abs(p))
try:
# Avoid conversion to scientific notation by accessing `sign`, `digits`
# and `exponent` from `Decimal.as_tuple()` directly.
sign, digits, exponent = d.quantize(exp, ROUND_HALF_UP).as_tuple()
digits = [unicode(digit) for digit in reversed(digits)]
while len(digits) <= abs(exponent):
digits.append(u'0')
digits.insert(-exponent, u'.')
if sign:
digits.append(u'-')
number = u''.join(reversed(digits))
return mark_safe(formats.number_format(number, abs(p)))
except InvalidOperation:
return input_val
floatformat.is_safe = True
def iriencode(value):
"""Escapes an IRI value for use in a URL."""
return force_unicode(iri_to_uri(value))
iriencode.is_safe = True
iriencode = stringfilter(iriencode)
def linenumbers(value, autoescape=None):
"""Displays text with line numbers."""
from django.utils.html import escape
lines = value.split(u'\n')
# Find the maximum width of the line count, for use with zero padding
# string format command
width = unicode(len(unicode(len(lines))))
if not autoescape or isinstance(value, SafeData):
for i, line in enumerate(lines):
lines[i] = (u"%0" + width + u"d. %s") % (i + 1, line)
else:
for i, line in enumerate(lines):
lines[i] = (u"%0" + width + u"d. %s") % (i + 1, escape(line))
return mark_safe(u'\n'.join(lines))
linenumbers.is_safe = True
linenumbers.needs_autoescape = True
linenumbers = stringfilter(linenumbers)
def lower(value):
"""Converts a string into all lowercase."""
return value.lower()
lower.is_safe = True
lower = stringfilter(lower)
def make_list(value):
"""
Returns the value turned into a list.
For an integer, it's a list of digits.
For a string, it's a list of characters.
"""
return list(value)
make_list.is_safe = False
make_list = stringfilter(make_list)
def slugify(value):
"""
Normalizes string, converts to lowercase, removes non-alpha characters,
and converts spaces to hyphens.
"""
import unicodedata
value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore')
value = unicode(re.sub('[^\w\s-]', '', value).strip().lower())
return mark_safe(re.sub('[-\s]+', '-', value))
slugify.is_safe = True
slugify = stringfilter(slugify)
def stringformat(value, arg):
"""
Formats the variable according to the arg, a string formatting specifier.
This specifier uses Python string formating syntax, with the exception that
the leading "%" is dropped.
See http://docs.python.org/lib/typesseq-strings.html for documentation
of Python string formatting
"""
try:
return (u"%" + unicode(arg)) % value
except (ValueError, TypeError):
return u""
stringformat.is_safe = True
def title(value):
"""Converts a string into titlecase."""
t = re.sub("([a-z])'([A-Z])", lambda m: m.group(0).lower(), value.title())
return re.sub("\d([A-Z])", lambda m: m.group(0).lower(), t)
title.is_safe = True
title = stringfilter(title)
def truncatewords(value, arg):
"""
Truncates a string after a certain number of words.
Argument: Number of words to truncate after.
Newlines within the string are removed.
"""
from django.utils.text import truncate_words
try:
length = int(arg)
except ValueError: # Invalid literal for int().
return value # Fail silently.
return truncate_words(value, length)
truncatewords.is_safe = True
truncatewords = stringfilter(truncatewords)
def truncatewords_html(value, arg):
"""
Truncates HTML after a certain number of words.
Argument: Number of words to truncate after.
Newlines in the HTML are preserved.
"""
from django.utils.text import truncate_html_words
try:
length = int(arg)
except ValueError: # invalid literal for int()
return value # Fail silently.
return truncate_html_words(value, length)
truncatewords_html.is_safe = True
truncatewords_html = stringfilter(truncatewords_html)
def upper(value):
"""Converts a string into all uppercase."""
return value.upper()
upper.is_safe = False
upper = stringfilter(upper)
def urlencode(value, safe=None):
"""
Escapes a value for use in a URL.
Takes an optional ``safe`` parameter used to determine the characters which
should not be escaped by Django's ``urlquote`` method. If not provided, the
default safe characters will be used (but an empty string can be provided
when *all* characters should be escaped).
"""
from django.utils.http import urlquote
kwargs = {}
if safe is not None:
kwargs['safe'] = safe
return urlquote(value, **kwargs)
urlencode.is_safe = False
urlencode = stringfilter(urlencode)
def urlize(value, autoescape=None):
"""Converts URLs in plain text into clickable links."""
from django.utils.html import urlize
return mark_safe(urlize(value, nofollow=True, autoescape=autoescape))
urlize.is_safe=True
urlize.needs_autoescape = True
urlize = stringfilter(urlize)
def urlizetrunc(value, limit, autoescape=None):
"""
Converts URLs into clickable links, truncating URLs to the given character
limit, and adding 'rel=nofollow' attribute to discourage spamming.
Argument: Length to truncate URLs to.
"""
from django.utils.html import urlize
return mark_safe(urlize(value, trim_url_limit=int(limit), nofollow=True,
autoescape=autoescape))
urlizetrunc.is_safe = True
urlizetrunc.needs_autoescape = True
urlizetrunc = stringfilter(urlizetrunc)
def wordcount(value):
"""Returns the number of words."""
return len(value.split())
wordcount.is_safe = False
wordcount = stringfilter(wordcount)
def wordwrap(value, arg):
"""
Wraps words at specified line length.
Argument: number of characters to wrap the text at.
"""
from django.utils.text import wrap
return wrap(value, int(arg))
wordwrap.is_safe = True
wordwrap = stringfilter(wordwrap)
def ljust(value, arg):
"""
Left-aligns the value in a field of a given width.
Argument: field size.
"""
return value.ljust(int(arg))
ljust.is_safe = True
ljust = stringfilter(ljust)
def rjust(value, arg):
"""
Right-aligns the value in a field of a given width.
Argument: field size.
"""
return value.rjust(int(arg))
rjust.is_safe = True
rjust = stringfilter(rjust)
def center(value, arg):
"""Centers the value in a field of a given width."""
return value.center(int(arg))
center.is_safe = True
center = stringfilter(center)
def cut(value, arg):
"""
Removes all values of arg from the given string.
"""
safe = isinstance(value, SafeData)
value = value.replace(arg, u'')
if safe and arg != ';':
return mark_safe(value)
return value
cut = stringfilter(cut)
###################
# HTML STRINGS #
###################
def escape(value):
"""
Marks the value as a string that should not be auto-escaped.
"""
from django.utils.safestring import mark_for_escaping
return mark_for_escaping(value)
escape.is_safe = True
escape = stringfilter(escape)
def force_escape(value):
"""
Escapes a string's HTML. This returns a new string containing the escaped
characters (as opposed to "escape", which marks the content for later
possible escaping).
"""
from django.utils.html import escape
return mark_safe(escape(value))
force_escape = stringfilter(force_escape)
force_escape.is_safe = True
def linebreaks(value, autoescape=None):
"""
Replaces line breaks in plain text with appropriate HTML; a single
newline becomes an HTML line break (``<br />``) and a new line
followed by a blank line becomes a paragraph break (``</p>``).
"""
from django.utils.html import linebreaks
autoescape = autoescape and not isinstance(value, SafeData)
return mark_safe(linebreaks(value, autoescape))
linebreaks.is_safe = True
linebreaks.needs_autoescape = True
linebreaks = stringfilter(linebreaks)
def linebreaksbr(value, autoescape=None):
"""
Converts all newlines in a piece of plain text to HTML line breaks
(``<br />``).
"""
if autoescape and not isinstance(value, SafeData):
from django.utils.html import escape
value = escape(value)
return mark_safe(value.replace('\n', '<br />'))
linebreaksbr.is_safe = True
linebreaksbr.needs_autoescape = True
linebreaksbr = stringfilter(linebreaksbr)
def safe(value):
"""
Marks the value as a string that should not be auto-escaped.
"""
return mark_safe(value)
safe.is_safe = True
safe = stringfilter(safe)
def safeseq(value):
"""
A "safe" filter for sequences. Marks each element in the sequence,
individually, as safe, after converting them to unicode. Returns a list
with the results.
"""
return [mark_safe(force_unicode(obj)) for obj in value]
safeseq.is_safe = True
def removetags(value, tags):
"""Removes a space separated list of [X]HTML tags from the output."""
tags = [re.escape(tag) for tag in tags.split()]
tags_re = u'(%s)' % u'|'.join(tags)
starttag_re = re.compile(ur'<%s(/?>|(\s+[^>]*>))' % tags_re, re.U)
endtag_re = re.compile(u'</%s>' % tags_re)
value = starttag_re.sub(u'', value)
value = endtag_re.sub(u'', value)
return value
removetags.is_safe = True
removetags = stringfilter(removetags)
def striptags(value):
"""Strips all [X]HTML tags."""
from django.utils.html import strip_tags
return strip_tags(value)
striptags.is_safe = True
striptags = stringfilter(striptags)
###################
# LISTS #
###################
def dictsort(value, arg):
"""
Takes a list of dicts, returns that list sorted by the property given in
the argument.
"""
return sorted(value, key=Variable(arg).resolve)
dictsort.is_safe = False
def dictsortreversed(value, arg):
"""
Takes a list of dicts, returns that list sorted in reverse order by the
property given in the argument.
"""
return sorted(value, key=Variable(arg).resolve, reverse=True)
dictsortreversed.is_safe = False
def first(value):
"""Returns the first item in a list."""
try:
return value[0]
except IndexError:
return u''
first.is_safe = False
def join(value, arg, autoescape=None):
"""
Joins a list with a string, like Python's ``str.join(list)``.
"""
value = map(force_unicode, value)
if autoescape:
value = [conditional_escape(v) for v in value]
try:
data = conditional_escape(arg).join(value)
except AttributeError: # fail silently but nicely
return value
return mark_safe(data)
join.is_safe = True
join.needs_autoescape = True
def last(value):
"Returns the last item in a list"
try:
return value[-1]
except IndexError:
return u''
last.is_safe = True
def length(value):
"""Returns the length of the value - useful for lists."""
try:
return len(value)
except (ValueError, TypeError):
return ''
length.is_safe = True
def length_is(value, arg):
"""Returns a boolean of whether the value's length is the argument."""
try:
return len(value) == int(arg)
except (ValueError, TypeError):
return ''
length_is.is_safe = False
def random(value):
"""Returns a random item from the list."""
return random_module.choice(value)
random.is_safe = True
def slice_(value, arg):
"""
Returns a slice of the list.
Uses the same syntax as Python's list slicing; see
http://diveintopython.org/native_data_types/lists.html#odbchelper.list.slice
for an introduction.
"""
try:
bits = []
for x in arg.split(u':'):
if len(x) == 0:
bits.append(None)
else:
bits.append(int(x))
return value[slice(*bits)]
except (ValueError, TypeError):
return value # Fail silently.
slice_.is_safe = True
def unordered_list(value, autoescape=None):
"""
Recursively takes a self-nested list and returns an HTML unordered list --
WITHOUT opening and closing <ul> tags.
The list is assumed to be in the proper format. For example, if ``var``
contains: ``['States', ['Kansas', ['Lawrence', 'Topeka'], 'Illinois']]``,
then ``{{ var|unordered_list }}`` would return::
<li>States
<ul>
<li>Kansas
<ul>
<li>Lawrence</li>
<li>Topeka</li>
</ul>
</li>
<li>Illinois</li>
</ul>
</li>
"""
if autoescape:
from django.utils.html import conditional_escape
escaper = conditional_escape
else:
escaper = lambda x: x
def convert_old_style_list(list_):
"""
Converts old style lists to the new easier to understand format.
The old list format looked like:
['Item 1', [['Item 1.1', []], ['Item 1.2', []]]
And it is converted to:
['Item 1', ['Item 1.1', 'Item 1.2]]
"""
if not isinstance(list_, (tuple, list)) or len(list_) != 2:
return list_, False
first_item, second_item = list_
if second_item == []:
return [first_item], True
try:
it = iter(second_item) # see if second item is iterable
except TypeError:
return list_, False
old_style_list = True
new_second_item = []
for sublist in second_item:
item, old_style_list = convert_old_style_list(sublist)
if not old_style_list:
break
new_second_item.extend(item)
if old_style_list:
second_item = new_second_item
return [first_item, second_item], old_style_list
def _helper(list_, tabs=1):
indent = u'\t' * tabs
output = []
list_length = len(list_)
i = 0
while i < list_length:
title = list_[i]
sublist = ''
sublist_item = None
if isinstance(title, (list, tuple)):
sublist_item = title
title = ''
elif i < list_length - 1:
next_item = list_[i+1]
if next_item and isinstance(next_item, (list, tuple)):
# The next item is a sub-list.
sublist_item = next_item
# We've processed the next item now too.
i += 1
if sublist_item:
sublist = _helper(sublist_item, tabs+1)
sublist = '\n%s<ul>\n%s\n%s</ul>\n%s' % (indent, sublist,
indent, indent)
output.append('%s<li>%s%s</li>' % (indent,
escaper(force_unicode(title)), sublist))
i += 1
return '\n'.join(output)
value, converted = convert_old_style_list(value)
return mark_safe(_helper(value))
unordered_list.is_safe = True
unordered_list.needs_autoescape = True
###################
# INTEGERS #
###################
def add(value, arg):
"""Adds the arg to the value."""
try:
return int(value) + int(arg)
except (ValueError, TypeError):
try:
return value + arg
except:
return value
add.is_safe = False
def get_digit(value, arg):
"""
Given a whole number, returns the requested digit of it, where 1 is the
right-most digit, 2 is the second-right-most digit, etc. Returns the
original value for invalid input (if input or argument is not an integer,
or if argument is less than 1). Otherwise, output is always an integer.
"""
try:
arg = int(arg)
value = int(value)
except ValueError:
return value # Fail silently for an invalid argument
if arg < 1:
return value
try:
return int(str(value)[-arg])
except IndexError:
return 0
get_digit.is_safe = False
###################
# DATES #
###################
def date(value, arg=None):
"""Formats a date according to the given format."""
from django.utils.dateformat import format
if not value:
return u''
if arg is None:
arg = settings.DATE_FORMAT
try:
return formats.date_format(value, arg)
except AttributeError:
try:
return format(value, arg)
except AttributeError:
return ''
date.is_safe = False
def time(value, arg=None):
"""Formats a time according to the given format."""
from django.utils import dateformat
if value in (None, u''):
return u''
if arg is None:
arg = settings.TIME_FORMAT
try:
return formats.time_format(value, arg)
except AttributeError:
try:
return dateformat.time_format(value, arg)
except AttributeError:
return ''
time.is_safe = False
def timesince(value, arg=None):
"""Formats a date as the time since that date (i.e. "4 days, 6 hours")."""
from django.utils.timesince import timesince
if not value:
return u''
try:
if arg:
return timesince(value, arg)
return timesince(value)
except (ValueError, TypeError):
return u''
timesince.is_safe = False
def timeuntil(value, arg=None):
"""Formats a date as the time until that date (i.e. "4 days, 6 hours")."""
from django.utils.timesince import timeuntil
if not value:
return u''
try:
return timeuntil(value, arg)
except (ValueError, TypeError):
return u''
timeuntil.is_safe = False
###################
# LOGIC #
###################
def default(value, arg):
"""If value is unavailable, use given default."""
return value or arg
default.is_safe = False
def default_if_none(value, arg):
"""If value is None, use given default."""
if value is None:
return arg
return value
default_if_none.is_safe = False
def divisibleby(value, arg):
"""Returns True if the value is devisible by the argument."""
return int(value) % int(arg) == 0
divisibleby.is_safe = False
def yesno(value, arg=None):
"""
Given a string mapping values for true, false and (optionally) None,
returns one of those strings accoding to the value:
========== ====================== ==================================
Value Argument Outputs
========== ====================== ==================================
``True`` ``"yeah,no,maybe"`` ``yeah``
``False`` ``"yeah,no,maybe"`` ``no``
``None`` ``"yeah,no,maybe"`` ``maybe``
``None`` ``"yeah,no"`` ``"no"`` (converts None to False
if no mapping for None is given.
========== ====================== ==================================
"""
if arg is None:
arg = ugettext('yes,no,maybe')
bits = arg.split(u',')
if len(bits) < 2:
return value # Invalid arg.
try:
yes, no, maybe = bits
except ValueError:
# Unpack list of wrong size (no "maybe" value provided).
yes, no, maybe = bits[0], bits[1], bits[1]
if value is None:
return maybe
if value:
return yes
return no
yesno.is_safe = False
###################
# MISC #
###################
def filesizeformat(bytes):
"""
Formats the value like a 'human-readable' file size (i.e. 13 KB, 4.1 MB,
102 bytes, etc).
"""
try:
bytes = float(bytes)
except (TypeError,ValueError,UnicodeDecodeError):
return ungettext("%(size)d byte", "%(size)d bytes", 0) % {'size': 0}
filesize_number_format = lambda value: formats.number_format(round(value, 1), 1)
if bytes < 1024:
return ungettext("%(size)d byte", "%(size)d bytes", bytes) % {'size': bytes}
if bytes < 1024 * 1024:
return ugettext("%s KB") % filesize_number_format(bytes / 1024)
if bytes < 1024 * 1024 * 1024:
return ugettext("%s MB") % filesize_number_format(bytes / (1024 * 1024))
if bytes < 1024 * 1024 * 1024 * 1024:
return ugettext("%s GB") % filesize_number_format(bytes / (1024 * 1024 * 1024))
if bytes < 1024 * 1024 * 1024 * 1024 * 1024:
return ugettext("%s TB") % filesize_number_format(bytes / (1024 * 1024 * 1024 * 1024))
return ugettext("%s PB") % filesize_number_format(bytes / (1024 * 1024 * 1024 * 1024 * 1024))
filesizeformat.is_safe = True
def pluralize(value, arg=u's'):
"""
Returns a plural suffix if the value is not 1. By default, 's' is used as
the suffix:
* If value is 0, vote{{ value|pluralize }} displays "0 votes".
* If value is 1, vote{{ value|pluralize }} displays "1 vote".
* If value is 2, vote{{ value|pluralize }} displays "2 votes".
If an argument is provided, that string is used instead:
* If value is 0, class{{ value|pluralize:"es" }} displays "0 classes".
* If value is 1, class{{ value|pluralize:"es" }} displays "1 class".
* If value is 2, class{{ value|pluralize:"es" }} displays "2 classes".
If the provided argument contains a comma, the text before the comma is
used for the singular case and the text after the comma is used for the
plural case:
* If value is 0, cand{{ value|pluralize:"y,ies" }} displays "0 candies".
* If value is 1, cand{{ value|pluralize:"y,ies" }} displays "1 candy".
* If value is 2, cand{{ value|pluralize:"y,ies" }} displays "2 candies".
"""
if not u',' in arg:
arg = u',' + arg
bits = arg.split(u',')
if len(bits) > 2:
return u''
singular_suffix, plural_suffix = bits[:2]
try:
if int(value) != 1:
return plural_suffix
except ValueError: # Invalid string that's not a number.
pass
except TypeError: # Value isn't a string or a number; maybe it's a list?
try:
if len(value) != 1:
return plural_suffix
except TypeError: # len() of unsized object.
pass
return singular_suffix
pluralize.is_safe = False
def phone2numeric(value):
"""Takes a phone number and converts it in to its numerical equivalent."""
from django.utils.text import phone2numeric
return phone2numeric(value)
phone2numeric.is_safe = True
def pprint(value):
"""A wrapper around pprint.pprint -- for debugging, really."""
from pprint import pformat
try:
return pformat(value)
except Exception, e:
return u"Error in formatting: %s" % force_unicode(e, errors="replace")
pprint.is_safe = True
# Syntax: register.filter(name of filter, callback)
register.filter(add)
register.filter(addslashes)
register.filter(capfirst)
register.filter(center)
register.filter(cut)
register.filter(date)
register.filter(default)
register.filter(default_if_none)
register.filter(dictsort)
register.filter(dictsortreversed)
register.filter(divisibleby)
register.filter(escape)
register.filter(escapejs)
register.filter(filesizeformat)
register.filter(first)
register.filter(fix_ampersands)
register.filter(floatformat)
register.filter(force_escape)
register.filter(get_digit)
register.filter(iriencode)
register.filter(join)
register.filter(last)
register.filter(length)
register.filter(length_is)
register.filter(linebreaks)
register.filter(linebreaksbr)
register.filter(linenumbers)
register.filter(ljust)
register.filter(lower)
register.filter(make_list)
register.filter(phone2numeric)
register.filter(pluralize)
register.filter(pprint)
register.filter(removetags)
register.filter(random)
register.filter(rjust)
register.filter(safe)
register.filter(safeseq)
register.filter('slice', slice_)
register.filter(slugify)
register.filter(stringformat)
register.filter(striptags)
register.filter(time)
register.filter(timesince)
register.filter(timeuntil)
register.filter(title)
register.filter(truncatewords)
register.filter(truncatewords_html)
register.filter(unordered_list)
register.filter(upper)
register.filter(urlencode)
register.filter(urlize)
register.filter(urlizetrunc)
register.filter(wordcount)
register.filter(wordwrap)
register.filter(yesno)
| bsd-3-clause |
toddeye/home-assistant | tests/components/sensor/test_command_sensor.py | 2 | 2037 | """
tests.components.sensor.command_sensor
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Tests command sensor.
"""
import unittest
import homeassistant.core as ha
from homeassistant.components.sensor import command_sensor
class TestCommandSensorSensor(unittest.TestCase):
""" Test the Template sensor. """
def setUp(self):
self.hass = ha.HomeAssistant()
def tearDown(self):
""" Stop down stuff we started. """
self.hass.stop()
def test_setup(self):
""" Test sensor setup """
config = {'name': 'Test',
'unit_of_measurement': 'in',
'command': 'echo 5'}
devices = []
def add_dev_callback(devs):
""" callback to add device """
for dev in devs:
devices.append(dev)
command_sensor.setup_platform(
self.hass, config, add_dev_callback)
self.assertEqual(1, len(devices))
entity = devices[0]
self.assertEqual('Test', entity.name)
self.assertEqual('in', entity.unit_of_measurement)
self.assertEqual('5', entity.state)
def test_setup_bad_config(self):
""" Test setup with a bad config """
config = {}
devices = []
def add_dev_callback(devs):
""" callback to add device """
for dev in devs:
devices.append(dev)
self.assertFalse(command_sensor.setup_platform(
self.hass, config, add_dev_callback))
self.assertEqual(0, len(devices))
def test_template(self):
""" Test command sensor with template """
data = command_sensor.CommandSensorData('echo 50')
entity = command_sensor.CommandSensor(
self.hass, data, 'test', 'in', '{{ value | multiply(0.1) }}')
self.assertEqual(5, float(entity.state))
def test_bad_command(self):
""" Test bad command """
data = command_sensor.CommandSensorData('asdfasdf')
data.update()
self.assertEqual(None, data.value)
| mit |
N-Parsons/exercism-python | exercises/acronym/acronym_test.py | 1 | 1345 | import unittest
from acronym import abbreviate
# Tests adapted from `problem-specifications//canonical-data.json` @ v1.7.0
class AcronymTest(unittest.TestCase):
def test_basic(self):
self.assertEqual(abbreviate('Portable Network Graphics'), 'PNG')
def test_lowercase_words(self):
self.assertEqual(abbreviate('Ruby on Rails'), 'ROR')
def test_punctuation(self):
self.assertEqual(abbreviate('First In, First Out'), 'FIFO')
def test_all_caps_words(self):
self.assertEqual(abbreviate('GNU Image Manipulation Program'), 'GIMP')
def test_punctuation_without_whitespace(self):
self.assertEqual(
abbreviate('Complementary metal-oxide semiconductor'), 'CMOS')
def test_very_long_abbreviation(self):
self.assertEqual(
abbreviate("Rolling On The Floor Laughing So Hard That "
"My Dogs Came Over And Licked Me"), "ROTFLSHTMDCOALM")
def test_consecutive_delimiters(self):
self.assertEqual(
abbreviate('Something - I made up from thin air'), 'SIMUFTA')
def test_apostrophes(self):
self.assertEqual(abbreviate("Halley's Comet"), 'HC')
def test_underscore_emphasis(self):
self.assertEqual(abbreviate("The Road _Not_ Taken"), 'TRNT')
if __name__ == '__main__':
unittest.main()
| mit |
nicholaslemay/python_koans | python3/libs/colorama/winterm.py | 523 | 4206 | # Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
from . import win32
# from wincon.h
class WinColor(object):
BLACK = 0
BLUE = 1
GREEN = 2
CYAN = 3
RED = 4
MAGENTA = 5
YELLOW = 6
GREY = 7
# from wincon.h
class WinStyle(object):
NORMAL = 0x00 # dim text, dim background
BRIGHT = 0x08 # bright text, dim background
class WinTerm(object):
def __init__(self):
self._default = win32.GetConsoleScreenBufferInfo(win32.STDOUT).wAttributes
self.set_attrs(self._default)
self._default_fore = self._fore
self._default_back = self._back
self._default_style = self._style
def get_attrs(self):
return self._fore + self._back * 16 + self._style
def set_attrs(self, value):
self._fore = value & 7
self._back = (value >> 4) & 7
self._style = value & WinStyle.BRIGHT
def reset_all(self, on_stderr=None):
self.set_attrs(self._default)
self.set_console(attrs=self._default)
def fore(self, fore=None, on_stderr=False):
if fore is None:
fore = self._default_fore
self._fore = fore
self.set_console(on_stderr=on_stderr)
def back(self, back=None, on_stderr=False):
if back is None:
back = self._default_back
self._back = back
self.set_console(on_stderr=on_stderr)
def style(self, style=None, on_stderr=False):
if style is None:
style = self._default_style
self._style = style
self.set_console(on_stderr=on_stderr)
def set_console(self, attrs=None, on_stderr=False):
if attrs is None:
attrs = self.get_attrs()
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
win32.SetConsoleTextAttribute(handle, attrs)
def get_position(self, handle):
position = win32.GetConsoleScreenBufferInfo(handle).dwCursorPosition
# Because Windows coordinates are 0-based,
# and win32.SetConsoleCursorPosition expects 1-based.
position.X += 1
position.Y += 1
return position
def set_cursor_position(self, position=None, on_stderr=False):
if position is None:
#I'm not currently tracking the position, so there is no default.
#position = self.get_position()
return
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
win32.SetConsoleCursorPosition(handle, position)
def cursor_up(self, num_rows=0, on_stderr=False):
if num_rows == 0:
return
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
position = self.get_position(handle)
adjusted_position = (position.Y - num_rows, position.X)
self.set_cursor_position(adjusted_position, on_stderr)
def erase_data(self, mode=0, on_stderr=False):
# 0 (or None) should clear from the cursor to the end of the screen.
# 1 should clear from the cursor to the beginning of the screen.
# 2 should clear the entire screen. (And maybe move cursor to (1,1)?)
#
# At the moment, I only support mode 2. From looking at the API, it
# should be possible to calculate a different number of bytes to clear,
# and to do so relative to the cursor position.
if mode[0] not in (2,):
return
handle = win32.STDOUT
if on_stderr:
handle = win32.STDERR
# here's where we'll home the cursor
coord_screen = win32.COORD(0,0)
csbi = win32.GetConsoleScreenBufferInfo(handle)
# get the number of character cells in the current buffer
dw_con_size = csbi.dwSize.X * csbi.dwSize.Y
# fill the entire screen with blanks
win32.FillConsoleOutputCharacter(handle, ' ', dw_con_size, coord_screen)
# now set the buffer's attributes accordingly
win32.FillConsoleOutputAttribute(handle, self.get_attrs(), dw_con_size, coord_screen );
# put the cursor at (0, 0)
win32.SetConsoleCursorPosition(handle, (coord_screen.X, coord_screen.Y))
| mit |
valdecdev/odoo | addons/pos_mercury/__openerp__.py | 2 | 1290 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Mercury Payment Services',
'version': '1.0',
'category': 'Point of Sale',
'sequence': 6,
'summary': 'Credit card support for Point Of Sale',
'description': """
Allow credit card POS payments
==============================
This module allows customers to pay for their orders with credit
cards. The transactions are processed by Mercury (developed by Wells
Fargo Bank). A Mercury merchant account is necessary. It allows the
following:
* Fast payment by just swiping a credit card while on the payment screen
* Combining of cash payments and credit card payments
* Cashback
* Supported cards: Visa, MasterCard, American Express, Discover
""",
'author': 'Odoo SA',
'depends': ['web', 'barcodes', 'point_of_sale'],
'website': '',
'data': [
'data/pos_mercury_data.xml',
'security/ir.model.access.csv',
'views/pos_mercury_templates.xml',
'views/pos_mercury_views.xml',
'views/pos_mercury_transaction_templates.xml',
],
'demo': [
'data/pos_mercury_demo.xml',
],
'qweb': [
'static/src/xml/pos_mercury.xml',
],
'installable': True,
'auto_install': False,
}
| agpl-3.0 |
denisff/python-for-android | python-modules/zope/zope/interface/common/tests/test_idatetime.py | 50 | 1837 | ##############################################################################
#
# Copyright (c) 2003 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Test for datetime interfaces
$Id: test_idatetime.py 110536 2010-04-06 02:59:44Z tseaver $
"""
import unittest
from zope.interface.verify import verifyObject, verifyClass
from zope.interface.common.idatetime import ITimeDelta, ITimeDeltaClass
from zope.interface.common.idatetime import IDate, IDateClass
from zope.interface.common.idatetime import IDateTime, IDateTimeClass
from zope.interface.common.idatetime import ITime, ITimeClass, ITZInfo
from datetime import timedelta, date, datetime, time, tzinfo
class TestDateTimeInterfaces(unittest.TestCase):
def test_interfaces(self):
verifyObject(ITimeDelta, timedelta(minutes=20))
verifyObject(IDate, date(2000, 1, 2))
verifyObject(IDateTime, datetime(2000, 1, 2, 10, 20))
verifyObject(ITime, time(20, 30, 15, 1234))
verifyObject(ITZInfo, tzinfo())
verifyClass(ITimeDeltaClass, timedelta)
verifyClass(IDateClass, date)
verifyClass(IDateTimeClass, datetime)
verifyClass(ITimeClass, time)
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(TestDateTimeInterfaces))
return suite
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
ujac81/PiBlaster | Pi/PyBlaster/testing/led_button_test.py | 1 | 6265 | """ led_button_test.py -- test suite for LED push buttons
@Author Ulrich Jansen <ulrich.jansen@rwth-aachen.de>
"""
import Queue
import RPi.GPIO as GPIO
import threading
import time
LED_GREEN = 2
LED_YELLOW = 3
LED_RED = 4
LED_BLUE = 17
LED_WHITE = 27
BUTTON_GREEN = 14
BUTTON_YELLOW = 15
BUTTON_RED = 18
BUTTON_BLUE = 23
BUTTON_WHITE = 24
class LED:
def __init__(self):
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
self.state_green = False
self.state_yellow = False
self.state_red = False
self.state_blue = False
self.state_white = False
def reset_leds(self):
"""Assign GPIO ports and turn of LEDs"""
GPIO.setup(LED_GREEN, GPIO.OUT)
GPIO.setup(LED_YELLOW, GPIO.OUT)
GPIO.setup(LED_RED, GPIO.OUT)
GPIO.setup(LED_BLUE, GPIO.OUT)
GPIO.setup(LED_WHITE, GPIO.OUT)
self.set_leds(0)
def set_led(self, num, state):
"""Set specific LED to state"""
if num == 0:
self.set_led_green(state)
if num == 1:
self.set_led_yellow(state)
if num == 2:
self.set_led_red(state)
if num == 3:
self.set_led_blue(state)
if num == 4:
self.set_led_white(state)
def set_leds(self, state):
for led in range(5):
self.set_led(led, state)
def set_led_green(self, state=-1):
do_state = not self.state_green
if state == 1:
do_state = True
elif state == 0:
do_state = False
self.state_green = do_state
GPIO.output(LED_GREEN, do_state)
def set_led_yellow(self, state=-1):
do_state = not self.state_yellow
if state == 1:
do_state = True
elif state == 0:
do_state = False
self.state_yellow = do_state
GPIO.output(LED_YELLOW, do_state)
def set_led_red(self, state=-1):
do_state = not self.state_red
if state == 1:
do_state = True
elif state == 0:
do_state = False
self.state_red = do_state
GPIO.output(LED_RED, do_state)
def set_led_blue(self, state=-1):
do_state = not self.state_blue
if state == 1:
do_state = True
elif state == 0:
do_state = False
self.state_blue = do_state
GPIO.output(LED_BLUE, do_state)
def set_led_white(self, state=-1):
do_state = not self.state_white
if state == 1:
do_state = True
elif state == 0:
do_state = False
self.state_white = do_state
GPIO.output(LED_WHITE, do_state)
def cleanup(self):
self.set_leds(0)
GPIO.cleanup()
class ButtonThread(threading.Thread):
"""
"""
def __init__(self, root, pin, name, queue, queue_lock):
"""
"""
threading.Thread.__init__(self)
self.root = root # TODO: need main
self.pin = pin
self.name = name
self.queue = queue
self.queue_lock = queue_lock
self.keep_run = 1 # TODO: via main
def run(self):
"""
"""
GPIO.setup(self.pin, GPIO.IN)
prev_in = 0
while self.keep_run: # TODO: via main
time.sleep(0.01)
inpt = GPIO.input(self.pin)
if (not prev_in) and inpt:
self.queue_lock.acquire()
self.queue.put([self.pin, self.name])
self.queue_lock.release()
prev_in = inpt
# end run()
def queue_not_empty(self):
if not self.queue.empty():
return True
return False
def read_queue(self):
result = None
class Buttons:
def __init__(self, root):
"""
"""
self.root = root
self.queue = Queue.Queue()
self.queue_lock = threading.Lock()
self.btn_threads = []
self.btn_threads.append(ButtonThread(root, BUTTON_GREEN, "green",
self.queue, self.queue_lock))
self.btn_threads.append(ButtonThread(root, BUTTON_YELLOW, "yellow",
self.queue, self.queue_lock))
self.btn_threads.append(ButtonThread(root, BUTTON_RED, "red",
self.queue, self.queue_lock))
self.btn_threads.append(ButtonThread(root, BUTTON_BLUE, "blue",
self.queue, self.queue_lock))
self.btn_threads.append(ButtonThread(root, BUTTON_WHITE, "white",
self.queue, self.queue_lock))
def start_threads(self):
"""
"""
for t in self.btn_threads:
t.start()
def get_last_button_pushed(self):
"""
"""
def main():
"""Init PyBlaster daemon
"""
led = LED()
led.reset_leds()
buttons = Buttons(root=None)
buttons.start_threads()
# poll = 0
while 1:
prev_in_green = 0
prev_in_yellow = 0
prev_in_red = 0
prev_in_blue = 0
prev_in_white = 0
# led.set_leds(poll % 2)
time.sleep(50./1000.) # 50ms
inpt = GPIO.input(BUTTON_GREEN)
if (not prev_in_green) and inpt:
print("Green button pressed")
led.set_led_green()
prev_in_green = inpt
inpt = GPIO.input(BUTTON_YELLOW)
if (not prev_in_yellow) and inpt:
print("Yellow button pressed")
led.set_led_yellow()
prev_in_yellow = inpt
inpt = GPIO.input(BUTTON_RED)
if (not prev_in_red) and inpt:
print("Red button pressed")
led.set_led_red()
prev_in_red = inpt
inpt = GPIO.input(BUTTON_BLUE)
if (not prev_in_blue) and inpt:
print("Blue button pressed")
led.set_led_blue()
prev_in_blue = inpt
inpt = GPIO.input(BUTTON_WHITE)
if (not prev_in_white) and inpt:
print("White button pressed")
led.set_led_white()
prev_in_white = inpt
# poll += 1
led.cleanup()
if __name__ == '__main__':
main()
| gpl-3.0 |
chaffra/sympy | sympy/core/tests/test_expr.py | 5 | 58924 | from __future__ import division
from sympy import (Add, Basic, S, Symbol, Wild, Float, Integer, Rational, I,
sin, cos, tan, exp, log, nan, oo, sqrt, symbols, Integral, sympify,
WildFunction, Poly, Function, Derivative, Number, pi, NumberSymbol, zoo,
Piecewise, Mul, Pow, nsimplify, ratsimp, trigsimp, radsimp, powsimp,
simplify, together, collect, factorial, apart, combsimp, factor, refine,
cancel, Tuple, default_sort_key, DiracDelta, gamma, Dummy, Sum, E,
exp_polar, expand, diff, O, Heaviside, Si, Max, UnevaluatedExpr,
integrate)
from sympy.core.function import AppliedUndef
from sympy.core.compatibility import range
from sympy.physics.secondquant import FockState
from sympy.physics.units import meter
from sympy.series.formal import FormalPowerSeries
from sympy.utilities.pytest import raises, XFAIL
from sympy.abc import a, b, c, n, t, u, x, y, z
class DummyNumber(object):
"""
Minimal implementation of a number that works with SymPy.
If one has a Number class (e.g. Sage Integer, or some other custom class)
that one wants to work well with SymPy, one has to implement at least the
methods of this class DummyNumber, resp. its subclasses I5 and F1_1.
Basically, one just needs to implement either __int__() or __float__() and
then one needs to make sure that the class works with Python integers and
with itself.
"""
def __radd__(self, a):
if isinstance(a, (int, float)):
return a + self.number
return NotImplemented
def __truediv__(a, b):
return a.__div__(b)
def __rtruediv__(a, b):
return a.__rdiv__(b)
def __add__(self, a):
if isinstance(a, (int, float, DummyNumber)):
return self.number + a
return NotImplemented
def __rsub__(self, a):
if isinstance(a, (int, float)):
return a - self.number
return NotImplemented
def __sub__(self, a):
if isinstance(a, (int, float, DummyNumber)):
return self.number - a
return NotImplemented
def __rmul__(self, a):
if isinstance(a, (int, float)):
return a * self.number
return NotImplemented
def __mul__(self, a):
if isinstance(a, (int, float, DummyNumber)):
return self.number * a
return NotImplemented
def __rdiv__(self, a):
if isinstance(a, (int, float)):
return a / self.number
return NotImplemented
def __div__(self, a):
if isinstance(a, (int, float, DummyNumber)):
return self.number / a
return NotImplemented
def __rpow__(self, a):
if isinstance(a, (int, float)):
return a ** self.number
return NotImplemented
def __pow__(self, a):
if isinstance(a, (int, float, DummyNumber)):
return self.number ** a
return NotImplemented
def __pos__(self):
return self.number
def __neg__(self):
return - self.number
class I5(DummyNumber):
number = 5
def __int__(self):
return self.number
class F1_1(DummyNumber):
number = 1.1
def __float__(self):
return self.number
i5 = I5()
f1_1 = F1_1()
# basic sympy objects
basic_objs = [
Rational(2),
Float("1.3"),
x,
y,
pow(x, y)*y,
]
# all supported objects
all_objs = basic_objs + [
5,
5.5,
i5,
f1_1
]
def dotest(s):
for x in all_objs:
for y in all_objs:
s(x, y)
return True
def test_basic():
def j(a, b):
x = a
x = +a
x = -a
x = a + b
x = a - b
x = a*b
x = a/b
x = a**b
assert dotest(j)
def test_ibasic():
def s(a, b):
x = a
x += b
x = a
x -= b
x = a
x *= b
x = a
x /= b
assert dotest(s)
def test_relational():
from sympy import Lt
assert (pi < 3) is S.false
assert (pi <= 3) is S.false
assert (pi > 3) is S.true
assert (pi >= 3) is S.true
assert (-pi < 3) is S.true
assert (-pi <= 3) is S.true
assert (-pi > 3) is S.false
assert (-pi >= 3) is S.false
r = Symbol('r', real=True)
assert (r - 2 < r - 3) is S.false
assert Lt(x + I, x + I + 2).func == Lt # issue 8288
def test_relational_assumptions():
from sympy import Lt, Gt, Le, Ge
m1 = Symbol("m1", nonnegative=False)
m2 = Symbol("m2", positive=False)
m3 = Symbol("m3", nonpositive=False)
m4 = Symbol("m4", negative=False)
assert (m1 < 0) == Lt(m1, 0)
assert (m2 <= 0) == Le(m2, 0)
assert (m3 > 0) == Gt(m3, 0)
assert (m4 >= 0) == Ge(m4, 0)
m1 = Symbol("m1", nonnegative=False, real=True)
m2 = Symbol("m2", positive=False, real=True)
m3 = Symbol("m3", nonpositive=False, real=True)
m4 = Symbol("m4", negative=False, real=True)
assert (m1 < 0) is S.true
assert (m2 <= 0) is S.true
assert (m3 > 0) is S.true
assert (m4 >= 0) is S.true
m1 = Symbol("m1", negative=True)
m2 = Symbol("m2", nonpositive=True)
m3 = Symbol("m3", positive=True)
m4 = Symbol("m4", nonnegative=True)
assert (m1 < 0) is S.true
assert (m2 <= 0) is S.true
assert (m3 > 0) is S.true
assert (m4 >= 0) is S.true
m1 = Symbol("m1", negative=False, real=True)
m2 = Symbol("m2", nonpositive=False, real=True)
m3 = Symbol("m3", positive=False, real=True)
m4 = Symbol("m4", nonnegative=False, real=True)
assert (m1 < 0) is S.false
assert (m2 <= 0) is S.false
assert (m3 > 0) is S.false
assert (m4 >= 0) is S.false
def test_relational_noncommutative():
from sympy import Lt, Gt, Le, Ge
A, B = symbols('A,B', commutative=False)
assert (A < B) == Lt(A, B)
assert (A <= B) == Le(A, B)
assert (A > B) == Gt(A, B)
assert (A >= B) == Ge(A, B)
def test_basic_nostr():
for obj in basic_objs:
raises(TypeError, lambda: obj + '1')
raises(TypeError, lambda: obj - '1')
if obj == 2:
assert obj * '1' == '11'
else:
raises(TypeError, lambda: obj * '1')
raises(TypeError, lambda: obj / '1')
raises(TypeError, lambda: obj ** '1')
def test_series_expansion_for_uniform_order():
assert (1/x + y + x).series(x, 0, 0) == 1/x + O(1, x)
assert (1/x + y + x).series(x, 0, 1) == 1/x + y + O(x)
assert (1/x + 1 + x).series(x, 0, 0) == 1/x + O(1, x)
assert (1/x + 1 + x).series(x, 0, 1) == 1/x + 1 + O(x)
assert (1/x + x).series(x, 0, 0) == 1/x + O(1, x)
assert (1/x + y + y*x + x).series(x, 0, 0) == 1/x + O(1, x)
assert (1/x + y + y*x + x).series(x, 0, 1) == 1/x + y + O(x)
def test_leadterm():
assert (3 + 2*x**(log(3)/log(2) - 1)).leadterm(x) == (3, 0)
assert (1/x**2 + 1 + x + x**2).leadterm(x)[1] == -2
assert (1/x + 1 + x + x**2).leadterm(x)[1] == -1
assert (x**2 + 1/x).leadterm(x)[1] == -1
assert (1 + x**2).leadterm(x)[1] == 0
assert (x + 1).leadterm(x)[1] == 0
assert (x + x**2).leadterm(x)[1] == 1
assert (x**2).leadterm(x)[1] == 2
def test_as_leading_term():
assert (3 + 2*x**(log(3)/log(2) - 1)).as_leading_term(x) == 3
assert (1/x**2 + 1 + x + x**2).as_leading_term(x) == 1/x**2
assert (1/x + 1 + x + x**2).as_leading_term(x) == 1/x
assert (x**2 + 1/x).as_leading_term(x) == 1/x
assert (1 + x**2).as_leading_term(x) == 1
assert (x + 1).as_leading_term(x) == 1
assert (x + x**2).as_leading_term(x) == x
assert (x**2).as_leading_term(x) == x**2
assert (x + oo).as_leading_term(x) == oo
def test_leadterm2():
assert (x*cos(1)*cos(1 + sin(1)) + sin(1 + sin(1))).leadterm(x) == \
(sin(1 + sin(1)), 0)
def test_leadterm3():
assert (y + z + x).leadterm(x) == (y + z, 0)
def test_as_leading_term2():
assert (x*cos(1)*cos(1 + sin(1)) + sin(1 + sin(1))).as_leading_term(x) == \
sin(1 + sin(1))
def test_as_leading_term3():
assert (2 + pi + x).as_leading_term(x) == 2 + pi
assert (2*x + pi*x + x**2).as_leading_term(x) == (2 + pi)*x
def test_as_leading_term4():
# see issue 6843
n = Symbol('n', integer=True, positive=True)
r = -n**3/(2*n**2 + 4*n + 2) - n**2/(n**2 + 2*n + 1) + \
n**2/(n + 1) - n/(2*n**2 + 4*n + 2) + n/(n*x + x) + 2*n/(n + 1) - \
1 + 1/(n*x + x) + 1/(n + 1) - 1/x
assert r.as_leading_term(x).cancel() == n/2
def test_as_leading_term_stub():
class foo(Function):
pass
assert foo(1/x).as_leading_term(x) == foo(1/x)
assert foo(1).as_leading_term(x) == foo(1)
raises(NotImplementedError, lambda: foo(x).as_leading_term(x))
def test_as_leading_term_deriv_integral():
# related to issue 11313
assert Derivative(x ** 3, x).as_leading_term(x) == 3*x**2
assert Derivative(x ** 3, y).as_leading_term(x) == 0
assert Integral(x ** 3, x).as_leading_term(x) == x**4/4
assert Integral(x ** 3, y).as_leading_term(x) == y*x**3
assert Derivative(exp(x), x).as_leading_term(x) == 1
assert Derivative(log(x), x).as_leading_term(x) == (1/x).as_leading_term(x)
def test_atoms():
assert x.atoms() == {x}
assert (1 + x).atoms() == {x, S(1)}
assert (1 + 2*cos(x)).atoms(Symbol) == {x}
assert (1 + 2*cos(x)).atoms(Symbol, Number) == {S(1), S(2), x}
assert (2*(x**(y**x))).atoms() == {S(2), x, y}
assert Rational(1, 2).atoms() == {S.Half}
assert Rational(1, 2).atoms(Symbol) == set([])
assert sin(oo).atoms(oo) == set()
assert Poly(0, x).atoms() == {S.Zero}
assert Poly(1, x).atoms() == {S.One}
assert Poly(x, x).atoms() == {x}
assert Poly(x, x, y).atoms() == {x}
assert Poly(x + y, x, y).atoms() == {x, y}
assert Poly(x + y, x, y, z).atoms() == {x, y}
assert Poly(x + y*t, x, y, z).atoms() == {t, x, y}
assert (I*pi).atoms(NumberSymbol) == {pi}
assert (I*pi).atoms(NumberSymbol, I) == \
(I*pi).atoms(I, NumberSymbol) == {pi, I}
assert exp(exp(x)).atoms(exp) == {exp(exp(x)), exp(x)}
assert (1 + x*(2 + y) + exp(3 + z)).atoms(Add) == \
{1 + x*(2 + y) + exp(3 + z), 2 + y, 3 + z}
# issue 6132
f = Function('f')
e = (f(x) + sin(x) + 2)
assert e.atoms(AppliedUndef) == \
{f(x)}
assert e.atoms(AppliedUndef, Function) == \
{f(x), sin(x)}
assert e.atoms(Function) == \
{f(x), sin(x)}
assert e.atoms(AppliedUndef, Number) == \
{f(x), S(2)}
assert e.atoms(Function, Number) == \
{S(2), sin(x), f(x)}
def test_is_polynomial():
k = Symbol('k', nonnegative=True, integer=True)
assert Rational(2).is_polynomial(x, y, z) is True
assert (S.Pi).is_polynomial(x, y, z) is True
assert x.is_polynomial(x) is True
assert x.is_polynomial(y) is True
assert (x**2).is_polynomial(x) is True
assert (x**2).is_polynomial(y) is True
assert (x**(-2)).is_polynomial(x) is False
assert (x**(-2)).is_polynomial(y) is True
assert (2**x).is_polynomial(x) is False
assert (2**x).is_polynomial(y) is True
assert (x**k).is_polynomial(x) is False
assert (x**k).is_polynomial(k) is False
assert (x**x).is_polynomial(x) is False
assert (k**k).is_polynomial(k) is False
assert (k**x).is_polynomial(k) is False
assert (x**(-k)).is_polynomial(x) is False
assert ((2*x)**k).is_polynomial(x) is False
assert (x**2 + 3*x - 8).is_polynomial(x) is True
assert (x**2 + 3*x - 8).is_polynomial(y) is True
assert (x**2 + 3*x - 8).is_polynomial() is True
assert sqrt(x).is_polynomial(x) is False
assert (sqrt(x)**3).is_polynomial(x) is False
assert (x**2 + 3*x*sqrt(y) - 8).is_polynomial(x) is True
assert (x**2 + 3*x*sqrt(y) - 8).is_polynomial(y) is False
assert ((x**2)*(y**2) + x*(y**2) + y*x + exp(2)).is_polynomial() is True
assert ((x**2)*(y**2) + x*(y**2) + y*x + exp(x)).is_polynomial() is False
assert (
(x**2)*(y**2) + x*(y**2) + y*x + exp(2)).is_polynomial(x, y) is True
assert (
(x**2)*(y**2) + x*(y**2) + y*x + exp(x)).is_polynomial(x, y) is False
def test_is_rational_function():
assert Integer(1).is_rational_function() is True
assert Integer(1).is_rational_function(x) is True
assert Rational(17, 54).is_rational_function() is True
assert Rational(17, 54).is_rational_function(x) is True
assert (12/x).is_rational_function() is True
assert (12/x).is_rational_function(x) is True
assert (x/y).is_rational_function() is True
assert (x/y).is_rational_function(x) is True
assert (x/y).is_rational_function(x, y) is True
assert (x**2 + 1/x/y).is_rational_function() is True
assert (x**2 + 1/x/y).is_rational_function(x) is True
assert (x**2 + 1/x/y).is_rational_function(x, y) is True
assert (sin(y)/x).is_rational_function() is False
assert (sin(y)/x).is_rational_function(y) is False
assert (sin(y)/x).is_rational_function(x) is True
assert (sin(y)/x).is_rational_function(x, y) is False
assert (S.NaN).is_rational_function() is False
assert (S.Infinity).is_rational_function() is False
assert (-S.Infinity).is_rational_function() is False
assert (S.ComplexInfinity).is_rational_function() is False
def test_is_algebraic_expr():
assert sqrt(3).is_algebraic_expr(x) is True
assert sqrt(3).is_algebraic_expr() is True
eq = ((1 + x**2)/(1 - y**2))**(S(1)/3)
assert eq.is_algebraic_expr(x) is True
assert eq.is_algebraic_expr(y) is True
assert (sqrt(x) + y**(S(2)/3)).is_algebraic_expr(x) is True
assert (sqrt(x) + y**(S(2)/3)).is_algebraic_expr(y) is True
assert (sqrt(x) + y**(S(2)/3)).is_algebraic_expr() is True
assert (cos(y)/sqrt(x)).is_algebraic_expr() is False
assert (cos(y)/sqrt(x)).is_algebraic_expr(x) is True
assert (cos(y)/sqrt(x)).is_algebraic_expr(y) is False
assert (cos(y)/sqrt(x)).is_algebraic_expr(x, y) is False
def test_SAGE1():
#see https://github.com/sympy/sympy/issues/3346
class MyInt:
def _sympy_(self):
return Integer(5)
m = MyInt()
e = Rational(2)*m
assert e == 10
raises(TypeError, lambda: Rational(2)*MyInt)
def test_SAGE2():
class MyInt(object):
def __int__(self):
return 5
assert sympify(MyInt()) == 5
e = Rational(2)*MyInt()
assert e == 10
raises(TypeError, lambda: Rational(2)*MyInt)
def test_SAGE3():
class MySymbol:
def __rmul__(self, other):
return ('mys', other, self)
o = MySymbol()
e = x*o
assert e == ('mys', x, o)
def test_len():
e = x*y
assert len(e.args) == 2
e = x + y + z
assert len(e.args) == 3
def test_doit():
a = Integral(x**2, x)
assert isinstance(a.doit(), Integral) is False
assert isinstance(a.doit(integrals=True), Integral) is False
assert isinstance(a.doit(integrals=False), Integral) is True
assert (2*Integral(x, x)).doit() == x**2
def test_attribute_error():
raises(AttributeError, lambda: x.cos())
raises(AttributeError, lambda: x.sin())
raises(AttributeError, lambda: x.exp())
def test_args():
assert (x*y).args in ((x, y), (y, x))
assert (x + y).args in ((x, y), (y, x))
assert (x*y + 1).args in ((x*y, 1), (1, x*y))
assert sin(x*y).args == (x*y,)
assert sin(x*y).args[0] == x*y
assert (x**y).args == (x, y)
assert (x**y).args[0] == x
assert (x**y).args[1] == y
def test_noncommutative_expand_issue_3757():
A, B, C = symbols('A,B,C', commutative=False)
assert A*B - B*A != 0
assert (A*(A + B)*B).expand() == A**2*B + A*B**2
assert (A*(A + B + C)*B).expand() == A**2*B + A*B**2 + A*C*B
def test_as_numer_denom():
a, b, c = symbols('a, b, c')
assert nan.as_numer_denom() == (nan, 1)
assert oo.as_numer_denom() == (oo, 1)
assert (-oo).as_numer_denom() == (-oo, 1)
assert zoo.as_numer_denom() == (zoo, 1)
assert (-zoo).as_numer_denom() == (zoo, 1)
assert x.as_numer_denom() == (x, 1)
assert (1/x).as_numer_denom() == (1, x)
assert (x/y).as_numer_denom() == (x, y)
assert (x/2).as_numer_denom() == (x, 2)
assert (x*y/z).as_numer_denom() == (x*y, z)
assert (x/(y*z)).as_numer_denom() == (x, y*z)
assert Rational(1, 2).as_numer_denom() == (1, 2)
assert (1/y**2).as_numer_denom() == (1, y**2)
assert (x/y**2).as_numer_denom() == (x, y**2)
assert ((x**2 + 1)/y).as_numer_denom() == (x**2 + 1, y)
assert (x*(y + 1)/y**7).as_numer_denom() == (x*(y + 1), y**7)
assert (x**-2).as_numer_denom() == (1, x**2)
assert (a/x + b/2/x + c/3/x).as_numer_denom() == \
(6*a + 3*b + 2*c, 6*x)
assert (a/x + b/2/x + c/3/y).as_numer_denom() == \
(2*c*x + y*(6*a + 3*b), 6*x*y)
assert (a/x + b/2/x + c/.5/x).as_numer_denom() == \
(2*a + b + 4.0*c, 2*x)
# this should take no more than a few seconds
assert int(log(Add(*[Dummy()/i/x for i in range(1, 705)]
).as_numer_denom()[1]/x).n(4)) == 705
for i in [S.Infinity, S.NegativeInfinity, S.ComplexInfinity]:
assert (i + x/3).as_numer_denom() == \
(x + i, 3)
assert (S.Infinity + x/3 + y/4).as_numer_denom() == \
(4*x + 3*y + S.Infinity, 12)
assert (oo*x + zoo*y).as_numer_denom() == \
(zoo*y + oo*x, 1)
A, B, C = symbols('A,B,C', commutative=False)
assert (A*B*C**-1).as_numer_denom() == (A*B*C**-1, 1)
assert (A*B*C**-1/x).as_numer_denom() == (A*B*C**-1, x)
assert (C**-1*A*B).as_numer_denom() == (C**-1*A*B, 1)
assert (C**-1*A*B/x).as_numer_denom() == (C**-1*A*B, x)
assert ((A*B*C)**-1).as_numer_denom() == ((A*B*C)**-1, 1)
assert ((A*B*C)**-1/x).as_numer_denom() == ((A*B*C)**-1, x)
def test_as_independent():
assert S.Zero.as_independent(x, as_Add=True) == (0, 0)
assert S.Zero.as_independent(x, as_Add=False) == (0, 0)
assert (2*x*sin(x) + y + x).as_independent(x) == (y, x + 2*x*sin(x))
assert (2*x*sin(x) + y + x).as_independent(y) == (x + 2*x*sin(x), y)
assert (2*x*sin(x) + y + x).as_independent(x, y) == (0, y + x + 2*x*sin(x))
assert (x*sin(x)*cos(y)).as_independent(x) == (cos(y), x*sin(x))
assert (x*sin(x)*cos(y)).as_independent(y) == (x*sin(x), cos(y))
assert (x*sin(x)*cos(y)).as_independent(x, y) == (1, x*sin(x)*cos(y))
assert (sin(x)).as_independent(x) == (1, sin(x))
assert (sin(x)).as_independent(y) == (sin(x), 1)
assert (2*sin(x)).as_independent(x) == (2, sin(x))
assert (2*sin(x)).as_independent(y) == (2*sin(x), 1)
# issue 4903 = 1766b
n1, n2, n3 = symbols('n1 n2 n3', commutative=False)
assert (n1 + n1*n2).as_independent(n2) == (n1, n1*n2)
assert (n2*n1 + n1*n2).as_independent(n2) == (0, n1*n2 + n2*n1)
assert (n1*n2*n1).as_independent(n2) == (n1, n2*n1)
assert (n1*n2*n1).as_independent(n1) == (1, n1*n2*n1)
assert (3*x).as_independent(x, as_Add=True) == (0, 3*x)
assert (3*x).as_independent(x, as_Add=False) == (3, x)
assert (3 + x).as_independent(x, as_Add=True) == (3, x)
assert (3 + x).as_independent(x, as_Add=False) == (1, 3 + x)
# issue 5479
assert (3*x).as_independent(Symbol) == (3, x)
# issue 5648
assert (n1*x*y).as_independent(x) == (n1*y, x)
assert ((x + n1)*(x - y)).as_independent(x) == (1, (x + n1)*(x - y))
assert ((x + n1)*(x - y)).as_independent(y) == (x + n1, x - y)
assert (DiracDelta(x - n1)*DiracDelta(x - y)).as_independent(x) \
== (1, DiracDelta(x - n1)*DiracDelta(x - y))
assert (x*y*n1*n2*n3).as_independent(n2) == (x*y*n1, n2*n3)
assert (x*y*n1*n2*n3).as_independent(n1) == (x*y, n1*n2*n3)
assert (x*y*n1*n2*n3).as_independent(n3) == (x*y*n1*n2, n3)
assert (DiracDelta(x - n1)*DiracDelta(y - n1)*DiracDelta(x - n2)).as_independent(y) == \
(DiracDelta(x - n1)*DiracDelta(x - n2), DiracDelta(y - n1))
# issue 5784
assert (x + Integral(x, (x, 1, 2))).as_independent(x, strict=True) == \
(Integral(x, (x, 1, 2)), x)
eq = Add(x, -x, 2, -3, evaluate=False)
assert eq.as_independent(x) == (-1, Add(x, -x, evaluate=False))
eq = Mul(x, 1/x, 2, -3, evaluate=False)
eq.as_independent(x) == (-6, Mul(x, 1/x, evaluate=False))
assert (x*y).as_independent(z, as_Add=True) == (x*y, 0)
@XFAIL
def test_call_2():
# TODO UndefinedFunction does not subclass Expr
f = Function('f')
assert (2*f)(x) == 2*f(x)
def test_replace():
f = log(sin(x)) + tan(sin(x**2))
assert f.replace(sin, cos) == log(cos(x)) + tan(cos(x**2))
assert f.replace(
sin, lambda a: sin(2*a)) == log(sin(2*x)) + tan(sin(2*x**2))
a = Wild('a')
b = Wild('b')
assert f.replace(sin(a), cos(a)) == log(cos(x)) + tan(cos(x**2))
assert f.replace(
sin(a), lambda a: sin(2*a)) == log(sin(2*x)) + tan(sin(2*x**2))
# test exact
assert (2*x).replace(a*x + b, b - a, exact=True) == 2*x
assert (2*x).replace(a*x + b, b - a) == 2/x
assert (2*x).replace(a*x + b, lambda a, b: b - a, exact=True) == 2*x
assert (2*x).replace(a*x + b, lambda a, b: b - a) == 2/x
g = 2*sin(x**3)
assert g.replace(
lambda expr: expr.is_Number, lambda expr: expr**2) == 4*sin(x**9)
assert cos(x).replace(cos, sin, map=True) == (sin(x), {cos(x): sin(x)})
assert sin(x).replace(cos, sin) == sin(x)
cond, func = lambda x: x.is_Mul, lambda x: 2*x
assert (x*y).replace(cond, func, map=True) == (2*x*y, {x*y: 2*x*y})
assert (x*(1 + x*y)).replace(cond, func, map=True) == \
(2*x*(2*x*y + 1), {x*(2*x*y + 1): 2*x*(2*x*y + 1), x*y: 2*x*y})
assert (y*sin(x)).replace(sin, lambda expr: sin(expr)/y, map=True) == \
(sin(x), {sin(x): sin(x)/y})
# if not simultaneous then y*sin(x) -> y*sin(x)/y = sin(x) -> sin(x)/y
assert (y*sin(x)).replace(sin, lambda expr: sin(expr)/y,
simultaneous=False) == sin(x)/y
assert (x**2 + O(x**3)).replace(Pow, lambda b, e: b**e/e) == O(1, x)
assert (x**2 + O(x**3)).replace(Pow, lambda b, e: b**e/e,
simultaneous=False) == x**2/2 + O(x**3)
assert (x*(x*y + 3)).replace(lambda x: x.is_Mul, lambda x: 2 + x) == \
x*(x*y + 5) + 2
e = (x*y + 1)*(2*x*y + 1) + 1
assert e.replace(cond, func, map=True) == (
2*((2*x*y + 1)*(4*x*y + 1)) + 1,
{2*x*y: 4*x*y, x*y: 2*x*y, (2*x*y + 1)*(4*x*y + 1):
2*((2*x*y + 1)*(4*x*y + 1))})
assert x.replace(x, y) == y
assert (x + 1).replace(1, 2) == x + 2
# https://groups.google.com/forum/#!topic/sympy/8wCgeC95tz0
n1, n2, n3 = symbols('n1:4', commutative=False)
f = Function('f')
assert (n1*f(n2)).replace(f, lambda x: x) == n1*n2
assert (n3*f(n2)).replace(f, lambda x: x) == n3*n2
def test_find():
expr = (x + y + 2 + sin(3*x))
assert expr.find(lambda u: u.is_Integer) == {S(2), S(3)}
assert expr.find(lambda u: u.is_Symbol) == {x, y}
assert expr.find(lambda u: u.is_Integer, group=True) == {S(2): 1, S(3): 1}
assert expr.find(lambda u: u.is_Symbol, group=True) == {x: 2, y: 1}
assert expr.find(Integer) == {S(2), S(3)}
assert expr.find(Symbol) == {x, y}
assert expr.find(Integer, group=True) == {S(2): 1, S(3): 1}
assert expr.find(Symbol, group=True) == {x: 2, y: 1}
a = Wild('a')
expr = sin(sin(x)) + sin(x) + cos(x) + x
assert expr.find(lambda u: type(u) is sin) == {sin(x), sin(sin(x))}
assert expr.find(
lambda u: type(u) is sin, group=True) == {sin(x): 2, sin(sin(x)): 1}
assert expr.find(sin(a)) == {sin(x), sin(sin(x))}
assert expr.find(sin(a), group=True) == {sin(x): 2, sin(sin(x)): 1}
assert expr.find(sin) == {sin(x), sin(sin(x))}
assert expr.find(sin, group=True) == {sin(x): 2, sin(sin(x)): 1}
def test_count():
expr = (x + y + 2 + sin(3*x))
assert expr.count(lambda u: u.is_Integer) == 2
assert expr.count(lambda u: u.is_Symbol) == 3
assert expr.count(Integer) == 2
assert expr.count(Symbol) == 3
assert expr.count(2) == 1
a = Wild('a')
assert expr.count(sin) == 1
assert expr.count(sin(a)) == 1
assert expr.count(lambda u: type(u) is sin) == 1
def test_has_basics():
f = Function('f')
g = Function('g')
p = Wild('p')
assert sin(x).has(x)
assert sin(x).has(sin)
assert not sin(x).has(y)
assert not sin(x).has(cos)
assert f(x).has(x)
assert f(x).has(f)
assert not f(x).has(y)
assert not f(x).has(g)
assert f(x).diff(x).has(x)
assert f(x).diff(x).has(f)
assert f(x).diff(x).has(Derivative)
assert not f(x).diff(x).has(y)
assert not f(x).diff(x).has(g)
assert not f(x).diff(x).has(sin)
assert (x**2).has(Symbol)
assert not (x**2).has(Wild)
assert (2*p).has(Wild)
assert not x.has()
def test_has_multiple():
f = x**2*y + sin(2**t + log(z))
assert f.has(x)
assert f.has(y)
assert f.has(z)
assert f.has(t)
assert not f.has(u)
assert f.has(x, y, z, t)
assert f.has(x, y, z, t, u)
i = Integer(4400)
assert not i.has(x)
assert (i*x**i).has(x)
assert not (i*y**i).has(x)
assert (i*y**i).has(x, y)
assert not (i*y**i).has(x, z)
def test_has_piecewise():
f = (x*y + 3/y)**(3 + 2)
g = Function('g')
h = Function('h')
p = Piecewise((g(x), x < -1), (1, x <= 1), (f, True))
assert p.has(x)
assert p.has(y)
assert not p.has(z)
assert p.has(1)
assert p.has(3)
assert not p.has(4)
assert p.has(f)
assert p.has(g)
assert not p.has(h)
def test_has_iterative():
A, B, C = symbols('A,B,C', commutative=False)
f = x*gamma(x)*sin(x)*exp(x*y)*A*B*C*cos(x*A*B)
assert f.has(x)
assert f.has(x*y)
assert f.has(x*sin(x))
assert not f.has(x*sin(y))
assert f.has(x*A)
assert f.has(x*A*B)
assert not f.has(x*A*C)
assert f.has(x*A*B*C)
assert not f.has(x*A*C*B)
assert f.has(x*sin(x)*A*B*C)
assert not f.has(x*sin(x)*A*C*B)
assert not f.has(x*sin(y)*A*B*C)
assert f.has(x*gamma(x))
assert not f.has(x + sin(x))
assert (x & y & z).has(x & z)
def test_has_integrals():
f = Integral(x**2 + sin(x*y*z), (x, 0, x + y + z))
assert f.has(x + y)
assert f.has(x + z)
assert f.has(y + z)
assert f.has(x*y)
assert f.has(x*z)
assert f.has(y*z)
assert not f.has(2*x + y)
assert not f.has(2*x*y)
def test_has_tuple():
f = Function('f')
g = Function('g')
h = Function('h')
assert Tuple(x, y).has(x)
assert not Tuple(x, y).has(z)
assert Tuple(f(x), g(x)).has(x)
assert not Tuple(f(x), g(x)).has(y)
assert Tuple(f(x), g(x)).has(f)
assert Tuple(f(x), g(x)).has(f(x))
assert not Tuple(f, g).has(x)
assert Tuple(f, g).has(f)
assert not Tuple(f, g).has(h)
assert Tuple(True).has(True) is True # .has(1) will also be True
def test_has_units():
from sympy.physics.units import m, s
assert (x*m/s).has(x)
assert (x*m/s).has(y, z) is False
def test_has_polys():
poly = Poly(x**2 + x*y*sin(z), x, y, t)
assert poly.has(x)
assert poly.has(x, y, z)
assert poly.has(x, y, z, t)
def test_has_physics():
assert FockState((x, y)).has(x)
def test_as_poly_as_expr():
f = x**2 + 2*x*y
assert f.as_poly().as_expr() == f
assert f.as_poly(x, y).as_expr() == f
assert (f + sin(x)).as_poly(x, y) is None
p = Poly(f, x, y)
assert p.as_poly() == p
def test_nonzero():
assert bool(S.Zero) is False
assert bool(S.One) is True
assert bool(x) is True
assert bool(x + y) is True
assert bool(x - x) is False
assert bool(x*y) is True
assert bool(x*1) is True
assert bool(x*0) is False
def test_is_number():
assert Float(3.14).is_number is True
assert Integer(737).is_number is True
assert Rational(3, 2).is_number is True
assert Rational(8).is_number is True
assert x.is_number is False
assert (2*x).is_number is False
assert (x + y).is_number is False
assert log(2).is_number is True
assert log(x).is_number is False
assert (2 + log(2)).is_number is True
assert (8 + log(2)).is_number is True
assert (2 + log(x)).is_number is False
assert (8 + log(2) + x).is_number is False
assert (1 + x**2/x - x).is_number is True
assert Tuple(Integer(1)).is_number is False
assert Add(2, x).is_number is False
assert Mul(3, 4).is_number is True
assert Pow(log(2), 2).is_number is True
assert oo.is_number is True
g = WildFunction('g')
assert g.is_number is False
assert (2*g).is_number is False
assert (x**2).subs(x, 3).is_number is True
# test extensibility of .is_number
# on subinstances of Basic
class A(Basic):
pass
a = A()
assert a.is_number is False
def test_as_coeff_add():
assert S(2).as_coeff_add() == (2, ())
assert S(3.0).as_coeff_add() == (0, (S(3.0),))
assert S(-3.0).as_coeff_add() == (0, (S(-3.0),))
assert x.as_coeff_add() == (0, (x,))
assert (x - 1).as_coeff_add() == (-1, (x,))
assert (x + 1).as_coeff_add() == (1, (x,))
assert (x + 2).as_coeff_add() == (2, (x,))
assert (x + y).as_coeff_add(y) == (x, (y,))
assert (3*x).as_coeff_add(y) == (3*x, ())
# don't do expansion
e = (x + y)**2
assert e.as_coeff_add(y) == (0, (e,))
def test_as_coeff_mul():
assert S(2).as_coeff_mul() == (2, ())
assert S(3.0).as_coeff_mul() == (1, (S(3.0),))
assert S(-3.0).as_coeff_mul() == (-1, (S(3.0),))
assert S(-3.0).as_coeff_mul(rational=False) == (-S(3.0), ())
assert x.as_coeff_mul() == (1, (x,))
assert (-x).as_coeff_mul() == (-1, (x,))
assert (2*x).as_coeff_mul() == (2, (x,))
assert (x*y).as_coeff_mul(y) == (x, (y,))
assert (3 + x).as_coeff_mul() == (1, (3 + x,))
assert (3 + x).as_coeff_mul(y) == (3 + x, ())
# don't do expansion
e = exp(x + y)
assert e.as_coeff_mul(y) == (1, (e,))
e = 2**(x + y)
assert e.as_coeff_mul(y) == (1, (e,))
assert (1.1*x).as_coeff_mul(rational=False) == (1.1, (x,))
assert (1.1*x).as_coeff_mul() == (1, (1.1, x))
assert (-oo*x).as_coeff_mul(rational=True) == (-1, (oo, x))
def test_as_coeff_exponent():
assert (3*x**4).as_coeff_exponent(x) == (3, 4)
assert (2*x**3).as_coeff_exponent(x) == (2, 3)
assert (4*x**2).as_coeff_exponent(x) == (4, 2)
assert (6*x**1).as_coeff_exponent(x) == (6, 1)
assert (3*x**0).as_coeff_exponent(x) == (3, 0)
assert (2*x**0).as_coeff_exponent(x) == (2, 0)
assert (1*x**0).as_coeff_exponent(x) == (1, 0)
assert (0*x**0).as_coeff_exponent(x) == (0, 0)
assert (-1*x**0).as_coeff_exponent(x) == (-1, 0)
assert (-2*x**0).as_coeff_exponent(x) == (-2, 0)
assert (2*x**3 + pi*x**3).as_coeff_exponent(x) == (2 + pi, 3)
assert (x*log(2)/(2*x + pi*x)).as_coeff_exponent(x) == \
(log(2)/(2 + pi), 0)
# issue 4784
D = Derivative
f = Function('f')
fx = D(f(x), x)
assert fx.as_coeff_exponent(f(x)) == (fx, 0)
def test_extractions():
assert ((x*y)**3).extract_multiplicatively(x**2 * y) == x*y**2
assert ((x*y)**3).extract_multiplicatively(x**4 * y) is None
assert (2*x).extract_multiplicatively(2) == x
assert (2*x).extract_multiplicatively(3) is None
assert (2*x).extract_multiplicatively(-1) is None
assert (Rational(1, 2)*x).extract_multiplicatively(3) == x/6
assert (sqrt(x)).extract_multiplicatively(x) is None
assert (sqrt(x)).extract_multiplicatively(1/x) is None
assert x.extract_multiplicatively(-x) is None
assert ((x*y)**3).extract_additively(1) is None
assert (x + 1).extract_additively(x) == 1
assert (x + 1).extract_additively(2*x) is None
assert (x + 1).extract_additively(-x) is None
assert (-x + 1).extract_additively(2*x) is None
assert (2*x + 3).extract_additively(x) == x + 3
assert (2*x + 3).extract_additively(2) == 2*x + 1
assert (2*x + 3).extract_additively(3) == 2*x
assert (2*x + 3).extract_additively(-2) is None
assert (2*x + 3).extract_additively(3*x) is None
assert (2*x + 3).extract_additively(2*x) == 3
assert x.extract_additively(0) == x
assert S(2).extract_additively(x) is None
assert S(2.).extract_additively(2) == S.Zero
assert S(2*x + 3).extract_additively(x + 1) == x + 2
assert S(2*x + 3).extract_additively(y + 1) is None
assert S(2*x - 3).extract_additively(x + 1) is None
assert S(2*x - 3).extract_additively(y + z) is None
assert ((a + 1)*x*4 + y).extract_additively(x).expand() == \
4*a*x + 3*x + y
assert ((a + 1)*x*4 + 3*y).extract_additively(x + 2*y).expand() == \
4*a*x + 3*x + y
assert (y*(x + 1)).extract_additively(x + 1) is None
assert ((y + 1)*(x + 1) + 3).extract_additively(x + 1) == \
y*(x + 1) + 3
assert ((x + y)*(x + 1) + x + y + 3).extract_additively(x + y) == \
x*(x + y) + 3
assert (x + y + 2*((x + y)*(x + 1)) + 3).extract_additively((x + y)*(x + 1)) == \
x + y + (x + 1)*(x + y) + 3
assert ((y + 1)*(x + 2*y + 1) + 3).extract_additively(y + 1) == \
(x + 2*y)*(y + 1) + 3
n = Symbol("n", integer=True)
assert (Integer(-3)).could_extract_minus_sign() is True
assert (-n*x + x).could_extract_minus_sign() != \
(n*x - x).could_extract_minus_sign()
assert (x - y).could_extract_minus_sign() != \
(-x + y).could_extract_minus_sign()
assert (1 - x - y).could_extract_minus_sign() is True
assert (1 - x + y).could_extract_minus_sign() is False
assert ((-x - x*y)/y).could_extract_minus_sign() is True
assert (-(x + x*y)/y).could_extract_minus_sign() is True
assert ((x + x*y)/(-y)).could_extract_minus_sign() is True
assert ((x + x*y)/y).could_extract_minus_sign() is False
assert (x*(-x - x**3)).could_extract_minus_sign() is True
assert ((-x - y)/(x + y)).could_extract_minus_sign() is True
# The results of each of these will vary on different machines, e.g.
# the first one might be False and the other (then) is true or vice versa,
# so both are included.
assert ((-x - y)/(x - y)).could_extract_minus_sign() is False or \
((-x - y)/(y - x)).could_extract_minus_sign() is False
assert (x - y).could_extract_minus_sign() is False
assert (-x + y).could_extract_minus_sign() is True
def test_nan_extractions():
for r in (1, 0, I, nan):
assert nan.extract_additively(r) is None
assert nan.extract_multiplicatively(r) is None
def test_coeff():
assert (x + 1).coeff(x + 1) == 1
assert (3*x).coeff(0) == 0
assert (z*(1 + x)*x**2).coeff(1 + x) == z*x**2
assert (1 + 2*x*x**(1 + x)).coeff(x*x**(1 + x)) == 2
assert (1 + 2*x**(y + z)).coeff(x**(y + z)) == 2
assert (3 + 2*x + 4*x**2).coeff(1) == 0
assert (3 + 2*x + 4*x**2).coeff(-1) == 0
assert (3 + 2*x + 4*x**2).coeff(x) == 2
assert (3 + 2*x + 4*x**2).coeff(x**2) == 4
assert (3 + 2*x + 4*x**2).coeff(x**3) == 0
assert (-x/8 + x*y).coeff(x) == -S(1)/8 + y
assert (-x/8 + x*y).coeff(-x) == S(1)/8
assert (4*x).coeff(2*x) == 0
assert (2*x).coeff(2*x) == 1
assert (-oo*x).coeff(x*oo) == -1
assert (10*x).coeff(x, 0) == 0
assert (10*x).coeff(10*x, 0) == 0
n1, n2 = symbols('n1 n2', commutative=False)
assert (n1*n2).coeff(n1) == 1
assert (n1*n2).coeff(n2) == n1
assert (n1*n2 + x*n1).coeff(n1) == 1 # 1*n1*(n2+x)
assert (n2*n1 + x*n1).coeff(n1) == n2 + x
assert (n2*n1 + x*n1**2).coeff(n1) == n2
assert (n1**x).coeff(n1) == 0
assert (n1*n2 + n2*n1).coeff(n1) == 0
assert (2*(n1 + n2)*n2).coeff(n1 + n2, right=1) == n2
assert (2*(n1 + n2)*n2).coeff(n1 + n2, right=0) == 2
f = Function('f')
assert (2*f(x) + 3*f(x).diff(x)).coeff(f(x)) == 2
expr = z*(x + y)**2
expr2 = z*(x + y)**2 + z*(2*x + 2*y)**2
assert expr.coeff(z) == (x + y)**2
assert expr.coeff(x + y) == 0
assert expr2.coeff(z) == (x + y)**2 + (2*x + 2*y)**2
assert (x + y + 3*z).coeff(1) == x + y
assert (-x + 2*y).coeff(-1) == x
assert (x - 2*y).coeff(-1) == 2*y
assert (3 + 2*x + 4*x**2).coeff(1) == 0
assert (-x - 2*y).coeff(2) == -y
assert (x + sqrt(2)*x).coeff(sqrt(2)) == x
assert (3 + 2*x + 4*x**2).coeff(x) == 2
assert (3 + 2*x + 4*x**2).coeff(x**2) == 4
assert (3 + 2*x + 4*x**2).coeff(x**3) == 0
assert (z*(x + y)**2).coeff((x + y)**2) == z
assert (z*(x + y)**2).coeff(x + y) == 0
assert (2 + 2*x + (x + 1)*y).coeff(x + 1) == y
assert (x + 2*y + 3).coeff(1) == x
assert (x + 2*y + 3).coeff(x, 0) == 2*y + 3
assert (x**2 + 2*y + 3*x).coeff(x**2, 0) == 2*y + 3*x
assert x.coeff(0, 0) == 0
assert x.coeff(x, 0) == 0
n, m, o, l = symbols('n m o l', commutative=False)
assert n.coeff(n) == 1
assert y.coeff(n) == 0
assert (3*n).coeff(n) == 3
assert (2 + n).coeff(x*m) == 0
assert (2*x*n*m).coeff(x) == 2*n*m
assert (2 + n).coeff(x*m*n + y) == 0
assert (2*x*n*m).coeff(3*n) == 0
assert (n*m + m*n*m).coeff(n) == 1 + m
assert (n*m + m*n*m).coeff(n, right=True) == m # = (1 + m)*n*m
assert (n*m + m*n).coeff(n) == 0
assert (n*m + o*m*n).coeff(m*n) == o
assert (n*m + o*m*n).coeff(m*n, right=1) == 1
assert (n*m + n*m*n).coeff(n*m, right=1) == 1 + n # = n*m*(n + 1)
assert (x*y).coeff(z, 0) == x*y
def test_coeff2():
r, kappa = symbols('r, kappa')
psi = Function("psi")
g = 1/r**2 * (2*r*psi(r).diff(r, 1) + r**2 * psi(r).diff(r, 2))
g = g.expand()
assert g.coeff((psi(r).diff(r))) == 2/r
def test_coeff2_0():
r, kappa = symbols('r, kappa')
psi = Function("psi")
g = 1/r**2 * (2*r*psi(r).diff(r, 1) + r**2 * psi(r).diff(r, 2))
g = g.expand()
assert g.coeff(psi(r).diff(r, 2)) == 1
def test_coeff_expand():
expr = z*(x + y)**2
expr2 = z*(x + y)**2 + z*(2*x + 2*y)**2
assert expr.coeff(z) == (x + y)**2
assert expr2.coeff(z) == (x + y)**2 + (2*x + 2*y)**2
def test_integrate():
assert x.integrate(x) == x**2/2
assert x.integrate((x, 0, 1)) == S(1)/2
def test_as_base_exp():
assert x.as_base_exp() == (x, S.One)
assert (x*y*z).as_base_exp() == (x*y*z, S.One)
assert (x + y + z).as_base_exp() == (x + y + z, S.One)
assert ((x + y)**z).as_base_exp() == (x + y, z)
def test_issue_4963():
assert hasattr(Mul(x, y), "is_commutative")
assert hasattr(Mul(x, y, evaluate=False), "is_commutative")
assert hasattr(Pow(x, y), "is_commutative")
assert hasattr(Pow(x, y, evaluate=False), "is_commutative")
expr = Mul(Pow(2, 2, evaluate=False), 3, evaluate=False) + 1
assert hasattr(expr, "is_commutative")
def test_action_verbs():
assert nsimplify((1/(exp(3*pi*x/5) + 1))) == \
(1/(exp(3*pi*x/5) + 1)).nsimplify()
assert ratsimp(1/x + 1/y) == (1/x + 1/y).ratsimp()
assert trigsimp(log(x), deep=True) == (log(x)).trigsimp(deep=True)
assert radsimp(1/(2 + sqrt(2))) == (1/(2 + sqrt(2))).radsimp()
assert radsimp(1/(a + b*sqrt(c)), symbolic=False) == \
(1/(a + b*sqrt(c))).radsimp(symbolic=False)
assert powsimp(x**y*x**z*y**z, combine='all') == \
(x**y*x**z*y**z).powsimp(combine='all')
assert (x**t*y**t).powsimp(force=True) == (x*y)**t
assert simplify(x**y*x**z*y**z) == (x**y*x**z*y**z).simplify()
assert together(1/x + 1/y) == (1/x + 1/y).together()
assert collect(a*x**2 + b*x**2 + a*x - b*x + c, x) == \
(a*x**2 + b*x**2 + a*x - b*x + c).collect(x)
assert apart(y/(y + 2)/(y + 1), y) == (y/(y + 2)/(y + 1)).apart(y)
assert combsimp(y/(x + 2)/(x + 1)) == (y/(x + 2)/(x + 1)).combsimp()
assert factor(x**2 + 5*x + 6) == (x**2 + 5*x + 6).factor()
assert refine(sqrt(x**2)) == sqrt(x**2).refine()
assert cancel((x**2 + 5*x + 6)/(x + 2)) == ((x**2 + 5*x + 6)/(x + 2)).cancel()
def test_as_powers_dict():
assert x.as_powers_dict() == {x: 1}
assert (x**y*z).as_powers_dict() == {x: y, z: 1}
assert Mul(2, 2, evaluate=False).as_powers_dict() == {S(2): S(2)}
assert (x*y).as_powers_dict()[z] == 0
assert (x + y).as_powers_dict()[z] == 0
def test_as_coefficients_dict():
check = [S(1), x, y, x*y, 1]
assert [Add(3*x, 2*x, y, 3).as_coefficients_dict()[i] for i in check] == \
[3, 5, 1, 0, 3]
assert [(3*x*y).as_coefficients_dict()[i] for i in check] == \
[0, 0, 0, 3, 0]
assert [(3.0*x*y).as_coefficients_dict()[i] for i in check] == \
[0, 0, 0, 3.0, 0]
assert (3.0*x*y).as_coefficients_dict()[3.0*x*y] == 0
def test_args_cnc():
A = symbols('A', commutative=False)
assert (x + A).args_cnc() == \
[[], [x + A]]
assert (x + a).args_cnc() == \
[[a + x], []]
assert (x*a).args_cnc() == \
[[a, x], []]
assert (x*y*A*(A + 1)).args_cnc(cset=True) == \
[{x, y}, [A, 1 + A]]
assert Mul(x, x, evaluate=False).args_cnc(cset=True, warn=False) == \
[{x}, []]
assert Mul(x, x**2, evaluate=False).args_cnc(cset=True, warn=False) == \
[{x, x**2}, []]
raises(ValueError, lambda: Mul(x, x, evaluate=False).args_cnc(cset=True))
assert Mul(x, y, x, evaluate=False).args_cnc() == \
[[x, y, x], []]
# always split -1 from leading number
assert (-1.*x).args_cnc() == [[-1, 1.0, x], []]
def test_new_rawargs():
n = Symbol('n', commutative=False)
a = x + n
assert a.is_commutative is False
assert a._new_rawargs(x).is_commutative
assert a._new_rawargs(x, y).is_commutative
assert a._new_rawargs(x, n).is_commutative is False
assert a._new_rawargs(x, y, n).is_commutative is False
m = x*n
assert m.is_commutative is False
assert m._new_rawargs(x).is_commutative
assert m._new_rawargs(n).is_commutative is False
assert m._new_rawargs(x, y).is_commutative
assert m._new_rawargs(x, n).is_commutative is False
assert m._new_rawargs(x, y, n).is_commutative is False
assert m._new_rawargs(x, n, reeval=False).is_commutative is False
assert m._new_rawargs(S.One) is S.One
def test_issue_5226():
assert Add(evaluate=False) == 0
assert Mul(evaluate=False) == 1
assert Mul(x + y, evaluate=False).is_Add
def test_free_symbols():
# free_symbols should return the free symbols of an object
assert S(1).free_symbols == set()
assert (x).free_symbols == {x}
assert Integral(x, (x, 1, y)).free_symbols == {y}
assert (-Integral(x, (x, 1, y))).free_symbols == {y}
assert meter.free_symbols == set()
assert (meter**x).free_symbols == {x}
def test_issue_5300():
x = Symbol('x', commutative=False)
assert x*sqrt(2)/sqrt(6) == x*sqrt(3)/3
def test_floordiv():
from sympy.functions.elementary.integers import floor
assert x // y == floor(x / y)
def test_as_coeff_Mul():
assert S(0).as_coeff_Mul() == (S.One, S.Zero)
assert Integer(3).as_coeff_Mul() == (Integer(3), Integer(1))
assert Rational(3, 4).as_coeff_Mul() == (Rational(3, 4), Integer(1))
assert Float(5.0).as_coeff_Mul() == (Float(5.0), Integer(1))
assert (Integer(3)*x).as_coeff_Mul() == (Integer(3), x)
assert (Rational(3, 4)*x).as_coeff_Mul() == (Rational(3, 4), x)
assert (Float(5.0)*x).as_coeff_Mul() == (Float(5.0), x)
assert (Integer(3)*x*y).as_coeff_Mul() == (Integer(3), x*y)
assert (Rational(3, 4)*x*y).as_coeff_Mul() == (Rational(3, 4), x*y)
assert (Float(5.0)*x*y).as_coeff_Mul() == (Float(5.0), x*y)
assert (x).as_coeff_Mul() == (S.One, x)
assert (x*y).as_coeff_Mul() == (S.One, x*y)
assert (-oo*x).as_coeff_Mul(rational=True) == (-1, oo*x)
def test_as_coeff_Add():
assert Integer(3).as_coeff_Add() == (Integer(3), Integer(0))
assert Rational(3, 4).as_coeff_Add() == (Rational(3, 4), Integer(0))
assert Float(5.0).as_coeff_Add() == (Float(5.0), Integer(0))
assert (Integer(3) + x).as_coeff_Add() == (Integer(3), x)
assert (Rational(3, 4) + x).as_coeff_Add() == (Rational(3, 4), x)
assert (Float(5.0) + x).as_coeff_Add() == (Float(5.0), x)
assert (Float(5.0) + x).as_coeff_Add(rational=True) == (0, Float(5.0) + x)
assert (Integer(3) + x + y).as_coeff_Add() == (Integer(3), x + y)
assert (Rational(3, 4) + x + y).as_coeff_Add() == (Rational(3, 4), x + y)
assert (Float(5.0) + x + y).as_coeff_Add() == (Float(5.0), x + y)
assert (x).as_coeff_Add() == (S.Zero, x)
assert (x*y).as_coeff_Add() == (S.Zero, x*y)
def test_expr_sorting():
f, g = symbols('f,g', cls=Function)
exprs = [1/x**2, 1/x, sqrt(sqrt(x)), sqrt(x), x, sqrt(x)**3, x**2]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [x, 2*x, 2*x**2, 2*x**3, x**n, 2*x**n, sin(x), sin(x)**n,
sin(x**2), cos(x), cos(x**2), tan(x)]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [x + 1, x**2 + x + 1, x**3 + x**2 + x + 1]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [S(4), x - 3*I/2, x + 3*I/2, x - 4*I + 1, x + 4*I + 1]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [f(1), f(2), f(3), f(1, 2, 3), g(1), g(2), g(3), g(1, 2, 3)]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [f(x), g(x), exp(x), sin(x), cos(x), factorial(x)]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [Tuple(x, y), Tuple(x, z), Tuple(x, y, z)]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [[3], [1, 2]]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [[1, 2], [2, 3]]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [[1, 2], [1, 2, 3]]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [{x: -y}, {x: y}]
assert sorted(exprs, key=default_sort_key) == exprs
exprs = [{1}, {1, 2}]
assert sorted(exprs, key=default_sort_key) == exprs
a, b = exprs = [Dummy('x'), Dummy('x')]
assert sorted([b, a], key=default_sort_key) == exprs
def test_as_ordered_factors():
f, g = symbols('f,g', cls=Function)
assert x.as_ordered_factors() == [x]
assert (2*x*x**n*sin(x)*cos(x)).as_ordered_factors() \
== [Integer(2), x, x**n, sin(x), cos(x)]
args = [f(1), f(2), f(3), f(1, 2, 3), g(1), g(2), g(3), g(1, 2, 3)]
expr = Mul(*args)
assert expr.as_ordered_factors() == args
A, B = symbols('A,B', commutative=False)
assert (A*B).as_ordered_factors() == [A, B]
assert (B*A).as_ordered_factors() == [B, A]
def test_as_ordered_terms():
f, g = symbols('f,g', cls=Function)
assert x.as_ordered_terms() == [x]
assert (sin(x)**2*cos(x) + sin(x)*cos(x)**2 + 1).as_ordered_terms() \
== [sin(x)**2*cos(x), sin(x)*cos(x)**2, 1]
args = [f(1), f(2), f(3), f(1, 2, 3), g(1), g(2), g(3), g(1, 2, 3)]
expr = Add(*args)
assert expr.as_ordered_terms() == args
assert (1 + 4*sqrt(3)*pi*x).as_ordered_terms() == [4*pi*x*sqrt(3), 1]
assert ( 2 + 3*I).as_ordered_terms() == [2, 3*I]
assert (-2 + 3*I).as_ordered_terms() == [-2, 3*I]
assert ( 2 - 3*I).as_ordered_terms() == [2, -3*I]
assert (-2 - 3*I).as_ordered_terms() == [-2, -3*I]
assert ( 4 + 3*I).as_ordered_terms() == [4, 3*I]
assert (-4 + 3*I).as_ordered_terms() == [-4, 3*I]
assert ( 4 - 3*I).as_ordered_terms() == [4, -3*I]
assert (-4 - 3*I).as_ordered_terms() == [-4, -3*I]
f = x**2*y**2 + x*y**4 + y + 2
assert f.as_ordered_terms(order="lex") == [x**2*y**2, x*y**4, y, 2]
assert f.as_ordered_terms(order="grlex") == [x*y**4, x**2*y**2, y, 2]
assert f.as_ordered_terms(order="rev-lex") == [2, y, x*y**4, x**2*y**2]
assert f.as_ordered_terms(order="rev-grlex") == [2, y, x**2*y**2, x*y**4]
def test_sort_key_atomic_expr():
from sympy.physics.units import m, s
assert sorted([-m, s], key=lambda arg: arg.sort_key()) == [-m, s]
def test_issue_4199():
# first subs and limit gives NaN
a = x/y
assert a._eval_interval(x, 0, oo)._eval_interval(y, oo, 0) is S.NaN
# second subs and limit gives NaN
assert a._eval_interval(x, 0, oo)._eval_interval(y, 0, oo) is S.NaN
# difference gives S.NaN
a = x - y
assert a._eval_interval(x, 1, oo)._eval_interval(y, oo, 1) is S.NaN
raises(ValueError, lambda: x._eval_interval(x, None, None))
a = -y*Heaviside(x - y)
assert a._eval_interval(x, -oo, oo) == -y
assert a._eval_interval(x, oo, -oo) == y
def test_eval_interval_zoo():
# Test that limit is used when zoo is returned
assert Si(1/x)._eval_interval(x, 0, 1) == -pi/2 + Si(1)
def test_primitive():
assert (3*(x + 1)**2).primitive() == (3, (x + 1)**2)
assert (6*x + 2).primitive() == (2, 3*x + 1)
assert (x/2 + 3).primitive() == (S(1)/2, x + 6)
eq = (6*x + 2)*(x/2 + 3)
assert eq.primitive()[0] == 1
eq = (2 + 2*x)**2
assert eq.primitive()[0] == 1
assert (4.0*x).primitive() == (1, 4.0*x)
assert (4.0*x + y/2).primitive() == (S.Half, 8.0*x + y)
assert (-2*x).primitive() == (2, -x)
assert Add(5*z/7, 0.5*x, 3*y/2, evaluate=False).primitive() == \
(S(1)/14, 7.0*x + 21*y + 10*z)
for i in [S.Infinity, S.NegativeInfinity, S.ComplexInfinity]:
assert (i + x/3).primitive() == \
(S(1)/3, i + x)
assert (S.Infinity + 2*x/3 + 4*y/7).primitive() == \
(S(1)/21, 14*x + 12*y + oo)
assert S.Zero.primitive() == (S.One, S.Zero)
def test_issue_5843():
a = 1 + x
assert (2*a).extract_multiplicatively(a) == 2
assert (4*a).extract_multiplicatively(2*a) == 2
assert ((3*a)*(2*a)).extract_multiplicatively(a) == 6*a
def test_is_constant():
from sympy.solvers.solvers import checksol
Sum(x, (x, 1, 10)).is_constant() is True
Sum(x, (x, 1, n)).is_constant() is False
Sum(x, (x, 1, n)).is_constant(y) is True
Sum(x, (x, 1, n)).is_constant(n) is False
Sum(x, (x, 1, n)).is_constant(x) is True
eq = a*cos(x)**2 + a*sin(x)**2 - a
eq.is_constant() is True
assert eq.subs({x: pi, a: 2}) == eq.subs({x: pi, a: 3}) == 0
assert x.is_constant() is False
assert x.is_constant(y) is True
assert checksol(x, x, Sum(x, (x, 1, n))) is False
assert checksol(x, x, Sum(x, (x, 1, n))) is False
f = Function('f')
assert checksol(x, x, f(x)) is False
p = symbols('p', positive=True)
assert Pow(x, S(0), evaluate=False).is_constant() is True # == 1
assert Pow(S(0), x, evaluate=False).is_constant() is False # == 0 or 1
assert (2**x).is_constant() is False
assert Pow(S(2), S(3), evaluate=False).is_constant() is True
z1, z2 = symbols('z1 z2', zero=True)
assert (z1 + 2*z2).is_constant() is True
assert meter.is_constant() is True
assert (3*meter).is_constant() is True
assert (x*meter).is_constant() is False
def test_equals():
assert (-3 - sqrt(5) + (-sqrt(10)/2 - sqrt(2)/2)**2).equals(0)
assert (x**2 - 1).equals((x + 1)*(x - 1))
assert (cos(x)**2 + sin(x)**2).equals(1)
assert (a*cos(x)**2 + a*sin(x)**2).equals(a)
r = sqrt(2)
assert (-1/(r + r*x) + 1/r/(1 + x)).equals(0)
assert factorial(x + 1).equals((x + 1)*factorial(x))
assert sqrt(3).equals(2*sqrt(3)) is False
assert (sqrt(5)*sqrt(3)).equals(sqrt(3)) is False
assert (sqrt(5) + sqrt(3)).equals(0) is False
assert (sqrt(5) + pi).equals(0) is False
assert meter.equals(0) is False
assert (3*meter**2).equals(0) is False
eq = -(-1)**(S(3)/4)*6**(S(1)/4) + (-6)**(S(1)/4)*I
if eq != 0: # if canonicalization makes this zero, skip the test
assert eq.equals(0)
assert sqrt(x).equals(0) is False
# from integrate(x*sqrt(1 + 2*x), x);
# diff is zero only when assumptions allow
i = 2*sqrt(2)*x**(S(5)/2)*(1 + 1/(2*x))**(S(5)/2)/5 + \
2*sqrt(2)*x**(S(3)/2)*(1 + 1/(2*x))**(S(5)/2)/(-6 - 3/x)
ans = sqrt(2*x + 1)*(6*x**2 + x - 1)/15
diff = i - ans
assert diff.equals(0) is False
assert diff.subs(x, -S.Half/2) == 7*sqrt(2)/120
# there are regions for x for which the expression is True, for
# example, when x < -1/2 or x > 0 the expression is zero
p = Symbol('p', positive=True)
assert diff.subs(x, p).equals(0) is True
assert diff.subs(x, -1).equals(0) is True
# prove via minimal_polynomial or self-consistency
eq = sqrt(1 + sqrt(3)) + sqrt(3 + 3*sqrt(3)) - sqrt(10 + 6*sqrt(3))
assert eq.equals(0)
q = 3**Rational(1, 3) + 3
p = expand(q**3)**Rational(1, 3)
assert (p - q).equals(0)
# issue 6829
# eq = q*x + q/4 + x**4 + x**3 + 2*x**2 - S(1)/3
# z = eq.subs(x, solve(eq, x)[0])
q = symbols('q')
z = (q*(-sqrt(-2*(-(q - S(7)/8)**S(2)/8 - S(2197)/13824)**(S(1)/3) -
S(13)/12)/2 - sqrt((2*q - S(7)/4)/sqrt(-2*(-(q - S(7)/8)**S(2)/8 -
S(2197)/13824)**(S(1)/3) - S(13)/12) + 2*(-(q - S(7)/8)**S(2)/8 -
S(2197)/13824)**(S(1)/3) - S(13)/6)/2 - S(1)/4) + q/4 + (-sqrt(-2*(-(q
- S(7)/8)**S(2)/8 - S(2197)/13824)**(S(1)/3) - S(13)/12)/2 - sqrt((2*q
- S(7)/4)/sqrt(-2*(-(q - S(7)/8)**S(2)/8 - S(2197)/13824)**(S(1)/3) -
S(13)/12) + 2*(-(q - S(7)/8)**S(2)/8 - S(2197)/13824)**(S(1)/3) -
S(13)/6)/2 - S(1)/4)**4 + (-sqrt(-2*(-(q - S(7)/8)**S(2)/8 -
S(2197)/13824)**(S(1)/3) - S(13)/12)/2 - sqrt((2*q -
S(7)/4)/sqrt(-2*(-(q - S(7)/8)**S(2)/8 - S(2197)/13824)**(S(1)/3) -
S(13)/12) + 2*(-(q - S(7)/8)**S(2)/8 - S(2197)/13824)**(S(1)/3) -
S(13)/6)/2 - S(1)/4)**3 + 2*(-sqrt(-2*(-(q - S(7)/8)**S(2)/8 -
S(2197)/13824)**(S(1)/3) - S(13)/12)/2 - sqrt((2*q -
S(7)/4)/sqrt(-2*(-(q - S(7)/8)**S(2)/8 - S(2197)/13824)**(S(1)/3) -
S(13)/12) + 2*(-(q - S(7)/8)**S(2)/8 - S(2197)/13824)**(S(1)/3) -
S(13)/6)/2 - S(1)/4)**2 - S(1)/3)
assert z.equals(0)
def test_random():
from sympy import posify, lucas
assert posify(x)[0]._random() is not None
assert lucas(n)._random(2, -2, 0, -1, 1) is None
# issue 8662
assert Piecewise((Max(x, y), z))._random() is None
def test_round():
from sympy.abc import x
assert Float('0.1249999').round(2) == 0.12
d20 = 12345678901234567890
ans = S(d20).round(2)
assert ans.is_Float and ans == d20
ans = S(d20).round(-2)
assert ans.is_Float and ans == 12345678901234567900
assert S('1/7').round(4) == 0.1429
assert S('.[12345]').round(4) == 0.1235
assert S('.1349').round(2) == 0.13
n = S(12345)
ans = n.round()
assert ans.is_Float
assert ans == n
ans = n.round(1)
assert ans.is_Float
assert ans == n
ans = n.round(4)
assert ans.is_Float
assert ans == n
assert n.round(-1) == 12350
r = n.round(-4)
assert r == 10000
# in fact, it should equal many values since __eq__
# compares at equal precision
assert all(r == i for i in range(9984, 10049))
assert n.round(-5) == 0
assert (pi + sqrt(2)).round(2) == 4.56
assert (10*(pi + sqrt(2))).round(-1) == 50
raises(TypeError, lambda: round(x + 2, 2))
assert S(2.3).round(1) == 2.3
e = S(12.345).round(2)
assert e == round(12.345, 2)
assert type(e) is Float
assert (Float(.3, 3) + 2*pi).round() == 7
assert (Float(.3, 3) + 2*pi*100).round() == 629
assert (Float(.03, 3) + 2*pi/100).round(5) == 0.09283
assert (Float(.03, 3) + 2*pi/100).round(4) == 0.0928
assert (pi + 2*E*I).round() == 3 + 5*I
assert S.Zero.round() == 0
a = (Add(1, Float('1.' + '9'*27, ''), evaluate=0))
assert a.round(10) == Float('3.0000000000', '')
assert a.round(25) == Float('3.0000000000000000000000000', '')
assert a.round(26) == Float('3.00000000000000000000000000', '')
assert a.round(27) == Float('2.999999999999999999999999999', '')
assert a.round(30) == Float('2.999999999999999999999999999', '')
raises(TypeError, lambda: x.round())
f = Function('f')
raises(TypeError, lambda: f(1).round())
# exact magnitude of 10
assert str(S(1).round()) == '1.'
assert str(S(100).round()) == '100.'
# applied to real and imaginary portions
assert (2*pi + E*I).round() == 6 + 3*I
assert (2*pi + I/10).round() == 6
assert (pi/10 + 2*I).round() == 2*I
# the lhs re and im parts are Float with dps of 2
# and those on the right have dps of 15 so they won't compare
# equal unless we use string or compare components (which will
# then coerce the floats to the same precision) or re-create
# the floats
assert str((pi/10 + E*I).round(2)) == '0.31 + 2.72*I'
assert (pi/10 + E*I).round(2).as_real_imag() == (0.31, 2.72)
assert (pi/10 + E*I).round(2) == Float(0.31, 2) + I*Float(2.72, 3)
# issue 6914
assert (I**(I + 3)).round(3) == Float('-0.208', '')*I
# issue 8720
assert S(-123.6).round() == -124.
assert S(-1.5).round() == -2.
assert S(-100.5).round() == -101.
assert S(-1.5 - 10.5*I).round() == -2.0 - 11.0*I
# issue 7961
assert str(S(0.006).round(2)) == '0.01'
assert str(S(0.00106).round(4)) == '0.0011'
# issue 8147
assert S.NaN.round() == S.NaN
assert S.Infinity.round() == S.Infinity
assert S.NegativeInfinity.round() == S.NegativeInfinity
assert S.ComplexInfinity.round() == S.ComplexInfinity
def test_held_expression_UnevaluatedExpr():
x = symbols("x")
he = UnevaluatedExpr(1/x)
e1 = x*he
assert isinstance(e1, Mul)
assert e1.args == (x, he)
assert e1.doit() == 1
xx = Mul(x, x, evaluate=False)
assert xx != x**2
ue2 = UnevaluatedExpr(xx)
assert isinstance(ue2, UnevaluatedExpr)
assert ue2.args == (xx,)
assert ue2.doit() == x**2
assert ue2.doit(deep=False) == xx
def test_round_exception_nostr():
# Don't use the string form of the expression in the round exception, as
# it's too slow
s = Symbol('bad')
try:
s.round()
except TypeError as e:
assert 'bad' not in str(e)
else:
# Did not raise
raise AssertionError("Did not raise")
def test_extract_branch_factor():
assert exp_polar(2.0*I*pi).extract_branch_factor() == (1, 1)
def test_identity_removal():
assert Add.make_args(x + 0) == (x,)
assert Mul.make_args(x*1) == (x,)
def test_float_0():
assert Float(0.0) + 1 == Float(1.0)
@XFAIL
def test_float_0_fail():
assert Float(0.0)*x == Float(0.0)
assert (x + Float(0.0)).is_Add
def test_issue_6325():
ans = (b**2 + z**2 - (b*(a + b*t) + z*(c + t*z))**2/(
(a + b*t)**2 + (c + t*z)**2))/sqrt((a + b*t)**2 + (c + t*z)**2)
e = sqrt((a + b*t)**2 + (c + z*t)**2)
assert diff(e, t, 2) == ans
e.diff(t, 2) == ans
assert diff(e, t, 2, simplify=False) != ans
def test_issue_7426():
f1 = a % c
f2 = x % z
assert f1.equals(f2) == False
def test_issue_1112():
x = Symbol('x', positive=False)
assert (x > 0) is S.false
def test_issue_10161():
x = symbols('x', real=True)
assert x*abs(x)*abs(x) == x**3
def test_issue_10755():
x = symbols('x')
raises(TypeError, lambda: int(log(x)))
raises(TypeError, lambda: log(x).round(2))
def test_issue_11877():
x = symbols('x')
assert integrate(log(S(1)/2 - x), (x, 0, S(1)/2)) == -S(1)/2 -log(2)/2
| bsd-3-clause |
N3uTr0nRom/N3uTr0n_kernel | tools/perf/scripts/python/syscall-counts-by-pid.py | 11180 | 1927 | # system call counts, by pid
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os, sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts-by-pid.py [comm]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
try:
syscalls[common_comm][common_pid][id] += 1
except TypeError:
syscalls[common_comm][common_pid][id] = 1
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events by comm/pid:\n\n",
print "%-40s %10s\n" % ("comm [pid]/syscalls", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id, val in sorted(syscalls[comm][pid].iteritems(), \
key = lambda(k, v): (v, k), reverse = True):
print " %-38s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
pschmitt/home-assistant | tests/components/rflink/test_sensor.py | 6 | 6573 | """Test for RFlink sensor components.
Test setup of rflink sensor component/platform. Verify manual and
automatic sensor creation.
"""
from homeassistant.components.rflink import (
CONF_RECONNECT_INTERVAL,
DATA_ENTITY_LOOKUP,
EVENT_KEY_COMMAND,
EVENT_KEY_SENSOR,
TMP_ENTITY,
)
from homeassistant.const import STATE_UNKNOWN, TEMP_CELSIUS, UNIT_PERCENTAGE
from tests.components.rflink.test_init import mock_rflink
DOMAIN = "sensor"
CONFIG = {
"rflink": {
"port": "/dev/ttyABC0",
"ignore_devices": ["ignore_wildcard_*", "ignore_sensor"],
},
DOMAIN: {
"platform": "rflink",
"devices": {"test": {"name": "test", "sensor_type": "temperature"}},
},
}
async def test_default_setup(hass, monkeypatch):
"""Test all basic functionality of the rflink sensor component."""
# setup mocking rflink module
event_callback, create, _, _ = await mock_rflink(hass, CONFIG, DOMAIN, monkeypatch)
# make sure arguments are passed
assert create.call_args_list[0][1]["ignore"]
# test default state of sensor loaded from config
config_sensor = hass.states.get("sensor.test")
assert config_sensor
assert config_sensor.state == "unknown"
assert config_sensor.attributes["unit_of_measurement"] == TEMP_CELSIUS
# test event for config sensor
event_callback(
{"id": "test", "sensor": "temperature", "value": 1, "unit": TEMP_CELSIUS}
)
await hass.async_block_till_done()
assert hass.states.get("sensor.test").state == "1"
# test event for new unconfigured sensor
event_callback(
{"id": "test2", "sensor": "temperature", "value": 0, "unit": TEMP_CELSIUS}
)
await hass.async_block_till_done()
# test state of new sensor
new_sensor = hass.states.get("sensor.test2")
assert new_sensor
assert new_sensor.state == "0"
assert new_sensor.attributes["unit_of_measurement"] == TEMP_CELSIUS
assert new_sensor.attributes["icon"] == "mdi:thermometer"
async def test_disable_automatic_add(hass, monkeypatch):
"""If disabled new devices should not be automatically added."""
config = {
"rflink": {"port": "/dev/ttyABC0"},
DOMAIN: {"platform": "rflink", "automatic_add": False},
}
# setup mocking rflink module
event_callback, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch)
# test event for new unconfigured sensor
event_callback(
{"id": "test2", "sensor": "temperature", "value": 0, "unit": TEMP_CELSIUS}
)
await hass.async_block_till_done()
# make sure new device is not added
assert not hass.states.get("sensor.test2")
async def test_entity_availability(hass, monkeypatch):
"""If Rflink device is disconnected, entities should become unavailable."""
# Make sure Rflink mock does not 'recover' to quickly from the
# disconnect or else the unavailability cannot be measured
config = CONFIG
failures = [True, True]
config[CONF_RECONNECT_INTERVAL] = 60
# Create platform and entities
_, _, _, disconnect_callback = await mock_rflink(
hass, config, DOMAIN, monkeypatch, failures=failures
)
# Entities are available by default
assert hass.states.get("sensor.test").state == STATE_UNKNOWN
# Mock a disconnect of the Rflink device
disconnect_callback()
# Wait for dispatch events to propagate
await hass.async_block_till_done()
# Entity should be unavailable
assert hass.states.get("sensor.test").state == "unavailable"
# Reconnect the Rflink device
disconnect_callback()
# Wait for dispatch events to propagate
await hass.async_block_till_done()
# Entities should be available again
assert hass.states.get("sensor.test").state == STATE_UNKNOWN
async def test_aliases(hass, monkeypatch):
"""Validate the response to sensor's alias (with aliases)."""
config = {
"rflink": {"port": "/dev/ttyABC0"},
DOMAIN: {
"platform": "rflink",
"devices": {
"test_02": {
"name": "test_02",
"sensor_type": "humidity",
"aliases": ["test_alias_02_0"],
}
},
},
}
# setup mocking rflink module
event_callback, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch)
# test default state of sensor loaded from config
config_sensor = hass.states.get("sensor.test_02")
assert config_sensor
assert config_sensor.state == "unknown"
# test event for config sensor
event_callback(
{
"id": "test_alias_02_0",
"sensor": "humidity",
"value": 65,
"unit": UNIT_PERCENTAGE,
}
)
await hass.async_block_till_done()
# test state of new sensor
updated_sensor = hass.states.get("sensor.test_02")
assert updated_sensor
assert updated_sensor.state == "65"
assert updated_sensor.attributes["unit_of_measurement"] == UNIT_PERCENTAGE
async def test_race_condition(hass, monkeypatch):
"""Test race condition for unknown components."""
config = {"rflink": {"port": "/dev/ttyABC0"}, DOMAIN: {"platform": "rflink"}}
tmp_entity = TMP_ENTITY.format("test3")
# setup mocking rflink module
event_callback, _, _, _ = await mock_rflink(hass, config, DOMAIN, monkeypatch)
# test event for new unconfigured sensor
event_callback({"id": "test3", "sensor": "battery", "value": "ok", "unit": ""})
event_callback({"id": "test3", "sensor": "battery", "value": "ko", "unit": ""})
# tmp_entity added to EVENT_KEY_SENSOR
assert tmp_entity in hass.data[DATA_ENTITY_LOOKUP][EVENT_KEY_SENSOR]["test3"]
# tmp_entity must no be added to EVENT_KEY_COMMAND
assert tmp_entity not in hass.data[DATA_ENTITY_LOOKUP][EVENT_KEY_COMMAND]["test3"]
await hass.async_block_till_done()
# test state of new sensor
updated_sensor = hass.states.get("sensor.test3")
assert updated_sensor
# test state of new sensor
new_sensor = hass.states.get(f"{DOMAIN}.test3")
assert new_sensor
assert new_sensor.state == "ok"
event_callback({"id": "test3", "sensor": "battery", "value": "ko", "unit": ""})
await hass.async_block_till_done()
# tmp_entity must be deleted from EVENT_KEY_COMMAND
assert tmp_entity not in hass.data[DATA_ENTITY_LOOKUP][EVENT_KEY_SENSOR]["test3"]
# test state of new sensor
new_sensor = hass.states.get(f"{DOMAIN}.test3")
assert new_sensor
assert new_sensor.state == "ko"
| apache-2.0 |
tkdchen/Nitrate | src/tcms/core/apps.py | 2 | 4387 | # -*- coding: utf-8 -*-
import logging
from datetime import datetime
from textwrap import dedent
from django.apps import AppConfig as DjangoAppConfig
from django.conf import settings
from django.db import connections
from django.db.models.signals import post_migrate
from django.utils.translation import gettext_lazy as _
logger = logging.getLogger(__name__)
SQLs = {
"postgresql": (
# Thanks for the answer: https://stackoverflow.com/q/2204058/968262
dedent(
"""\
select 'placeholder' as ph1, 'placeholder' as ph2, i.relname as Key_name
from pg_class t, pg_class i, pg_index ix, pg_attribute a
where
t.oid = ix.indrelid
and i.oid = ix.indexrelid
and a.attrelid = t.oid
and a.attnum = ANY(ix.indkey)
and t.relkind = 'r'
and t.relname = 'django_comments'
and a.attname = 'object_pk'
order by t.relname, i.relname;
"""
),
"DROP INDEX {}",
"SELECT data_type FROM information_schema.columns "
"WHERE table_name = 'django_comments' AND column_name = 'object_pk'",
"ALTER TABLE django_comments ALTER COLUMN object_pk TYPE INTEGER "
"USING object_pk::integer",
),
"mysql": (
# Key_name is the 3rd column
"SHOW INDEXES FROM django_comments WHERE Column_name = 'object_pk'",
"DROP INDEX {} ON django_comments",
"SELECT DATA_TYPE FROM information_schema.columns "
"WHERE table_schema = '{}' AND table_name = 'django_comments' "
"AND COLUMN_NAME = 'object_pk'",
"ALTER TABLE django_comments MODIFY object_pk INT",
),
}
def ensure_django_comment_object_pk_is_int(*args, **kwargs):
for db_key, db_info in settings.DATABASES.items():
_, db_engine = db_info["ENGINE"].rsplit(".", 1)
if db_engine not in SQLs:
logger.warning(
"Engine %s is not supported to modify data type of column "
"django_comment.object_pk.",
db_engine,
)
return
sql_find_indexes, sql_drop_idx, query, alter = SQLs[db_engine]
schema_name = db_info["NAME"]
query = query.format(schema_name)
with connections[db_key].cursor() as cursor:
cursor.execute(query)
(type_name,) = cursor.fetchone()
need_modify = type_name.lower() not in ("int", "integer")
if need_modify:
# Before alter the column, find existing indexes and remove them.
# Later, the indexes will be created back.
# This is required since the version 2.1.0 of django-contrib-comments,
# which adds indexes to the django_comments.object_pk column
with connections[db_key].cursor() as cursor:
cursor.execute(sql_find_indexes)
rows = cursor.fetchall()
had_index = False
with connections[db_key].cursor() as cursor:
for row in rows:
had_index = True
# 2: the column name
cursor.execute(sql_drop_idx.format(row[2]))
logger.info(
"Change django_comments.object_pk to INTEGER in database %s",
schema_name,
)
with connections[db_key].cursor() as cursor:
cursor.execute(alter)
if had_index:
# For this special case of django_comments, there is no special
# index on the original django_comments.object_pk column. So,
# after altering it to the integer type, a btree index is good
# enough. Note that, btree is the default for PostgreSQL,
# and the MySQL and MariaDB with InnoDB engine.
now = datetime.now().strftime("%Y%m%d%H%M%S")
with connections[db_key].cursor() as cursor:
cursor.execute(
f"CREATE INDEX django_comments__object_pk__{now} "
f"ON django_comments (object_pk)"
)
class AppConfig(DjangoAppConfig):
label = "core"
name = "tcms.core"
verbose_name = _("Core App")
def ready(self):
post_migrate.connect(ensure_django_comment_object_pk_is_int)
| gpl-2.0 |
JIoJIaJIu/servo | tests/wpt/css-tests/tools/pywebsocket/src/mod_pywebsocket/stream.py | 673 | 2748 | # Copyright 2011, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""This file exports public symbols.
"""
from mod_pywebsocket._stream_base import BadOperationException
from mod_pywebsocket._stream_base import ConnectionTerminatedException
from mod_pywebsocket._stream_base import InvalidFrameException
from mod_pywebsocket._stream_base import InvalidUTF8Exception
from mod_pywebsocket._stream_base import UnsupportedFrameException
from mod_pywebsocket._stream_hixie75 import StreamHixie75
from mod_pywebsocket._stream_hybi import Frame
from mod_pywebsocket._stream_hybi import Stream
from mod_pywebsocket._stream_hybi import StreamOptions
# These methods are intended to be used by WebSocket client developers to have
# their implementations receive broken data in tests.
from mod_pywebsocket._stream_hybi import create_close_frame
from mod_pywebsocket._stream_hybi import create_header
from mod_pywebsocket._stream_hybi import create_length_header
from mod_pywebsocket._stream_hybi import create_ping_frame
from mod_pywebsocket._stream_hybi import create_pong_frame
from mod_pywebsocket._stream_hybi import create_binary_frame
from mod_pywebsocket._stream_hybi import create_text_frame
from mod_pywebsocket._stream_hybi import create_closing_handshake_body
# vi:sts=4 sw=4 et
| mpl-2.0 |
MaDKaTZe/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/common/system/autoinstall.py | 115 | 17669 | # Copyright (c) 2009, Daniel Krech All rights reserved.
# Copyright (C) 2010 Chris Jerdonek (cjerdonek@webkit.org)
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of the Daniel Krech nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Support for automatically downloading Python packages from an URL."""
import codecs
import logging
import os
import shutil
import stat
import sys
import tarfile
import tempfile
import urllib2
import urlparse
import zipfile
_log = logging.getLogger(__name__)
class AutoInstaller(object):
"""Supports automatically installing Python packages from an URL.
Supports uncompressed files, .tar.gz, and .zip formats.
Basic usage:
installer = AutoInstaller()
installer.install(url="http://pypi.python.org/packages/source/p/pep8/pep8-0.5.0.tar.gz#md5=512a818af9979290cd619cce8e9c2e2b",
url_subpath="pep8-0.5.0/pep8.py")
installer.install(url="http://pypi.python.org/packages/source/m/mechanize/mechanize-0.2.4.zip",
url_subpath="mechanize")
"""
def __init__(self, append_to_search_path=False, make_package=True,
target_dir=None, temp_dir=None):
"""Create an AutoInstaller instance, and set up the target directory.
Args:
append_to_search_path: A boolean value of whether to append the
target directory to the sys.path search path.
make_package: A boolean value of whether to make the target
directory a package. This adds an __init__.py file
to the target directory -- allowing packages and
modules within the target directory to be imported
explicitly using dotted module names.
target_dir: The directory path to which packages should be installed.
Defaults to a subdirectory of the folder containing
this module called "autoinstalled".
temp_dir: The directory path to use for any temporary files
generated while downloading, unzipping, and extracting
packages to install. Defaults to a standard temporary
location generated by the tempfile module. This
parameter should normally be used only for development
testing.
"""
if target_dir is None:
this_dir = os.path.dirname(__file__)
target_dir = os.path.join(this_dir, "autoinstalled")
# Ensure that the target directory exists.
self._set_up_target_dir(target_dir, append_to_search_path, make_package)
self._target_dir = target_dir
self._temp_dir = temp_dir
def _write_file(self, path, text, encoding):
with codecs.open(path, "w", encoding) as filehandle:
filehandle.write(text)
def _set_up_target_dir(self, target_dir, append_to_search_path,
make_package):
"""Set up a target directory.
Args:
target_dir: The path to the target directory to set up.
append_to_search_path: A boolean value of whether to append the
target directory to the sys.path search path.
make_package: A boolean value of whether to make the target
directory a package. This adds an __init__.py file
to the target directory -- allowing packages and
modules within the target directory to be imported
explicitly using dotted module names.
"""
if not os.path.exists(target_dir):
os.makedirs(target_dir)
if append_to_search_path:
sys.path.append(target_dir)
if make_package:
self._make_package(target_dir)
def _make_package(self, target_dir):
init_path = os.path.join(target_dir, "__init__.py")
if not os.path.exists(init_path):
text = ("# This file is required for Python to search this "
"directory for modules.\n")
self._write_file(init_path, text, "ascii")
def _create_scratch_directory_inner(self, prefix):
"""Create a scratch directory without exception handling.
Creates a scratch directory inside the AutoInstaller temp
directory self._temp_dir, or inside a platform-dependent temp
directory if self._temp_dir is None. Returns the path to the
created scratch directory.
Raises:
OSError: [Errno 2] if the containing temp directory self._temp_dir
is not None and does not exist.
"""
# The tempfile.mkdtemp() method function requires that the
# directory corresponding to the "dir" parameter already exist
# if it is not None.
scratch_dir = tempfile.mkdtemp(prefix=prefix.replace('/', '.'), dir=self._temp_dir)
return scratch_dir
def _create_scratch_directory(self, target_name):
"""Create a temporary scratch directory, and return its path.
The scratch directory is generated inside the temp directory
of this AutoInstaller instance. This method also creates the
temp directory if it does not already exist.
"""
prefix = target_name.replace(os.sep, "_") + "_"
try:
scratch_dir = self._create_scratch_directory_inner(prefix)
except OSError:
# Handle case of containing temp directory not existing--
# OSError: [Errno 2] No such file or directory:...
temp_dir = self._temp_dir
if temp_dir is None or os.path.exists(temp_dir):
raise
# Else try again after creating the temp directory.
os.makedirs(temp_dir)
scratch_dir = self._create_scratch_directory_inner(prefix)
return scratch_dir
def _url_downloaded_path(self, target_name):
return os.path.join(self._target_dir, ".%s.url" % target_name.replace('/', '_'))
def _is_downloaded(self, target_name, url):
version_path = self._url_downloaded_path(target_name)
if not os.path.exists(version_path):
return False
with codecs.open(version_path, "r", "utf-8") as filehandle:
return filehandle.read().strip() == url.strip()
def _record_url_downloaded(self, target_name, url):
version_path = self._url_downloaded_path(target_name)
self._write_file(version_path, url, "utf-8")
def _extract_targz(self, path, scratch_dir):
# tarfile.extractall() extracts to a path without the trailing ".tar.gz".
target_basename = os.path.basename(path[:-len(".tar.gz")])
target_path = os.path.join(scratch_dir, target_basename)
try:
tar_file = tarfile.open(path)
except tarfile.ReadError, err:
# Append existing Error message to new Error.
message = ("Could not open tar file: %s\n"
" The file probably does not have the correct format.\n"
" --> Inner message: %s"
% (path, err))
raise Exception(message)
try:
tar_file.extractall(target_path)
finally:
tar_file.close()
return target_path
# This is a replacement for ZipFile.extractall(), which is
# available in Python 2.6 but not in earlier versions.
# NOTE: The version in 2.6.1 (which shipped on Snow Leopard) is broken!
def _extract_all(self, zip_file, target_dir):
for name in zip_file.namelist():
path = os.path.join(target_dir, name)
if not os.path.basename(path):
# Then the path ends in a slash, so it is a directory.
os.makedirs(path)
continue
try:
# We open this file w/o encoding, as we're reading/writing
# the raw byte-stream from the zip file.
outfile = open(path, 'wb')
except IOError:
# Not all zip files seem to list the directories explicitly,
# so try again after creating the containing directory.
_log.debug("Got IOError: retrying after creating directory...")
dirname = os.path.dirname(path)
os.makedirs(dirname)
outfile = open(path, 'wb')
try:
outfile.write(zip_file.read(name))
finally:
outfile.close()
def _unzip(self, path, scratch_dir):
# zipfile.extractall() extracts to a path without the trailing ".zip".
target_basename = os.path.basename(path[:-len(".zip")])
target_path = os.path.join(scratch_dir, target_basename)
try:
zip_file = zipfile.ZipFile(path, "r")
except zipfile.BadZipfile, err:
message = ("Could not open zip file: %s\n"
" --> Inner message: %s"
% (path, err))
raise Exception(message)
try:
self._extract_all(zip_file, scratch_dir)
finally:
zip_file.close()
return target_path
def _prepare_package(self, path, scratch_dir):
"""Prepare a package for use, if necessary, and return the new path.
For example, this method unzips zipped files and extracts
tar files.
Args:
path: The path to the downloaded URL contents.
scratch_dir: The scratch directory. Note that the scratch
directory contains the file designated by the
path parameter.
"""
# FIXME: Add other natural extensions.
if path.endswith(".zip"):
new_path = self._unzip(path, scratch_dir)
elif path.endswith(".tar.gz"):
new_path = self._extract_targz(path, scratch_dir)
else:
# No preparation is needed.
new_path = path
return new_path
def _download_to_stream(self, url, stream):
failures = 0
while True:
try:
netstream = urllib2.urlopen(url)
break
except IOError, err:
# Try multiple times
if failures < 5:
_log.warning("Failed to download %s, %s retrying" % (
url, err))
failures += 1
continue
# Append existing Error message to new Error.
message = ('Could not download Python modules from URL "%s".\n'
" Make sure you are connected to the internet.\n"
" You must be connected to the internet when "
"downloading needed modules for the first time.\n"
" --> Inner message: %s"
% (url, err))
raise IOError(message)
code = 200
if hasattr(netstream, "getcode"):
code = netstream.getcode()
if not 200 <= code < 300:
raise ValueError("HTTP Error code %s" % code)
BUFSIZE = 2**13 # 8KB
while True:
data = netstream.read(BUFSIZE)
if not data:
break
stream.write(data)
netstream.close()
def _download(self, url, scratch_dir):
url_path = urlparse.urlsplit(url)[2]
url_path = os.path.normpath(url_path) # Removes trailing slash.
target_filename = os.path.basename(url_path)
target_path = os.path.join(scratch_dir, target_filename)
with open(target_path, "wb") as stream:
self._download_to_stream(url, stream)
return target_path
def _install(self, scratch_dir, package_name, target_path, url, url_subpath, files_to_remove):
"""Install a python package from an URL.
This internal method overwrites the target path if the target
path already exists.
"""
path = self._download(url=url, scratch_dir=scratch_dir)
path = self._prepare_package(path, scratch_dir)
if url_subpath is None:
source_path = path
else:
source_path = os.path.join(path, url_subpath)
for filename in files_to_remove:
path = os.path.join(source_path, filename.replace('/', os.sep))
if os.path.exists(path):
# Pre-emptively change the permissions to #0777 to try and work around win32 permissions issues.
os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
os.remove(path)
if os.path.exists(target_path):
if os.path.isdir(target_path):
shutil.rmtree(target_path, ignore_errors=True)
else:
os.remove(target_path)
# shutil.move() command creates intermediate directories if they do not exist.
shutil.move(source_path, target_path)
# ensure all the new directories are importable.
intermediate_dirs = os.path.dirname(os.path.relpath(target_path, self._target_dir))
parent_dirname = self._target_dir
for dirname in intermediate_dirs.split(os.sep):
parent_dirname = os.path.join(parent_dirname, dirname)
self._make_package(parent_dirname)
self._record_url_downloaded(package_name, url)
def install(self, url, should_refresh=False, target_name=None,
url_subpath=None, files_to_remove=None):
"""Install a python package from an URL.
Args:
url: The URL from which to download the package.
Optional Args:
should_refresh: A boolean value of whether the package should be
downloaded again if the package is already present.
target_name: The name of the folder or file in the autoinstaller
target directory at which the package should be
installed. Defaults to the base name of the
URL sub-path. This parameter must be provided if
the URL sub-path is not specified.
url_subpath: The relative path of the URL directory that should
be installed. Defaults to the full directory, or
the entire URL contents.
"""
if target_name is None:
if not url_subpath:
raise ValueError('The "target_name" parameter must be '
'provided if the "url_subpath" parameter '
"is not provided.")
# Remove any trailing slashes.
url_subpath = os.path.normpath(url_subpath)
target_name = os.path.basename(url_subpath)
target_path = os.path.join(self._target_dir, target_name.replace('/', os.sep))
if not should_refresh and self._is_downloaded(target_name, url):
return False
files_to_remove = files_to_remove or []
package_name = target_name.replace(os.sep, '.')
_log.info("Auto-installing package: %s" % package_name)
# The scratch directory is where we will download and prepare
# files specific to this install until they are ready to move
# into place.
scratch_dir = self._create_scratch_directory(target_name)
try:
self._install(package_name=package_name,
target_path=target_path,
scratch_dir=scratch_dir,
url=url,
url_subpath=url_subpath,
files_to_remove=files_to_remove)
except Exception, err:
# Append existing Error message to new Error.
message = ("Error auto-installing the %s package to:\n"
' "%s"\n'
" --> Inner message: %s"
% (target_name, target_path, err))
raise Exception(message)
finally:
shutil.rmtree(scratch_dir, ignore_errors=True)
_log.debug('Auto-installed %s to:' % url)
_log.debug(' "%s"' % target_path)
return True
| bsd-3-clause |
MenZil/kuma | vendor/packages/logilab/astng/test/unittest_nodes.py | 24 | 13826 | # copyright 2003-2013 LOGILAB S.A. (Paris, FRANCE), all rights reserved.
# contact http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This file is part of logilab-astng.
#
# logilab-astng is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 2.1 of the License, or (at your
# option) any later version.
#
# logilab-astng is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with logilab-astng. If not, see <http://www.gnu.org/licenses/>.
"""tests for specific behaviour of astng nodes
"""
import sys
from logilab.common import testlib
from logilab.astng.node_classes import unpack_infer
from logilab.astng.bases import BUILTINS, YES, InferenceContext
from logilab.astng.exceptions import ASTNGBuildingException, NotFoundError
from logilab.astng import builder, nodes
from data import module as test_module
from os.path import join, abspath, dirname
DATA = join(dirname(abspath(__file__)), 'data')
abuilder = builder.ASTNGBuilder()
class AsString(testlib.TestCase):
def test_varargs_kwargs_as_string(self):
ast = abuilder.string_build( 'raise_string(*args, **kwargs)').body[0]
self.assertEqual(ast.as_string(), 'raise_string(*args, **kwargs)')
def test_module_as_string(self):
"""check as_string on a whole module prepared to be returned identically
"""
data = open(join(DATA, 'module.py')).read()
self.assertMultiLineEqual(MODULE.as_string(), data)
def test_module2_as_string(self):
"""check as_string on a whole module prepared to be returned identically
"""
data = open(join(DATA, 'module2.py')).read()
self.assertMultiLineEqual(MODULE2.as_string(), data)
@testlib.require_version('2.7')
def test_2_7_as_string(self):
"""check as_string for python syntax >= 2.7"""
code = '''one_two = {1, 2}
b = {v: k for (k, v) in enumerate('string')}
cdd = {k for k in b}\n\n'''
ast = abuilder.string_build(code)
self.assertMultiLineEqual(ast.as_string(), code)
@testlib.require_version('3.0')
def test_3k_as_string(self):
"""check as_string for python 3k syntax"""
code = '''print()
def function(var):
nonlocal counter
try:
hello
except NameError as nexc:
(*hell, o) = b'hello'
raise AttributeError from nexc
\n'''
# TODO : annotations and keywords for class definition are not yet implemented
_todo = '''
def function(var:int):
nonlocal counter
class Language(metaclass=Natural):
"""natural language"""
'''
ast = abuilder.string_build(code)
self.assertEqual(ast.as_string(), code)
class _NodeTC(testlib.TestCase):
"""test transformation of If Node"""
CODE = None
@property
def astng(self):
try:
return self.__class__.__dict__['CODE_ASTNG']
except KeyError:
astng = abuilder.string_build(self.CODE)
self.__class__.CODE_ASTNG = astng
return astng
class IfNodeTC(_NodeTC):
"""test transformation of If Node"""
CODE = """
if 0:
print()
if True:
print()
else:
pass
if "":
print()
elif []:
raise
if 1:
print()
elif True:
print()
elif func():
pass
else:
raise
"""
def test_if_elif_else_node(self):
"""test transformation for If node"""
self.assertEqual(len(self.astng.body), 4)
for stmt in self.astng.body:
self.assertIsInstance( stmt, nodes.If)
self.assertFalse(self.astng.body[0].orelse) # simple If
self.assertIsInstance(self.astng.body[1].orelse[0], nodes.Pass) # If / else
self.assertIsInstance(self.astng.body[2].orelse[0], nodes.If) # If / elif
self.assertIsInstance(self.astng.body[3].orelse[0].orelse[0], nodes.If)
def test_block_range(self):
# XXX ensure expected values
self.assertEqual(self.astng.block_range(1), (0, 22))
self.assertEqual(self.astng.block_range(10), (0, 22)) # XXX (10, 22) ?
self.assertEqual(self.astng.body[1].block_range(5), (5, 6))
self.assertEqual(self.astng.body[1].block_range(6), (6, 6))
self.assertEqual(self.astng.body[1].orelse[0].block_range(7), (7, 8))
self.assertEqual(self.astng.body[1].orelse[0].block_range(8), (8, 8))
class TryExceptNodeTC(_NodeTC):
CODE = """
try:
print ('pouet')
except IOError:
pass
except UnicodeError:
print()
else:
print()
"""
def test_block_range(self):
# XXX ensure expected values
self.assertEqual(self.astng.body[0].block_range(1), (1, 8))
self.assertEqual(self.astng.body[0].block_range(2), (2, 2))
self.assertEqual(self.astng.body[0].block_range(3), (3, 8))
self.assertEqual(self.astng.body[0].block_range(4), (4, 4))
self.assertEqual(self.astng.body[0].block_range(5), (5, 5))
self.assertEqual(self.astng.body[0].block_range(6), (6, 6))
self.assertEqual(self.astng.body[0].block_range(7), (7, 7))
self.assertEqual(self.astng.body[0].block_range(8), (8, 8))
class TryFinallyNodeTC(_NodeTC):
CODE = """
try:
print ('pouet')
finally:
print ('pouet')
"""
def test_block_range(self):
# XXX ensure expected values
self.assertEqual(self.astng.body[0].block_range(1), (1, 4))
self.assertEqual(self.astng.body[0].block_range(2), (2, 2))
self.assertEqual(self.astng.body[0].block_range(3), (3, 4))
self.assertEqual(self.astng.body[0].block_range(4), (4, 4))
class TryFinally25NodeTC(_NodeTC):
CODE = """
try:
print('pouet')
except Exception:
print ('oops')
finally:
print ('pouet')
"""
def test_block_range(self):
# XXX ensure expected values
self.assertEqual(self.astng.body[0].block_range(1), (1, 6))
self.assertEqual(self.astng.body[0].block_range(2), (2, 2))
self.assertEqual(self.astng.body[0].block_range(3), (3, 4))
self.assertEqual(self.astng.body[0].block_range(4), (4, 4))
self.assertEqual(self.astng.body[0].block_range(5), (5, 5))
self.assertEqual(self.astng.body[0].block_range(6), (6, 6))
class TryExcept2xNodeTC(_NodeTC):
CODE = """
try:
hello
except AttributeError, (retval, desc):
pass
"""
def test_tuple_attribute(self):
if sys.version_info >= (3, 0):
self.skipTest('syntax removed from py3.x')
handler = self.astng.body[0].handlers[0]
self.assertIsInstance(handler.name, nodes.Tuple)
MODULE = abuilder.module_build(test_module)
MODULE2 = abuilder.file_build(join(DATA, 'module2.py'), 'data.module2')
class ImportNodeTC(testlib.TestCase):
def test_import_self_resolve(self):
myos = MODULE2.igetattr('myos').next()
self.assertTrue(isinstance(myos, nodes.Module), myos)
self.assertEqual(myos.name, 'os')
self.assertEqual(myos.qname(), 'os')
self.assertEqual(myos.pytype(), '%s.module' % BUILTINS)
def test_from_self_resolve(self):
spawn = MODULE.igetattr('spawn').next()
self.assertTrue(isinstance(spawn, nodes.Class), spawn)
self.assertEqual(spawn.root().name, 'logilab.common.shellutils')
self.assertEqual(spawn.qname(), 'logilab.common.shellutils.Execute')
self.assertEqual(spawn.pytype(), '%s.classobj' % BUILTINS)
abspath = MODULE2.igetattr('abspath').next()
self.assertTrue(isinstance(abspath, nodes.Function), abspath)
self.assertEqual(abspath.root().name, 'os.path')
self.assertEqual(abspath.qname(), 'os.path.abspath')
self.assertEqual(abspath.pytype(), '%s.function' % BUILTINS)
def test_real_name(self):
from_ = MODULE['spawn']
self.assertEqual(from_.real_name('spawn'), 'Execute')
imp_ = MODULE['os']
self.assertEqual(imp_.real_name('os'), 'os')
self.assertRaises(NotFoundError, imp_.real_name, 'os.path')
imp_ = MODULE['spawn']
self.assertEqual(imp_.real_name('spawn'), 'Execute')
self.assertRaises(NotFoundError, imp_.real_name, 'Execute')
imp_ = MODULE2['YO']
self.assertEqual(imp_.real_name('YO'), 'YO')
self.assertRaises(NotFoundError, imp_.real_name, 'data')
def test_as_string(self):
ast = MODULE['modutils']
self.assertEqual(ast.as_string(), "from logilab.common import modutils")
ast = MODULE['spawn']
self.assertEqual(ast.as_string(), "from logilab.common.shellutils import Execute as spawn")
ast = MODULE['os']
self.assertEqual(ast.as_string(), "import os.path")
code = """from . import here
from .. import door
from .store import bread
from ..cave import wine\n\n"""
ast = abuilder.string_build(code)
self.assertMultiLineEqual(ast.as_string(), code)
def test_bad_import_inference(self):
# Explication of bug
'''When we import PickleError from nonexistent, a call to the infer
method of this From node will be made by unpack_infer.
inference.infer_from will try to import this module, which will fail and
raise a InferenceException (by mixins.do_import_module). The infer_name
will catch this exception and yield and YES instead.
'''
code = '''try:
from pickle import PickleError
except ImportError:
from nonexistent import PickleError
try:
pass
except PickleError:
pass
'''
astng = abuilder.string_build(code)
from_node = astng.body[1].handlers[0].body[0]
handler_type = astng.body[1].handlers[0].type
excs = list(unpack_infer(handler_type))
def test_absolute_import(self):
astng = abuilder.file_build(self.datapath('absimport.py'))
ctx = InferenceContext()
ctx.lookupname = 'message'
# will fail if absolute import failed
astng['message'].infer(ctx).next()
ctx.lookupname = 'email'
m = astng['email'].infer(ctx).next()
self.assertFalse(m.file.startswith(self.datapath('email.py')))
class CmpNodeTC(testlib.TestCase):
def test_as_string(self):
ast = abuilder.string_build("a == 2").body[0]
self.assertEqual(ast.as_string(), "a == 2")
class ConstNodeTC(testlib.TestCase):
def _test(self, value):
node = nodes.const_factory(value)
self.assertIsInstance(node._proxied, nodes.Class)
self.assertEqual(node._proxied.name, value.__class__.__name__)
self.assertIs(node.value, value)
self.assertTrue(node._proxied.parent)
self.assertEqual(node._proxied.root().name, value.__class__.__module__)
def test_none(self):
self._test(None)
def test_bool(self):
self._test(True)
def test_int(self):
self._test(1)
def test_float(self):
self._test(1.0)
def test_complex(self):
self._test(1.0j)
def test_str(self):
self._test('a')
def test_unicode(self):
self._test(u'a')
class NameNodeTC(testlib.TestCase):
def test_assign_to_True(self):
"""test that True and False assignements don't crash"""
code = """True = False
def hello(False):
pass
del True
"""
if sys.version_info >= (3, 0):
self.assertRaises(SyntaxError,#might become ASTNGBuildingException
abuilder.string_build, code)
else:
ast = abuilder.string_build(code)
ass_true = ast['True']
self.assertIsInstance(ass_true, nodes.AssName)
self.assertEqual(ass_true.name, "True")
del_true = ast.body[2].targets[0]
self.assertIsInstance(del_true, nodes.DelName)
self.assertEqual(del_true.name, "True")
class ArgumentsNodeTC(testlib.TestCase):
def test_linenumbering(self):
ast = abuilder.string_build('''
def func(a,
b): pass
x = lambda x: None
''')
self.assertEqual(ast['func'].args.fromlineno, 2)
self.assertFalse(ast['func'].args.is_statement)
xlambda = ast['x'].infer().next()
self.assertEqual(xlambda.args.fromlineno, 4)
self.assertEqual(xlambda.args.tolineno, 4)
self.assertFalse(xlambda.args.is_statement)
if sys.version_info < (3, 0):
self.assertEqual(ast['func'].args.tolineno, 3)
else:
self.skipTest('FIXME http://bugs.python.org/issue10445 '
'(no line number on function args)')
class SliceNodeTC(testlib.TestCase):
def test(self):
for code in ('a[0]', 'a[1:3]', 'a[:-1:step]', 'a[:,newaxis]',
'a[newaxis,:]', 'del L[::2]', 'del A[1]', 'del Br[:]'):
ast = abuilder.string_build(code).body[0]
self.assertEqual(ast.as_string(), code)
def test_slice_and_subscripts(self):
code = """a[:1] = bord[2:]
a[:1] = bord[2:]
del bree[3:d]
bord[2:]
del av[d::f], a[df:]
a[:1] = bord[2:]
del SRC[::1,newaxis,1:]
tous[vals] = 1010
del thousand[key]
del a[::2], a[:-1:step]
del Fee.form[left:]
aout.vals = miles.of_stuff
del (ccok, (name.thing, foo.attrib.value)), Fee.form[left:]
if all[1] == bord[0:]:
pass\n\n"""
ast = abuilder.string_build(code)
self.assertEqual(ast.as_string(), code)
class EllipsisNodeTC(testlib.TestCase):
def test(self):
ast = abuilder.string_build('a[...]').body[0]
self.assertEqual(ast.as_string(), 'a[...]')
if __name__ == '__main__':
testlib.unittest_main()
| mpl-2.0 |
meyersj/geotweet | geotweet/geomongo/__init__.py | 1 | 1159 | import os
import shutil
import errno
import logging
import sys
from load import GeoJSONLoader
from mongo import MongoGeo
class GeoMongo(object):
def __init__(self, args):
self.source = args.file
self.mongo = args.mongo
self.db = args.db
self.collection = args.collection
def run(self):
""" Top level runner to load State and County GeoJSON files into Mongo DB """
logging.info("Starting GeoJSON MongoDB loading process.")
mongo = dict(uri=self.mongo, db=self.db, collection=self.collection)
self.load(self.source, **mongo)
logging.info("Finished loading {0} into MongoDB".format(self.source))
def load(self, geojson, uri=None, db=None, collection=None):
""" Load geojson file into mongodb instance """
logging.info("Mongo URI: {0}".format(uri))
logging.info("Mongo DB: {0}".format(db))
logging.info("Mongo Collection: {0}".format(collection))
logging.info("Geojson File to be loaded: {0}".format(geojson))
mongo = MongoGeo(db=db, collection=collection, uri=uri)
GeoJSONLoader().load(geojson, mongo.insert)
| mit |
eeshangarg/oh-mainline | vendor/packages/docutils/docutils/languages/gl.py | 149 | 2040 | # -*- coding: utf-8 -*-
# Author: David Goodger
# Contact: goodger@users.sourceforge.net
# Revision: $Revision: 2224 $
# Date: $Date: 2004-06-05 21:40:46 +0200 (Sat, 05 Jun 2004) $
# Copyright: This module has been placed in the public domain.
# New language mappings are welcome. Before doing a new translation, please
# read <http://docutils.sf.net/docs/howto/i18n.html>. Two files must be
# translated for each language: one in docutils/languages, the other in
# docutils/parsers/rst/languages.
"""
Galician-language mappings for language-dependent features of Docutils.
"""
__docformat__ = 'reStructuredText'
labels = {
# fixed: language-dependent
'author': u'Autor',
'authors': u'Autores',
'organization': u'Organizaci\u00f3n',
'address': u'Enderezo',
'contact': u'Contacto',
'version': u'Versi\u00f3n',
'revision': u'Revisi\u00f3n',
'status': u'Estado',
'date': u'Data',
'copyright': u'Dereitos de copia',
'dedication': u'Dedicatoria',
'abstract': u'Abstract',
'attention': u'Atenci\u00f3n!',
'caution': u'Advertencia!',
'danger': u'PERIGO!',
'error': u'Erro',
'hint': u'Consello',
'important': u'Importante',
'note': u'Nota',
'tip': u'Suxesti\u00f3n',
'warning': u'Aviso',
'contents': u'Contido'}
"""Mapping of node class name to label text."""
bibliographic_fields = {
# language-dependent: fixed
u'autor': 'author',
u'autores': 'authors',
u'organizaci\u00f3n': 'organization',
u'enderezo': 'address',
u'contacto': 'contact',
u'versi\u00f3n': 'version',
u'revisi\u00f3n': 'revision',
u'estado': 'status',
u'data': 'date',
u'dereitos de copia': 'copyright',
u'dedicatoria': 'dedication',
u'abstract': 'abstract'}
"""Galician (lowcased) to canonical name mapping for bibliographic fields."""
author_separators = [';', ',']
"""List of separator strings for the 'Authors' bibliographic field. Tried in
order."""
| agpl-3.0 |
marco-mariotti/selenoprofiles | libraries/SOAPpy/Types.py | 1 | 51871 | from __future__ import nested_scopes
"""
################################################################################
# Copyright (c) 2003, Pfizer
# Copyright (c) 2001, Cayce Ullman.
# Copyright (c) 2001, Brian Matthews.
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# Neither the name of actzero, inc. nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
################################################################################
"""
ident = '$Id: Types.py,v 1.19 2005/02/22 04:29:43 warnes Exp $'
from version import __version__
import UserList
import base64
import cgi
import urllib
import copy
import re
import time
from types import *
# SOAPpy modules
from Errors import *
from NS import NS
from Utilities import encodeHexString, cleanDate
from Config import Config
###############################################################################
# Utility functions
###############################################################################
def isPrivate(name): return name[0]=='_'
def isPublic(name): return name[0]!='_'
###############################################################################
# Types and Wrappers
###############################################################################
class anyType:
_validURIs = (NS.XSD, NS.XSD2, NS.XSD3, NS.ENC)
def __init__(self, data = None, name = None, typed = 1, attrs = None):
if self.__class__ == anyType:
raise Error, "anyType can't be instantiated directly"
if type(name) in (ListType, TupleType):
self._ns, self._name = name
else:
self._ns = self._validURIs[0]
self._name = name
self._typed = typed
self._attrs = {}
self._cache = None
self._type = self._typeName()
self._data = self._checkValueSpace(data)
if attrs != None:
self._setAttrs(attrs)
def __str__(self):
if hasattr(self,'_name') and self._name:
return "<%s %s at %d>" % (self.__class__, self._name, id(self))
return "<%s at %d>" % (self.__class__, id(self))
__repr__ = __str__
def _checkValueSpace(self, data):
return data
def _marshalData(self):
return str(self._data)
def _marshalAttrs(self, ns_map, builder):
a = ''
for attr, value in self._attrs.items():
ns, n = builder.genns(ns_map, attr[0])
a += n + ' %s%s="%s"' % \
(ns, attr[1], cgi.escape(str(value), 1))
return a
def _fixAttr(self, attr):
if type(attr) in (StringType, UnicodeType):
attr = (None, attr)
elif type(attr) == ListType:
attr = tuple(attr)
elif type(attr) != TupleType:
raise AttributeError, "invalid attribute type"
if len(attr) != 2:
raise AttributeError, "invalid attribute length"
if type(attr[0]) not in (NoneType, StringType, UnicodeType):
raise AttributeError, "invalid attribute namespace URI type"
return attr
def _getAttr(self, attr):
attr = self._fixAttr(attr)
try:
return self._attrs[attr]
except:
return None
def _setAttr(self, attr, value):
attr = self._fixAttr(attr)
if type(value) is StringType:
value = unicode(value)
self._attrs[attr] = value
def _setAttrs(self, attrs):
if type(attrs) in (ListType, TupleType):
for i in range(0, len(attrs), 2):
self._setAttr(attrs[i], attrs[i + 1])
return
if type(attrs) == DictType:
d = attrs
elif isinstance(attrs, anyType):
d = attrs._attrs
else:
raise AttributeError, "invalid attribute type"
for attr, value in d.items():
self._setAttr(attr, value)
def _setMustUnderstand(self, val):
self._setAttr((NS.ENV, "mustUnderstand"), val)
def _getMustUnderstand(self):
return self._getAttr((NS.ENV, "mustUnderstand"))
def _setActor(self, val):
self._setAttr((NS.ENV, "actor"), val)
def _getActor(self):
return self._getAttr((NS.ENV, "actor"))
def _typeName(self):
return self.__class__.__name__[:-4]
def _validNamespaceURI(self, URI, strict):
if not hasattr(self, '_typed') or not self._typed:
return None
if URI in self._validURIs:
return URI
if not strict:
return self._ns
raise AttributeError, \
"not a valid namespace for type %s" % self._type
class voidType(anyType):
pass
class stringType(anyType):
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if type(data) not in (StringType, UnicodeType):
raise AttributeError, "invalid %s type:" % self._type
return data
class untypedType(stringType):
def __init__(self, data = None, name = None, attrs = None):
stringType.__init__(self, data, name, 0, attrs)
class IDType(stringType): pass
class NCNameType(stringType): pass
class NameType(stringType): pass
class ENTITYType(stringType): pass
class IDREFType(stringType): pass
class languageType(stringType): pass
class NMTOKENType(stringType): pass
class QNameType(stringType): pass
class tokenType(anyType):
_validURIs = (NS.XSD2, NS.XSD3)
__invalidre = '[\n\t]|^ | $| '
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if type(data) not in (StringType, UnicodeType):
raise AttributeError, "invalid %s type" % self._type
if type(self.__invalidre) == StringType:
self.__invalidre = re.compile(self.__invalidre)
if self.__invalidre.search(data):
raise ValueError, "invalid %s value" % self._type
return data
class normalizedStringType(anyType):
_validURIs = (NS.XSD3,)
__invalidre = '[\n\r\t]'
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if type(data) not in (StringType, UnicodeType):
raise AttributeError, "invalid %s type" % self._type
if type(self.__invalidre) == StringType:
self.__invalidre = re.compile(self.__invalidre)
if self.__invalidre.search(data):
raise ValueError, "invalid %s value" % self._type
return data
class CDATAType(normalizedStringType):
_validURIs = (NS.XSD2,)
class booleanType(anyType):
def __int__(self):
return self._data
__nonzero__ = __int__
def _marshalData(self):
return ['false', 'true'][self._data]
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if data in (0, '0', 'false', ''):
return 0
if data in (1, '1', 'true'):
return 1
raise ValueError, "invalid %s value" % self._type
class decimalType(anyType):
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if type(data) not in (IntType, LongType, FloatType):
raise Error, "invalid %s value" % self._type
return data
class floatType(anyType):
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if type(data) not in (IntType, LongType, FloatType) or \
data < -3.4028234663852886E+38 or \
data > 3.4028234663852886E+38:
raise ValueError, "invalid %s value: %s" % (self._type, repr(data))
return data
def _marshalData(self):
return "%.18g" % self._data # More precision
class doubleType(anyType):
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if type(data) not in (IntType, LongType, FloatType) or \
data < -1.7976931348623158E+308 or \
data > 1.7976931348623157E+308:
raise ValueError, "invalid %s value: %s" % (self._type, repr(data))
return data
def _marshalData(self):
return "%.18g" % self._data # More precision
class durationType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
try:
# A tuple or a scalar is OK, but make them into a list
if type(data) == TupleType:
data = list(data)
elif type(data) != ListType:
data = [data]
if len(data) > 6:
raise Exception, "too many values"
# Now check the types of all the components, and find
# the first nonzero element along the way.
f = -1
for i in range(len(data)):
if data[i] == None:
data[i] = 0
continue
if type(data[i]) not in \
(IntType, LongType, FloatType):
raise Exception, "element %d a bad type" % i
if data[i] and f == -1:
f = i
# If they're all 0, just use zero seconds.
if f == -1:
self._cache = 'PT0S'
return (0,) * 6
# Make sure only the last nonzero element has a decimal fraction
# and only the first element is negative.
d = -1
for i in range(f, len(data)):
if data[i]:
if d != -1:
raise Exception, \
"all except the last nonzero element must be " \
"integers"
if data[i] < 0 and i > f:
raise Exception, \
"only the first nonzero element can be negative"
elif data[i] != long(data[i]):
d = i
# Pad the list on the left if necessary.
if len(data) < 6:
n = 6 - len(data)
f += n
d += n
data = [0] * n + data
# Save index of the first nonzero element and the decimal
# element for _marshalData.
self.__firstnonzero = f
self.__decimal = d
except Exception, e:
raise ValueError, "invalid %s value - %s" % (self._type, e)
return tuple(data)
def _marshalData(self):
if self._cache == None:
d = self._data
t = 0
if d[self.__firstnonzero] < 0:
s = '-P'
else:
s = 'P'
t = 0
for i in range(self.__firstnonzero, len(d)):
if d[i]:
if i > 2 and not t:
s += 'T'
t = 1
if self.__decimal == i:
s += "%g" % abs(d[i])
else:
s += "%d" % long(abs(d[i]))
s += ['Y', 'M', 'D', 'H', 'M', 'S'][i]
self._cache = s
return self._cache
class timeDurationType(durationType):
_validURIs = (NS.XSD, NS.XSD2, NS.ENC)
class dateTimeType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
try:
if data == None:
data = time.time()
if (type(data) in (IntType, LongType)):
data = list(time.gmtime(data)[:6])
elif (type(data) == FloatType):
f = data - int(data)
data = list(time.gmtime(int(data))[:6])
data[5] += f
elif type(data) in (ListType, TupleType):
if len(data) < 6:
raise Exception, "not enough values"
if len(data) > 9:
raise Exception, "too many values"
data = list(data[:6])
cleanDate(data)
else:
raise Exception, "invalid type"
except Exception, e:
raise ValueError, "invalid %s value - %s" % (self._type, e)
return tuple(data)
def _marshalData(self):
if self._cache == None:
d = self._data
s = "%04d-%02d-%02dT%02d:%02d:%02d" % ((abs(d[0]),) + d[1:])
if d[0] < 0:
s = '-' + s
f = d[5] - int(d[5])
if f != 0:
s += ("%g" % f)[1:]
s += 'Z'
self._cache = s
return self._cache
class recurringInstantType(anyType):
_validURIs = (NS.XSD,)
def _checkValueSpace(self, data):
try:
if data == None:
data = list(time.gmtime(time.time())[:6])
if (type(data) in (IntType, LongType)):
data = list(time.gmtime(data)[:6])
elif (type(data) == FloatType):
f = data - int(data)
data = list(time.gmtime(int(data))[:6])
data[5] += f
elif type(data) in (ListType, TupleType):
if len(data) < 1:
raise Exception, "not enough values"
if len(data) > 9:
raise Exception, "too many values"
data = list(data[:6])
if len(data) < 6:
data += [0] * (6 - len(data))
f = len(data)
for i in range(f):
if data[i] == None:
if f < i:
raise Exception, \
"only leftmost elements can be none"
else:
f = i
break
cleanDate(data, f)
else:
raise Exception, "invalid type"
except Exception, e:
raise ValueError, "invalid %s value - %s" % (self._type, e)
return tuple(data)
def _marshalData(self):
if self._cache == None:
d = self._data
e = list(d)
neg = ''
if not e[0]:
e[0] = '--'
else:
if e[0] < 0:
neg = '-'
e[0] = abs(e[0])
if e[0] < 100:
e[0] = '-' + "%02d" % e[0]
else:
e[0] = "%04d" % e[0]
for i in range(1, len(e)):
if e[i] == None or (i < 3 and e[i] == 0):
e[i] = '-'
else:
if e[i] < 0:
neg = '-'
e[i] = abs(e[i])
e[i] = "%02d" % e[i]
if d[5]:
f = abs(d[5] - int(d[5]))
if f:
e[5] += ("%g" % f)[1:]
s = "%s%s-%s-%sT%s:%s:%sZ" % ((neg,) + tuple(e))
self._cache = s
return self._cache
class timeInstantType(dateTimeType):
_validURIs = (NS.XSD, NS.XSD2, NS.ENC)
class timePeriodType(dateTimeType):
_validURIs = (NS.XSD2, NS.ENC)
class timeType(anyType):
def _checkValueSpace(self, data):
try:
if data == None:
data = time.gmtime(time.time())[3:6]
elif (type(data) == FloatType):
f = data - int(data)
data = list(time.gmtime(int(data))[3:6])
data[2] += f
elif type(data) in (IntType, LongType):
data = time.gmtime(data)[3:6]
elif type(data) in (ListType, TupleType):
if len(data) == 9:
data = data[3:6]
elif len(data) > 3:
raise Exception, "too many values"
data = [None, None, None] + list(data)
if len(data) < 6:
data += [0] * (6 - len(data))
cleanDate(data, 3)
data = data[3:]
else:
raise Exception, "invalid type"
except Exception, e:
raise ValueError, "invalid %s value - %s" % (self._type, e)
return tuple(data)
def _marshalData(self):
if self._cache == None:
d = self._data
s = ''
s = time.strftime("%H:%M:%S", (0, 0, 0) + d + (0, 0, -1))
f = d[2] - int(d[2])
if f != 0:
s += ("%g" % f)[1:]
s += 'Z'
self._cache = s
return self._cache
class dateType(anyType):
def _checkValueSpace(self, data):
try:
if data == None:
data = time.gmtime(time.time())[0:3]
elif type(data) in (IntType, LongType, FloatType):
data = time.gmtime(data)[0:3]
elif type(data) in (ListType, TupleType):
if len(data) == 9:
data = data[0:3]
elif len(data) > 3:
raise Exception, "too many values"
data = list(data)
if len(data) < 3:
data += [1, 1, 1][len(data):]
data += [0, 0, 0]
cleanDate(data)
data = data[:3]
else:
raise Exception, "invalid type"
except Exception, e:
raise ValueError, "invalid %s value - %s" % (self._type, e)
return tuple(data)
def _marshalData(self):
if self._cache == None:
d = self._data
s = "%04d-%02d-%02dZ" % ((abs(d[0]),) + d[1:])
if d[0] < 0:
s = '-' + s
self._cache = s
return self._cache
class gYearMonthType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
try:
if data == None:
data = time.gmtime(time.time())[0:2]
elif type(data) in (IntType, LongType, FloatType):
data = time.gmtime(data)[0:2]
elif type(data) in (ListType, TupleType):
if len(data) == 9:
data = data[0:2]
elif len(data) > 2:
raise Exception, "too many values"
data = list(data)
if len(data) < 2:
data += [1, 1][len(data):]
data += [1, 0, 0, 0]
cleanDate(data)
data = data[:2]
else:
raise Exception, "invalid type"
except Exception, e:
raise ValueError, "invalid %s value - %s" % (self._type, e)
return tuple(data)
def _marshalData(self):
if self._cache == None:
d = self._data
s = "%04d-%02dZ" % ((abs(d[0]),) + d[1:])
if d[0] < 0:
s = '-' + s
self._cache = s
return self._cache
class gYearType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
try:
if data == None:
data = time.gmtime(time.time())[0:1]
elif type(data) in (IntType, LongType, FloatType):
data = [data]
if type(data) in (ListType, TupleType):
if len(data) == 9:
data = data[0:1]
elif len(data) < 1:
raise Exception, "too few values"
elif len(data) > 1:
raise Exception, "too many values"
if type(data[0]) == FloatType:
try: s = int(data[0])
except: s = long(data[0])
if s != data[0]:
raise Exception, "not integral"
data = [s]
elif type(data[0]) not in (IntType, LongType):
raise Exception, "bad type"
else:
raise Exception, "invalid type"
except Exception, e:
raise ValueError, "invalid %s value - %s" % (self._type, e)
return data[0]
def _marshalData(self):
if self._cache == None:
d = self._data
s = "%04dZ" % abs(d)
if d < 0:
s = '-' + s
self._cache = s
return self._cache
class centuryType(anyType):
_validURIs = (NS.XSD2, NS.ENC)
def _checkValueSpace(self, data):
try:
if data == None:
data = time.gmtime(time.time())[0:1] / 100
elif type(data) in (IntType, LongType, FloatType):
data = [data]
if type(data) in (ListType, TupleType):
if len(data) == 9:
data = data[0:1] / 100
elif len(data) < 1:
raise Exception, "too few values"
elif len(data) > 1:
raise Exception, "too many values"
if type(data[0]) == FloatType:
try: s = int(data[0])
except: s = long(data[0])
if s != data[0]:
raise Exception, "not integral"
data = [s]
elif type(data[0]) not in (IntType, LongType):
raise Exception, "bad type"
else:
raise Exception, "invalid type"
except Exception, e:
raise ValueError, "invalid %s value - %s" % (self._type, e)
return data[0]
def _marshalData(self):
if self._cache == None:
d = self._data
s = "%02dZ" % abs(d)
if d < 0:
s = '-' + s
self._cache = s
return self._cache
class yearType(gYearType):
_validURIs = (NS.XSD2, NS.ENC)
class gMonthDayType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
try:
if data == None:
data = time.gmtime(time.time())[1:3]
elif type(data) in (IntType, LongType, FloatType):
data = time.gmtime(data)[1:3]
elif type(data) in (ListType, TupleType):
if len(data) == 9:
data = data[0:2]
elif len(data) > 2:
raise Exception, "too many values"
data = list(data)
if len(data) < 2:
data += [1, 1][len(data):]
data = [0] + data + [0, 0, 0]
cleanDate(data, 1)
data = data[1:3]
else:
raise Exception, "invalid type"
except Exception, e:
raise ValueError, "invalid %s value - %s" % (self._type, e)
return tuple(data)
def _marshalData(self):
if self._cache == None:
self._cache = "--%02d-%02dZ" % self._data
return self._cache
class recurringDateType(gMonthDayType):
_validURIs = (NS.XSD2, NS.ENC)
class gMonthType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
try:
if data == None:
data = time.gmtime(time.time())[1:2]
elif type(data) in (IntType, LongType, FloatType):
data = [data]
if type(data) in (ListType, TupleType):
if len(data) == 9:
data = data[1:2]
elif len(data) < 1:
raise Exception, "too few values"
elif len(data) > 1:
raise Exception, "too many values"
if type(data[0]) == FloatType:
try: s = int(data[0])
except: s = long(data[0])
if s != data[0]:
raise Exception, "not integral"
data = [s]
elif type(data[0]) not in (IntType, LongType):
raise Exception, "bad type"
if data[0] < 1 or data[0] > 12:
raise Exception, "bad value"
else:
raise Exception, "invalid type"
except Exception, e:
raise ValueError, "invalid %s value - %s" % (self._type, e)
return data[0]
def _marshalData(self):
if self._cache == None:
self._cache = "--%02d--Z" % self._data
return self._cache
class monthType(gMonthType):
_validURIs = (NS.XSD2, NS.ENC)
class gDayType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
try:
if data == None:
data = time.gmtime(time.time())[2:3]
elif type(data) in (IntType, LongType, FloatType):
data = [data]
if type(data) in (ListType, TupleType):
if len(data) == 9:
data = data[2:3]
elif len(data) < 1:
raise Exception, "too few values"
elif len(data) > 1:
raise Exception, "too many values"
if type(data[0]) == FloatType:
try: s = int(data[0])
except: s = long(data[0])
if s != data[0]:
raise Exception, "not integral"
data = [s]
elif type(data[0]) not in (IntType, LongType):
raise Exception, "bad type"
if data[0] < 1 or data[0] > 31:
raise Exception, "bad value"
else:
raise Exception, "invalid type"
except Exception, e:
raise ValueError, "invalid %s value - %s" % (self._type, e)
return data[0]
def _marshalData(self):
if self._cache == None:
self._cache = "---%02dZ" % self._data
return self._cache
class recurringDayType(gDayType):
_validURIs = (NS.XSD2, NS.ENC)
class hexBinaryType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if type(data) not in (StringType, UnicodeType):
raise AttributeError, "invalid %s type" % self._type
return data
def _marshalData(self):
if self._cache == None:
self._cache = encodeHexString(self._data)
return self._cache
class base64BinaryType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if type(data) not in (StringType, UnicodeType):
raise AttributeError, "invalid %s type" % self._type
return data
def _marshalData(self):
if self._cache == None:
self._cache = base64.encodestring(self._data)
return self._cache
class base64Type(base64BinaryType):
_validURIs = (NS.ENC,)
class binaryType(anyType):
_validURIs = (NS.XSD, NS.ENC)
def __init__(self, data, name = None, typed = 1, encoding = 'base64',
attrs = None):
anyType.__init__(self, data, name, typed, attrs)
self._setAttr('encoding', encoding)
def _marshalData(self):
if self._cache == None:
if self._getAttr((None, 'encoding')) == 'base64':
self._cache = base64.encodestring(self._data)
else:
self._cache = encodeHexString(self._data)
return self._cache
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if type(data) not in (StringType, UnicodeType):
raise AttributeError, "invalid %s type" % self._type
return data
def _setAttr(self, attr, value):
attr = self._fixAttr(attr)
if attr[1] == 'encoding':
if attr[0] != None or value not in ('base64', 'hex'):
raise AttributeError, "invalid encoding"
self._cache = None
anyType._setAttr(self, attr, value)
class anyURIType(anyType):
_validURIs = (NS.XSD3,)
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if type(data) not in (StringType, UnicodeType):
raise AttributeError, "invalid %s type" % self._type
return data
def _marshalData(self):
if self._cache == None:
self._cache = urllib.quote(self._data)
return self._cache
class uriType(anyURIType):
_validURIs = (NS.XSD,)
class uriReferenceType(anyURIType):
_validURIs = (NS.XSD2,)
class NOTATIONType(anyType):
def __init__(self, data, name = None, typed = 1, attrs = None):
if self.__class__ == NOTATIONType:
raise Error, "a NOTATION can't be instantiated directly"
anyType.__init__(self, data, name, typed, attrs)
class ENTITIESType(anyType):
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if type(data) in (StringType, UnicodeType):
return (data,)
if type(data) not in (ListType, TupleType) or \
filter (lambda x: type(x) not in (StringType, UnicodeType), data):
raise AttributeError, "invalid %s type" % self._type
return data
def _marshalData(self):
return ' '.join(self._data)
class IDREFSType(ENTITIESType): pass
class NMTOKENSType(ENTITIESType): pass
class integerType(anyType):
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if type(data) not in (IntType, LongType):
raise ValueError, "invalid %s value" % self._type
return data
class nonPositiveIntegerType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if type(data) not in (IntType, LongType) or data > 0:
raise ValueError, "invalid %s value" % self._type
return data
class non_Positive_IntegerType(nonPositiveIntegerType):
_validURIs = (NS.XSD,)
def _typeName(self):
return 'non-positive-integer'
class negativeIntegerType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if type(data) not in (IntType, LongType) or data >= 0:
raise ValueError, "invalid %s value" % self._type
return data
class negative_IntegerType(negativeIntegerType):
_validURIs = (NS.XSD,)
def _typeName(self):
return 'negative-integer'
class longType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if type(data) not in (IntType, LongType) or \
data < -9223372036854775808L or \
data > 9223372036854775807L:
raise ValueError, "invalid %s value" % self._type
return data
class intType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if type(data) not in (IntType, LongType) or \
data < -2147483648L or \
data > 2147483647:
raise ValueError, "invalid %s value" % self._type
return data
class shortType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if type(data) not in (IntType, LongType) or \
data < -32768 or \
data > 32767:
raise ValueError, "invalid %s value" % self._type
return data
class byteType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if type(data) not in (IntType, LongType) or \
data < -128 or \
data > 127:
raise ValueError, "invalid %s value" % self._type
return data
class nonNegativeIntegerType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if type(data) not in (IntType, LongType) or data < 0:
raise ValueError, "invalid %s value" % self._type
return data
class non_Negative_IntegerType(nonNegativeIntegerType):
_validURIs = (NS.XSD,)
def _typeName(self):
return 'non-negative-integer'
class unsignedLongType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if type(data) not in (IntType, LongType) or \
data < 0 or \
data > 18446744073709551615L:
raise ValueError, "invalid %s value" % self._type
return data
class unsignedIntType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if type(data) not in (IntType, LongType) or \
data < 0 or \
data > 4294967295L:
raise ValueError, "invalid %s value" % self._type
return data
class unsignedShortType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if type(data) not in (IntType, LongType) or \
data < 0 or \
data > 65535:
raise ValueError, "invalid %s value" % self._type
return data
class unsignedByteType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if type(data) not in (IntType, LongType) or \
data < 0 or \
data > 255:
raise ValueError, "invalid %s value" % self._type
return data
class positiveIntegerType(anyType):
_validURIs = (NS.XSD2, NS.XSD3, NS.ENC)
def _checkValueSpace(self, data):
if data == None:
raise ValueError, "must supply initial %s value" % self._type
if type(data) not in (IntType, LongType) or data <= 0:
raise ValueError, "invalid %s value" % self._type
return data
class positive_IntegerType(positiveIntegerType):
_validURIs = (NS.XSD,)
def _typeName(self):
return 'positive-integer'
# Now compound types
class compoundType(anyType):
def __init__(self, data = None, name = None, typed = 1, attrs = None):
if self.__class__ == compoundType:
raise Error, "a compound can't be instantiated directly"
anyType.__init__(self, data, name, typed, attrs)
self._keyord = []
if type(data) == DictType:
self.__dict__.update(data)
def _aslist(self, item=None):
if item is not None:
return self.__dict__[self._keyord[item]]
else:
return map( lambda x: self.__dict__[x], self._keyord)
def _asdict(self, item=None, encoding=Config.dict_encoding):
if item is not None:
if type(item) in (UnicodeType,StringType):
item = item.encode(encoding)
return self.__dict__[item]
else:
retval = {}
def fun(x): retval[x.encode(encoding)] = self.__dict__[x]
if hasattr(self, '_keyord'):
map( fun, self._keyord)
else:
for name in dir(self):
if isPublic(name):
retval[name] = getattr(self,name)
return retval
def __getitem__(self, item):
if type(item) == IntType:
return self.__dict__[self._keyord[item]]
else:
return getattr(self, item)
def __len__(self):
return len(self._keyord)
def __nonzero__(self):
return 1
def _keys(self):
return filter(lambda x: x[0] != '_', self.__dict__.keys())
def _addItem(self, name, value, attrs = None):
if name in self._keyord:
if type(self.__dict__[name]) != ListType:
self.__dict__[name] = [self.__dict__[name]]
self.__dict__[name].append(value)
else:
self.__dict__[name] = value
self._keyord.append(name)
def _placeItem(self, name, value, pos, subpos = 0, attrs = None):
if subpos == 0 and type(self.__dict__[name]) != ListType:
self.__dict__[name] = value
else:
self.__dict__[name][subpos] = value
self._keyord[pos] = name
def _getItemAsList(self, name, default = []):
try:
d = self.__dict__[name]
except:
return default
if type(d) == ListType:
return d
return [d]
def __str__(self):
return anyType.__str__(self) + ": " + str(self._asdict())
def __repr__(self):
return self.__str__()
class structType(compoundType):
pass
class headerType(structType):
_validURIs = (NS.ENV,)
def __init__(self, data = None, typed = 1, attrs = None):
structType.__init__(self, data, "Header", typed, attrs)
class bodyType(structType):
_validURIs = (NS.ENV,)
def __init__(self, data = None, typed = 1, attrs = None):
structType.__init__(self, data, "Body", typed, attrs)
class arrayType(UserList.UserList, compoundType):
def __init__(self, data = None, name = None, attrs = None,
offset = 0, rank = None, asize = 0, elemsname = None):
if data:
if type(data) not in (ListType, TupleType):
raise Error, "Data must be a sequence"
UserList.UserList.__init__(self, data)
compoundType.__init__(self, data, name, 0, attrs)
self._elemsname = elemsname or "item"
if data == None:
self._rank = rank
# According to 5.4.2.2 in the SOAP spec, each element in a
# sparse array must have a position. _posstate keeps track of
# whether we've seen a position or not. It's possible values
# are:
# -1 No elements have been added, so the state is indeterminate
# 0 An element without a position has been added, so no
# elements can have positions
# 1 An element with a position has been added, so all elements
# must have positions
self._posstate = -1
self._full = 0
if asize in ('', None):
asize = '0'
self._dims = map (lambda x: int(x), str(asize).split(','))
self._dims.reverse() # It's easier to work with this way
self._poss = [0] * len(self._dims) # This will end up
# reversed too
for i in range(len(self._dims)):
if self._dims[i] < 0 or \
self._dims[i] == 0 and len(self._dims) > 1:
raise TypeError, "invalid Array dimensions"
if offset > 0:
self._poss[i] = offset % self._dims[i]
offset = int(offset / self._dims[i])
# Don't break out of the loop if offset is 0 so we test all the
# dimensions for > 0.
if offset:
raise AttributeError, "invalid Array offset"
a = [None] * self._dims[0]
for i in range(1, len(self._dims)):
b = []
for j in range(self._dims[i]):
b.append(copy.deepcopy(a))
a = b
self.data = a
def _aslist(self, item=None):
if item is not None:
return self.data[int(item)]
else:
return self.data
def _asdict(self, item=None, encoding=Config.dict_encoding):
if item is not None:
if type(item) in (UnicodeType,StringType):
item = item.encode(encoding)
return self.data[int(item)]
else:
retval = {}
def fun(x): retval[str(x).encode(encoding)] = self.data[x]
map( fun, range(len(self.data)) )
return retval
def __getitem__(self, item):
try:
return self.data[int(item)]
except ValueError:
return getattr(self, item)
def __len__(self):
return len(self.data)
def __nonzero__(self):
return 1
def __str__(self):
return anyType.__str__(self) + ": " + str(self._aslist())
def _keys(self):
return filter(lambda x: x[0] != '_', self.__dict__.keys())
def _addItem(self, name, value, attrs):
if self._full:
raise ValueError, "Array is full"
pos = attrs.get((NS.ENC, 'position'))
if pos != None:
if self._posstate == 0:
raise AttributeError, \
"all elements in a sparse Array must have a " \
"position attribute"
self._posstate = 1
try:
if pos[0] == '[' and pos[-1] == ']':
pos = map (lambda x: int(x), pos[1:-1].split(','))
pos.reverse()
if len(pos) == 1:
pos = pos[0]
curpos = [0] * len(self._dims)
for i in range(len(self._dims)):
curpos[i] = pos % self._dims[i]
pos = int(pos / self._dims[i])
if pos == 0:
break
if pos:
raise Exception
elif len(pos) != len(self._dims):
raise Exception
else:
for i in range(len(self._dims)):
if pos[i] >= self._dims[i]:
raise Exception
curpos = pos
else:
raise Exception
except:
raise AttributeError, \
"invalid Array element position %s" % str(pos)
else:
if self._posstate == 1:
raise AttributeError, \
"only elements in a sparse Array may have a " \
"position attribute"
self._posstate = 0
curpos = self._poss
a = self.data
for i in range(len(self._dims) - 1, 0, -1):
a = a[curpos[i]]
if curpos[0] >= len(a):
a += [None] * (len(a) - curpos[0] + 1)
a[curpos[0]] = value
if pos == None:
self._poss[0] += 1
for i in range(len(self._dims) - 1):
if self._poss[i] < self._dims[i]:
break
self._poss[i] = 0
self._poss[i + 1] += 1
if self._dims[-1] and self._poss[-1] >= self._dims[-1]:
#self._full = 1
#FIXME: why is this occuring?
pass
def _placeItem(self, name, value, pos, subpos, attrs = None):
curpos = [0] * len(self._dims)
for i in range(len(self._dims)):
if self._dims[i] == 0:
curpos[0] = pos
break
curpos[i] = pos % self._dims[i]
pos = int(pos / self._dims[i])
if pos == 0:
break
if self._dims[i] != 0 and pos:
raise Error, "array index out of range"
a = self.data
for i in range(len(self._dims) - 1, 0, -1):
a = a[curpos[i]]
if curpos[0] >= len(a):
a += [None] * (len(a) - curpos[0] + 1)
a[curpos[0]] = value
class typedArrayType(arrayType):
def __init__(self, data = None, name = None, typed = None, attrs = None,
offset = 0, rank = None, asize = 0, elemsname = None, complexType = 0):
arrayType.__init__(self, data, name, attrs, offset, rank, asize,
elemsname)
self._typed = 1
self._type = typed
self._complexType = complexType
class faultType(structType, Error):
def __init__(self, faultcode = "", faultstring = "", detail = None):
self.faultcode = faultcode
self.faultstring = faultstring
if detail != None:
self.detail = detail
structType.__init__(self, None, 0)
def _setDetail(self, detail = None):
if detail != None:
self.detail = detail
else:
try: del self.detail
except AttributeError: pass
def __repr__(self):
if getattr(self, 'detail', None) != None:
return "<Fault %s: %s: %s>" % (self.faultcode,
self.faultstring,
self.detail)
else:
return "<Fault %s: %s>" % (self.faultcode, self.faultstring)
__str__ = __repr__
def __call__(self):
return (self.faultcode, self.faultstring, self.detail)
class SOAPException(Exception):
def __init__(self, code="", string="", detail=None):
self.value = ("SOAPpy SOAP Exception", code, string, detail)
self.code = code
self.string = string
self.detail = detail
def __str__(self):
return repr(self.value)
class RequiredHeaderMismatch(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class MethodNotFound(Exception):
def __init__(self, value):
(val, detail) = value.split(":")
self.value = val
self.detail = detail
def __str__(self):
return repr(self.value, self.detail)
class AuthorizationFailed(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class MethodFailed(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
#######
# Convert complex SOAPpy objects to native python equivalents
#######
def simplify(object, level=0):
"""
Convert the SOAPpy objects and thier contents to simple python types.
This function recursively converts the passed 'container' object,
and all public subobjects. (Private subobjects have names that
start with '_'.)
Conversions:
- faultType --> raise python exception
- arrayType --> array
- compoundType --> dictionary
"""
if level > 10:
return object
if isinstance( object, faultType ):
if object.faultstring == "Required Header Misunderstood":
raise RequiredHeaderMismatch(object.detail)
elif object.faultstring == "Method Not Found":
raise MethodNotFound(object.detail)
elif object.faultstring == "Authorization Failed":
raise AuthorizationFailed(object.detail)
elif object.faultstring == "Method Failed":
raise MethodFailed(object.detail)
else:
se = SOAPException(object.faultcode, object.faultstring,
object.detail)
raise se
elif isinstance( object, arrayType ):
data = object._aslist()
for k in range(len(data)):
data[k] = simplify(data[k], level=level+1)
return data
elif isinstance( object, compoundType ) or isinstance(object, structType):
data = object._asdict()
for k in data.keys():
if isPublic(k):
data[k] = simplify(data[k], level=level+1)
return data
elif type(object)==DictType:
for k in object.keys():
if isPublic(k):
object[k] = simplify(object[k])
return object
elif type(object)==list:
for k in range(len(object)):
object[k] = simplify(object[k])
return object
else:
return object
def simplify_contents(object, level=0):
"""
Convert the contents of SOAPpy objects to simple python types.
This function recursively converts the sub-objects contained in a
'container' object to simple python types.
Conversions:
- faultType --> raise python exception
- arrayType --> array
- compoundType --> dictionary
"""
if level>10: return object
if isinstance( object, faultType ):
for k in object._keys():
if isPublic(k):
setattr(object, k, simplify(object[k], level=level+1))
raise object
elif isinstance( object, arrayType ):
data = object._aslist()
for k in range(len(data)):
object[k] = simplify(data[k], level=level+1)
elif isinstance(object, structType):
data = object._asdict()
for k in data.keys():
if isPublic(k):
setattr(object, k, simplify(data[k], level=level+1))
elif isinstance( object, compoundType ) :
data = object._asdict()
for k in data.keys():
if isPublic(k):
object[k] = simplify(data[k], level=level+1)
elif type(object)==DictType:
for k in object.keys():
if isPublic(k):
object[k] = simplify(object[k])
elif type(object)==list:
for k in range(len(object)):
object[k] = simplify(object[k])
return object
| gpl-2.0 |
WillisXChen/django-oscar | oscar/lib/python2.7/site-packages/django_extensions/utils/dia2django.py | 5 | 10265 | # -*- coding: UTF-8 -*-
##Author Igor Támara igor@tamarapatino.org
##Use this little program as you wish, if you
#include it in your work, let others know you
#are using it preserving this note, you have
#the right to make derivative works, Use it
#at your own risk.
#Tested to work on(etch testing 13-08-2007):
# Python 2.4.4 (#2, Jul 17 2007, 11:56:54)
# [GCC 4.1.3 20070629 (prerelease) (Debian 4.1.2-13)] on linux2
dependclasses = ["User", "Group", "Permission", "Message"]
import re
import six
import sys
import gzip
import codecs
from xml.dom.minidom import * # NOQA
#Type dictionary translation types SQL -> Django
tsd = {
"text": "TextField",
"date": "DateField",
"varchar": "CharField",
"int": "IntegerField",
"float": "FloatField",
"serial": "AutoField",
"boolean": "BooleanField",
"numeric": "FloatField",
"timestamp": "DateTimeField",
"bigint": "IntegerField",
"datetime": "DateTimeField",
"date": "DateField",
"time": "TimeField",
"bool": "BooleanField",
"int": "IntegerField",
}
#convert varchar -> CharField
v2c = re.compile('varchar\((\d+)\)')
def index(fks, id):
"""Looks for the id on fks, fks is an array of arrays, each array has on [1]
the id of the class in a dia diagram. When not present returns None, else
it returns the position of the class with id on fks"""
for i, j in fks.items():
if fks[i][1] == id:
return i
return None
def addparentstofks(rels, fks):
"""Gets a list of relations, between parents and sons and a dict of
clases named in dia, and modifies the fks to add the parent as fk to get
order on the output of classes and replaces the base class of the son, to
put the class parent name.
"""
for j in rels:
son = index(fks, j[1])
parent = index(fks, j[0])
fks[son][2] = fks[son][2].replace("models.Model", parent)
if parent not in fks[son][0]:
fks[son][0].append(parent)
def dia2django(archivo):
models_txt = ''
f = codecs.open(archivo, "rb")
#dia files are gzipped
data = gzip.GzipFile(fileobj=f).read()
ppal = parseString(data)
#diagram -> layer -> object -> UML - Class -> name, (attribs : composite -> name,type)
datos = ppal.getElementsByTagName("dia:diagram")[0].getElementsByTagName("dia:layer")[0].getElementsByTagName("dia:object")
clases = {}
herit = []
imports = six.u("")
for i in datos:
#Look for the classes
if i.getAttribute("type") == "UML - Class":
myid = i.getAttribute("id")
for j in i.childNodes:
if j.nodeType == Node.ELEMENT_NODE and j.hasAttributes():
if j.getAttribute("name") == "name":
actclas = j.getElementsByTagName("dia:string")[0].childNodes[0].data[1:-1]
myname = "\nclass %s(models.Model) :\n" % actclas
clases[actclas] = [[], myid, myname, 0]
if j.getAttribute("name") == "attributes":
for l in j.getElementsByTagName("dia:composite"):
if l.getAttribute("type") == "umlattribute":
#Look for the attribute name and type
for k in l.getElementsByTagName("dia:attribute"):
if k.getAttribute("name") == "name":
nc = k.getElementsByTagName("dia:string")[0].childNodes[0].data[1:-1]
elif k.getAttribute("name") == "type":
tc = k.getElementsByTagName("dia:string")[0].childNodes[0].data[1:-1]
elif k.getAttribute("name") == "value":
val = k.getElementsByTagName("dia:string")[0].childNodes[0].data[1:-1]
if val == '##':
val = ''
elif k.getAttribute("name") == "visibility" and k.getElementsByTagName("dia:enum")[0].getAttribute("val") == "2":
if tc.replace(" ", "").lower().startswith("manytomanyfield("):
#If we find a class not in our model that is marked as being to another model
newc = tc.replace(" ", "")[16:-1]
if dependclasses.count(newc) == 0:
dependclasses.append(newc)
if tc.replace(" ", "").lower().startswith("foreignkey("):
#If we find a class not in our model that is marked as being to another model
newc = tc.replace(" ", "")[11:-1]
if dependclasses.count(newc) == 0:
dependclasses.append(newc)
#Mapping SQL types to Django
varch = v2c.search(tc)
if tc.replace(" ", "").startswith("ManyToManyField("):
myfor = tc.replace(" ", "")[16:-1]
if actclas == myfor:
#In case of a recursive type, we use 'self'
tc = tc.replace(myfor, "'self'")
elif clases[actclas][0].count(myfor) == 0:
#Adding related class
if myfor not in dependclasses:
#In case we are using Auth classes or external via protected dia visibility
clases[actclas][0].append(myfor)
tc = "models." + tc
if len(val) > 0:
tc = tc.replace(")", "," + val + ")")
elif tc.find("Field") != -1:
if tc.count("()") > 0 and len(val) > 0:
tc = "models.%s" % tc.replace(")", "," + val + ")")
else:
tc = "models.%s(%s)" % (tc, val)
elif tc.replace(" ", "").startswith("ForeignKey("):
myfor = tc.replace(" ", "")[11:-1]
if actclas == myfor:
#In case of a recursive type, we use 'self'
tc = tc.replace(myfor, "'self'")
elif clases[actclas][0].count(myfor) == 0:
#Adding foreign classes
if myfor not in dependclasses:
#In case we are using Auth classes
clases[actclas][0].append(myfor)
tc = "models." + tc
if len(val) > 0:
tc = tc.replace(")", "," + val + ")")
elif varch is None:
tc = "models." + tsd[tc.strip().lower()] + "(" + val + ")"
else:
tc = "models.CharField(max_length=" + varch.group(1) + ")"
if len(val) > 0:
tc = tc.replace(")", ", " + val + " )")
if not (nc == "id" and tc == "AutoField()"):
clases[actclas][2] = clases[actclas][2] + (" %s = %s\n" % (nc, tc))
elif i.getAttribute("type") == "UML - Generalization":
mycons = ['A', 'A']
a = i.getElementsByTagName("dia:connection")
for j in a:
if len(j.getAttribute("to")):
mycons[int(j.getAttribute("handle"))] = j.getAttribute("to")
print(mycons)
if 'A' not in mycons:
herit.append(mycons)
elif i.getAttribute("type") == "UML - SmallPackage":
a = i.getElementsByTagName("dia:string")
for j in a:
if len(j.childNodes[0].data[1:-1]):
imports += six.u("from %s.models import *" % j.childNodes[0].data[1:-1])
addparentstofks(herit, clases)
#Ordering the appearance of classes
#First we make a list of the classes each classs is related to.
ordered = []
for j, k in six.iteritems(clases):
k[2] = k[2] + "\n def __unicode__(self):\n return u\"\"\n"
for fk in k[0]:
if fk not in dependclasses:
clases[fk][3] += 1
ordered.append([j] + k)
i = 0
while i < len(ordered):
mark = i
j = i + 1
while j < len(ordered):
if ordered[i][0] in ordered[j][1]:
mark = j
j += 1
if mark == i:
i += 1
else:
# swap %s in %s" % ( ordered[i] , ordered[mark]) to make ordered[i] to be at the end
if ordered[i][0] in ordered[mark][1] and ordered[mark][0] in ordered[i][1]:
#Resolving simplistic circular ForeignKeys
print("Not able to resolve circular ForeignKeys between %s and %s" % (ordered[i][1], ordered[mark][0]))
break
a = ordered[i]
ordered[i] = ordered[mark]
ordered[mark] = a
if i == len(ordered) - 1:
break
ordered.reverse()
if imports:
models_txt = str(imports)
for i in ordered:
models_txt += '%s\n' % str(i[3])
return models_txt
if __name__ == '__main__':
if len(sys.argv) == 2:
dia2django(sys.argv[1])
else:
print(" Use:\n \n " + sys.argv[0] + " diagram.dia\n\n")
| bsd-3-clause |
Danfocus/Flexget | flexget/components/sites/sites/fuzer.py | 4 | 6243 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
from future.moves.urllib.parse import quote_plus
import logging
import re
from requests.exceptions import RequestException
from flexget import plugin
from flexget.config_schema import one_or_more
from flexget.entry import Entry
from flexget.event import event
from flexget.plugin import PluginError
from flexget.utils.requests import Session as RequestSession
from flexget.components.sites.utils import torrent_availability, normalize_scene
from flexget.utils.soup import get_soup
from flexget.utils.tools import parse_filesize
log = logging.getLogger('fuzer')
requests = RequestSession()
CATEGORIES = {
# Movies
'HD Movies': 9,
'XviD': 7,
'BRRip': 59,
'Israeli HD Movies': 61,
'Israeli Movies': 60,
'DVDR': 58,
'Dubbed Movies': 83,
# TV
'HD Shows': 10,
'Shows': 8,
'Israeli HD Shows': 63,
'Israeli Shows': 62,
'Dubbed Shows': 84,
# Anime
'Anime': 65,
# FuzePacks
'Movie Packs': 73,
'Shows Packs': 76,
}
class UrlRewriteFuzer(object):
schema = {
'type': 'object',
'properties': {
'cookie_password': {'type': 'string'},
'user_id': {'type': 'integer'},
'rss_key': {'type': 'string'},
'category': one_or_more(
{'oneOf': [{'type': 'string', 'enum': list(CATEGORIES)}, {'type': 'integer'}]}
),
},
'required': ['user_id', 'cookie_password', 'rss_key'],
'additionalProperties': False,
}
def get_fuzer_soup(self, search_term, categories_list):
params = {'matchquery': 'any', 'ref_': 'advanced'}
query = '{}&{}'.format(search_term, '&'.join(categories_list))
try:
page = requests.get(
'https://www.fuzer.me/browse.php?query={}'.format(query),
params=params,
cookies=self.cookies,
)
except RequestException as e:
raise PluginError('Could not connect to Fuzer: {}'.format(e))
if 'login' in page.url:
raise PluginError('Could not fetch results from Fuzer. Check config')
log.debug('Using %s as fuzer search url', page.url)
return get_soup(page.content)
def extract_entry_from_soup(self, soup):
table = soup.find('div', {'id': 'main_table'})
if table is None:
raise PluginError('Could not fetch results table from Fuzer, aborting')
log.trace('fuzer results table: %s', table)
table = table.find('table', {'class': 'table_info'})
if len(table.find_all('tr')) == 1:
log.debug('No search results were returned from Fuzer, continuing')
return []
entries = []
for tr in table.find_all("tr"):
if not tr.get('class') or 'colhead_dark' in tr.get('class'):
continue
name = tr.find('div', {'class': 'main_title'}).find('a').text
torrent_name = re.search(
'\\n(.*)', tr.find('div', {'style': 'float: right;'}).find('a')['title']
).group(1)
attachment_link = tr.find('div', {'style': 'float: right;'}).find('a')['href']
attachment_id = re.search(r'attachmentid=(\d+)', attachment_link).group(1)
raw_size = tr.find_all('td', {'class': 'inline_info'})[0].text.strip()
seeders = int(tr.find_all('td', {'class': 'inline_info'})[2].text)
leechers = int(tr.find_all('td', {'class': 'inline_info'})[3].text)
e = Entry()
e['title'] = name
final_url = 'https://www.fuzer.me/rss/torrent.php/{}/{}/{}/{}'.format(
attachment_id, self.user_id, self.rss_key, torrent_name
)
log.debug('RSS-ified download link: %s', final_url)
e['url'] = final_url
e['torrent_seeds'] = seeders
e['torrent_leeches'] = leechers
e['torrent_availability'] = torrent_availability(
e['torrent_seeds'], e['torrent_leeches']
)
size = re.search(r'(\d+(?:[.,]\d+)*)\s?([KMGTP]B)', raw_size)
e['content_size'] = parse_filesize(size.group(0))
entries.append(e)
return entries
@plugin.internet(log)
def search(self, task, entry, config=None):
"""
Search for name from fuzer.
"""
self.rss_key = config['rss_key']
self.user_id = config['user_id']
self.cookies = {
'fzr2lastactivity': '0',
'fzr2lastvisit': '',
'fzr2password': config['cookie_password'],
'fzr2sessionhash': '',
'fzr2userid': str(self.user_id),
}
category = config.get('category', [0])
# Make sure categories is a list
if not isinstance(category, list):
category = [category]
# If there are any text categories, turn them into their id number
categories = [c if isinstance(c, int) else CATEGORIES[c] for c in category]
c_list = ['c{}={}'.format(quote_plus('[]'), c) for c in categories]
entries = []
if entry.get('imdb_id'):
log.debug("imdb_id '%s' detected, using in search.", entry['imdb_id'])
soup = self.get_fuzer_soup(entry['imdb_id'], c_list)
entries = self.extract_entry_from_soup(soup)
if entries:
for e in list(entries):
e['imdb_id'] = entry.get('imdb_id')
else:
for search_string in entry.get('search_strings', [entry['title']]):
query = normalize_scene(search_string)
text = quote_plus(query.encode('windows-1255'))
soup = self.get_fuzer_soup(text, c_list)
entries += self.extract_entry_from_soup(soup)
return (
sorted(entries, reverse=True, key=lambda x: x.get('torrent_availability'))
if entries
else []
)
@event('plugin.register')
def register_plugin():
plugin.register(UrlRewriteFuzer, 'fuzer', interfaces=['search'], api_ver=2)
| mit |
rdo-management/neutron | neutron/db/migration/alembic_migrations/versions/236b90af57ab_ml2_refactor_for_dynamic_segments.py | 17 | 1170 | # Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""ml2_type_driver_refactor_dynamic_segments
Revision ID: 236b90af57ab
Revises: 58fe87a01143
Create Date: 2014-08-14 16:22:14.293788
"""
# revision identifiers, used by Alembic.
revision = '236b90af57ab'
down_revision = '58fe87a01143'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('ml2_network_segments',
sa.Column('is_dynamic', sa.Boolean(), nullable=False,
server_default=sa.sql.false()))
def downgrade():
op.drop_column('ml2_network_segments', 'is_dynamic')
| apache-2.0 |
lmazuel/azure-sdk-for-python | azure-mgmt-network/azure/mgmt/network/v2017_10_01/models/application_gateway_firewall_disabled_rule_group_py3.py | 7 | 1464 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class ApplicationGatewayFirewallDisabledRuleGroup(Model):
"""Allows to disable rules within a rule group or an entire rule group.
All required parameters must be populated in order to send to Azure.
:param rule_group_name: Required. The name of the rule group that will be
disabled.
:type rule_group_name: str
:param rules: The list of rules that will be disabled. If null, all rules
of the rule group will be disabled.
:type rules: list[int]
"""
_validation = {
'rule_group_name': {'required': True},
}
_attribute_map = {
'rule_group_name': {'key': 'ruleGroupName', 'type': 'str'},
'rules': {'key': 'rules', 'type': '[int]'},
}
def __init__(self, *, rule_group_name: str, rules=None, **kwargs) -> None:
super(ApplicationGatewayFirewallDisabledRuleGroup, self).__init__(**kwargs)
self.rule_group_name = rule_group_name
self.rules = rules
| mit |
amyvmiwei/kbengine | kbe/res/scripts/common/Lib/test/test_urllib.py | 12 | 59717 | """Regresssion tests for urllib"""
import urllib.parse
import urllib.request
import urllib.error
import http.client
import email.message
import io
import unittest
from test import support
import os
import sys
import tempfile
from nturl2path import url2pathname, pathname2url
from base64 import b64encode
import collections
def hexescape(char):
"""Escape char as RFC 2396 specifies"""
hex_repr = hex(ord(char))[2:].upper()
if len(hex_repr) == 1:
hex_repr = "0%s" % hex_repr
return "%" + hex_repr
# Shortcut for testing FancyURLopener
_urlopener = None
def urlopen(url, data=None, proxies=None):
"""urlopen(url [, data]) -> open file-like object"""
global _urlopener
if proxies is not None:
opener = urllib.request.FancyURLopener(proxies=proxies)
elif not _urlopener:
with support.check_warnings(
('FancyURLopener style of invoking requests is deprecated.',
DeprecationWarning)):
opener = urllib.request.FancyURLopener()
_urlopener = opener
else:
opener = _urlopener
if data is None:
return opener.open(url)
else:
return opener.open(url, data)
class FakeHTTPMixin(object):
def fakehttp(self, fakedata):
class FakeSocket(io.BytesIO):
io_refs = 1
def sendall(self, data):
FakeHTTPConnection.buf = data
def makefile(self, *args, **kwds):
self.io_refs += 1
return self
def read(self, amt=None):
if self.closed:
return b""
return io.BytesIO.read(self, amt)
def readline(self, length=None):
if self.closed:
return b""
return io.BytesIO.readline(self, length)
def close(self):
self.io_refs -= 1
if self.io_refs == 0:
io.BytesIO.close(self)
class FakeHTTPConnection(http.client.HTTPConnection):
# buffer to store data for verification in urlopen tests.
buf = None
def connect(self):
self.sock = FakeSocket(fakedata)
self._connection_class = http.client.HTTPConnection
http.client.HTTPConnection = FakeHTTPConnection
def unfakehttp(self):
http.client.HTTPConnection = self._connection_class
class urlopen_FileTests(unittest.TestCase):
"""Test urlopen() opening a temporary file.
Try to test as much functionality as possible so as to cut down on reliance
on connecting to the Net for testing.
"""
def setUp(self):
# Create a temp file to use for testing
self.text = bytes("test_urllib: %s\n" % self.__class__.__name__,
"ascii")
f = open(support.TESTFN, 'wb')
try:
f.write(self.text)
finally:
f.close()
self.pathname = support.TESTFN
self.returned_obj = urlopen("file:%s" % self.pathname)
def tearDown(self):
"""Shut down the open object"""
self.returned_obj.close()
os.remove(support.TESTFN)
def test_interface(self):
# Make sure object returned by urlopen() has the specified methods
for attr in ("read", "readline", "readlines", "fileno",
"close", "info", "geturl", "getcode", "__iter__"):
self.assertTrue(hasattr(self.returned_obj, attr),
"object returned by urlopen() lacks %s attribute" %
attr)
def test_read(self):
self.assertEqual(self.text, self.returned_obj.read())
def test_readline(self):
self.assertEqual(self.text, self.returned_obj.readline())
self.assertEqual(b'', self.returned_obj.readline(),
"calling readline() after exhausting the file did not"
" return an empty string")
def test_readlines(self):
lines_list = self.returned_obj.readlines()
self.assertEqual(len(lines_list), 1,
"readlines() returned the wrong number of lines")
self.assertEqual(lines_list[0], self.text,
"readlines() returned improper text")
def test_fileno(self):
file_num = self.returned_obj.fileno()
self.assertIsInstance(file_num, int, "fileno() did not return an int")
self.assertEqual(os.read(file_num, len(self.text)), self.text,
"Reading on the file descriptor returned by fileno() "
"did not return the expected text")
def test_close(self):
# Test close() by calling it here and then having it be called again
# by the tearDown() method for the test
self.returned_obj.close()
def test_info(self):
self.assertIsInstance(self.returned_obj.info(), email.message.Message)
def test_geturl(self):
self.assertEqual(self.returned_obj.geturl(), self.pathname)
def test_getcode(self):
self.assertIsNone(self.returned_obj.getcode())
def test_iter(self):
# Test iterator
# Don't need to count number of iterations since test would fail the
# instant it returned anything beyond the first line from the
# comparison.
# Use the iterator in the usual implicit way to test for ticket #4608.
for line in self.returned_obj:
self.assertEqual(line, self.text)
def test_relativelocalfile(self):
self.assertRaises(ValueError,urllib.request.urlopen,'./' + self.pathname)
class ProxyTests(unittest.TestCase):
def setUp(self):
# Records changes to env vars
self.env = support.EnvironmentVarGuard()
# Delete all proxy related env vars
for k in list(os.environ):
if 'proxy' in k.lower():
self.env.unset(k)
def tearDown(self):
# Restore all proxy related env vars
self.env.__exit__()
del self.env
def test_getproxies_environment_keep_no_proxies(self):
self.env.set('NO_PROXY', 'localhost')
proxies = urllib.request.getproxies_environment()
# getproxies_environment use lowered case truncated (no '_proxy') keys
self.assertEqual('localhost', proxies['no'])
# List of no_proxies with space.
self.env.set('NO_PROXY', 'localhost, anotherdomain.com, newdomain.com')
self.assertTrue(urllib.request.proxy_bypass_environment('anotherdomain.com'))
class urlopen_HttpTests(unittest.TestCase, FakeHTTPMixin):
"""Test urlopen() opening a fake http connection."""
def check_read(self, ver):
self.fakehttp(b"HTTP/" + ver + b" 200 OK\r\n\r\nHello!")
try:
fp = urlopen("http://python.org/")
self.assertEqual(fp.readline(), b"Hello!")
self.assertEqual(fp.readline(), b"")
self.assertEqual(fp.geturl(), 'http://python.org/')
self.assertEqual(fp.getcode(), 200)
finally:
self.unfakehttp()
def test_url_fragment(self):
# Issue #11703: geturl() omits fragments in the original URL.
url = 'http://docs.python.org/library/urllib.html#OK'
self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello!")
try:
fp = urllib.request.urlopen(url)
self.assertEqual(fp.geturl(), url)
finally:
self.unfakehttp()
def test_willclose(self):
self.fakehttp(b"HTTP/1.1 200 OK\r\n\r\nHello!")
try:
resp = urlopen("http://www.python.org")
self.assertTrue(resp.fp.will_close)
finally:
self.unfakehttp()
def test_read_0_9(self):
# "0.9" response accepted (but not "simple responses" without
# a status line)
self.check_read(b"0.9")
def test_read_1_0(self):
self.check_read(b"1.0")
def test_read_1_1(self):
self.check_read(b"1.1")
def test_read_bogus(self):
# urlopen() should raise OSError for many error codes.
self.fakehttp(b'''HTTP/1.1 401 Authentication Required
Date: Wed, 02 Jan 2008 03:03:54 GMT
Server: Apache/1.3.33 (Debian GNU/Linux) mod_ssl/2.8.22 OpenSSL/0.9.7e
Connection: close
Content-Type: text/html; charset=iso-8859-1
''')
try:
self.assertRaises(OSError, urlopen, "http://python.org/")
finally:
self.unfakehttp()
def test_invalid_redirect(self):
# urlopen() should raise OSError for many error codes.
self.fakehttp(b'''HTTP/1.1 302 Found
Date: Wed, 02 Jan 2008 03:03:54 GMT
Server: Apache/1.3.33 (Debian GNU/Linux) mod_ssl/2.8.22 OpenSSL/0.9.7e
Location: file://guidocomputer.athome.com:/python/license
Connection: close
Content-Type: text/html; charset=iso-8859-1
''')
try:
self.assertRaises(urllib.error.HTTPError, urlopen,
"http://python.org/")
finally:
self.unfakehttp()
def test_empty_socket(self):
# urlopen() raises OSError if the underlying socket does not send any
# data. (#1680230)
self.fakehttp(b'')
try:
self.assertRaises(OSError, urlopen, "http://something")
finally:
self.unfakehttp()
def test_missing_localfile(self):
# Test for #10836
with self.assertRaises(urllib.error.URLError) as e:
urlopen('file://localhost/a/file/which/doesnot/exists.py')
self.assertTrue(e.exception.filename)
self.assertTrue(e.exception.reason)
def test_file_notexists(self):
fd, tmp_file = tempfile.mkstemp()
tmp_fileurl = 'file://localhost/' + tmp_file.replace(os.path.sep, '/')
try:
self.assertTrue(os.path.exists(tmp_file))
with urlopen(tmp_fileurl) as fobj:
self.assertTrue(fobj)
finally:
os.close(fd)
os.unlink(tmp_file)
self.assertFalse(os.path.exists(tmp_file))
with self.assertRaises(urllib.error.URLError):
urlopen(tmp_fileurl)
def test_ftp_nohost(self):
test_ftp_url = 'ftp:///path'
with self.assertRaises(urllib.error.URLError) as e:
urlopen(test_ftp_url)
self.assertFalse(e.exception.filename)
self.assertTrue(e.exception.reason)
def test_ftp_nonexisting(self):
with self.assertRaises(urllib.error.URLError) as e:
urlopen('ftp://localhost/a/file/which/doesnot/exists.py')
self.assertFalse(e.exception.filename)
self.assertTrue(e.exception.reason)
def test_userpass_inurl(self):
self.fakehttp(b"HTTP/1.0 200 OK\r\n\r\nHello!")
try:
fp = urlopen("http://user:pass@python.org/")
self.assertEqual(fp.readline(), b"Hello!")
self.assertEqual(fp.readline(), b"")
self.assertEqual(fp.geturl(), 'http://user:pass@python.org/')
self.assertEqual(fp.getcode(), 200)
finally:
self.unfakehttp()
def test_userpass_inurl_w_spaces(self):
self.fakehttp(b"HTTP/1.0 200 OK\r\n\r\nHello!")
try:
userpass = "a b:c d"
url = "http://{}@python.org/".format(userpass)
fakehttp_wrapper = http.client.HTTPConnection
authorization = ("Authorization: Basic %s\r\n" %
b64encode(userpass.encode("ASCII")).decode("ASCII"))
fp = urlopen(url)
# The authorization header must be in place
self.assertIn(authorization, fakehttp_wrapper.buf.decode("UTF-8"))
self.assertEqual(fp.readline(), b"Hello!")
self.assertEqual(fp.readline(), b"")
# the spaces are quoted in URL so no match
self.assertNotEqual(fp.geturl(), url)
self.assertEqual(fp.getcode(), 200)
finally:
self.unfakehttp()
def test_URLopener_deprecation(self):
with support.check_warnings(('',DeprecationWarning)):
urllib.request.URLopener()
class urlopen_DataTests(unittest.TestCase):
"""Test urlopen() opening a data URL."""
def setUp(self):
# text containing URL special- and unicode-characters
self.text = "test data URLs :;,%=& \u00f6 \u00c4 "
# 2x1 pixel RGB PNG image with one black and one white pixel
self.image = (
b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x02\x00\x00\x00'
b'\x01\x08\x02\x00\x00\x00{@\xe8\xdd\x00\x00\x00\x01sRGB\x00\xae'
b'\xce\x1c\xe9\x00\x00\x00\x0fIDAT\x08\xd7c```\xf8\xff\xff?\x00'
b'\x06\x01\x02\xfe\no/\x1e\x00\x00\x00\x00IEND\xaeB`\x82')
self.text_url = (
"data:text/plain;charset=UTF-8,test%20data%20URLs%20%3A%3B%2C%25%3"
"D%26%20%C3%B6%20%C3%84%20")
self.text_url_base64 = (
"data:text/plain;charset=ISO-8859-1;base64,dGVzdCBkYXRhIFVSTHMgOjs"
"sJT0mIPYgxCA%3D")
# base64 encoded data URL that contains ignorable spaces,
# such as "\n", " ", "%0A", and "%20".
self.image_url = (
"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAIAAAABCAIAAAB7\n"
"QOjdAAAAAXNSR0IArs4c6QAAAA9JREFUCNdj%0AYGBg%2BP//PwAGAQL%2BCm8 "
"vHgAAAABJRU5ErkJggg%3D%3D%0A%20")
self.text_url_resp = urllib.request.urlopen(self.text_url)
self.text_url_base64_resp = urllib.request.urlopen(
self.text_url_base64)
self.image_url_resp = urllib.request.urlopen(self.image_url)
def test_interface(self):
# Make sure object returned by urlopen() has the specified methods
for attr in ("read", "readline", "readlines",
"close", "info", "geturl", "getcode", "__iter__"):
self.assertTrue(hasattr(self.text_url_resp, attr),
"object returned by urlopen() lacks %s attribute" %
attr)
def test_info(self):
self.assertIsInstance(self.text_url_resp.info(), email.message.Message)
self.assertEqual(self.text_url_base64_resp.info().get_params(),
[('text/plain', ''), ('charset', 'ISO-8859-1')])
self.assertEqual(self.image_url_resp.info()['content-length'],
str(len(self.image)))
self.assertEqual(urllib.request.urlopen("data:,").info().get_params(),
[('text/plain', ''), ('charset', 'US-ASCII')])
def test_geturl(self):
self.assertEqual(self.text_url_resp.geturl(), self.text_url)
self.assertEqual(self.text_url_base64_resp.geturl(),
self.text_url_base64)
self.assertEqual(self.image_url_resp.geturl(), self.image_url)
def test_read_text(self):
self.assertEqual(self.text_url_resp.read().decode(
dict(self.text_url_resp.info().get_params())['charset']), self.text)
def test_read_text_base64(self):
self.assertEqual(self.text_url_base64_resp.read().decode(
dict(self.text_url_base64_resp.info().get_params())['charset']),
self.text)
def test_read_image(self):
self.assertEqual(self.image_url_resp.read(), self.image)
def test_missing_comma(self):
self.assertRaises(ValueError,urllib.request.urlopen,'data:text/plain')
def test_invalid_base64_data(self):
# missing padding character
self.assertRaises(ValueError,urllib.request.urlopen,'data:;base64,Cg=')
class urlretrieve_FileTests(unittest.TestCase):
"""Test urllib.urlretrieve() on local files"""
def setUp(self):
# Create a list of temporary files. Each item in the list is a file
# name (absolute path or relative to the current working directory).
# All files in this list will be deleted in the tearDown method. Note,
# this only helps to makes sure temporary files get deleted, but it
# does nothing about trying to close files that may still be open. It
# is the responsibility of the developer to properly close files even
# when exceptional conditions occur.
self.tempFiles = []
# Create a temporary file.
self.registerFileForCleanUp(support.TESTFN)
self.text = b'testing urllib.urlretrieve'
try:
FILE = open(support.TESTFN, 'wb')
FILE.write(self.text)
FILE.close()
finally:
try: FILE.close()
except: pass
def tearDown(self):
# Delete the temporary files.
for each in self.tempFiles:
try: os.remove(each)
except: pass
def constructLocalFileUrl(self, filePath):
filePath = os.path.abspath(filePath)
try:
filePath.encode("utf-8")
except UnicodeEncodeError:
raise unittest.SkipTest("filePath is not encodable to utf8")
return "file://%s" % urllib.request.pathname2url(filePath)
def createNewTempFile(self, data=b""):
"""Creates a new temporary file containing the specified data,
registers the file for deletion during the test fixture tear down, and
returns the absolute path of the file."""
newFd, newFilePath = tempfile.mkstemp()
try:
self.registerFileForCleanUp(newFilePath)
newFile = os.fdopen(newFd, "wb")
newFile.write(data)
newFile.close()
finally:
try: newFile.close()
except: pass
return newFilePath
def registerFileForCleanUp(self, fileName):
self.tempFiles.append(fileName)
def test_basic(self):
# Make sure that a local file just gets its own location returned and
# a headers value is returned.
result = urllib.request.urlretrieve("file:%s" % support.TESTFN)
self.assertEqual(result[0], support.TESTFN)
self.assertIsInstance(result[1], email.message.Message,
"did not get a email.message.Message instance "
"as second returned value")
def test_copy(self):
# Test that setting the filename argument works.
second_temp = "%s.2" % support.TESTFN
self.registerFileForCleanUp(second_temp)
result = urllib.request.urlretrieve(self.constructLocalFileUrl(
support.TESTFN), second_temp)
self.assertEqual(second_temp, result[0])
self.assertTrue(os.path.exists(second_temp), "copy of the file was not "
"made")
FILE = open(second_temp, 'rb')
try:
text = FILE.read()
FILE.close()
finally:
try: FILE.close()
except: pass
self.assertEqual(self.text, text)
def test_reporthook(self):
# Make sure that the reporthook works.
def hooktester(block_count, block_read_size, file_size, count_holder=[0]):
self.assertIsInstance(block_count, int)
self.assertIsInstance(block_read_size, int)
self.assertIsInstance(file_size, int)
self.assertEqual(block_count, count_holder[0])
count_holder[0] = count_holder[0] + 1
second_temp = "%s.2" % support.TESTFN
self.registerFileForCleanUp(second_temp)
urllib.request.urlretrieve(
self.constructLocalFileUrl(support.TESTFN),
second_temp, hooktester)
def test_reporthook_0_bytes(self):
# Test on zero length file. Should call reporthook only 1 time.
report = []
def hooktester(block_count, block_read_size, file_size, _report=report):
_report.append((block_count, block_read_size, file_size))
srcFileName = self.createNewTempFile()
urllib.request.urlretrieve(self.constructLocalFileUrl(srcFileName),
support.TESTFN, hooktester)
self.assertEqual(len(report), 1)
self.assertEqual(report[0][2], 0)
def test_reporthook_5_bytes(self):
# Test on 5 byte file. Should call reporthook only 2 times (once when
# the "network connection" is established and once when the block is
# read).
report = []
def hooktester(block_count, block_read_size, file_size, _report=report):
_report.append((block_count, block_read_size, file_size))
srcFileName = self.createNewTempFile(b"x" * 5)
urllib.request.urlretrieve(self.constructLocalFileUrl(srcFileName),
support.TESTFN, hooktester)
self.assertEqual(len(report), 2)
self.assertEqual(report[0][2], 5)
self.assertEqual(report[1][2], 5)
def test_reporthook_8193_bytes(self):
# Test on 8193 byte file. Should call reporthook only 3 times (once
# when the "network connection" is established, once for the next 8192
# bytes, and once for the last byte).
report = []
def hooktester(block_count, block_read_size, file_size, _report=report):
_report.append((block_count, block_read_size, file_size))
srcFileName = self.createNewTempFile(b"x" * 8193)
urllib.request.urlretrieve(self.constructLocalFileUrl(srcFileName),
support.TESTFN, hooktester)
self.assertEqual(len(report), 3)
self.assertEqual(report[0][2], 8193)
self.assertEqual(report[0][1], 8192)
self.assertEqual(report[1][1], 8192)
self.assertEqual(report[2][1], 8192)
class urlretrieve_HttpTests(unittest.TestCase, FakeHTTPMixin):
"""Test urllib.urlretrieve() using fake http connections"""
def test_short_content_raises_ContentTooShortError(self):
self.fakehttp(b'''HTTP/1.1 200 OK
Date: Wed, 02 Jan 2008 03:03:54 GMT
Server: Apache/1.3.33 (Debian GNU/Linux) mod_ssl/2.8.22 OpenSSL/0.9.7e
Connection: close
Content-Length: 100
Content-Type: text/html; charset=iso-8859-1
FF
''')
def _reporthook(par1, par2, par3):
pass
with self.assertRaises(urllib.error.ContentTooShortError):
try:
urllib.request.urlretrieve('http://example.com/',
reporthook=_reporthook)
finally:
self.unfakehttp()
def test_short_content_raises_ContentTooShortError_without_reporthook(self):
self.fakehttp(b'''HTTP/1.1 200 OK
Date: Wed, 02 Jan 2008 03:03:54 GMT
Server: Apache/1.3.33 (Debian GNU/Linux) mod_ssl/2.8.22 OpenSSL/0.9.7e
Connection: close
Content-Length: 100
Content-Type: text/html; charset=iso-8859-1
FF
''')
with self.assertRaises(urllib.error.ContentTooShortError):
try:
urllib.request.urlretrieve('http://example.com/')
finally:
self.unfakehttp()
class QuotingTests(unittest.TestCase):
"""Tests for urllib.quote() and urllib.quote_plus()
According to RFC 2396 (Uniform Resource Identifiers), to escape a
character you write it as '%' + <2 character US-ASCII hex value>.
The Python code of ``'%' + hex(ord(<character>))[2:]`` escapes a
character properly. Case does not matter on the hex letters.
The various character sets specified are:
Reserved characters : ";/?:@&=+$,"
Have special meaning in URIs and must be escaped if not being used for
their special meaning
Data characters : letters, digits, and "-_.!~*'()"
Unreserved and do not need to be escaped; can be, though, if desired
Control characters : 0x00 - 0x1F, 0x7F
Have no use in URIs so must be escaped
space : 0x20
Must be escaped
Delimiters : '<>#%"'
Must be escaped
Unwise : "{}|\^[]`"
Must be escaped
"""
def test_never_quote(self):
# Make sure quote() does not quote letters, digits, and "_,.-"
do_not_quote = '' .join(["ABCDEFGHIJKLMNOPQRSTUVWXYZ",
"abcdefghijklmnopqrstuvwxyz",
"0123456789",
"_.-"])
result = urllib.parse.quote(do_not_quote)
self.assertEqual(do_not_quote, result,
"using quote(): %r != %r" % (do_not_quote, result))
result = urllib.parse.quote_plus(do_not_quote)
self.assertEqual(do_not_quote, result,
"using quote_plus(): %r != %r" % (do_not_quote, result))
def test_default_safe(self):
# Test '/' is default value for 'safe' parameter
self.assertEqual(urllib.parse.quote.__defaults__[0], '/')
def test_safe(self):
# Test setting 'safe' parameter does what it should do
quote_by_default = "<>"
result = urllib.parse.quote(quote_by_default, safe=quote_by_default)
self.assertEqual(quote_by_default, result,
"using quote(): %r != %r" % (quote_by_default, result))
result = urllib.parse.quote_plus(quote_by_default,
safe=quote_by_default)
self.assertEqual(quote_by_default, result,
"using quote_plus(): %r != %r" %
(quote_by_default, result))
# Safe expressed as bytes rather than str
result = urllib.parse.quote(quote_by_default, safe=b"<>")
self.assertEqual(quote_by_default, result,
"using quote(): %r != %r" % (quote_by_default, result))
# "Safe" non-ASCII characters should have no effect
# (Since URIs are not allowed to have non-ASCII characters)
result = urllib.parse.quote("a\xfcb", encoding="latin-1", safe="\xfc")
expect = urllib.parse.quote("a\xfcb", encoding="latin-1", safe="")
self.assertEqual(expect, result,
"using quote(): %r != %r" %
(expect, result))
# Same as above, but using a bytes rather than str
result = urllib.parse.quote("a\xfcb", encoding="latin-1", safe=b"\xfc")
expect = urllib.parse.quote("a\xfcb", encoding="latin-1", safe="")
self.assertEqual(expect, result,
"using quote(): %r != %r" %
(expect, result))
def test_default_quoting(self):
# Make sure all characters that should be quoted are by default sans
# space (separate test for that).
should_quote = [chr(num) for num in range(32)] # For 0x00 - 0x1F
should_quote.append('<>#%"{}|\^[]`')
should_quote.append(chr(127)) # For 0x7F
should_quote = ''.join(should_quote)
for char in should_quote:
result = urllib.parse.quote(char)
self.assertEqual(hexescape(char), result,
"using quote(): "
"%s should be escaped to %s, not %s" %
(char, hexescape(char), result))
result = urllib.parse.quote_plus(char)
self.assertEqual(hexescape(char), result,
"using quote_plus(): "
"%s should be escapes to %s, not %s" %
(char, hexescape(char), result))
del should_quote
partial_quote = "ab[]cd"
expected = "ab%5B%5Dcd"
result = urllib.parse.quote(partial_quote)
self.assertEqual(expected, result,
"using quote(): %r != %r" % (expected, result))
result = urllib.parse.quote_plus(partial_quote)
self.assertEqual(expected, result,
"using quote_plus(): %r != %r" % (expected, result))
def test_quoting_space(self):
# Make sure quote() and quote_plus() handle spaces as specified in
# their unique way
result = urllib.parse.quote(' ')
self.assertEqual(result, hexescape(' '),
"using quote(): %r != %r" % (result, hexescape(' ')))
result = urllib.parse.quote_plus(' ')
self.assertEqual(result, '+',
"using quote_plus(): %r != +" % result)
given = "a b cd e f"
expect = given.replace(' ', hexescape(' '))
result = urllib.parse.quote(given)
self.assertEqual(expect, result,
"using quote(): %r != %r" % (expect, result))
expect = given.replace(' ', '+')
result = urllib.parse.quote_plus(given)
self.assertEqual(expect, result,
"using quote_plus(): %r != %r" % (expect, result))
def test_quoting_plus(self):
self.assertEqual(urllib.parse.quote_plus('alpha+beta gamma'),
'alpha%2Bbeta+gamma')
self.assertEqual(urllib.parse.quote_plus('alpha+beta gamma', '+'),
'alpha+beta+gamma')
# Test with bytes
self.assertEqual(urllib.parse.quote_plus(b'alpha+beta gamma'),
'alpha%2Bbeta+gamma')
# Test with safe bytes
self.assertEqual(urllib.parse.quote_plus('alpha+beta gamma', b'+'),
'alpha+beta+gamma')
def test_quote_bytes(self):
# Bytes should quote directly to percent-encoded values
given = b"\xa2\xd8ab\xff"
expect = "%A2%D8ab%FF"
result = urllib.parse.quote(given)
self.assertEqual(expect, result,
"using quote(): %r != %r" % (expect, result))
# Encoding argument should raise type error on bytes input
self.assertRaises(TypeError, urllib.parse.quote, given,
encoding="latin-1")
# quote_from_bytes should work the same
result = urllib.parse.quote_from_bytes(given)
self.assertEqual(expect, result,
"using quote_from_bytes(): %r != %r"
% (expect, result))
def test_quote_with_unicode(self):
# Characters in Latin-1 range, encoded by default in UTF-8
given = "\xa2\xd8ab\xff"
expect = "%C2%A2%C3%98ab%C3%BF"
result = urllib.parse.quote(given)
self.assertEqual(expect, result,
"using quote(): %r != %r" % (expect, result))
# Characters in Latin-1 range, encoded by with None (default)
result = urllib.parse.quote(given, encoding=None, errors=None)
self.assertEqual(expect, result,
"using quote(): %r != %r" % (expect, result))
# Characters in Latin-1 range, encoded with Latin-1
given = "\xa2\xd8ab\xff"
expect = "%A2%D8ab%FF"
result = urllib.parse.quote(given, encoding="latin-1")
self.assertEqual(expect, result,
"using quote(): %r != %r" % (expect, result))
# Characters in BMP, encoded by default in UTF-8
given = "\u6f22\u5b57" # "Kanji"
expect = "%E6%BC%A2%E5%AD%97"
result = urllib.parse.quote(given)
self.assertEqual(expect, result,
"using quote(): %r != %r" % (expect, result))
# Characters in BMP, encoded with Latin-1
given = "\u6f22\u5b57"
self.assertRaises(UnicodeEncodeError, urllib.parse.quote, given,
encoding="latin-1")
# Characters in BMP, encoded with Latin-1, with replace error handling
given = "\u6f22\u5b57"
expect = "%3F%3F" # "??"
result = urllib.parse.quote(given, encoding="latin-1",
errors="replace")
self.assertEqual(expect, result,
"using quote(): %r != %r" % (expect, result))
# Characters in BMP, Latin-1, with xmlcharref error handling
given = "\u6f22\u5b57"
expect = "%26%2328450%3B%26%2323383%3B" # "漢字"
result = urllib.parse.quote(given, encoding="latin-1",
errors="xmlcharrefreplace")
self.assertEqual(expect, result,
"using quote(): %r != %r" % (expect, result))
def test_quote_plus_with_unicode(self):
# Encoding (latin-1) test for quote_plus
given = "\xa2\xd8 \xff"
expect = "%A2%D8+%FF"
result = urllib.parse.quote_plus(given, encoding="latin-1")
self.assertEqual(expect, result,
"using quote_plus(): %r != %r" % (expect, result))
# Errors test for quote_plus
given = "ab\u6f22\u5b57 cd"
expect = "ab%3F%3F+cd"
result = urllib.parse.quote_plus(given, encoding="latin-1",
errors="replace")
self.assertEqual(expect, result,
"using quote_plus(): %r != %r" % (expect, result))
class UnquotingTests(unittest.TestCase):
"""Tests for unquote() and unquote_plus()
See the doc string for quoting_Tests for details on quoting and such.
"""
def test_unquoting(self):
# Make sure unquoting of all ASCII values works
escape_list = []
for num in range(128):
given = hexescape(chr(num))
expect = chr(num)
result = urllib.parse.unquote(given)
self.assertEqual(expect, result,
"using unquote(): %r != %r" % (expect, result))
result = urllib.parse.unquote_plus(given)
self.assertEqual(expect, result,
"using unquote_plus(): %r != %r" %
(expect, result))
escape_list.append(given)
escape_string = ''.join(escape_list)
del escape_list
result = urllib.parse.unquote(escape_string)
self.assertEqual(result.count('%'), 1,
"using unquote(): not all characters escaped: "
"%s" % result)
self.assertRaises((TypeError, AttributeError), urllib.parse.unquote, None)
self.assertRaises((TypeError, AttributeError), urllib.parse.unquote, ())
with support.check_warnings(('', BytesWarning), quiet=True):
self.assertRaises((TypeError, AttributeError), urllib.parse.unquote, b'')
def test_unquoting_badpercent(self):
# Test unquoting on bad percent-escapes
given = '%xab'
expect = given
result = urllib.parse.unquote(given)
self.assertEqual(expect, result, "using unquote(): %r != %r"
% (expect, result))
given = '%x'
expect = given
result = urllib.parse.unquote(given)
self.assertEqual(expect, result, "using unquote(): %r != %r"
% (expect, result))
given = '%'
expect = given
result = urllib.parse.unquote(given)
self.assertEqual(expect, result, "using unquote(): %r != %r"
% (expect, result))
# unquote_to_bytes
given = '%xab'
expect = bytes(given, 'ascii')
result = urllib.parse.unquote_to_bytes(given)
self.assertEqual(expect, result, "using unquote_to_bytes(): %r != %r"
% (expect, result))
given = '%x'
expect = bytes(given, 'ascii')
result = urllib.parse.unquote_to_bytes(given)
self.assertEqual(expect, result, "using unquote_to_bytes(): %r != %r"
% (expect, result))
given = '%'
expect = bytes(given, 'ascii')
result = urllib.parse.unquote_to_bytes(given)
self.assertEqual(expect, result, "using unquote_to_bytes(): %r != %r"
% (expect, result))
self.assertRaises((TypeError, AttributeError), urllib.parse.unquote_to_bytes, None)
self.assertRaises((TypeError, AttributeError), urllib.parse.unquote_to_bytes, ())
def test_unquoting_mixed_case(self):
# Test unquoting on mixed-case hex digits in the percent-escapes
given = '%Ab%eA'
expect = b'\xab\xea'
result = urllib.parse.unquote_to_bytes(given)
self.assertEqual(expect, result,
"using unquote_to_bytes(): %r != %r"
% (expect, result))
def test_unquoting_parts(self):
# Make sure unquoting works when have non-quoted characters
# interspersed
given = 'ab%sd' % hexescape('c')
expect = "abcd"
result = urllib.parse.unquote(given)
self.assertEqual(expect, result,
"using quote(): %r != %r" % (expect, result))
result = urllib.parse.unquote_plus(given)
self.assertEqual(expect, result,
"using unquote_plus(): %r != %r" % (expect, result))
def test_unquoting_plus(self):
# Test difference between unquote() and unquote_plus()
given = "are+there+spaces..."
expect = given
result = urllib.parse.unquote(given)
self.assertEqual(expect, result,
"using unquote(): %r != %r" % (expect, result))
expect = given.replace('+', ' ')
result = urllib.parse.unquote_plus(given)
self.assertEqual(expect, result,
"using unquote_plus(): %r != %r" % (expect, result))
def test_unquote_to_bytes(self):
given = 'br%C3%BCckner_sapporo_20050930.doc'
expect = b'br\xc3\xbcckner_sapporo_20050930.doc'
result = urllib.parse.unquote_to_bytes(given)
self.assertEqual(expect, result,
"using unquote_to_bytes(): %r != %r"
% (expect, result))
# Test on a string with unescaped non-ASCII characters
# (Technically an invalid URI; expect those characters to be UTF-8
# encoded).
result = urllib.parse.unquote_to_bytes("\u6f22%C3%BC")
expect = b'\xe6\xbc\xa2\xc3\xbc' # UTF-8 for "\u6f22\u00fc"
self.assertEqual(expect, result,
"using unquote_to_bytes(): %r != %r"
% (expect, result))
# Test with a bytes as input
given = b'%A2%D8ab%FF'
expect = b'\xa2\xd8ab\xff'
result = urllib.parse.unquote_to_bytes(given)
self.assertEqual(expect, result,
"using unquote_to_bytes(): %r != %r"
% (expect, result))
# Test with a bytes as input, with unescaped non-ASCII bytes
# (Technically an invalid URI; expect those bytes to be preserved)
given = b'%A2\xd8ab%FF'
expect = b'\xa2\xd8ab\xff'
result = urllib.parse.unquote_to_bytes(given)
self.assertEqual(expect, result,
"using unquote_to_bytes(): %r != %r"
% (expect, result))
def test_unquote_with_unicode(self):
# Characters in the Latin-1 range, encoded with UTF-8
given = 'br%C3%BCckner_sapporo_20050930.doc'
expect = 'br\u00fcckner_sapporo_20050930.doc'
result = urllib.parse.unquote(given)
self.assertEqual(expect, result,
"using unquote(): %r != %r" % (expect, result))
# Characters in the Latin-1 range, encoded with None (default)
result = urllib.parse.unquote(given, encoding=None, errors=None)
self.assertEqual(expect, result,
"using unquote(): %r != %r" % (expect, result))
# Characters in the Latin-1 range, encoded with Latin-1
result = urllib.parse.unquote('br%FCckner_sapporo_20050930.doc',
encoding="latin-1")
expect = 'br\u00fcckner_sapporo_20050930.doc'
self.assertEqual(expect, result,
"using unquote(): %r != %r" % (expect, result))
# Characters in BMP, encoded with UTF-8
given = "%E6%BC%A2%E5%AD%97"
expect = "\u6f22\u5b57" # "Kanji"
result = urllib.parse.unquote(given)
self.assertEqual(expect, result,
"using unquote(): %r != %r" % (expect, result))
# Decode with UTF-8, invalid sequence
given = "%F3%B1"
expect = "\ufffd" # Replacement character
result = urllib.parse.unquote(given)
self.assertEqual(expect, result,
"using unquote(): %r != %r" % (expect, result))
# Decode with UTF-8, invalid sequence, replace errors
result = urllib.parse.unquote(given, errors="replace")
self.assertEqual(expect, result,
"using unquote(): %r != %r" % (expect, result))
# Decode with UTF-8, invalid sequence, ignoring errors
given = "%F3%B1"
expect = ""
result = urllib.parse.unquote(given, errors="ignore")
self.assertEqual(expect, result,
"using unquote(): %r != %r" % (expect, result))
# A mix of non-ASCII and percent-encoded characters, UTF-8
result = urllib.parse.unquote("\u6f22%C3%BC")
expect = '\u6f22\u00fc'
self.assertEqual(expect, result,
"using unquote(): %r != %r" % (expect, result))
# A mix of non-ASCII and percent-encoded characters, Latin-1
# (Note, the string contains non-Latin-1-representable characters)
result = urllib.parse.unquote("\u6f22%FC", encoding="latin-1")
expect = '\u6f22\u00fc'
self.assertEqual(expect, result,
"using unquote(): %r != %r" % (expect, result))
class urlencode_Tests(unittest.TestCase):
"""Tests for urlencode()"""
def help_inputtype(self, given, test_type):
"""Helper method for testing different input types.
'given' must lead to only the pairs:
* 1st, 1
* 2nd, 2
* 3rd, 3
Test cannot assume anything about order. Docs make no guarantee and
have possible dictionary input.
"""
expect_somewhere = ["1st=1", "2nd=2", "3rd=3"]
result = urllib.parse.urlencode(given)
for expected in expect_somewhere:
self.assertIn(expected, result,
"testing %s: %s not found in %s" %
(test_type, expected, result))
self.assertEqual(result.count('&'), 2,
"testing %s: expected 2 '&'s; got %s" %
(test_type, result.count('&')))
amp_location = result.index('&')
on_amp_left = result[amp_location - 1]
on_amp_right = result[amp_location + 1]
self.assertTrue(on_amp_left.isdigit() and on_amp_right.isdigit(),
"testing %s: '&' not located in proper place in %s" %
(test_type, result))
self.assertEqual(len(result), (5 * 3) + 2, #5 chars per thing and amps
"testing %s: "
"unexpected number of characters: %s != %s" %
(test_type, len(result), (5 * 3) + 2))
def test_using_mapping(self):
# Test passing in a mapping object as an argument.
self.help_inputtype({"1st":'1', "2nd":'2', "3rd":'3'},
"using dict as input type")
def test_using_sequence(self):
# Test passing in a sequence of two-item sequences as an argument.
self.help_inputtype([('1st', '1'), ('2nd', '2'), ('3rd', '3')],
"using sequence of two-item tuples as input")
def test_quoting(self):
# Make sure keys and values are quoted using quote_plus()
given = {"&":"="}
expect = "%s=%s" % (hexescape('&'), hexescape('='))
result = urllib.parse.urlencode(given)
self.assertEqual(expect, result)
given = {"key name":"A bunch of pluses"}
expect = "key+name=A+bunch+of+pluses"
result = urllib.parse.urlencode(given)
self.assertEqual(expect, result)
def test_doseq(self):
# Test that passing True for 'doseq' parameter works correctly
given = {'sequence':['1', '2', '3']}
expect = "sequence=%s" % urllib.parse.quote_plus(str(['1', '2', '3']))
result = urllib.parse.urlencode(given)
self.assertEqual(expect, result)
result = urllib.parse.urlencode(given, True)
for value in given["sequence"]:
expect = "sequence=%s" % value
self.assertIn(expect, result)
self.assertEqual(result.count('&'), 2,
"Expected 2 '&'s, got %s" % result.count('&'))
def test_empty_sequence(self):
self.assertEqual("", urllib.parse.urlencode({}))
self.assertEqual("", urllib.parse.urlencode([]))
def test_nonstring_values(self):
self.assertEqual("a=1", urllib.parse.urlencode({"a": 1}))
self.assertEqual("a=None", urllib.parse.urlencode({"a": None}))
def test_nonstring_seq_values(self):
self.assertEqual("a=1&a=2", urllib.parse.urlencode({"a": [1, 2]}, True))
self.assertEqual("a=None&a=a",
urllib.parse.urlencode({"a": [None, "a"]}, True))
data = collections.OrderedDict([("a", 1), ("b", 1)])
self.assertEqual("a=a&a=b",
urllib.parse.urlencode({"a": data}, True))
def test_urlencode_encoding(self):
# ASCII encoding. Expect %3F with errors="replace'
given = (('\u00a0', '\u00c1'),)
expect = '%3F=%3F'
result = urllib.parse.urlencode(given, encoding="ASCII", errors="replace")
self.assertEqual(expect, result)
# Default is UTF-8 encoding.
given = (('\u00a0', '\u00c1'),)
expect = '%C2%A0=%C3%81'
result = urllib.parse.urlencode(given)
self.assertEqual(expect, result)
# Latin-1 encoding.
given = (('\u00a0', '\u00c1'),)
expect = '%A0=%C1'
result = urllib.parse.urlencode(given, encoding="latin-1")
self.assertEqual(expect, result)
def test_urlencode_encoding_doseq(self):
# ASCII Encoding. Expect %3F with errors="replace'
given = (('\u00a0', '\u00c1'),)
expect = '%3F=%3F'
result = urllib.parse.urlencode(given, doseq=True,
encoding="ASCII", errors="replace")
self.assertEqual(expect, result)
# ASCII Encoding. On a sequence of values.
given = (("\u00a0", (1, "\u00c1")),)
expect = '%3F=1&%3F=%3F'
result = urllib.parse.urlencode(given, True,
encoding="ASCII", errors="replace")
self.assertEqual(expect, result)
# Utf-8
given = (("\u00a0", "\u00c1"),)
expect = '%C2%A0=%C3%81'
result = urllib.parse.urlencode(given, True)
self.assertEqual(expect, result)
given = (("\u00a0", (42, "\u00c1")),)
expect = '%C2%A0=42&%C2%A0=%C3%81'
result = urllib.parse.urlencode(given, True)
self.assertEqual(expect, result)
# latin-1
given = (("\u00a0", "\u00c1"),)
expect = '%A0=%C1'
result = urllib.parse.urlencode(given, True, encoding="latin-1")
self.assertEqual(expect, result)
given = (("\u00a0", (42, "\u00c1")),)
expect = '%A0=42&%A0=%C1'
result = urllib.parse.urlencode(given, True, encoding="latin-1")
self.assertEqual(expect, result)
def test_urlencode_bytes(self):
given = ((b'\xa0\x24', b'\xc1\x24'),)
expect = '%A0%24=%C1%24'
result = urllib.parse.urlencode(given)
self.assertEqual(expect, result)
result = urllib.parse.urlencode(given, True)
self.assertEqual(expect, result)
# Sequence of values
given = ((b'\xa0\x24', (42, b'\xc1\x24')),)
expect = '%A0%24=42&%A0%24=%C1%24'
result = urllib.parse.urlencode(given, True)
self.assertEqual(expect, result)
def test_urlencode_encoding_safe_parameter(self):
# Send '$' (\x24) as safe character
# Default utf-8 encoding
given = ((b'\xa0\x24', b'\xc1\x24'),)
result = urllib.parse.urlencode(given, safe=":$")
expect = '%A0$=%C1$'
self.assertEqual(expect, result)
given = ((b'\xa0\x24', b'\xc1\x24'),)
result = urllib.parse.urlencode(given, doseq=True, safe=":$")
expect = '%A0$=%C1$'
self.assertEqual(expect, result)
# Safe parameter in sequence
given = ((b'\xa0\x24', (b'\xc1\x24', 0xd, 42)),)
expect = '%A0$=%C1$&%A0$=13&%A0$=42'
result = urllib.parse.urlencode(given, True, safe=":$")
self.assertEqual(expect, result)
# Test all above in latin-1 encoding
given = ((b'\xa0\x24', b'\xc1\x24'),)
result = urllib.parse.urlencode(given, safe=":$",
encoding="latin-1")
expect = '%A0$=%C1$'
self.assertEqual(expect, result)
given = ((b'\xa0\x24', b'\xc1\x24'),)
expect = '%A0$=%C1$'
result = urllib.parse.urlencode(given, doseq=True, safe=":$",
encoding="latin-1")
given = ((b'\xa0\x24', (b'\xc1\x24', 0xd, 42)),)
expect = '%A0$=%C1$&%A0$=13&%A0$=42'
result = urllib.parse.urlencode(given, True, safe=":$",
encoding="latin-1")
self.assertEqual(expect, result)
class Pathname_Tests(unittest.TestCase):
"""Test pathname2url() and url2pathname()"""
def test_basic(self):
# Make sure simple tests pass
expected_path = os.path.join("parts", "of", "a", "path")
expected_url = "parts/of/a/path"
result = urllib.request.pathname2url(expected_path)
self.assertEqual(expected_url, result,
"pathname2url() failed; %s != %s" %
(result, expected_url))
result = urllib.request.url2pathname(expected_url)
self.assertEqual(expected_path, result,
"url2pathame() failed; %s != %s" %
(result, expected_path))
def test_quoting(self):
# Test automatic quoting and unquoting works for pathnam2url() and
# url2pathname() respectively
given = os.path.join("needs", "quot=ing", "here")
expect = "needs/%s/here" % urllib.parse.quote("quot=ing")
result = urllib.request.pathname2url(given)
self.assertEqual(expect, result,
"pathname2url() failed; %s != %s" %
(expect, result))
expect = given
result = urllib.request.url2pathname(result)
self.assertEqual(expect, result,
"url2pathname() failed; %s != %s" %
(expect, result))
given = os.path.join("make sure", "using_quote")
expect = "%s/using_quote" % urllib.parse.quote("make sure")
result = urllib.request.pathname2url(given)
self.assertEqual(expect, result,
"pathname2url() failed; %s != %s" %
(expect, result))
given = "make+sure/using_unquote"
expect = os.path.join("make+sure", "using_unquote")
result = urllib.request.url2pathname(given)
self.assertEqual(expect, result,
"url2pathname() failed; %s != %s" %
(expect, result))
@unittest.skipUnless(sys.platform == 'win32',
'test specific to the urllib.url2path function.')
def test_ntpath(self):
given = ('/C:/', '///C:/', '/C|//')
expect = 'C:\\'
for url in given:
result = urllib.request.url2pathname(url)
self.assertEqual(expect, result,
'urllib.request..url2pathname() failed; %s != %s' %
(expect, result))
given = '///C|/path'
expect = 'C:\\path'
result = urllib.request.url2pathname(given)
self.assertEqual(expect, result,
'urllib.request.url2pathname() failed; %s != %s' %
(expect, result))
class Utility_Tests(unittest.TestCase):
"""Testcase to test the various utility functions in the urllib."""
def test_splitpasswd(self):
"""Some of password examples are not sensible, but it is added to
confirming to RFC2617 and addressing issue4675.
"""
self.assertEqual(('user', 'ab'),urllib.parse.splitpasswd('user:ab'))
self.assertEqual(('user', 'a\nb'),urllib.parse.splitpasswd('user:a\nb'))
self.assertEqual(('user', 'a\tb'),urllib.parse.splitpasswd('user:a\tb'))
self.assertEqual(('user', 'a\rb'),urllib.parse.splitpasswd('user:a\rb'))
self.assertEqual(('user', 'a\fb'),urllib.parse.splitpasswd('user:a\fb'))
self.assertEqual(('user', 'a\vb'),urllib.parse.splitpasswd('user:a\vb'))
self.assertEqual(('user', 'a:b'),urllib.parse.splitpasswd('user:a:b'))
self.assertEqual(('user', 'a b'),urllib.parse.splitpasswd('user:a b'))
self.assertEqual(('user 2', 'ab'),urllib.parse.splitpasswd('user 2:ab'))
self.assertEqual(('user+1', 'a+b'),urllib.parse.splitpasswd('user+1:a+b'))
def test_thishost(self):
"""Test the urllib.request.thishost utility function returns a tuple"""
self.assertIsInstance(urllib.request.thishost(), tuple)
class URLopener_Tests(unittest.TestCase):
"""Testcase to test the open method of URLopener class."""
def test_quoted_open(self):
class DummyURLopener(urllib.request.URLopener):
def open_spam(self, url):
return url
with support.check_warnings(
('DummyURLopener style of invoking requests is deprecated.',
DeprecationWarning)):
self.assertEqual(DummyURLopener().open(
'spam://example/ /'),'//example/%20/')
# test the safe characters are not quoted by urlopen
self.assertEqual(DummyURLopener().open(
"spam://c:|windows%/:=&?~#+!$,;'@()*[]|/path/"),
"//c:|windows%/:=&?~#+!$,;'@()*[]|/path/")
# Just commented them out.
# Can't really tell why keep failing in windows and sparc.
# Everywhere else they work ok, but on those machines, sometimes
# fail in one of the tests, sometimes in other. I have a linux, and
# the tests go ok.
# If anybody has one of the problematic environments, please help!
# . Facundo
#
# def server(evt):
# import socket, time
# serv = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# serv.settimeout(3)
# serv.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# serv.bind(("", 9093))
# serv.listen(5)
# try:
# conn, addr = serv.accept()
# conn.send("1 Hola mundo\n")
# cantdata = 0
# while cantdata < 13:
# data = conn.recv(13-cantdata)
# cantdata += len(data)
# time.sleep(.3)
# conn.send("2 No more lines\n")
# conn.close()
# except socket.timeout:
# pass
# finally:
# serv.close()
# evt.set()
#
# class FTPWrapperTests(unittest.TestCase):
#
# def setUp(self):
# import ftplib, time, threading
# ftplib.FTP.port = 9093
# self.evt = threading.Event()
# threading.Thread(target=server, args=(self.evt,)).start()
# time.sleep(.1)
#
# def tearDown(self):
# self.evt.wait()
#
# def testBasic(self):
# # connects
# ftp = urllib.ftpwrapper("myuser", "mypass", "localhost", 9093, [])
# ftp.close()
#
# def testTimeoutNone(self):
# # global default timeout is ignored
# import socket
# self.assertIsNone(socket.getdefaulttimeout())
# socket.setdefaulttimeout(30)
# try:
# ftp = urllib.ftpwrapper("myuser", "mypass", "localhost", 9093, [])
# finally:
# socket.setdefaulttimeout(None)
# self.assertEqual(ftp.ftp.sock.gettimeout(), 30)
# ftp.close()
#
# def testTimeoutDefault(self):
# # global default timeout is used
# import socket
# self.assertIsNone(socket.getdefaulttimeout())
# socket.setdefaulttimeout(30)
# try:
# ftp = urllib.ftpwrapper("myuser", "mypass", "localhost", 9093, [])
# finally:
# socket.setdefaulttimeout(None)
# self.assertEqual(ftp.ftp.sock.gettimeout(), 30)
# ftp.close()
#
# def testTimeoutValue(self):
# ftp = urllib.ftpwrapper("myuser", "mypass", "localhost", 9093, [],
# timeout=30)
# self.assertEqual(ftp.ftp.sock.gettimeout(), 30)
# ftp.close()
class RequestTests(unittest.TestCase):
"""Unit tests for urllib.request.Request."""
def test_default_values(self):
Request = urllib.request.Request
request = Request("http://www.python.org")
self.assertEqual(request.get_method(), 'GET')
request = Request("http://www.python.org", {})
self.assertEqual(request.get_method(), 'POST')
def test_with_method_arg(self):
Request = urllib.request.Request
request = Request("http://www.python.org", method='HEAD')
self.assertEqual(request.method, 'HEAD')
self.assertEqual(request.get_method(), 'HEAD')
request = Request("http://www.python.org", {}, method='HEAD')
self.assertEqual(request.method, 'HEAD')
self.assertEqual(request.get_method(), 'HEAD')
request = Request("http://www.python.org", method='GET')
self.assertEqual(request.get_method(), 'GET')
request.method = 'HEAD'
self.assertEqual(request.get_method(), 'HEAD')
class URL2PathNameTests(unittest.TestCase):
def test_converting_drive_letter(self):
self.assertEqual(url2pathname("///C|"), 'C:')
self.assertEqual(url2pathname("///C:"), 'C:')
self.assertEqual(url2pathname("///C|/"), 'C:\\')
def test_converting_when_no_drive_letter(self):
# cannot end a raw string in \
self.assertEqual(url2pathname("///C/test/"), r'\\\C\test' '\\')
self.assertEqual(url2pathname("////C/test/"), r'\\C\test' '\\')
def test_simple_compare(self):
self.assertEqual(url2pathname("///C|/foo/bar/spam.foo"),
r'C:\foo\bar\spam.foo')
def test_non_ascii_drive_letter(self):
self.assertRaises(IOError, url2pathname, "///\u00e8|/")
def test_roundtrip_url2pathname(self):
list_of_paths = ['C:',
r'\\\C\test\\',
r'C:\foo\bar\spam.foo'
]
for path in list_of_paths:
self.assertEqual(url2pathname(pathname2url(path)), path)
class PathName2URLTests(unittest.TestCase):
def test_converting_drive_letter(self):
self.assertEqual(pathname2url("C:"), '///C:')
self.assertEqual(pathname2url("C:\\"), '///C:')
def test_converting_when_no_drive_letter(self):
self.assertEqual(pathname2url(r"\\\folder\test" "\\"),
'/////folder/test/')
self.assertEqual(pathname2url(r"\\folder\test" "\\"),
'////folder/test/')
self.assertEqual(pathname2url(r"\folder\test" "\\"),
'/folder/test/')
def test_simple_compare(self):
self.assertEqual(pathname2url(r'C:\foo\bar\spam.foo'),
"///C:/foo/bar/spam.foo" )
def test_long_drive_letter(self):
self.assertRaises(IOError, pathname2url, "XX:\\")
def test_roundtrip_pathname2url(self):
list_of_paths = ['///C:',
'/////folder/test/',
'///C:/foo/bar/spam.foo']
for path in list_of_paths:
self.assertEqual(pathname2url(url2pathname(path)), path)
if __name__ == '__main__':
unittest.main()
| lgpl-3.0 |
ict-felix/stack | vt_manager_kvm/src/python/vt_manager_kvm/communication/gapi3communication.py | 1 | 2666 | from django.http import *
import os, sys, logging
from vt_manager_kvm.common.rpc4django import rpcmethod
from vt_manager_kvm.common.rpc4django import *
from vt_manager_kvm.communication.geni.v3.configurators.handlerconfigurator import HandlerConfigurator
#URL_NAME
GAPI3_URL = "gapi"
# Parameter Types
CREDENTIALS_TYPE = 'array' # of strings
OPTIONS_TYPE = 'struct'
RSPEC_TYPE = 'string'
VERSION_TYPE = 'struct'
URN_TYPE = 'string'
SUCCESS_TYPE = 'boolean'
STATUS_TYPE = 'struct'
TIME_TYPE = 'string'
URNS_TYPE = 'string'
ACTION_TYPE = 'string'
handler = HandlerConfigurator.configure_handler()
@rpcmethod(signature=['string', 'string'], url_name=GAPI3_URL)
def ping(challenge):
return challenge
@rpcmethod(signature=[VERSION_TYPE], url_name=GAPI3_URL)
def GetVersion(options={}):
return handler.GetVersion(options)
@rpcmethod(signature=[RSPEC_TYPE, CREDENTIALS_TYPE, OPTIONS_TYPE], url_name=GAPI3_URL)
def ListResources(credentials, options):
return handler.ListResources(credentials, options)
@rpcmethod(signature=[RSPEC_TYPE, URNS_TYPE, CREDENTIALS_TYPE, OPTIONS_TYPE], url_name=GAPI3_URL)
def Describe(urns, credentials, options):
return handler.Describe(urns, credentials, options)
@rpcmethod(signature=[SUCCESS_TYPE, URN_TYPE, CREDENTIALS_TYPE, RSPEC_TYPE ,OPTIONS_TYPE], url_name=GAPI3_URL)
def Allocate(slice_urn, credentials, rspec, options):
return handler.Allocate(slice_urn, credentials, rspec, options)
@rpcmethod(signature=[RSPEC_TYPE, URNS_TYPE, CREDENTIALS_TYPE, OPTIONS_TYPE], url_name=GAPI3_URL)
def Provision(urns, creds, options):
return handler.Provision(urns, creds, options)
@rpcmethod(signature=[SUCCESS_TYPE, URNS_TYPE, CREDENTIALS_TYPE], url_name=GAPI3_URL)
def Delete(urns, creds, options):
return handler.Delete(urns, creds, options)
@rpcmethod(signature=[SUCCESS_TYPE, URNS_TYPE, CREDENTIALS_TYPE, ACTION_TYPE, OPTIONS_TYPE], url_name=GAPI3_URL)
def PerformOperationalAction(urns, creds, action, options):
return handler.PerformOperationalAction(urns, creds, action, options)
@rpcmethod(signature=[STATUS_TYPE, URNS_TYPE, CREDENTIALS_TYPE, OPTIONS_TYPE], url_name=GAPI3_URL)
def Status(urns, creds, options):
return handler.Status(urns, creds, options)
@rpcmethod(signature=[SUCCESS_TYPE, URNS_TYPE, CREDENTIALS_TYPE, TIME_TYPE, OPTIONS_TYPE], url_name=GAPI3_URL)
def Renew(urns, creds, expiration_time, options):
return handler.Renew(urns, creds, expiration_time, options)
@rpcmethod(signature=[SUCCESS_TYPE, URN_TYPE, CREDENTIALS_TYPE, OPTIONS_TYPE], url_name=GAPI3_URL)
def Shutdown(slice_urn, credentials, options):
return handler.Shutdown(slice_urn, credentials, options)
| apache-2.0 |
eEcoLiDAR/eEcoLiDAR | laserchicken/feature_extractor/range_feature_extractor.py | 1 | 1277 | import numpy as np
from laserchicken.feature_extractor.base_feature_extractor import FeatureExtractor
from laserchicken.keys import point
class RangeFeatureExtractor(FeatureExtractor):
"""Calculates the max, min and range on the z axis."""
DEFAULT_MAX = float('NaN')
DEFAULT_MIN = float('NaN')
def __init__(self, data_key='z'):
self.data_key = data_key
@classmethod
def requires(cls):
return []
def provides(self):
base_names = ['max_', 'min_', 'range_']
return [base + str(self.data_key) for base in base_names]
def extract(self, point_cloud, neighborhoods, target_point_cloud, target_indices, volume_description):
return np.array([self._extract_one(point_cloud, neighborhood) for neighborhood in neighborhoods]).T
def _extract_one(self, source_point_cloud, neighborhood):
if neighborhood:
source_data = source_point_cloud[point][self.data_key]['data'][neighborhood]
max_z = np.max(source_data) if len(source_data) > 0 else self.DEFAULT_MAX
min_z = np.min(source_data) if len(source_data) > 0 else self.DEFAULT_MIN
range_z = max_z - min_z
else:
max_z = min_z = range_z = np.NaN
return max_z, min_z, range_z
| apache-2.0 |
cherusk/ansible | lib/ansible/modules/cloud/google/gcspanner.py | 66 | 10214 | #!/usr/bin/python
# Copyright 2017 Google Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcspanner
version_added: "2.3"
short_description: Create and Delete Instances/Databases on Spanner.
description:
- Create and Delete Instances/Databases on Spanner.
See U(https://cloud.google.com/spanner/docs) for an overview.
requirements:
- "python >= 2.6"
- "google-auth >= 0.5.0"
- "google-cloud-spanner >= 0.23.0"
notes:
- Changing the configuration on an existing instance is not supported.
author:
- "Tom Melendez (@supertom) <tom@supertom.com>"
options:
configuration:
description:
- Configuration the instance should use. Examples are us-central1, asia-east1 and europe-west1.
required: True
instance_id:
description:
- GCP spanner instance name.
required: True
database_name:
description:
- Name of database contained on the instance.
required: False
force_instance_delete:
description:
- To delete an instance, this argument must exist and be true (along with state being equal to absent).
required: False
default: False
instance_display_name:
description:
- Name of Instance to display. If not specified, instance_id will be used instead.
required: False
node_count:
description:
- Number of nodes in the instance. If not specified while creating an instance,
node_count will be set to 1.
required: False
state:
description: State of the instance or database (absent, present). Applies to the most granular
resource. If a database_name is specified we remove it. If only instance_id
is specified, that is what is removed.
required: False
default: "present"
'''
EXAMPLES = '''
# Create instance.
gcspanner:
instance_id: "{{ instance_id }}"
configuration: "{{ configuration }}"
state: present
node_count: 1
# Create database.
gcspanner:
instance_id: "{{ instance_id }}"
configuration: "{{ configuration }}"
database_name: "{{ database_name }}"
state: present
# Delete instance (and all databases)
gcspanner:
instance_id: "{{ instance_id }}"
configuration: "{{ configuration }}"
state: absent
force_instance_delete: yes
'''
RETURN = '''
state:
description: The state of the instance or database. Value will be either 'absent' or 'present'.
returned: Always
type: str
sample: "present"
database_name:
description: Name of database.
returned: When database name is specified
type: str
sample: "mydatabase"
instance_id:
description: Name of instance.
returned: Always
type: str
sample: "myinstance"
previous_values:
description: List of dictionaries containing previous values prior to update.
returned: When an instance update has occurred and a field has been modified.
type: dict
sample: "'previous_values': { 'instance': { 'instance_display_name': 'my-instance', 'node_count': 1 } }"
updated:
description: Boolean field to denote an update has occurred.
returned: When an update has occurred.
type: bool
sample: True
'''
try:
from ast import literal_eval
HAS_PYTHON26 = True
except ImportError:
HAS_PYTHON26 = False
try:
from google.cloud import spanner
from google.gax.errors import GaxError
HAS_GOOGLE_CLOUD_SPANNER = True
except ImportError as e:
HAS_GOOGLE_CLOUD_SPANNER = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.gcp import check_min_pkg_version, get_google_cloud_credentials
CLOUD_CLIENT = 'google-cloud-spanner'
CLOUD_CLIENT_MINIMUM_VERSION = '0.23.0'
CLOUD_CLIENT_USER_AGENT = 'ansible-spanner-0.1'
def get_spanner_configuration_name(config_name, project_name):
config_name = 'projects/%s/instanceConfigs/regional-%s' % (project_name,
config_name)
return config_name
def instance_update(instance):
"""
Call update method on spanner client.
Note: A ValueError exception is thrown despite the client succeeding.
So, we validate the node_count and instance_display_name parameters and then
ignore the ValueError exception.
:param instance: a Spanner instance object
:type instance: class `google.cloud.spanner.Instance`
:returns True on success, raises ValueError on type error.
:rtype ``bool``
"""
errmsg = ''
if not isinstance(instance.node_count, int):
errmsg = 'node_count must be an integer %s (%s)' % (
instance.node_count, type(instance.node_count))
if instance.display_name and not isinstance(instance.display_name,
basestring):
errmsg = 'instance_display_name must be an string %s (%s)' % (
instance.display_name, type(instance.display_name))
if errmsg:
raise ValueError(errmsg)
try:
instance.update()
except ValueError as e:
# The ValueError here is the one we 'expect'.
pass
return True
def main():
module = AnsibleModule(argument_spec=dict(
instance_id=dict(type='str', required=True),
state=dict(choices=['absent', 'present'], default='present'),
database_name=dict(type='str', default=None),
configuration=dict(type='str', required=True),
node_count=dict(type='int'),
instance_display_name=dict(type='str', default=None),
force_instance_delete=dict(type='bool', default=False),
service_account_email=dict(),
credentials_file=dict(),
project_id=dict(), ), )
if not HAS_PYTHON26:
module.fail_json(
msg="GCE module requires python's 'ast' module, python v2.6+")
if not HAS_GOOGLE_CLOUD_SPANNER:
module.fail_json(msg="Please install google-cloud-spanner.")
if not check_min_pkg_version(CLOUD_CLIENT, CLOUD_CLIENT_MINIMUM_VERSION):
module.fail_json(msg="Please install %s client version %s" %
(CLOUD_CLIENT, CLOUD_CLIENT_MINIMUM_VERSION))
mod_params = {}
mod_params['state'] = module.params.get('state')
mod_params['instance_id'] = module.params.get('instance_id')
mod_params['database_name'] = module.params.get('database_name')
mod_params['configuration'] = module.params.get('configuration')
mod_params['node_count'] = module.params.get('node_count', None)
mod_params['instance_display_name'] = module.params.get('instance_display_name')
mod_params['force_instance_delete'] = module.params.get('force_instance_delete')
creds, params = get_google_cloud_credentials(module)
spanner_client = spanner.Client(project=params['project_id'],
credentials=creds,
user_agent=CLOUD_CLIENT_USER_AGENT)
changed = False
json_output = {}
i = None
if mod_params['instance_id']:
config_name = get_spanner_configuration_name(
mod_params['configuration'], params['project_id'])
i = spanner_client.instance(mod_params['instance_id'],
configuration_name=config_name)
d = None
if mod_params['database_name']:
# TODO(supertom): support DDL
ddl_statements = ''
d = i.database(mod_params['database_name'], ddl_statements)
if mod_params['state'] == 'absent':
# Remove the most granular resource. If database is specified
# we remove it. If only instance is specified, that is what is removed.
if d is not None and d.exists():
d.drop()
changed = True
else:
if i.exists():
if mod_params['force_instance_delete']:
i.delete()
else:
module.fail_json(
msg=(("Cannot delete Spanner instance: "
"'force_instance_delete' argument not specified")))
changed = True
elif mod_params['state'] == 'present':
if not i.exists():
i = spanner_client.instance(mod_params['instance_id'],
configuration_name=config_name,
display_name=mod_params['instance_display_name'],
node_count=mod_params['node_count'] or 1)
i.create()
changed = True
else:
# update instance
i.reload()
inst_prev_vals = {}
if i.display_name != mod_params['instance_display_name']:
inst_prev_vals['instance_display_name'] = i.display_name
i.display_name = mod_params['instance_display_name']
if mod_params['node_count']:
if i.node_count != mod_params['node_count']:
inst_prev_vals['node_count'] = i.node_count
i.node_count = mod_params['node_count']
if inst_prev_vals:
changed = instance_update(i)
json_output['updated'] = changed
json_output['previous_values'] = {'instance': inst_prev_vals}
if d:
if not d.exists():
d.create()
d.reload()
changed = True
json_output['changed'] = changed
json_output.update(mod_params)
module.exit_json(**json_output)
if __name__ == '__main__':
main()
| gpl-3.0 |
hbrunn/OpenUpgrade | addons/resource/tests/__init__.py | 121 | 1146 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2013-TODAY OpenERP S.A. <http://www.openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.addons.resource.tests import test_resource
checks = [
test_resource,
]
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
aronsky/home-assistant | homeassistant/components/sensor/octoprint.py | 4 | 4343 | """
Support for monitoring OctoPrint sensors.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.octoprint/
"""
import logging
import requests
from homeassistant.components.octoprint import (SENSOR_TYPES,
DOMAIN as COMPONENT_DOMAIN)
from homeassistant.const import (TEMP_CELSIUS)
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['octoprint']
NOTIFICATION_ID = 'octoprint_notification'
NOTIFICATION_TITLE = 'OctoPrint sensor setup error'
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the available OctoPrint sensors."""
if discovery_info is None:
return
name = discovery_info['name']
base_url = discovery_info['base_url']
monitored_conditions = discovery_info['sensors']
octoprint_api = hass.data[COMPONENT_DOMAIN][base_url]
tools = octoprint_api.get_tools()
if "Temperatures" in monitored_conditions:
if not tools:
hass.components.persistent_notification.create(
'Your printer appears to be offline.<br />'
'If you do not want to have your printer on <br />'
' at all times, and you would like to monitor <br /> '
'temperatures, please add <br />'
'bed and/or number_of_tools to your config <br />'
'and restart.',
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID)
devices = []
types = ["actual", "target"]
for octo_type in monitored_conditions:
if octo_type == "Temperatures":
for tool in tools:
for temp_type in types:
new_sensor = OctoPrintSensor(
octoprint_api, temp_type, temp_type, name,
SENSOR_TYPES[octo_type][3], SENSOR_TYPES[octo_type][0],
SENSOR_TYPES[octo_type][1], tool)
devices.append(new_sensor)
else:
new_sensor = OctoPrintSensor(
octoprint_api, octo_type, SENSOR_TYPES[octo_type][2],
name, SENSOR_TYPES[octo_type][3], SENSOR_TYPES[octo_type][0],
SENSOR_TYPES[octo_type][1], None, SENSOR_TYPES[octo_type][4])
devices.append(new_sensor)
add_entities(devices, True)
class OctoPrintSensor(Entity):
"""Representation of an OctoPrint sensor."""
def __init__(self, api, condition, sensor_type, sensor_name, unit,
endpoint, group, tool=None, icon=None):
"""Initialize a new OctoPrint sensor."""
self.sensor_name = sensor_name
if tool is None:
self._name = '{} {}'.format(sensor_name, condition)
else:
self._name = '{} {} {} {}'.format(
sensor_name, condition, tool, 'temp')
self.sensor_type = sensor_type
self.api = api
self._state = None
self._unit_of_measurement = unit
self.api_endpoint = endpoint
self.api_group = group
self.api_tool = tool
self._icon = icon
_LOGGER.debug("Created OctoPrint sensor %r", self)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
sensor_unit = self.unit_of_measurement
if sensor_unit in (TEMP_CELSIUS, "%"):
# API sometimes returns null and not 0
if self._state is None:
self._state = 0
return round(self._state, 2)
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
def update(self):
"""Update state of sensor."""
try:
self._state = self.api.update(
self.sensor_type, self.api_endpoint, self.api_group,
self.api_tool)
except requests.exceptions.ConnectionError:
# Error calling the api, already logged in api.update()
return
@property
def icon(self):
"""Icon to use in the frontend."""
return self._icon
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.