repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
lgeiger/ide-python | lib/debugger/VendorLib/vs-py-debugger/pythonFiles/jedi/__main__.py | 28 | 1427 | import sys
from os.path import join, dirname, abspath, isdir
def _start_linter():
"""
This is a pre-alpha API. You're not supposed to use it at all, except for
testing. It will very likely change.
"""
import jedi
if '--debug' in sys.argv:
jedi.set_debug_function()
for path in sys.argv[2:]:
if path.startswith('--'):
continue
if isdir(path):
import fnmatch
import os
paths = []
for root, dirnames, filenames in os.walk(path):
for filename in fnmatch.filter(filenames, '*.py'):
paths.append(os.path.join(root, filename))
else:
paths = [path]
try:
for path in paths:
for error in jedi.Script(path=path)._analysis():
print(error)
except Exception:
if '--pdb' in sys.argv:
import traceback
traceback.print_exc()
import pdb
pdb.post_mortem()
else:
raise
if len(sys.argv) == 2 and sys.argv[1] == 'repl':
# don't want to use __main__ only for repl yet, maybe we want to use it for
# something else. So just use the keyword ``repl`` for now.
print(join(dirname(abspath(__file__)), 'api', 'replstartup.py'))
elif len(sys.argv) > 1 and sys.argv[1] == 'linter':
_start_linter()
| mit |
netman92/coala | tests/misc/ShellTest.py | 34 | 3511 | from contextlib import ExitStack
import os
import sys
from tempfile import NamedTemporaryFile
import unittest
from coalib.misc.Shell import run_interactive_shell_command, run_shell_command
class RunShellCommandTest(unittest.TestCase):
@staticmethod
def construct_testscript_command(scriptname):
return (sys.executable,
os.path.join(os.path.dirname(os.path.realpath(__file__)),
'run_shell_command_testfiles',
scriptname))
def test_run_interactive_shell_command(self):
command = RunShellCommandTest.construct_testscript_command(
'test_interactive_program.py')
with run_interactive_shell_command(command) as p:
self.assertEqual(p.stdout.readline(), 'test_program X\n')
self.assertEqual(p.stdout.readline(), 'Type in a number:\n')
p.stdin.write('33\n')
p.stdin.flush()
self.assertEqual(p.stdout.readline(), '33\n')
self.assertEqual(p.stdout.readline(), 'Exiting program.\n')
self.assertEqual(p.stdout.read(), '')
self.assertEqual(p.stderr.read(), '')
def test_run_interactive_shell_command_custom_streams(self):
command = RunShellCommandTest.construct_testscript_command(
'test_interactive_program.py')
with ExitStack() as stack:
streams = {s: stack.enter_context(NamedTemporaryFile(mode='w+'))
for s in ['stdout', 'stderr', 'stdin']}
with run_interactive_shell_command(command, **streams) as p:
streams['stdin'].write('712\n')
streams['stdin'].flush()
streams['stdin'].seek(0)
self.assertFalse(streams['stdout'].closed)
self.assertFalse(streams['stderr'].closed)
self.assertFalse(streams['stdin'].closed)
streams['stdout'].seek(0)
self.assertEqual(streams['stdout'].read(),
'test_program X\nType in a number:\n712\n'
'Exiting program.\n')
streams['stderr'].seek(0)
self.assertEqual(streams['stderr'].read(), '')
def test_run_interactive_shell_command_kwargs_delegation(self):
with self.assertRaises(TypeError):
with run_interactive_shell_command('some_command',
weird_parameter=30):
pass
def test_run_shell_command_without_stdin(self):
command = RunShellCommandTest.construct_testscript_command(
'test_program.py')
stdout, stderr = run_shell_command(command)
expected = ('test_program Z\n'
'non-interactive mode.\n'
'Exiting...\n')
self.assertEqual(stdout, expected)
self.assertEqual(stderr, '')
def test_run_shell_command_with_stdin(self):
command = RunShellCommandTest.construct_testscript_command(
'test_input_program.py')
stdout, stderr = run_shell_command(command, '1 4 10 22')
self.assertEqual(stdout, '37\n')
self.assertEqual(stderr, '')
stdout, stderr = run_shell_command(command, '1 p 5')
self.assertEqual(stdout, '')
self.assertEqual(stderr, 'INVALID INPUT\n')
def test_run_shell_command_kwargs_delegation(self):
with self.assertRaises(TypeError):
run_shell_command('super-cool-command', weird_parameter2='abc')
| agpl-3.0 |
alb-i986/selenium | py/selenium/webdriver/ie/webdriver.py | 25 | 2186 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from selenium.webdriver.common import utils
from selenium.webdriver.remote.webdriver import WebDriver as RemoteWebDriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from .service import Service
DEFAULT_TIMEOUT = 30
DEFAULT_PORT = 0
DEFAULT_HOST = None
DEFAULT_LOG_LEVEL = None
DEFAULT_LOG_FILE = None
class WebDriver(RemoteWebDriver):
def __init__(self, executable_path='IEDriverServer.exe', capabilities=None,
port=DEFAULT_PORT, timeout=DEFAULT_TIMEOUT, host=DEFAULT_HOST,
log_level=DEFAULT_LOG_LEVEL, log_file=DEFAULT_LOG_FILE):
self.port = port
if self.port == 0:
self.port = utils.free_port()
self.host = host
self.log_level = log_level
self.log_file = log_file
self.iedriver = Service(
executable_path,
port=self.port,
host=self.host,
log_level=self.log_level,
log_file=self.log_file)
self.iedriver.start()
if capabilities is None:
capabilities = DesiredCapabilities.INTERNETEXPLORER
RemoteWebDriver.__init__(
self,
command_executor='http://localhost:%d' % self.port,
desired_capabilities=capabilities)
self._is_remote = False
def quit(self):
RemoteWebDriver.quit(self)
self.iedriver.stop()
| apache-2.0 |
ogenstad/ansible | lib/ansible/modules/cloud/amazon/iam_role.py | 17 | 19354 | #!/usr/bin/python
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: iam_role
short_description: Manage AWS IAM roles
description:
- Manage AWS IAM roles
version_added: "2.3"
author: "Rob White (@wimnat)"
options:
path:
description:
- The path to the role. For more information about paths, see U(http://docs.aws.amazon.com/IAM/latest/UserGuide/reference_identifiers.html).
default: "/"
name:
description:
- The name of the role to create.
required: true
description:
description:
- Provide a description of the new role
version_added: "2.5"
assume_role_policy_document:
description:
- The trust relationship policy document that grants an entity permission to assume the role.
- "This parameter is required when C(state=present)."
managed_policy:
description:
- A list of managed policy ARNs or, since Ansible 2.4, a list of either managed policy ARNs or friendly names.
To embed an inline policy, use M(iam_policy). To remove existing policies, use an empty list item.
aliases: [ managed_policies ]
purge_policies:
description:
- Detaches any managed policies not listed in the "managed_policy" option. Set to false if you want to attach policies elsewhere.
type: bool
default: true
version_added: "2.5"
state:
description:
- Create or remove the IAM role
default: present
choices: [ present, absent ]
create_instance_profile:
description:
- Creates an IAM instance profile along with the role
type: bool
default: yes
version_added: "2.5"
requirements: [ botocore, boto3 ]
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
- name: Create a role with description
iam_role:
name: mynewrole
assume_role_policy_document: "{{ lookup('file','policy.json') }}"
description: This is My New Role
- name: "Create a role and attach a managed policy called 'PowerUserAccess'"
iam_role:
name: mynewrole
assume_role_policy_document: "{{ lookup('file','policy.json') }}"
managed_policy:
- arn:aws:iam::aws:policy/PowerUserAccess
- name: Keep the role created above but remove all managed policies
iam_role:
name: mynewrole
assume_role_policy_document: "{{ lookup('file','policy.json') }}"
managed_policy:
-
- name: Delete the role
iam_role:
name: mynewrole
assume_role_policy_document: "{{ lookup('file', 'policy.json') }}"
state: absent
'''
RETURN = '''
iam_role:
description: dictionary containing the IAM Role data
returned: success
type: complex
contains:
path:
description: the path to the role
type: string
returned: always
sample: /
role_name:
description: the friendly name that identifies the role
type: string
returned: always
sample: myrole
role_id:
description: the stable and unique string identifying the role
type: string
returned: always
sample: ABCDEFF4EZ4ABCDEFV4ZC
arn:
description: the Amazon Resource Name (ARN) specifying the role
type: string
returned: always
sample: "arn:aws:iam::1234567890:role/mynewrole"
create_date:
description: the date and time, in ISO 8601 date-time format, when the role was created
type: string
returned: always
sample: "2016-08-14T04:36:28+00:00"
assume_role_policy_document:
description: the policy that grants an entity permission to assume the role
type: string
returned: always
sample: {
'statement': [
{
'action': 'sts:AssumeRole',
'effect': 'Allow',
'principal': {
'service': 'ec2.amazonaws.com'
},
'sid': ''
}
],
'version': '2012-10-17'
}
attached_policies:
description: a list of dicts containing the name and ARN of the managed IAM policies attached to the role
type: list
returned: always
sample: [
{
'policy_arn': 'arn:aws:iam::aws:policy/PowerUserAccess',
'policy_name': 'PowerUserAccess'
}
]
'''
from ansible.module_utils._text import to_native
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import camel_dict_to_snake_dict, ec2_argument_spec, get_aws_connection_info, boto3_conn, sort_json_policy_dict
from ansible.module_utils.ec2 import HAS_BOTO3
import json
import traceback
try:
from botocore.exceptions import ClientError, BotoCoreError
except ImportError:
pass # caught by imported HAS_BOTO3
def compare_assume_role_policy_doc(current_policy_doc, new_policy_doc):
if sort_json_policy_dict(current_policy_doc) == sort_json_policy_dict(json.loads(new_policy_doc)):
return True
else:
return False
def compare_attached_role_policies(current_attached_policies, new_attached_policies):
# If new_attached_policies is None it means we want to remove all policies
if len(current_attached_policies) > 0 and new_attached_policies is None:
return False
current_attached_policies_arn_list = []
for policy in current_attached_policies:
current_attached_policies_arn_list.append(policy['PolicyArn'])
if set(current_attached_policies_arn_list) == set(new_attached_policies):
return True
else:
return False
def convert_friendly_names_to_arns(connection, module, policy_names):
if not any([not policy.startswith('arn:') for policy in policy_names]):
return policy_names
allpolicies = {}
paginator = connection.get_paginator('list_policies')
policies = paginator.paginate().build_full_result()['Policies']
for policy in policies:
allpolicies[policy['PolicyName']] = policy['Arn']
allpolicies[policy['Arn']] = policy['Arn']
try:
return [allpolicies[policy] for policy in policy_names]
except KeyError as e:
module.fail_json(msg="Couldn't find policy: " + str(e))
def remove_policies(connection, module, policies_to_remove, params):
for policy in policies_to_remove:
try:
connection.detach_role_policy(RoleName=params['RoleName'], PolicyArn=policy)
except ClientError as e:
module.fail_json(msg="Unable to detach policy {0} from {1}: {2}".format(policy, params['RoleName'], to_native(e)),
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
except BotoCoreError as e:
module.fail_json(msg="Unable to detach policy {0} from {1}: {2}".format(policy, params['RoleName'], to_native(e)),
exception=traceback.format_exc())
return True
def create_or_update_role(connection, module):
params = dict()
params['Path'] = module.params.get('path')
params['RoleName'] = module.params.get('name')
params['AssumeRolePolicyDocument'] = module.params.get('assume_role_policy_document')
if module.params.get('description') is not None:
params['Description'] = module.params.get('description')
managed_policies = module.params.get('managed_policy')
create_instance_profile = module.params.get('create_instance_profile')
if managed_policies:
managed_policies = convert_friendly_names_to_arns(connection, module, managed_policies)
changed = False
# Get role
role = get_role(connection, module, params['RoleName'])
# If role is None, create it
if role is None:
try:
role = connection.create_role(**params)
changed = True
except ClientError as e:
module.fail_json(msg="Unable to create role: {0}".format(to_native(e)),
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
except BotoCoreError as e:
module.fail_json(msg="Unable to create role: {0}".format(to_native(e)),
exception=traceback.format_exc())
else:
# Check Assumed Policy document
if not compare_assume_role_policy_doc(role['AssumeRolePolicyDocument'], params['AssumeRolePolicyDocument']):
try:
connection.update_assume_role_policy(RoleName=params['RoleName'], PolicyDocument=json.dumps(json.loads(params['AssumeRolePolicyDocument'])))
changed = True
except ClientError as e:
module.fail_json(msg="Unable to update assume role policy for role {0}: {1}".format(params['RoleName'], to_native(e)),
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
except BotoCoreError as e:
module.fail_json(msg="Unable to update assume role policy for role {0}: {1}".format(params['RoleName'], to_native(e)),
exception=traceback.format_exc())
if managed_policies is not None:
# Get list of current attached managed policies
current_attached_policies = get_attached_policy_list(connection, module, params['RoleName'])
current_attached_policies_arn_list = [policy['PolicyArn'] for policy in current_attached_policies]
# If a single empty list item then all managed policies to be removed
if len(managed_policies) == 1 and not managed_policies[0] and module.params.get('purge_policies'):
# Detach policies not present
if remove_policies(connection, module, set(current_attached_policies_arn_list) - set(managed_policies), params):
changed = True
else:
# Make a list of the ARNs from the attached policies
# Detach roles not defined in task
if module.params.get('purge_policies'):
if remove_policies(connection, module, set(current_attached_policies_arn_list) - set(managed_policies), params):
changed = True
# Attach roles not already attached
for policy_arn in set(managed_policies) - set(current_attached_policies_arn_list):
try:
connection.attach_role_policy(RoleName=params['RoleName'], PolicyArn=policy_arn)
except ClientError as e:
module.fail_json(msg="Unable to attach policy {0} to role {1}: {2}".format(policy_arn, params['RoleName'], to_native(e)),
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
except BotoCoreError as e:
module.fail_json(msg="Unable to attach policy {0} to role {1}: {2}".format(policy_arn, params['RoleName'], to_native(e)),
exception=traceback.format_exc())
changed = True
# Instance profile
if create_instance_profile:
try:
instance_profiles = connection.list_instance_profiles_for_role(RoleName=params['RoleName'])['InstanceProfiles']
except ClientError as e:
module.fail_json(msg="Unable to list instance profiles for role {0}: {1}".format(params['RoleName'], to_native(e)),
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
except BotoCoreError as e:
module.fail_json(msg="Unable to list instance profiles for role {0}: {1}".format(params['RoleName'], to_native(e)),
exception=traceback.format_exc())
if not any(p['InstanceProfileName'] == params['RoleName'] for p in instance_profiles):
# Make sure an instance profile is attached
try:
connection.create_instance_profile(InstanceProfileName=params['RoleName'], Path=params['Path'])
changed = True
except ClientError as e:
# If the profile already exists, no problem, move on
if e.response['Error']['Code'] == 'EntityAlreadyExists':
pass
else:
module.fail_json(msg="Unable to create instance profile for role {0}: {1}".format(params['RoleName'], to_native(e)),
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
except BotoCoreError as e:
module.fail_json(msg="Unable to create instance profile for role {0}: {1}".format(params['RoleName'], to_native(e)),
exception=traceback.format_exc())
connection.add_role_to_instance_profile(InstanceProfileName=params['RoleName'], RoleName=params['RoleName'])
# Get the role again
role = get_role(connection, module, params['RoleName'])
role['attached_policies'] = get_attached_policy_list(connection, module, params['RoleName'])
module.exit_json(changed=changed, iam_role=camel_dict_to_snake_dict(role), **camel_dict_to_snake_dict(role))
def destroy_role(connection, module):
params = dict()
params['RoleName'] = module.params.get('name')
if get_role(connection, module, params['RoleName']):
# We need to remove any instance profiles from the role before we delete it
try:
instance_profiles = connection.list_instance_profiles_for_role(RoleName=params['RoleName'])['InstanceProfiles']
except ClientError as e:
module.fail_json(msg="Unable to list instance profiles for role {0}: {1}".format(params['RoleName'], to_native(e)),
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
except BotoCoreError as e:
module.fail_json(msg="Unable to list instance profiles for role {0}: {1}".format(params['RoleName'], to_native(e)),
exception=traceback.format_exc())
# Now remove the role from the instance profile(s)
for profile in instance_profiles:
try:
connection.remove_role_from_instance_profile(InstanceProfileName=profile['InstanceProfileName'], RoleName=params['RoleName'])
except ClientError as e:
module.fail_json(msg="Unable to remove role {0} from instance profile {1}: {2}".format(
params['RoleName'], profile['InstanceProfileName'], to_native(e)),
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
except BotoCoreError as e:
module.fail_json(msg="Unable to remove role {0} from instance profile {1}: {2}".format(
params['RoleName'], profile['InstanceProfileName'], to_native(e)),
exception=traceback.format_exc())
# Now remove any attached policies otherwise deletion fails
try:
for policy in get_attached_policy_list(connection, module, params['RoleName']):
connection.detach_role_policy(RoleName=params['RoleName'], PolicyArn=policy['PolicyArn'])
except ClientError as e:
module.fail_json(msg="Unable to detach policy {0} from role {1}: {2}".format(policy['PolicyArn'], params['RoleName'], to_native(e)),
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
except BotoCoreError as e:
module.fail_json(msg="Unable to detach policy {0} from role {1}: {2}".format(policy['PolicyArn'], params['RoleName'], to_native(e)),
exception=traceback.format_exc())
try:
connection.delete_role(**params)
except ClientError as e:
module.fail_json(msg="Unable to delete role: {0}".format(to_native(e)),
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
except BotoCoreError as e:
module.fail_json(msg="Unable to delete role: {0}".format(to_native(e)), exception=traceback.format_exc())
else:
module.exit_json(changed=False)
module.exit_json(changed=True)
def get_role(connection, module, name):
try:
return connection.get_role(RoleName=name)['Role']
except ClientError as e:
if e.response['Error']['Code'] == 'NoSuchEntity':
return None
else:
module.fail_json(msg="Unable to get role {0}: {1}".format(name, to_native(e)),
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
except BotoCoreError as e:
module.fail_json(msg="Unable to get role {0}: {1}".format(name, to_native(e)), exception=traceback.format_exc())
def get_attached_policy_list(connection, module, name):
try:
return connection.list_attached_role_policies(RoleName=name)['AttachedPolicies']
except ClientError as e:
if e.response['Error']['Code'] == 'NoSuchEntity':
return []
else:
module.fail_json(msg="Unable to list attached policies for role {0}: {1}".format(name, to_native(e)),
exception=traceback.format_exc(), **camel_dict_to_snake_dict(e.response))
except BotoCoreError as e:
module.fail_json(msg="Unable to list attached policies for role {0}: {1}".format(name, to_native(e)),
exception=traceback.format_exc())
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
name=dict(type='str', required=True),
path=dict(type='str', default="/"),
assume_role_policy_document=dict(type='json'),
managed_policy=dict(type='list', aliases=['managed_policies']),
state=dict(type='str', choices=['present', 'absent'], default='present'),
description=dict(type='str'),
create_instance_profile=dict(type='bool', default=True),
purge_policies=dict(type='bool', default=True),
)
)
module = AnsibleModule(argument_spec=argument_spec,
required_if=[('state', 'present', ['assume_role_policy_document'])])
if not HAS_BOTO3:
module.fail_json(msg='boto3 required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
connection = boto3_conn(module, conn_type='client', resource='iam', region=region, endpoint=ec2_url, **aws_connect_params)
state = module.params.get("state")
if state == 'present':
create_or_update_role(connection, module)
else:
destroy_role(connection, module)
if __name__ == '__main__':
main()
| gpl-3.0 |
sometallgit/AutoUploader | Python27/Lib/bsddb/dbobj.py | 100 | 11331 | #-------------------------------------------------------------------------
# This file contains real Python object wrappers for DB and DBEnv
# C "objects" that can be usefully subclassed. The previous SWIG
# based interface allowed this thanks to SWIG's shadow classes.
# -- Gregory P. Smith
#-------------------------------------------------------------------------
#
# (C) Copyright 2001 Autonomous Zone Industries
#
# License: This is free software. You may use this software for any
# purpose including modification/redistribution, so long as
# this header remains intact and that you do not claim any
# rights of ownership or authorship of this software. This
# software has been tested, but no warranty is expressed or
# implied.
#
#
# TODO it would be *really nice* to have an automatic shadow class populator
# so that new methods don't need to be added here manually after being
# added to _bsddb.c.
#
import sys
absolute_import = (sys.version_info[0] >= 3)
if absolute_import :
# Because this syntaxis is not valid before Python 2.5
exec("from . import db")
else :
import db
if sys.version_info < (2, 6) :
from UserDict import DictMixin as MutableMapping
else :
import collections
MutableMapping = collections.MutableMapping
class DBEnv:
def __init__(self, *args, **kwargs):
self._cobj = db.DBEnv(*args, **kwargs)
def close(self, *args, **kwargs):
return self._cobj.close(*args, **kwargs)
def open(self, *args, **kwargs):
return self._cobj.open(*args, **kwargs)
def remove(self, *args, **kwargs):
return self._cobj.remove(*args, **kwargs)
def set_shm_key(self, *args, **kwargs):
return self._cobj.set_shm_key(*args, **kwargs)
def set_cachesize(self, *args, **kwargs):
return self._cobj.set_cachesize(*args, **kwargs)
def set_data_dir(self, *args, **kwargs):
return self._cobj.set_data_dir(*args, **kwargs)
def set_flags(self, *args, **kwargs):
return self._cobj.set_flags(*args, **kwargs)
def set_lg_bsize(self, *args, **kwargs):
return self._cobj.set_lg_bsize(*args, **kwargs)
def set_lg_dir(self, *args, **kwargs):
return self._cobj.set_lg_dir(*args, **kwargs)
def set_lg_max(self, *args, **kwargs):
return self._cobj.set_lg_max(*args, **kwargs)
def set_lk_detect(self, *args, **kwargs):
return self._cobj.set_lk_detect(*args, **kwargs)
if db.version() < (4,5):
def set_lk_max(self, *args, **kwargs):
return self._cobj.set_lk_max(*args, **kwargs)
def set_lk_max_locks(self, *args, **kwargs):
return self._cobj.set_lk_max_locks(*args, **kwargs)
def set_lk_max_lockers(self, *args, **kwargs):
return self._cobj.set_lk_max_lockers(*args, **kwargs)
def set_lk_max_objects(self, *args, **kwargs):
return self._cobj.set_lk_max_objects(*args, **kwargs)
def set_mp_mmapsize(self, *args, **kwargs):
return self._cobj.set_mp_mmapsize(*args, **kwargs)
def set_timeout(self, *args, **kwargs):
return self._cobj.set_timeout(*args, **kwargs)
def set_tmp_dir(self, *args, **kwargs):
return self._cobj.set_tmp_dir(*args, **kwargs)
def txn_begin(self, *args, **kwargs):
return self._cobj.txn_begin(*args, **kwargs)
def txn_checkpoint(self, *args, **kwargs):
return self._cobj.txn_checkpoint(*args, **kwargs)
def txn_stat(self, *args, **kwargs):
return self._cobj.txn_stat(*args, **kwargs)
def set_tx_max(self, *args, **kwargs):
return self._cobj.set_tx_max(*args, **kwargs)
def set_tx_timestamp(self, *args, **kwargs):
return self._cobj.set_tx_timestamp(*args, **kwargs)
def lock_detect(self, *args, **kwargs):
return self._cobj.lock_detect(*args, **kwargs)
def lock_get(self, *args, **kwargs):
return self._cobj.lock_get(*args, **kwargs)
def lock_id(self, *args, **kwargs):
return self._cobj.lock_id(*args, **kwargs)
def lock_put(self, *args, **kwargs):
return self._cobj.lock_put(*args, **kwargs)
def lock_stat(self, *args, **kwargs):
return self._cobj.lock_stat(*args, **kwargs)
def log_archive(self, *args, **kwargs):
return self._cobj.log_archive(*args, **kwargs)
def set_get_returns_none(self, *args, **kwargs):
return self._cobj.set_get_returns_none(*args, **kwargs)
def log_stat(self, *args, **kwargs):
return self._cobj.log_stat(*args, **kwargs)
def dbremove(self, *args, **kwargs):
return self._cobj.dbremove(*args, **kwargs)
def dbrename(self, *args, **kwargs):
return self._cobj.dbrename(*args, **kwargs)
def set_encrypt(self, *args, **kwargs):
return self._cobj.set_encrypt(*args, **kwargs)
if db.version() >= (4,4):
def fileid_reset(self, *args, **kwargs):
return self._cobj.fileid_reset(*args, **kwargs)
def lsn_reset(self, *args, **kwargs):
return self._cobj.lsn_reset(*args, **kwargs)
class DB(MutableMapping):
def __init__(self, dbenv, *args, **kwargs):
# give it the proper DBEnv C object that its expecting
self._cobj = db.DB(*((dbenv._cobj,) + args), **kwargs)
# TODO are there other dict methods that need to be overridden?
def __len__(self):
return len(self._cobj)
def __getitem__(self, arg):
return self._cobj[arg]
def __setitem__(self, key, value):
self._cobj[key] = value
def __delitem__(self, arg):
del self._cobj[arg]
if sys.version_info >= (2, 6) :
def __iter__(self) :
return self._cobj.__iter__()
def append(self, *args, **kwargs):
return self._cobj.append(*args, **kwargs)
def associate(self, *args, **kwargs):
return self._cobj.associate(*args, **kwargs)
def close(self, *args, **kwargs):
return self._cobj.close(*args, **kwargs)
def consume(self, *args, **kwargs):
return self._cobj.consume(*args, **kwargs)
def consume_wait(self, *args, **kwargs):
return self._cobj.consume_wait(*args, **kwargs)
def cursor(self, *args, **kwargs):
return self._cobj.cursor(*args, **kwargs)
def delete(self, *args, **kwargs):
return self._cobj.delete(*args, **kwargs)
def fd(self, *args, **kwargs):
return self._cobj.fd(*args, **kwargs)
def get(self, *args, **kwargs):
return self._cobj.get(*args, **kwargs)
def pget(self, *args, **kwargs):
return self._cobj.pget(*args, **kwargs)
def get_both(self, *args, **kwargs):
return self._cobj.get_both(*args, **kwargs)
def get_byteswapped(self, *args, **kwargs):
return self._cobj.get_byteswapped(*args, **kwargs)
def get_size(self, *args, **kwargs):
return self._cobj.get_size(*args, **kwargs)
def get_type(self, *args, **kwargs):
return self._cobj.get_type(*args, **kwargs)
def join(self, *args, **kwargs):
return self._cobj.join(*args, **kwargs)
def key_range(self, *args, **kwargs):
return self._cobj.key_range(*args, **kwargs)
def has_key(self, *args, **kwargs):
return self._cobj.has_key(*args, **kwargs)
def items(self, *args, **kwargs):
return self._cobj.items(*args, **kwargs)
def keys(self, *args, **kwargs):
return self._cobj.keys(*args, **kwargs)
def open(self, *args, **kwargs):
return self._cobj.open(*args, **kwargs)
def put(self, *args, **kwargs):
return self._cobj.put(*args, **kwargs)
def remove(self, *args, **kwargs):
return self._cobj.remove(*args, **kwargs)
def rename(self, *args, **kwargs):
return self._cobj.rename(*args, **kwargs)
def set_bt_minkey(self, *args, **kwargs):
return self._cobj.set_bt_minkey(*args, **kwargs)
def set_bt_compare(self, *args, **kwargs):
return self._cobj.set_bt_compare(*args, **kwargs)
def set_cachesize(self, *args, **kwargs):
return self._cobj.set_cachesize(*args, **kwargs)
def set_dup_compare(self, *args, **kwargs) :
return self._cobj.set_dup_compare(*args, **kwargs)
def set_flags(self, *args, **kwargs):
return self._cobj.set_flags(*args, **kwargs)
def set_h_ffactor(self, *args, **kwargs):
return self._cobj.set_h_ffactor(*args, **kwargs)
def set_h_nelem(self, *args, **kwargs):
return self._cobj.set_h_nelem(*args, **kwargs)
def set_lorder(self, *args, **kwargs):
return self._cobj.set_lorder(*args, **kwargs)
def set_pagesize(self, *args, **kwargs):
return self._cobj.set_pagesize(*args, **kwargs)
def set_re_delim(self, *args, **kwargs):
return self._cobj.set_re_delim(*args, **kwargs)
def set_re_len(self, *args, **kwargs):
return self._cobj.set_re_len(*args, **kwargs)
def set_re_pad(self, *args, **kwargs):
return self._cobj.set_re_pad(*args, **kwargs)
def set_re_source(self, *args, **kwargs):
return self._cobj.set_re_source(*args, **kwargs)
def set_q_extentsize(self, *args, **kwargs):
return self._cobj.set_q_extentsize(*args, **kwargs)
def stat(self, *args, **kwargs):
return self._cobj.stat(*args, **kwargs)
def sync(self, *args, **kwargs):
return self._cobj.sync(*args, **kwargs)
def type(self, *args, **kwargs):
return self._cobj.type(*args, **kwargs)
def upgrade(self, *args, **kwargs):
return self._cobj.upgrade(*args, **kwargs)
def values(self, *args, **kwargs):
return self._cobj.values(*args, **kwargs)
def verify(self, *args, **kwargs):
return self._cobj.verify(*args, **kwargs)
def set_get_returns_none(self, *args, **kwargs):
return self._cobj.set_get_returns_none(*args, **kwargs)
def set_encrypt(self, *args, **kwargs):
return self._cobj.set_encrypt(*args, **kwargs)
class DBSequence:
def __init__(self, *args, **kwargs):
self._cobj = db.DBSequence(*args, **kwargs)
def close(self, *args, **kwargs):
return self._cobj.close(*args, **kwargs)
def get(self, *args, **kwargs):
return self._cobj.get(*args, **kwargs)
def get_dbp(self, *args, **kwargs):
return self._cobj.get_dbp(*args, **kwargs)
def get_key(self, *args, **kwargs):
return self._cobj.get_key(*args, **kwargs)
def init_value(self, *args, **kwargs):
return self._cobj.init_value(*args, **kwargs)
def open(self, *args, **kwargs):
return self._cobj.open(*args, **kwargs)
def remove(self, *args, **kwargs):
return self._cobj.remove(*args, **kwargs)
def stat(self, *args, **kwargs):
return self._cobj.stat(*args, **kwargs)
def set_cachesize(self, *args, **kwargs):
return self._cobj.set_cachesize(*args, **kwargs)
def set_flags(self, *args, **kwargs):
return self._cobj.set_flags(*args, **kwargs)
def set_range(self, *args, **kwargs):
return self._cobj.set_range(*args, **kwargs)
def get_cachesize(self, *args, **kwargs):
return self._cobj.get_cachesize(*args, **kwargs)
def get_flags(self, *args, **kwargs):
return self._cobj.get_flags(*args, **kwargs)
def get_range(self, *args, **kwargs):
return self._cobj.get_range(*args, **kwargs)
| mit |
afdelgado/askbot | askbot/migrations/0063_transplant_question_closed_datas.py | 18 | 28030 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from askbot.utils.console import ProgressBar
class Migration(DataMigration):
def forwards(self, orm):
message = "Marking closed threads"
num_questions = orm.Question.objects.count()
for question in ProgressBar(orm.Question.objects.iterator(), num_questions, message):
thread = question.thread
thread.closed = question.closed
thread.closed_by = question.closed_by
thread.closed_at = question.closed_at
thread.close_reason = question.close_reason
thread.save()
if orm.Question.objects.exclude(closed=models.F('thread__closed')).exists():
raise ValueError("There are Thread instances for which data doesn't match Question!")
for field_name in ['closed_by', 'closed_at', 'close_reason']:
kwargs = {field_name: models.F('thread__%s' % field_name)}
if orm.Question.objects.filter(closed=True).exclude(**kwargs).exists():
raise ValueError("There are Thread instances for which data doesn't match Question!")
def backwards(self, orm):
"Write your backwards methods here."
models = {
'askbot.activity': {
'Meta': {'object_name': 'Activity', 'db_table': "u'activity'"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'activity_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_auditted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']", 'null': 'True'}),
'receiving_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'received_activity'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'recipients': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'incoming_activity'", 'symmetrical': 'False', 'through': "orm['askbot.ActivityAuditStatus']", 'to': "orm['auth.User']"}),
'summary': ('django.db.models.fields.TextField', [], {'default': "''"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.activityauditstatus': {
'Meta': {'unique_together': "(('user', 'activity'),)", 'object_name': 'ActivityAuditStatus'},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Activity']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.anonymousanswer': {
'Meta': {'object_name': 'AnonymousAnswer'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_answers'", 'to': "orm['askbot.Question']"}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'text': ('django.db.models.fields.TextField', [], {}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'askbot.anonymousquestion': {
'Meta': {'object_name': 'AnonymousQuestion'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'askbot.answer': {
'Meta': {'object_name': 'Answer', 'db_table': "u'answer'"},
'accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_answers'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'answers'", 'to': "orm['askbot.Question']"}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.award': {
'Meta': {'object_name': 'Award', 'db_table': "u'award'"},
'awarded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'badge': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_badge'", 'to': "orm['askbot.BadgeData']"}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'notified': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_user'", 'to': "orm['auth.User']"})
},
'askbot.badgedata': {
'Meta': {'ordering': "('slug',)", 'object_name': 'BadgeData'},
'awarded_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'awarded_to': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'badges'", 'symmetrical': 'False', 'through': "orm['askbot.Award']", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50', 'db_index': 'True'})
},
'askbot.comment': {
'Meta': {'ordering': "('-added_at',)", 'object_name': 'Comment', 'db_table': "u'comment'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'comment': ('django.db.models.fields.CharField', [], {'max_length': '2048'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'html': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '2048'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'offensive_flag_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'comments'", 'to': "orm['auth.User']"})
},
'askbot.emailfeedsetting': {
'Meta': {'object_name': 'EmailFeedSetting'},
'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'feed_type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'frequency': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '8'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reported_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'subscriber': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notification_subscriptions'", 'to': "orm['auth.User']"})
},
'askbot.favoritequestion': {
'Meta': {'object_name': 'FavoriteQuestion', 'db_table': "u'favorite_question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_favorite_questions'", 'to': "orm['auth.User']"})
},
'askbot.markedtag': {
'Meta': {'object_name': 'MarkedTag'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'reason': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_selections'", 'to': "orm['askbot.Tag']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tag_selections'", 'to': "orm['auth.User']"})
},
'askbot.postrevision': {
'Meta': {'ordering': "('-revision',)", 'unique_together': "(('answer', 'revision'), ('question', 'revision'))", 'object_name': 'PostRevision'},
'answer': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'revisions'", 'null': 'True', 'to': "orm['askbot.Answer']"}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'postrevisions'", 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'revisions'", 'null': 'True', 'to': "orm['askbot.Question']"}),
'revised_at': ('django.db.models.fields.DateTimeField', [], {}),
'revision': ('django.db.models.fields.PositiveIntegerField', [], {}),
'revision_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'tagnames': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '125', 'blank': 'True'}),
'text': ('django.db.models.fields.TextField', [], {}),
'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '300', 'blank': 'True'})
},
'askbot.question': {
'Meta': {'object_name': 'Question', 'db_table': "u'question'"},
'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'answer_accepted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questions'", 'to': "orm['auth.User']"}),
'close_reason': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'closed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'closed_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'closed_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'favorited_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'favorite_questions'", 'symmetrical': 'False', 'through': "orm['askbot.FavoriteQuestion']", 'to': "orm['auth.User']"}),
'followed_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'followed_questions'", 'symmetrical': 'False', 'to': "orm['auth.User']"}),
'html': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_activity_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_activity_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'last_active_in_questions'", 'to': "orm['auth.User']"}),
'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_questions'", 'null': 'True', 'to': "orm['auth.User']"}),
'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}),
'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}),
'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'questions'", 'symmetrical': 'False', 'to': "orm['askbot.Tag']"}),
'text': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'thread': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'questions'", 'unique': 'True', 'to': "orm['askbot.Thread']"}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}),
'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'})
},
'askbot.questionview': {
'Meta': {'object_name': 'QuestionView'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'viewed'", 'to': "orm['askbot.Question']"}),
'when': ('django.db.models.fields.DateTimeField', [], {}),
'who': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'question_views'", 'to': "orm['auth.User']"})
},
'askbot.repute': {
'Meta': {'object_name': 'Repute', 'db_table': "u'repute'"},
'comment': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'negative': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'positive': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Question']", 'null': 'True', 'blank': 'True'}),
'reputation': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'reputation_type': ('django.db.models.fields.SmallIntegerField', [], {}),
'reputed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
},
'askbot.tag': {
'Meta': {'ordering': "('-used_count', 'name')", 'object_name': 'Tag', 'db_table': "u'tag'"},
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_tags'", 'to': "orm['auth.User']"}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_tags'", 'null': 'True', 'to': "orm['auth.User']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'used_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'askbot.thread': {
'Meta': {'object_name': 'Thread'},
'answer_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'close_reason': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}),
'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'closed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'closed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'favourite_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
},
'askbot.vote': {
'Meta': {'unique_together': "(('content_type', 'object_id', 'user'),)", 'object_name': 'Vote', 'db_table': "u'vote'"},
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['auth.User']"}),
'vote': ('django.db.models.fields.SmallIntegerField', [], {}),
'voted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}),
'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}),
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}),
'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}),
'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}),
'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}),
'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['askbot']
| gpl-3.0 |
SMMAR11/smmaranim | app/functions/form_init.py | 1 | 8799 | # coding: utf-8
'''
Mise en forme d'un gabarit normé pour chaque champ d'un formulaire
_form : Objet formulaire
Retourne un tableau associatif
'''
def sub(_form) :
# Imports
from bs4 import BeautifulSoup
from django.template.defaultfilters import safe
from smmaranim.custom_settings import ERROR_MESSAGES
from smmaranim.custom_settings import MAY_BE_REQUIRED_FIELD
from smmaranim.custom_settings import REQUIRED_FIELD
output = {}
# Mise en forme du gabarit par défaut
gabarit_defaut = '''
<div class="field-wrapper" id="fw_{}">
<span class="field-label">{}</span>
<span class="field">{}</span>
<span class="field-error-message"></span>
</div>
'''
for champ in _form :
# Surchargement des messages d'erreur
for cle, val in ERROR_MESSAGES.items() : _form.fields[champ.name].error_messages[cle] = val
# Conversion du champ en code HTML (<=> chaîne de caractères)
champ__str = BeautifulSoup('{}'.format(champ), 'html.parser')
# Ajout d'une note à la fin du label de chaque champ obligatoire
if champ.label :
strs = champ.label.split('|')
if _form.fields[champ.name].required == True :
strs[0] += REQUIRED_FIELD
else :
for elem in champ__str.find_all() :
if 'may-be-required' in elem.attrs.keys() : strs[0] += MAY_BE_REQUIRED_FIELD
if champ.help_text : strs[0] += '<span class="help-icon" title="{}"></span>'.format(champ.help_text)
champ.label = '|'.join(strs)
# Définition de la valeur de l'attribut name
attr_name = '{}-{}'.format(_form.prefix, champ.name) if _form.prefix else champ.name
# Suppression de l'attribut required
for elem in champ__str.find_all() :
if 'may-be-required' in elem.attrs.keys() : del elem['may-be-required']
if 'required' in elem.attrs.keys() : del elem['required']
# Obtention du type de champ
type_champ = champ.field.widget.__class__.__name__
# Définition du gabarit
if type_champ == 'CheckboxInput' :
gabarit = '''
<div class="field-wrapper" id="fw_{}">
<span class="field">{}</span>
<span class="field-label">{}</span>
<span class="field-error-message"></span>
</div>
'''.format(attr_name, champ__str, champ.label)
elif type_champ == 'ClearableFileInput' :
# Stockage des inputs de type file et checkbox
input_checkbox = champ__str.find('input', { 'type' : 'checkbox' })
input_file = champ__str.find('input', { 'type' : 'file' })
# Initialisation du bloc informations
infos = ''
for a in champ__str.find_all('a') :
# Affichage de l'option "Effacer" si définie
if input_checkbox :
delete = '''
<span class="delete-file">
{}
<label for="{}-clear_id">Effacer</label>
</span>
'''.format(input_checkbox, attr_name)
else :
delete = ''
infos = '''
<div class="if-return">
<span class="file-infos">
{}
</span>
{}
</div>
'''.format(a['href'], delete)
gabarit = '''
<div class="field-wrapper" id="fw_{}">
<span class="field-label">{}</span>
<div class="if-container">
<span class="field">{}</span>
<span class="if-trigger">Parcourir</span>
{}
</div>
<span class="field-error-message"></span>
</div>
'''.format(attr_name, champ.label, input_file, infos)
elif type_champ == 'DateInput' :
gabarit = '''
<div class="field-wrapper" id="fw_{}">
<span class="field-label">{}</span>
<div class="form-group">
<span class="field">
<div class="input-group">
{}
<span class="date input-group-addon" style="cursor: pointer;">
<input name="{}__datepicker" type="hidden">
<span class="glyphicon glyphicon-calendar"></span>
</span>
</div>
</span>
</div>
<span class="field-error-message"></span>
</div>
'''.format(attr_name, champ.label, champ__str, attr_name)
elif type_champ == 'DateTimeInput' :
gabarit = gabarit_defaut.format(attr_name, champ.label, champ__str)
elif type_champ == 'EmailInput' :
# Obtention de la balise <input/> de type email
champ__str = champ__str.find('input', { 'type' : 'email' })
# Changement de type (email -> text)
champ__str['type'] = 'text'
gabarit = '''
<div class="field-wrapper" id="fw_{}">
<span class="field-label">{}</span>
<div class="form-group">
<span class="field">
<div class="input-group">
{}
<span class="input-group-addon">
<span class="fa fa-at"></span>
</span>
</div>
</span>
</div>
<span class="field-error-message"></span>
</div>
'''.format(attr_name, champ.label, champ__str, attr_name)
elif type_champ == 'NumberInput' :
# Obtention de la balise <input/> de type number
champ__str = champ__str.find('input', { 'type' : 'number' })
# Changement de type (number -> text)
champ__str['type'] = 'text'
# Suppression d'attributs indésirables
for ta in ['min'] :
if champ__str.has_attr(ta) : del champ__str[ta]
gabarit = gabarit_defaut.format(attr_name, champ.label, champ__str)
elif type_champ == 'PasswordInput' :
gabarit = gabarit_defaut.format(attr_name, champ.label, champ__str)
elif type_champ == 'RadioSelect' :
# Détermination du type de RadioSelect
dtable = True
for i in champ__str.find_all('input') :
if not i.has_attr('into-datatable') : dtable = False
# Détermination du gabarit
if dtable == False :
gabarit = gabarit_defaut.format(attr_name, champ.label, champ__str)
else :
# Stockage des labels
labels = champ.label.split('|')
# Initialisation des balises <tr/> de la balise <tbody/>
trs = []
for li in champ__str.find_all('li') :
# Obtention de l'élément label (contient les données d'une balise <tr/>)
label = li.find('label')
# Obtention de l'élément input
i = label.find('input')
# Suppression de l'attribut into-datatable (inutile)
del i['into-datatable']
# Empilement des balises <tr/>
if i['value'] :
trs.append('<tr>{}</tr>'.format(
''.join(['<td>{}</td>'.format(
elem if elem != '__rb__' else i
) for elem in label.text.split('|')])
))
gabarit = '''
<div class="field-wrapper" id="fw_{}">
<span class="field-label">{}</span>
<div class="custom-table" id="dtable_{}">
<table border="1" bordercolor="#DDD">
<thead>
<tr>{}</tr>
</thead>
<tbody>{}</tbody>
</table>
</div>
<span class="field-error-message"></span>
</div>
'''.format(
attr_name,
labels[0],
attr_name,
''.join(['<th>{}</th>'.format(elem if elem != '__rb__' else '') for elem in labels[1:]]),
''.join(trs)
)
elif type_champ == 'Select' :
gabarit = gabarit_defaut.format(attr_name, champ.label, champ__str)
elif type_champ == 'SelectMultiple' :
# Stockage des labels
labels = champ.label.split('|')
# Initialisation des balises <tr/> de la balise <tbody/>
trs = []
for option in champ__str.find_all('option') :
tds = []
for index, elem in enumerate(option.text.split('|')) :
td_content = elem
if elem == '__zcc__' :
kwargs = {
'id' : 'id_{}_{}'.format(attr_name, index),
'name' : attr_name,
'type' : 'checkbox',
'value' : option['value']
}
if option.has_attr('selected') : kwargs['checked'] = True
td_content = '<input {}>'.format(
' '.join(['{}="{}"'.format(cle, val) for cle, val in kwargs.items()])
)
tds.append('<td>{}</td>'.format(td_content))
trs.append('<tr>{}</tr>'.format(''.join(tds)))
gabarit = '''
<div class="field-wrapper" id="fw_{}">
<span class="field-label">{}</span>
<div class="custom-table" id="dtable_{}">
<table border="1" bordercolor="#DDD">
<thead>
<tr>{}</tr>
</thead>
<tbody>{}</tbody>
</table>
</div>
<span class="field-error-message"></span>
</div>
'''.format(
attr_name,
labels[0],
attr_name,
''.join(['<th>{}</th>'.format(
elem if elem != '__zcc__' else '<input type="checkbox" id="id_{}__all" value="__ALL__">' \
.format(attr_name)
) for elem in labels[1:]]),
''.join(trs)
)
elif type_champ == 'Textarea' :
gabarit = gabarit_defaut.format(attr_name, champ.label, champ__str)
elif type_champ == 'TextInput' :
gabarit = gabarit_defaut.format(attr_name, champ.label, champ__str)
elif type_champ == 'TimeInput' :
gabarit = gabarit_defaut.format(attr_name, champ.label, champ__str)
else :
gabarit = None
# Empilement du tableau des champs sauf si aucun gabarit disponible
if gabarit :
output[champ.name] = safe(gabarit)
else :
raise ValueError('Aucun gabarit n\'est disponible pour un champ {}.'.format(type_champ))
return output | gpl-3.0 |
chauhanhardik/populo | openedx/core/djangoapps/user_api/views.py | 31 | 32766 | """HTTP end-points for the User API. """
import copy
from opaque_keys import InvalidKeyError
from django.conf import settings
from django.contrib.auth.models import User
from django.http import HttpResponse
from django.core.urlresolvers import reverse
from django.core.exceptions import ImproperlyConfigured, NON_FIELD_ERRORS, ValidationError
from django.utils.translation import ugettext as _
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import ensure_csrf_cookie, csrf_protect, csrf_exempt
from opaque_keys.edx import locator
from rest_framework import authentication
from rest_framework import filters
from rest_framework import generics
from rest_framework import status
from rest_framework import viewsets
from rest_framework.views import APIView
from rest_framework.exceptions import ParseError
from django_countries import countries
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from openedx.core.lib.api.permissions import ApiKeyHeaderPermission
import third_party_auth
from django_comment_common.models import Role
from edxmako.shortcuts import marketing_link
from student.views import create_account_with_params
from student.cookies import set_logged_in_cookies
from openedx.core.lib.api.authentication import SessionAuthenticationAllowInactiveUser
from util.json_request import JsonResponse
from .preferences.api import update_email_opt_in
from .helpers import FormDescription, shim_student_view, require_post_params
from .models import UserPreference, UserProfile
from .accounts import (
NAME_MAX_LENGTH, EMAIL_MIN_LENGTH, EMAIL_MAX_LENGTH, PASSWORD_MIN_LENGTH, PASSWORD_MAX_LENGTH,
USERNAME_MIN_LENGTH, USERNAME_MAX_LENGTH
)
from .accounts.api import check_account_exists
from .serializers import UserSerializer, UserPreferenceSerializer
class LoginSessionView(APIView):
"""HTTP end-points for logging in users. """
# This end-point is available to anonymous users,
# so do not require authentication.
authentication_classes = []
@method_decorator(ensure_csrf_cookie)
def get(self, request): # pylint: disable=unused-argument
"""Return a description of the login form.
This decouples clients from the API definition:
if the API decides to modify the form, clients won't need
to be updated.
See `user_api.helpers.FormDescription` for examples
of the JSON-encoded form description.
Returns:
HttpResponse
"""
form_desc = FormDescription("post", reverse("user_api_login_session"))
# Translators: This label appears above a field on the login form
# meant to hold the user's email address.
email_label = _(u"Email")
# Translators: This example email address is used as a placeholder in
# a field on the login form meant to hold the user's email address.
email_placeholder = _(u"username@domain.com")
# Translators: These instructions appear on the login form, immediately
# below a field meant to hold the user's email address.
email_instructions = _(
u"The email address you used to register with {platform_name}"
).format(platform_name=settings.PLATFORM_NAME)
form_desc.add_field(
"email",
field_type="email",
label=email_label,
placeholder=email_placeholder,
instructions=email_instructions,
restrictions={
"min_length": EMAIL_MIN_LENGTH,
"max_length": EMAIL_MAX_LENGTH,
}
)
# Translators: This label appears above a field on the login form
# meant to hold the user's password.
password_label = _(u"Password")
form_desc.add_field(
"password",
label=password_label,
field_type="password",
restrictions={
"min_length": PASSWORD_MIN_LENGTH,
"max_length": PASSWORD_MAX_LENGTH,
}
)
form_desc.add_field(
"remember",
field_type="checkbox",
label=_("Remember me"),
default=False,
required=False,
)
return HttpResponse(form_desc.to_json(), content_type="application/json")
@method_decorator(require_post_params(["email", "password"]))
@method_decorator(csrf_protect)
def post(self, request):
"""Log in a user.
You must send all required form fields with the request.
You can optionally send an `analytics` param with a JSON-encoded
object with additional info to include in the login analytics event.
Currently, the only supported field is "enroll_course_id" to indicate
that the user logged in while enrolling in a particular course.
Arguments:
request (HttpRequest)
Returns:
HttpResponse: 200 on success
HttpResponse: 400 if the request is not valid.
HttpResponse: 403 if authentication failed.
403 with content "third-party-auth" if the user
has successfully authenticated with a third party provider
but does not have a linked account.
HttpResponse: 302 if redirecting to another page.
Example Usage:
POST /user_api/v1/login_session
with POST params `email`, `password`, and `remember`.
200 OK
"""
# For the initial implementation, shim the existing login view
# from the student Django app.
from student.views import login_user
return shim_student_view(login_user, check_logged_in=True)(request)
class RegistrationView(APIView):
"""HTTP end-points for creating a new user. """
DEFAULT_FIELDS = ["email", "name", "username", "password"]
EXTRA_FIELDS = [
"city",
"country",
"gender",
"year_of_birth",
"level_of_education",
"mailing_address",
"goals",
"honor_code",
"terms_of_service",
]
# This end-point is available to anonymous users,
# so do not require authentication.
authentication_classes = []
def _is_field_visible(self, field_name):
"""Check whether a field is visible based on Django settings. """
return self._extra_fields_setting.get(field_name) in ["required", "optional"]
def _is_field_required(self, field_name):
"""Check whether a field is required based on Django settings. """
return self._extra_fields_setting.get(field_name) == "required"
def __init__(self, *args, **kwargs):
super(RegistrationView, self).__init__(*args, **kwargs)
# Backwards compatibility: Honor code is required by default, unless
# explicitly set to "optional" in Django settings.
self._extra_fields_setting = copy.deepcopy(settings.REGISTRATION_EXTRA_FIELDS)
self._extra_fields_setting["honor_code"] = self._extra_fields_setting.get("honor_code", "required")
# Check that the setting is configured correctly
for field_name in self.EXTRA_FIELDS:
if self._extra_fields_setting.get(field_name, "hidden") not in ["required", "optional", "hidden"]:
msg = u"Setting REGISTRATION_EXTRA_FIELDS values must be either required, optional, or hidden."
raise ImproperlyConfigured(msg)
# Map field names to the instance method used to add the field to the form
self.field_handlers = {}
for field_name in self.DEFAULT_FIELDS + self.EXTRA_FIELDS:
handler = getattr(self, "_add_{field_name}_field".format(field_name=field_name))
self.field_handlers[field_name] = handler
@method_decorator(ensure_csrf_cookie)
def get(self, request):
"""Return a description of the registration form.
This decouples clients from the API definition:
if the API decides to modify the form, clients won't need
to be updated.
This is especially important for the registration form,
since different edx-platform installations might
collect different demographic information.
See `user_api.helpers.FormDescription` for examples
of the JSON-encoded form description.
Arguments:
request (HttpRequest)
Returns:
HttpResponse
"""
form_desc = FormDescription("post", reverse("user_api_registration"))
self._apply_third_party_auth_overrides(request, form_desc)
# Default fields are always required
for field_name in self.DEFAULT_FIELDS:
self.field_handlers[field_name](form_desc, required=True)
# Extra fields configured in Django settings
# may be required, optional, or hidden
for field_name in self.EXTRA_FIELDS:
if self._is_field_visible(field_name):
self.field_handlers[field_name](
form_desc,
required=self._is_field_required(field_name)
)
return HttpResponse(form_desc.to_json(), content_type="application/json")
@method_decorator(csrf_exempt)
def post(self, request):
"""Create the user's account.
You must send all required form fields with the request.
You can optionally send a "course_id" param to indicate in analytics
events that the user registered while enrolling in a particular course.
Arguments:
request (HTTPRequest)
Returns:
HttpResponse: 200 on success
HttpResponse: 400 if the request is not valid.
HttpResponse: 409 if an account with the given username or email
address already exists
"""
data = request.POST.copy()
email = data.get('email')
username = data.get('username')
# Handle duplicate email/username
conflicts = check_account_exists(email=email, username=username)
if conflicts:
conflict_messages = {
# Translators: This message is shown to users who attempt to create a new
# account using an email address associated with an existing account.
"email": _(
u"It looks like {email_address} belongs to an existing account. Try again with a different email address."
).format(email_address=email),
# Translators: This message is shown to users who attempt to create a new
# account using a username associated with an existing account.
"username": _(
u"It looks like {username} belongs to an existing account. Try again with a different username."
).format(username=username),
}
errors = {
field: [{"user_message": conflict_messages[field]}]
for field in conflicts
}
return JsonResponse(errors, status=409)
# Backwards compatibility: the student view expects both
# terms of service and honor code values. Since we're combining
# these into a single checkbox, the only value we may get
# from the new view is "honor_code".
# Longer term, we will need to make this more flexible to support
# open source installations that may have separate checkboxes
# for TOS, privacy policy, etc.
if data.get("honor_code") and "terms_of_service" not in data:
data["terms_of_service"] = data["honor_code"]
try:
user = create_account_with_params(request, data)
except ValidationError as err:
# Should only get non-field errors from this function
assert NON_FIELD_ERRORS not in err.message_dict
# Only return first error for each field
errors = {
field: [{"user_message": error} for error in error_list]
for field, error_list in err.message_dict.items()
}
return JsonResponse(errors, status=400)
response = JsonResponse({"success": True})
set_logged_in_cookies(request, response, user)
return response
def _add_email_field(self, form_desc, required=True):
"""Add an email field to a form description.
Arguments:
form_desc: A form description
Keyword Arguments:
required (bool): Whether this field is required; defaults to True
"""
# Translators: This label appears above a field on the registration form
# meant to hold the user's email address.
email_label = _(u"Email")
# Translators: This example email address is used as a placeholder in
# a field on the registration form meant to hold the user's email address.
email_placeholder = _(u"username@domain.com")
form_desc.add_field(
"email",
field_type="email",
label=email_label,
placeholder=email_placeholder,
restrictions={
"min_length": EMAIL_MIN_LENGTH,
"max_length": EMAIL_MAX_LENGTH,
},
required=required
)
def _add_name_field(self, form_desc, required=True):
"""Add a name field to a form description.
Arguments:
form_desc: A form description
Keyword Arguments:
required (bool): Whether this field is required; defaults to True
"""
# Translators: This label appears above a field on the registration form
# meant to hold the user's full name.
name_label = _(u"Full name")
# Translators: This example name is used as a placeholder in
# a field on the registration form meant to hold the user's name.
name_placeholder = _(u"Jane Doe")
# Translators: These instructions appear on the registration form, immediately
# below a field meant to hold the user's full name.
name_instructions = _(u"Needed for any certificates you may earn")
form_desc.add_field(
"name",
label=name_label,
placeholder=name_placeholder,
instructions=name_instructions,
restrictions={
"max_length": NAME_MAX_LENGTH,
},
required=required
)
def _add_username_field(self, form_desc, required=True):
"""Add a username field to a form description.
Arguments:
form_desc: A form description
Keyword Arguments:
required (bool): Whether this field is required; defaults to True
"""
# Translators: This label appears above a field on the registration form
# meant to hold the user's public username.
username_label = _(u"Public username")
# Translators: These instructions appear on the registration form, immediately
# below a field meant to hold the user's public username.
username_instructions = _(
u"The name that will identify you in your courses - "
"{bold_start}(cannot be changed later){bold_end}"
).format(bold_start=u'<strong>', bold_end=u'</strong>')
# Translators: This example username is used as a placeholder in
# a field on the registration form meant to hold the user's username.
username_placeholder = _(u"JaneDoe")
form_desc.add_field(
"username",
label=username_label,
instructions=username_instructions,
placeholder=username_placeholder,
restrictions={
"min_length": USERNAME_MIN_LENGTH,
"max_length": USERNAME_MAX_LENGTH,
},
required=required
)
def _add_password_field(self, form_desc, required=True):
"""Add a password field to a form description.
Arguments:
form_desc: A form description
Keyword Arguments:
required (bool): Whether this field is required; defaults to True
"""
# Translators: This label appears above a field on the registration form
# meant to hold the user's password.
password_label = _(u"Password")
form_desc.add_field(
"password",
label=password_label,
field_type="password",
restrictions={
"min_length": PASSWORD_MIN_LENGTH,
"max_length": PASSWORD_MAX_LENGTH,
},
required=required
)
def _add_level_of_education_field(self, form_desc, required=True):
"""Add a level of education field to a form description.
Arguments:
form_desc: A form description
Keyword Arguments:
required (bool): Whether this field is required; defaults to True
"""
# Translators: This label appears above a dropdown menu on the registration
# form used to select the user's highest completed level of education.
education_level_label = _(u"Highest level of education completed")
form_desc.add_field(
"level_of_education",
label=education_level_label,
field_type="select",
options=UserProfile.LEVEL_OF_EDUCATION_CHOICES,
include_default_option=True,
required=required
)
def _add_gender_field(self, form_desc, required=True):
"""Add a gender field to a form description.
Arguments:
form_desc: A form description
Keyword Arguments:
required (bool): Whether this field is required; defaults to True
"""
# Translators: This label appears above a dropdown menu on the registration
# form used to select the user's gender.
gender_label = _(u"Gender")
form_desc.add_field(
"gender",
label=gender_label,
field_type="select",
options=UserProfile.GENDER_CHOICES,
include_default_option=True,
required=required
)
def _add_year_of_birth_field(self, form_desc, required=True):
"""Add a year of birth field to a form description.
Arguments:
form_desc: A form description
Keyword Arguments:
required (bool): Whether this field is required; defaults to True
"""
# Translators: This label appears above a dropdown menu on the registration
# form used to select the user's year of birth.
yob_label = _(u"Year of birth")
options = [(unicode(year), unicode(year)) for year in UserProfile.VALID_YEARS]
form_desc.add_field(
"year_of_birth",
label=yob_label,
field_type="select",
options=options,
include_default_option=True,
required=required
)
def _add_mailing_address_field(self, form_desc, required=True):
"""Add a mailing address field to a form description.
Arguments:
form_desc: A form description
Keyword Arguments:
required (bool): Whether this field is required; defaults to True
"""
# Translators: This label appears above a field on the registration form
# meant to hold the user's mailing address.
mailing_address_label = _(u"Mailing address")
form_desc.add_field(
"mailing_address",
label=mailing_address_label,
field_type="textarea",
required=required
)
def _add_goals_field(self, form_desc, required=True):
"""Add a goals field to a form description.
Arguments:
form_desc: A form description
Keyword Arguments:
required (bool): Whether this field is required; defaults to True
"""
# Translators: This phrase appears above a field on the registration form
# meant to hold the user's reasons for registering with edX.
goals_label = _(
u"Tell us why you're interested in {platform_name}"
).format(platform_name=settings.PLATFORM_NAME)
form_desc.add_field(
"goals",
label=goals_label,
field_type="textarea",
required=required
)
def _add_city_field(self, form_desc, required=True):
"""Add a city field to a form description.
Arguments:
form_desc: A form description
Keyword Arguments:
required (bool): Whether this field is required; defaults to True
"""
# Translators: This label appears above a field on the registration form
# which allows the user to input the city in which they live.
city_label = _(u"City")
form_desc.add_field(
"city",
label=city_label,
required=required
)
def _add_country_field(self, form_desc, required=True):
"""Add a country field to a form description.
Arguments:
form_desc: A form description
Keyword Arguments:
required (bool): Whether this field is required; defaults to True
"""
# Translators: This label appears above a dropdown menu on the registration
# form used to select the country in which the user lives.
country_label = _(u"Country")
error_msg = _(u"Please select your Country.")
form_desc.add_field(
"country",
label=country_label,
field_type="select",
options=list(countries),
include_default_option=True,
required=required,
error_messages={
"required": error_msg
}
)
def _add_honor_code_field(self, form_desc, required=True):
"""Add an honor code field to a form description.
Arguments:
form_desc: A form description
Keyword Arguments:
required (bool): Whether this field is required; defaults to True
"""
# Separate terms of service and honor code checkboxes
if self._is_field_visible("terms_of_service"):
terms_text = _(u"Honor Code")
# Combine terms of service and honor code checkboxes
else:
# Translators: This is a legal document users must agree to
# in order to register a new account.
terms_text = _(u"Terms of Service and Honor Code")
terms_link = u"<a href=\"{url}\">{terms_text}</a>".format(
url=marketing_link("HONOR"),
terms_text=terms_text
)
# Translators: "Terms of Service" is a legal document users must agree to
# in order to register a new account.
label = _(
u"I agree to the {platform_name} {terms_of_service}."
).format(
platform_name=settings.PLATFORM_NAME,
terms_of_service=terms_link
)
# Translators: "Terms of Service" is a legal document users must agree to
# in order to register a new account.
error_msg = _(
u"You must agree to the {platform_name} {terms_of_service}."
).format(
platform_name=settings.PLATFORM_NAME,
terms_of_service=terms_link
)
form_desc.add_field(
"honor_code",
label=label,
field_type="checkbox",
default=False,
required=required,
error_messages={
"required": error_msg
}
)
def _add_terms_of_service_field(self, form_desc, required=True):
"""Add a terms of service field to a form description.
Arguments:
form_desc: A form description
Keyword Arguments:
required (bool): Whether this field is required; defaults to True
"""
# Translators: This is a legal document users must agree to
# in order to register a new account.
terms_text = _(u"Terms of Service")
terms_link = u"<a href=\"{url}\">{terms_text}</a>".format(
url=marketing_link("TOS"),
terms_text=terms_text
)
# Translators: "Terms of service" is a legal document users must agree to
# in order to register a new account.
label = _(
u"I agree to the {platform_name} {terms_of_service}."
).format(
platform_name=settings.PLATFORM_NAME,
terms_of_service=terms_link
)
# Translators: "Terms of service" is a legal document users must agree to
# in order to register a new account.
error_msg = _(
u"You must agree to the {platform_name} {terms_of_service}."
).format(
platform_name=settings.PLATFORM_NAME,
terms_of_service=terms_link
)
form_desc.add_field(
"terms_of_service",
label=label,
field_type="checkbox",
default=False,
required=required,
error_messages={
"required": error_msg
}
)
def _apply_third_party_auth_overrides(self, request, form_desc):
"""Modify the registration form if the user has authenticated with a third-party provider.
If a user has successfully authenticated with a third-party provider,
but does not yet have an account with EdX, we want to fill in
the registration form with any info that we get from the
provider.
This will also hide the password field, since we assign users a default
(random) password on the assumption that they will be using
third-party auth to log in.
Arguments:
request (HttpRequest): The request for the registration form, used
to determine if the user has successfully authenticated
with a third-party provider.
form_desc (FormDescription): The registration form description
"""
if third_party_auth.is_enabled():
running_pipeline = third_party_auth.pipeline.get(request)
if running_pipeline:
current_provider = third_party_auth.provider.Registry.get_from_pipeline(running_pipeline)
# Override username / email / full name
field_overrides = current_provider.get_register_form_data(
running_pipeline.get('kwargs')
)
for field_name in self.DEFAULT_FIELDS:
if field_name in field_overrides:
form_desc.override_field_properties(
field_name, default=field_overrides[field_name]
)
# Hide the password field
form_desc.override_field_properties(
"password",
default="",
field_type="hidden",
required=False,
label="",
instructions="",
restrictions={}
)
class PasswordResetView(APIView):
"""HTTP end-point for GETting a description of the password reset form. """
# This end-point is available to anonymous users,
# so do not require authentication.
authentication_classes = []
@method_decorator(ensure_csrf_cookie)
def get(self, request): # pylint: disable=unused-argument
"""Return a description of the password reset form.
This decouples clients from the API definition:
if the API decides to modify the form, clients won't need
to be updated.
See `user_api.helpers.FormDescription` for examples
of the JSON-encoded form description.
Returns:
HttpResponse
"""
form_desc = FormDescription("post", reverse("password_change_request"))
# Translators: This label appears above a field on the password reset
# form meant to hold the user's email address.
email_label = _(u"Email")
# Translators: This example email address is used as a placeholder in
# a field on the password reset form meant to hold the user's email address.
email_placeholder = _(u"username@domain.com")
# Translators: These instructions appear on the password reset form,
# immediately below a field meant to hold the user's email address.
email_instructions = _(
u"The email address you used to register with {platform_name}"
).format(platform_name=settings.PLATFORM_NAME)
form_desc.add_field(
"email",
field_type="email",
label=email_label,
placeholder=email_placeholder,
instructions=email_instructions,
restrictions={
"min_length": EMAIL_MIN_LENGTH,
"max_length": EMAIL_MAX_LENGTH,
}
)
return HttpResponse(form_desc.to_json(), content_type="application/json")
class UserViewSet(viewsets.ReadOnlyModelViewSet):
authentication_classes = (authentication.SessionAuthentication,)
permission_classes = (ApiKeyHeaderPermission,)
queryset = User.objects.all().prefetch_related("preferences")
serializer_class = UserSerializer
paginate_by = 10
paginate_by_param = "page_size"
class ForumRoleUsersListView(generics.ListAPIView):
"""
Forum roles are represented by a list of user dicts
"""
authentication_classes = (authentication.SessionAuthentication,)
permission_classes = (ApiKeyHeaderPermission,)
serializer_class = UserSerializer
paginate_by = 10
paginate_by_param = "page_size"
def get_queryset(self):
"""
Return a list of users with the specified role/course pair
"""
name = self.kwargs['name']
course_id_string = self.request.QUERY_PARAMS.get('course_id')
if not course_id_string:
raise ParseError('course_id must be specified')
course_id = SlashSeparatedCourseKey.from_deprecated_string(course_id_string)
role = Role.objects.get_or_create(course_id=course_id, name=name)[0]
users = role.users.all()
return users
class UserPreferenceViewSet(viewsets.ReadOnlyModelViewSet):
authentication_classes = (authentication.SessionAuthentication,)
permission_classes = (ApiKeyHeaderPermission,)
queryset = UserPreference.objects.all()
filter_backends = (filters.DjangoFilterBackend,)
filter_fields = ("key", "user")
serializer_class = UserPreferenceSerializer
paginate_by = 10
paginate_by_param = "page_size"
class PreferenceUsersListView(generics.ListAPIView):
authentication_classes = (authentication.SessionAuthentication,)
permission_classes = (ApiKeyHeaderPermission,)
serializer_class = UserSerializer
paginate_by = 10
paginate_by_param = "page_size"
def get_queryset(self):
return User.objects.filter(preferences__key=self.kwargs["pref_key"]).prefetch_related("preferences")
class UpdateEmailOptInPreference(APIView):
"""View for updating the email opt in preference. """
authentication_classes = (SessionAuthenticationAllowInactiveUser,)
@method_decorator(require_post_params(["course_id", "email_opt_in"]))
@method_decorator(ensure_csrf_cookie)
def post(self, request):
""" Post function for updating the email opt in preference.
Allows the modification or creation of the email opt in preference at an
organizational level.
Args:
request (Request): The request should contain the following POST parameters:
* course_id: The slash separated course ID. Used to determine the organization
for this preference setting.
* email_opt_in: "True" or "False" to determine if the user is opting in for emails from
this organization. If the string does not match "True" (case insensitive) it will
assume False.
"""
course_id = request.DATA['course_id']
try:
org = locator.CourseLocator.from_string(course_id).org
except InvalidKeyError:
return HttpResponse(
status=400,
content="No course '{course_id}' found".format(course_id=course_id),
content_type="text/plain"
)
# Only check for true. All other values are False.
email_opt_in = request.DATA['email_opt_in'].lower() == 'true'
update_email_opt_in(request.user, org, email_opt_in)
return HttpResponse(status=status.HTTP_200_OK)
| agpl-3.0 |
cloudera/hue | desktop/core/ext-py/SQLAlchemy-1.3.17/test/orm/test_options.py | 2 | 83938 | import sqlalchemy as sa
from sqlalchemy import Column
from sqlalchemy import ForeignKey
from sqlalchemy import inspect
from sqlalchemy import Integer
from sqlalchemy import String
from sqlalchemy import testing
from sqlalchemy.orm import aliased
from sqlalchemy.orm import class_mapper
from sqlalchemy.orm import column_property
from sqlalchemy.orm import create_session
from sqlalchemy.orm import defaultload
from sqlalchemy.orm import defer
from sqlalchemy.orm import exc as orm_exc
from sqlalchemy.orm import joinedload
from sqlalchemy.orm import lazyload
from sqlalchemy.orm import Load
from sqlalchemy.orm import load_only
from sqlalchemy.orm import mapper
from sqlalchemy.orm import relationship
from sqlalchemy.orm import Session
from sqlalchemy.orm import strategy_options
from sqlalchemy.orm import subqueryload
from sqlalchemy.orm import util as orm_util
from sqlalchemy.orm import with_polymorphic
from sqlalchemy.testing import fixtures
from sqlalchemy.testing.assertions import assert_raises_message
from sqlalchemy.testing.assertions import eq_
from test.orm import _fixtures
from .inheritance._poly_fixtures import _Polymorphic
from .inheritance._poly_fixtures import Company
from .inheritance._poly_fixtures import Engineer
from .inheritance._poly_fixtures import Manager
from .inheritance._poly_fixtures import Person
class QueryTest(_fixtures.FixtureTest):
run_setup_mappers = "once"
run_inserts = "once"
run_deletes = None
@classmethod
def setup_mappers(cls):
cls._setup_stock_mapping()
class SubItem(cls.classes.Item):
pass
mapper(
SubItem,
None,
inherits=cls.classes.Item,
properties={
"extra_keywords": relationship(
cls.classes.Keyword,
viewonly=True,
secondary=cls.tables.item_keywords,
)
},
)
class PathTest(object):
def _make_path(self, path):
r = []
for i, item in enumerate(path):
if i % 2 == 0:
if isinstance(item, type):
item = class_mapper(item)
else:
if isinstance(item, str):
item = inspect(r[-1]).mapper.attrs[item]
r.append(item)
return tuple(r)
def _make_path_registry(self, path):
return orm_util.PathRegistry.coerce(self._make_path(path))
def _assert_path_result(self, opt, q, paths):
q._attributes = q._attributes.copy()
attr = {}
if isinstance(opt, strategy_options._UnboundLoad):
for val in opt._to_bind:
val._bind_loader(
[ent.entity_zero for ent in q._mapper_entities],
q._current_path,
attr,
False,
)
else:
opt._process(q, True)
attr = q._attributes
assert_paths = [k[1] for k in attr]
eq_(
set([p for p in assert_paths]),
set([self._make_path(p) for p in paths]),
)
class LoadTest(PathTest, QueryTest):
def test_str(self):
User = self.classes.User
result = Load(User)
result.strategy = (("deferred", False), ("instrument", True))
eq_(
str(result),
"Load(strategy=(('deferred', False), ('instrument', True)))",
)
def test_gen_path_attr_entity(self):
User = self.classes.User
Address = self.classes.Address
result = Load(User)
eq_(
result._generate_path(
inspect(User)._path_registry,
User.addresses,
None,
"relationship",
),
self._make_path_registry([User, "addresses", Address]),
)
def test_gen_path_attr_column(self):
User = self.classes.User
result = Load(User)
eq_(
result._generate_path(
inspect(User)._path_registry, User.name, None, "column"
),
self._make_path_registry([User, "name"]),
)
def test_gen_path_string_entity(self):
User = self.classes.User
Address = self.classes.Address
result = Load(User)
eq_(
result._generate_path(
inspect(User)._path_registry, "addresses", None, "relationship"
),
self._make_path_registry([User, "addresses", Address]),
)
def test_gen_path_string_column(self):
User = self.classes.User
result = Load(User)
eq_(
result._generate_path(
inspect(User)._path_registry, "name", None, "column"
),
self._make_path_registry([User, "name"]),
)
def test_gen_path_invalid_from_col(self):
User = self.classes.User
result = Load(User)
result.path = self._make_path_registry([User, "name"])
assert_raises_message(
sa.exc.ArgumentError,
"Attribute 'name' of entity 'Mapper|User|users' does "
"not refer to a mapped entity",
result._generate_path,
result.path,
User.addresses,
None,
"relationship",
)
def test_gen_path_attr_entity_invalid_raiseerr(self):
User = self.classes.User
Order = self.classes.Order
result = Load(User)
assert_raises_message(
sa.exc.ArgumentError,
"Attribute 'Order.items' does not link from element "
"'Mapper|User|users'",
result._generate_path,
inspect(User)._path_registry,
Order.items,
None,
"relationship",
)
def test_gen_path_attr_entity_invalid_noraiseerr(self):
User = self.classes.User
Order = self.classes.Order
result = Load(User)
eq_(
result._generate_path(
inspect(User)._path_registry,
Order.items,
None,
"relationship",
False,
),
None,
)
def test_set_strat_ent(self):
User = self.classes.User
l1 = Load(User)
l2 = l1.joinedload("addresses")
to_bind = l2.context.values()[0]
eq_(
l1.context,
{("loader", self._make_path([User, "addresses"])): to_bind},
)
def test_set_strat_col(self):
User = self.classes.User
l1 = Load(User)
l2 = l1.defer("name")
l3 = list(l2.context.values())[0]
eq_(l1.context, {("loader", self._make_path([User, "name"])): l3})
class OfTypePathingTest(PathTest, QueryTest):
def _fixture(self):
User, Address = self.classes.User, self.classes.Address
Dingaling = self.classes.Dingaling
address_table = self.tables.addresses
class SubAddr(Address):
pass
mapper(
SubAddr,
inherits=Address,
properties={
"sub_attr": column_property(address_table.c.email_address),
"dings": relationship(Dingaling),
},
)
return User, Address, SubAddr
def test_oftype_only_col_attr_unbound(self):
User, Address, SubAddr = self._fixture()
l1 = defaultload(User.addresses.of_type(SubAddr)).defer(
SubAddr.sub_attr
)
sess = Session()
q = sess.query(User)
self._assert_path_result(
l1,
q,
[(User, "addresses"), (User, "addresses", SubAddr, "sub_attr")],
)
def test_oftype_only_col_attr_bound(self):
User, Address, SubAddr = self._fixture()
l1 = (
Load(User)
.defaultload(User.addresses.of_type(SubAddr))
.defer(SubAddr.sub_attr)
)
sess = Session()
q = sess.query(User)
self._assert_path_result(
l1,
q,
[(User, "addresses"), (User, "addresses", SubAddr, "sub_attr")],
)
def test_oftype_only_col_attr_string_unbound(self):
User, Address, SubAddr = self._fixture()
l1 = defaultload(User.addresses.of_type(SubAddr)).defer("sub_attr")
sess = Session()
q = sess.query(User)
self._assert_path_result(
l1,
q,
[(User, "addresses"), (User, "addresses", SubAddr, "sub_attr")],
)
def test_oftype_only_col_attr_string_bound(self):
User, Address, SubAddr = self._fixture()
l1 = (
Load(User)
.defaultload(User.addresses.of_type(SubAddr))
.defer("sub_attr")
)
sess = Session()
q = sess.query(User)
self._assert_path_result(
l1,
q,
[(User, "addresses"), (User, "addresses", SubAddr, "sub_attr")],
)
def test_oftype_only_rel_attr_unbound(self):
User, Address, SubAddr = self._fixture()
l1 = defaultload(User.addresses.of_type(SubAddr)).joinedload(
SubAddr.dings
)
sess = Session()
q = sess.query(User)
self._assert_path_result(
l1, q, [(User, "addresses"), (User, "addresses", SubAddr, "dings")]
)
def test_oftype_only_rel_attr_bound(self):
User, Address, SubAddr = self._fixture()
l1 = (
Load(User)
.defaultload(User.addresses.of_type(SubAddr))
.joinedload(SubAddr.dings)
)
sess = Session()
q = sess.query(User)
self._assert_path_result(
l1, q, [(User, "addresses"), (User, "addresses", SubAddr, "dings")]
)
def test_oftype_only_rel_attr_string_unbound(self):
User, Address, SubAddr = self._fixture()
l1 = defaultload(User.addresses.of_type(SubAddr)).joinedload("dings")
sess = Session()
q = sess.query(User)
self._assert_path_result(
l1, q, [(User, "addresses"), (User, "addresses", SubAddr, "dings")]
)
def test_oftype_only_rel_attr_string_bound(self):
User, Address, SubAddr = self._fixture()
l1 = (
Load(User)
.defaultload(User.addresses.of_type(SubAddr))
.defer("sub_attr")
)
sess = Session()
q = sess.query(User)
self._assert_path_result(
l1,
q,
[(User, "addresses"), (User, "addresses", SubAddr, "sub_attr")],
)
class OptionsTest(PathTest, QueryTest):
def _option_fixture(self, *arg):
return strategy_options._UnboundLoad._from_keys(
strategy_options._UnboundLoad.joinedload, arg, True, {}
)
def test_get_path_one_level_string(self):
User = self.classes.User
sess = Session()
q = sess.query(User)
opt = self._option_fixture("addresses")
self._assert_path_result(opt, q, [(User, "addresses")])
def test_get_path_one_level_attribute(self):
User = self.classes.User
sess = Session()
q = sess.query(User)
opt = self._option_fixture(User.addresses)
self._assert_path_result(opt, q, [(User, "addresses")])
def test_path_on_entity_but_doesnt_match_currentpath(self):
User, Address = self.classes.User, self.classes.Address
# ensure "current path" is fully consumed before
# matching against current entities.
# see [ticket:2098]
sess = Session()
q = sess.query(User)
opt = self._option_fixture("email_address", "id")
q = sess.query(Address)._with_current_path(
orm_util.PathRegistry.coerce(
[inspect(User), inspect(User).attrs.addresses]
)
)
self._assert_path_result(opt, q, [])
def test_get_path_one_level_with_unrelated(self):
Order = self.classes.Order
sess = Session()
q = sess.query(Order)
opt = self._option_fixture("addresses")
self._assert_path_result(opt, q, [])
def test_path_multilevel_string(self):
Item, User, Order = (
self.classes.Item,
self.classes.User,
self.classes.Order,
)
sess = Session()
q = sess.query(User)
opt = self._option_fixture("orders.items.keywords")
self._assert_path_result(
opt,
q,
[
(User, "orders"),
(User, "orders", Order, "items"),
(User, "orders", Order, "items", Item, "keywords"),
],
)
def test_path_multilevel_attribute(self):
Item, User, Order = (
self.classes.Item,
self.classes.User,
self.classes.Order,
)
sess = Session()
q = sess.query(User)
opt = self._option_fixture(User.orders, Order.items, Item.keywords)
self._assert_path_result(
opt,
q,
[
(User, "orders"),
(User, "orders", Order, "items"),
(User, "orders", Order, "items", Item, "keywords"),
],
)
def test_with_current_matching_string(self):
Item, User, Order = (
self.classes.Item,
self.classes.User,
self.classes.Order,
)
sess = Session()
q = sess.query(Item)._with_current_path(
self._make_path_registry([User, "orders", Order, "items"])
)
opt = self._option_fixture("orders.items.keywords")
self._assert_path_result(opt, q, [(Item, "keywords")])
def test_with_current_matching_attribute(self):
Item, User, Order = (
self.classes.Item,
self.classes.User,
self.classes.Order,
)
sess = Session()
q = sess.query(Item)._with_current_path(
self._make_path_registry([User, "orders", Order, "items"])
)
opt = self._option_fixture(User.orders, Order.items, Item.keywords)
self._assert_path_result(opt, q, [(Item, "keywords")])
def test_with_current_nonmatching_string(self):
Item, User, Order = (
self.classes.Item,
self.classes.User,
self.classes.Order,
)
sess = Session()
q = sess.query(Item)._with_current_path(
self._make_path_registry([User, "orders", Order, "items"])
)
opt = self._option_fixture("keywords")
self._assert_path_result(opt, q, [])
opt = self._option_fixture("items.keywords")
self._assert_path_result(opt, q, [])
def test_with_current_nonmatching_attribute(self):
Item, User, Order = (
self.classes.Item,
self.classes.User,
self.classes.Order,
)
sess = Session()
q = sess.query(Item)._with_current_path(
self._make_path_registry([User, "orders", Order, "items"])
)
opt = self._option_fixture(Item.keywords)
self._assert_path_result(opt, q, [])
opt = self._option_fixture(Order.items, Item.keywords)
self._assert_path_result(opt, q, [])
def test_with_current_nonmatching_entity(self):
Item, User, Order = (
self.classes.Item,
self.classes.User,
self.classes.Order,
)
sess = Session()
q = sess.query(Item)._with_current_path(
self._make_path_registry(
[inspect(aliased(User)), "orders", Order, "items"]
)
)
opt = self._option_fixture(User.orders)
self._assert_path_result(opt, q, [])
opt = self._option_fixture(User.orders, Order.items, Item.keywords)
self._assert_path_result(opt, q, [])
q = sess.query(Item)._with_current_path(
self._make_path_registry([User, "orders", Order, "items"])
)
ac = aliased(User)
opt = self._option_fixture(ac.orders)
self._assert_path_result(opt, q, [])
opt = self._option_fixture(ac.orders, Order.items, Item.keywords)
self._assert_path_result(opt, q, [])
def test_with_current_match_aliased_classes(self):
Item, User, Order = (
self.classes.Item,
self.classes.User,
self.classes.Order,
)
ac = aliased(User)
sess = Session()
q = sess.query(Item)._with_current_path(
self._make_path_registry([inspect(ac), "orders", Order, "items"])
)
opt = self._option_fixture(ac.orders, Order.items, Item.keywords)
self._assert_path_result(opt, q, [(Item, "keywords")])
opt = self._option_fixture(ac.orders, Order.items)
self._assert_path_result(opt, q, [])
def test_from_base_to_subclass_attr(self):
Dingaling, Address = self.classes.Dingaling, self.classes.Address
sess = Session()
class SubAddr(Address):
pass
mapper(
SubAddr,
inherits=Address,
properties={"flub": relationship(Dingaling)},
)
q = sess.query(Address)
opt = self._option_fixture(SubAddr.flub)
self._assert_path_result(opt, q, [(SubAddr, "flub")])
def test_from_subclass_to_subclass_attr(self):
Dingaling, Address = self.classes.Dingaling, self.classes.Address
sess = Session()
class SubAddr(Address):
pass
mapper(
SubAddr,
inherits=Address,
properties={"flub": relationship(Dingaling)},
)
q = sess.query(SubAddr)
opt = self._option_fixture(SubAddr.flub)
self._assert_path_result(opt, q, [(SubAddr, "flub")])
def test_from_base_to_base_attr_via_subclass(self):
Dingaling, Address = self.classes.Dingaling, self.classes.Address
sess = Session()
class SubAddr(Address):
pass
mapper(
SubAddr,
inherits=Address,
properties={"flub": relationship(Dingaling)},
)
q = sess.query(Address)
opt = self._option_fixture(SubAddr.user)
self._assert_path_result(
opt, q, [(Address, inspect(Address).attrs.user)]
)
def test_of_type(self):
User, Address = self.classes.User, self.classes.Address
sess = Session()
class SubAddr(Address):
pass
mapper(SubAddr, inherits=Address)
q = sess.query(User)
opt = self._option_fixture(
User.addresses.of_type(SubAddr), SubAddr.user
)
u_mapper = inspect(User)
a_mapper = inspect(Address)
self._assert_path_result(
opt,
q,
[
(u_mapper, u_mapper.attrs.addresses),
(
u_mapper,
u_mapper.attrs.addresses,
a_mapper,
a_mapper.attrs.user,
),
],
)
def test_of_type_string_attr(self):
User, Address = self.classes.User, self.classes.Address
sess = Session()
class SubAddr(Address):
pass
mapper(SubAddr, inherits=Address)
q = sess.query(User)
opt = self._option_fixture(User.addresses.of_type(SubAddr), "user")
u_mapper = inspect(User)
a_mapper = inspect(Address)
self._assert_path_result(
opt,
q,
[
(u_mapper, u_mapper.attrs.addresses),
(
u_mapper,
u_mapper.attrs.addresses,
a_mapper,
a_mapper.attrs.user,
),
],
)
def test_of_type_plus_level(self):
Dingaling, User, Address = (
self.classes.Dingaling,
self.classes.User,
self.classes.Address,
)
sess = Session()
class SubAddr(Address):
pass
mapper(
SubAddr,
inherits=Address,
properties={"flub": relationship(Dingaling)},
)
q = sess.query(User)
opt = self._option_fixture(
User.addresses.of_type(SubAddr), SubAddr.flub
)
u_mapper = inspect(User)
sa_mapper = inspect(SubAddr)
self._assert_path_result(
opt,
q,
[
(u_mapper, u_mapper.attrs.addresses),
(
u_mapper,
u_mapper.attrs.addresses,
sa_mapper,
sa_mapper.attrs.flub,
),
],
)
def test_aliased_single(self):
User = self.classes.User
sess = Session()
ualias = aliased(User)
q = sess.query(ualias)
opt = self._option_fixture(ualias.addresses)
self._assert_path_result(opt, q, [(inspect(ualias), "addresses")])
def test_with_current_aliased_single(self):
User, Address = self.classes.User, self.classes.Address
sess = Session()
ualias = aliased(User)
q = sess.query(ualias)._with_current_path(
self._make_path_registry([Address, "user"])
)
opt = self._option_fixture(Address.user, ualias.addresses)
self._assert_path_result(opt, q, [(inspect(ualias), "addresses")])
def test_with_current_aliased_single_nonmatching_option(self):
User, Address = self.classes.User, self.classes.Address
sess = Session()
ualias = aliased(User)
q = sess.query(User)._with_current_path(
self._make_path_registry([Address, "user"])
)
opt = self._option_fixture(Address.user, ualias.addresses)
self._assert_path_result(opt, q, [])
def test_with_current_aliased_single_nonmatching_entity(self):
User, Address = self.classes.User, self.classes.Address
sess = Session()
ualias = aliased(User)
q = sess.query(ualias)._with_current_path(
self._make_path_registry([Address, "user"])
)
opt = self._option_fixture(Address.user, User.addresses)
self._assert_path_result(opt, q, [])
def test_multi_entity_opt_on_second(self):
Item = self.classes.Item
Order = self.classes.Order
opt = self._option_fixture(Order.items)
sess = Session()
q = sess.query(Item, Order)
self._assert_path_result(opt, q, [(Order, "items")])
def test_multi_entity_opt_on_string(self):
Item = self.classes.Item
Order = self.classes.Order
opt = self._option_fixture("items")
sess = Session()
q = sess.query(Item, Order)
self._assert_path_result(opt, q, [])
def test_multi_entity_no_mapped_entities(self):
Item = self.classes.Item
Order = self.classes.Order
opt = self._option_fixture("items")
sess = Session()
q = sess.query(Item.id, Order.id)
self._assert_path_result(opt, q, [])
def test_path_exhausted(self):
User = self.classes.User
Item = self.classes.Item
Order = self.classes.Order
opt = self._option_fixture(User.orders)
sess = Session()
q = sess.query(Item)._with_current_path(
self._make_path_registry([User, "orders", Order, "items"])
)
self._assert_path_result(opt, q, [])
def test_chained(self):
User = self.classes.User
Order = self.classes.Order
sess = Session()
q = sess.query(User)
opt = self._option_fixture(User.orders).joinedload("items")
self._assert_path_result(
opt, q, [(User, "orders"), (User, "orders", Order, "items")]
)
def test_chained_plus_dotted(self):
User = self.classes.User
Order = self.classes.Order
Item = self.classes.Item
sess = Session()
q = sess.query(User)
opt = self._option_fixture("orders.items").joinedload("keywords")
self._assert_path_result(
opt,
q,
[
(User, "orders"),
(User, "orders", Order, "items"),
(User, "orders", Order, "items", Item, "keywords"),
],
)
def test_chained_plus_multi(self):
User = self.classes.User
Order = self.classes.Order
Item = self.classes.Item
sess = Session()
q = sess.query(User)
opt = self._option_fixture(User.orders, Order.items).joinedload(
"keywords"
)
self._assert_path_result(
opt,
q,
[
(User, "orders"),
(User, "orders", Order, "items"),
(User, "orders", Order, "items", Item, "keywords"),
],
)
class FromSubclassOptionsTest(PathTest, fixtures.DeclarativeMappedTest):
# test for regression to #3963
run_setup_mappers = "once"
run_inserts = "once"
run_deletes = None
@classmethod
def setup_mappers(cls):
Base = cls.DeclarativeBasic
class BaseCls(Base):
__tablename__ = "basecls"
id = Column(Integer, primary_key=True)
type = Column(String(30))
related_id = Column(ForeignKey("related.id"))
related = relationship("Related")
class SubClass(BaseCls):
__tablename__ = "subcls"
id = Column(ForeignKey("basecls.id"), primary_key=True)
class Related(Base):
__tablename__ = "related"
id = Column(Integer, primary_key=True)
sub_related_id = Column(ForeignKey("sub_related.id"))
sub_related = relationship("SubRelated")
class SubRelated(Base):
__tablename__ = "sub_related"
id = Column(Integer, primary_key=True)
def test_with_current_nonmatching_entity_subclasses(self):
BaseCls, SubClass, Related, SubRelated = self.classes(
"BaseCls", "SubClass", "Related", "SubRelated"
)
sess = Session()
q = sess.query(Related)._with_current_path(
self._make_path_registry([inspect(SubClass), "related"])
)
opt = subqueryload(SubClass.related).subqueryload(Related.sub_related)
self._assert_path_result(opt, q, [(Related, "sub_related")])
class OptionsNoPropTest(_fixtures.FixtureTest):
"""test the error messages emitted when using property
options in conjunction with column-only entities, or
for not existing options
"""
run_create_tables = False
run_inserts = None
run_deletes = None
def test_option_with_mapper_basestring(self):
Item = self.classes.Item
self._assert_option([Item], "keywords")
def test_option_with_mapper_PropCompatator(self):
Item = self.classes.Item
self._assert_option([Item], Item.keywords)
def test_option_with_mapper_then_column_basestring(self):
Item = self.classes.Item
self._assert_option([Item, Item.id], "keywords")
def test_option_with_mapper_then_column_PropComparator(self):
Item = self.classes.Item
self._assert_option([Item, Item.id], Item.keywords)
def test_option_with_column_then_mapper_basestring(self):
Item = self.classes.Item
self._assert_option([Item.id, Item], "keywords")
def test_option_with_column_then_mapper_PropComparator(self):
Item = self.classes.Item
self._assert_option([Item.id, Item], Item.keywords)
def test_option_with_column_basestring(self):
Item = self.classes.Item
message = (
"Query has only expression-based entities - can't "
'find property named "keywords".'
)
self._assert_eager_with_just_column_exception(
Item.id, "keywords", message
)
def test_option_with_column_PropComparator(self):
Item = self.classes.Item
self._assert_eager_with_just_column_exception(
Item.id,
Item.keywords,
"Query has only expression-based entities, which do not apply "
'to relationship property "Item.keywords"',
)
def test_option_against_nonexistent_PropComparator(self):
Item = self.classes.Item
Keyword = self.classes.Keyword
self._assert_eager_with_entity_exception(
[Keyword],
(joinedload(Item.keywords),),
'Mapped attribute "Item.keywords" does not apply to any of the '
"root entities in this query, e.g. mapped class "
"Keyword->keywords. Please specify the full path from one of "
"the root entities to the target attribute. ",
)
def test_option_against_nonexistent_basestring(self):
Item = self.classes.Item
self._assert_eager_with_entity_exception(
[Item],
(joinedload("foo"),),
'Can\'t find property named "foo" on mapped class '
"Item->items in this Query.",
)
def test_option_against_nonexistent_twolevel_basestring(self):
Item = self.classes.Item
self._assert_eager_with_entity_exception(
[Item],
(joinedload("keywords.foo"),),
'Can\'t find property named "foo" on mapped class '
"Keyword->keywords in this Query.",
)
def test_option_against_nonexistent_twolevel_chained(self):
Item = self.classes.Item
self._assert_eager_with_entity_exception(
[Item],
(joinedload("keywords").joinedload("foo"),),
'Can\'t find property named "foo" on mapped class '
"Keyword->keywords in this Query.",
)
@testing.fails_if(
lambda: True,
"PropertyOption doesn't yet check for relation/column on end result",
)
def test_option_against_non_relation_basestring(self):
Item = self.classes.Item
Keyword = self.classes.Keyword
self._assert_eager_with_entity_exception(
[Keyword, Item],
(joinedload("keywords"),),
r"Attribute 'keywords' of entity 'Mapper\|Keyword\|keywords' "
"does not refer to a mapped entity",
)
@testing.fails_if(
lambda: True,
"PropertyOption doesn't yet check for relation/column on end result",
)
def test_option_against_multi_non_relation_basestring(self):
Item = self.classes.Item
Keyword = self.classes.Keyword
self._assert_eager_with_entity_exception(
[Keyword, Item],
(joinedload("keywords"),),
r"Attribute 'keywords' of entity 'Mapper\|Keyword\|keywords' "
"does not refer to a mapped entity",
)
def test_option_against_wrong_entity_type_basestring(self):
Item = self.classes.Item
self._assert_loader_strategy_exception(
[Item],
(joinedload("id").joinedload("keywords"),),
'Can\'t apply "joined loader" strategy to property "Item.id", '
'which is a "column property"; this loader strategy is '
'intended to be used with a "relationship property".',
)
def test_col_option_against_relationship_basestring(self):
Item = self.classes.Item
self._assert_loader_strategy_exception(
[Item],
(load_only("keywords"),),
'Can\'t apply "column loader" strategy to property '
'"Item.keywords", which is a "relationship property"; this '
'loader strategy is intended to be used with a "column property".',
)
def test_load_only_against_multi_entity_attr(self):
User = self.classes.User
Item = self.classes.Item
self._assert_eager_with_entity_exception(
[User, Item],
(load_only(User.id, Item.id),),
r"Can't apply wildcard \('\*'\) or load_only\(\) loader option "
"to multiple entities mapped class User->users, mapped class "
"Item->items. Specify loader options for each entity "
"individually, such as "
r"Load\(mapped class User->users\).some_option\('\*'\), "
r"Load\(mapped class Item->items\).some_option\('\*'\).",
)
def test_col_option_against_relationship_attr(self):
Item = self.classes.Item
self._assert_loader_strategy_exception(
[Item],
(load_only(Item.keywords),),
'Can\'t apply "column loader" strategy to property '
'"Item.keywords", which is a "relationship property"; this '
'loader strategy is intended to be used with a "column property".',
)
def test_option_against_multi_non_relation_twolevel_basestring(self):
Item = self.classes.Item
Keyword = self.classes.Keyword
self._assert_loader_strategy_exception(
[Keyword, Item],
(joinedload("id").joinedload("keywords"),),
'Can\'t apply "joined loader" strategy to property "Keyword.id", '
'which is a "column property"; this loader strategy is intended '
'to be used with a "relationship property".',
)
def test_option_against_multi_nonexistent_basestring(self):
Item = self.classes.Item
Keyword = self.classes.Keyword
self._assert_eager_with_entity_exception(
[Keyword, Item],
(joinedload("description"),),
'Can\'t find property named "description" on mapped class '
"Keyword->keywords in this Query.",
)
def test_option_against_multi_no_entities_basestring(self):
Item = self.classes.Item
Keyword = self.classes.Keyword
self._assert_eager_with_entity_exception(
[Keyword.id, Item.id],
(joinedload("keywords"),),
r"Query has only expression-based entities - can't find property "
'named "keywords".',
)
def test_option_against_wrong_multi_entity_type_attr_one(self):
Item = self.classes.Item
Keyword = self.classes.Keyword
self._assert_loader_strategy_exception(
[Keyword, Item],
(joinedload(Keyword.id).joinedload(Item.keywords),),
'Can\'t apply "joined loader" strategy to property "Keyword.id", '
'which is a "column property"; this loader strategy is intended '
'to be used with a "relationship property".',
)
def test_option_against_wrong_multi_entity_type_attr_two(self):
Item = self.classes.Item
Keyword = self.classes.Keyword
self._assert_loader_strategy_exception(
[Keyword, Item],
(joinedload(Keyword.keywords).joinedload(Item.keywords),),
'Can\'t apply "joined loader" strategy to property '
'"Keyword.keywords", which is a "column property"; this loader '
'strategy is intended to be used with a "relationship property".',
)
def test_option_against_wrong_multi_entity_type_attr_three(self):
Item = self.classes.Item
Keyword = self.classes.Keyword
self._assert_eager_with_entity_exception(
[Keyword.id, Item.id],
(joinedload(Keyword.keywords).joinedload(Item.keywords),),
"Query has only expression-based entities, which do not apply to "
'column property "Keyword.keywords"',
)
def test_wrong_type_in_option(self):
Item = self.classes.Item
Keyword = self.classes.Keyword
self._assert_eager_with_entity_exception(
[Item],
(joinedload(Keyword),),
r"mapper option expects string key or list of attributes",
)
def test_non_contiguous_all_option(self):
User = self.classes.User
self._assert_eager_with_entity_exception(
[User],
(joinedload(User.addresses).joinedload(User.orders),),
r"Attribute 'User.orders' does not link "
"from element 'Mapper|Address|addresses'",
)
def test_non_contiguous_all_option_of_type(self):
User = self.classes.User
Order = self.classes.Order
self._assert_eager_with_entity_exception(
[User],
(
joinedload(User.addresses).joinedload(
User.orders.of_type(Order)
),
),
r"Attribute 'User.orders' does not link "
"from element 'Mapper|Address|addresses'",
)
@classmethod
def setup_mappers(cls):
users, User, addresses, Address, orders, Order = (
cls.tables.users,
cls.classes.User,
cls.tables.addresses,
cls.classes.Address,
cls.tables.orders,
cls.classes.Order,
)
mapper(
User,
users,
properties={
"addresses": relationship(Address),
"orders": relationship(Order),
},
)
mapper(Address, addresses)
mapper(Order, orders)
keywords, items, item_keywords, Keyword, Item = (
cls.tables.keywords,
cls.tables.items,
cls.tables.item_keywords,
cls.classes.Keyword,
cls.classes.Item,
)
mapper(
Keyword,
keywords,
properties={
"keywords": column_property(keywords.c.name + "some keyword")
},
)
mapper(
Item,
items,
properties=dict(
keywords=relationship(Keyword, secondary=item_keywords)
),
)
def _assert_option(self, entity_list, option):
Item = self.classes.Item
q = create_session().query(*entity_list).options(joinedload(option))
key = ("loader", (inspect(Item), inspect(Item).attrs.keywords))
assert key in q._attributes
def _assert_loader_strategy_exception(self, entity_list, options, message):
assert_raises_message(
orm_exc.LoaderStrategyException,
message,
create_session().query(*entity_list).options,
*options
)
def _assert_eager_with_entity_exception(
self, entity_list, options, message
):
assert_raises_message(
sa.exc.ArgumentError,
message,
create_session().query(*entity_list).options,
*options
)
def _assert_eager_with_just_column_exception(
self, column, eager_option, message
):
assert_raises_message(
sa.exc.ArgumentError,
message,
create_session().query(column).options,
joinedload(eager_option),
)
class OptionsNoPropTestInh(_Polymorphic):
def test_missing_attr_wpoly_subclasss(self):
s = Session()
wp = with_polymorphic(Person, [Manager], flat=True)
assert_raises_message(
sa.exc.ArgumentError,
r'Mapped attribute "Manager.status" does not apply to any of '
r"the root entities in this query, e.g. "
r"with_polymorphic\(Person, \[Manager\]\).",
s.query(wp).options,
load_only(Manager.status),
)
def test_missing_attr_of_type_subclass(self):
s = Session()
assert_raises_message(
sa.exc.ArgumentError,
r'Attribute "Manager.manager_name" does not link from element '
r'"with_polymorphic\(Person, \[Engineer\]\)".$',
s.query(Company).options,
joinedload(Company.employees.of_type(Engineer)).load_only(
Manager.manager_name
),
)
def test_missing_attr_of_type_subclass_name_matches(self):
s = Session()
# the name "status" is present on Engineer also, make sure
# that doesn't get mixed up here
assert_raises_message(
sa.exc.ArgumentError,
r'Attribute "Manager.status" does not link from element '
r'"with_polymorphic\(Person, \[Engineer\]\)".$',
s.query(Company).options,
joinedload(Company.employees.of_type(Engineer)).load_only(
Manager.status
),
)
def test_missing_str_attr_of_type_subclass(self):
s = Session()
assert_raises_message(
sa.exc.ArgumentError,
r'Can\'t find property named "manager_name" on '
r"mapped class Engineer->engineers in this Query.$",
s.query(Company).options,
joinedload(Company.employees.of_type(Engineer)).load_only(
"manager_name"
),
)
def test_missing_attr_of_type_wpoly_subclass(self):
s = Session()
wp = with_polymorphic(Person, [Manager], flat=True)
assert_raises_message(
sa.exc.ArgumentError,
r'Attribute "Manager.manager_name" does not link from '
r'element "with_polymorphic\(Person, \[Manager\]\)".$',
s.query(Company).options,
joinedload(Company.employees.of_type(wp)).load_only(
Manager.manager_name
),
)
def test_missing_attr_is_missing_of_type_for_alias(self):
s = Session()
pa = aliased(Person)
assert_raises_message(
sa.exc.ArgumentError,
r'Attribute "AliasedClass_Person.name" does not link from '
r'element "mapped class Person->people". Did you mean to use '
r"Company.employees.of_type\(AliasedClass_Person\)\?",
s.query(Company).options,
joinedload(Company.employees).load_only(pa.name),
)
q = s.query(Company).options(
joinedload(Company.employees.of_type(pa)).load_only(pa.name)
)
orig_path = inspect(Company)._path_registry[
Company.employees.property
][inspect(pa)][pa.name.property]
key = ("loader", orig_path.natural_path)
loader = q._attributes[key]
eq_(loader.path, orig_path)
class PickleTest(PathTest, QueryTest):
def _option_fixture(self, *arg):
return strategy_options._UnboundLoad._from_keys(
strategy_options._UnboundLoad.joinedload, arg, True, {}
)
def test_modern_opt_getstate(self):
User = self.classes.User
opt = self._option_fixture(User.addresses)
to_bind = list(opt._to_bind)
eq_(
opt.__getstate__(),
{
"_is_chain_link": False,
"local_opts": {},
"is_class_strategy": False,
"path": [(User, "addresses", None)],
"propagate_to_loaders": True,
"_of_type": None,
"_to_bind": to_bind,
},
)
def test_modern_opt_setstate(self):
User = self.classes.User
inner_opt = strategy_options._UnboundLoad.__new__(
strategy_options._UnboundLoad
)
inner_state = {
"_is_chain_link": False,
"local_opts": {},
"is_class_strategy": False,
"path": [(User, "addresses", None)],
"propagate_to_loaders": True,
"_to_bind": None,
"strategy": (("lazy", "joined"),),
}
inner_opt.__setstate__(inner_state)
opt = strategy_options._UnboundLoad.__new__(
strategy_options._UnboundLoad
)
state = {
"_is_chain_link": False,
"local_opts": {},
"is_class_strategy": False,
"path": [(User, "addresses", None)],
"propagate_to_loaders": True,
"_to_bind": [inner_opt],
}
opt.__setstate__(state)
query = create_session().query(User)
attr = {}
load = opt._bind_loader(
[ent.entity_zero for ent in query._mapper_entities],
query._current_path,
attr,
False,
)
eq_(
load.path,
inspect(User)._path_registry[User.addresses.property][
inspect(self.classes.Address)
],
)
def test_legacy_opt_setstate(self):
User = self.classes.User
opt = strategy_options._UnboundLoad.__new__(
strategy_options._UnboundLoad
)
state = {
"_is_chain_link": False,
"local_opts": {},
"is_class_strategy": False,
"path": [(User, "addresses")],
"propagate_to_loaders": True,
"_to_bind": [opt],
"strategy": (("lazy", "joined"),),
}
opt.__setstate__(state)
query = create_session().query(User)
attr = {}
load = opt._bind_loader(
[ent.entity_zero for ent in query._mapper_entities],
query._current_path,
attr,
False,
)
eq_(
load.path,
inspect(User)._path_registry[User.addresses.property][
inspect(self.classes.Address)
],
)
class LocalOptsTest(PathTest, QueryTest):
@classmethod
def setup_class(cls):
super(LocalOptsTest, cls).setup_class()
@strategy_options.loader_option()
def some_col_opt_only(loadopt, key, opts):
return loadopt.set_column_strategy(
(key,), None, opts, opts_only=True
)
@strategy_options.loader_option()
def some_col_opt_strategy(loadopt, key, opts):
return loadopt.set_column_strategy(
(key,), {"deferred": True, "instrument": True}, opts
)
cls.some_col_opt_only = some_col_opt_only
cls.some_col_opt_strategy = some_col_opt_strategy
def _assert_attrs(self, opts, expected):
User = self.classes.User
query = create_session().query(User)
attr = {}
for opt in opts:
if isinstance(opt, strategy_options._UnboundLoad):
for tb in opt._to_bind:
tb._bind_loader(
[ent.entity_zero for ent in query._mapper_entities],
query._current_path,
attr,
False,
)
else:
attr.update(opt.context)
key = (
"loader",
tuple(inspect(User)._path_registry[User.name.property]),
)
eq_(attr[key].local_opts, expected)
def test_single_opt_only(self):
opt = strategy_options._UnboundLoad().some_col_opt_only(
"name", {"foo": "bar"}
)
self._assert_attrs([opt], {"foo": "bar"})
def test_unbound_multiple_opt_only(self):
opts = [
strategy_options._UnboundLoad().some_col_opt_only(
"name", {"foo": "bar"}
),
strategy_options._UnboundLoad().some_col_opt_only(
"name", {"bat": "hoho"}
),
]
self._assert_attrs(opts, {"foo": "bar", "bat": "hoho"})
def test_bound_multiple_opt_only(self):
User = self.classes.User
opts = [
Load(User)
.some_col_opt_only("name", {"foo": "bar"})
.some_col_opt_only("name", {"bat": "hoho"})
]
self._assert_attrs(opts, {"foo": "bar", "bat": "hoho"})
def test_bound_strat_opt_recvs_from_optonly(self):
User = self.classes.User
opts = [
Load(User)
.some_col_opt_only("name", {"foo": "bar"})
.some_col_opt_strategy("name", {"bat": "hoho"})
]
self._assert_attrs(opts, {"foo": "bar", "bat": "hoho"})
def test_unbound_strat_opt_recvs_from_optonly(self):
opts = [
strategy_options._UnboundLoad().some_col_opt_only(
"name", {"foo": "bar"}
),
strategy_options._UnboundLoad().some_col_opt_strategy(
"name", {"bat": "hoho"}
),
]
self._assert_attrs(opts, {"foo": "bar", "bat": "hoho"})
def test_unbound_opt_only_adds_to_strat(self):
opts = [
strategy_options._UnboundLoad().some_col_opt_strategy(
"name", {"bat": "hoho"}
),
strategy_options._UnboundLoad().some_col_opt_only(
"name", {"foo": "bar"}
),
]
self._assert_attrs(opts, {"foo": "bar", "bat": "hoho"})
def test_bound_opt_only_adds_to_strat(self):
User = self.classes.User
opts = [
Load(User)
.some_col_opt_strategy("name", {"bat": "hoho"})
.some_col_opt_only("name", {"foo": "bar"})
]
self._assert_attrs(opts, {"foo": "bar", "bat": "hoho"})
class SubOptionsTest(PathTest, QueryTest):
run_create_tables = False
run_inserts = None
run_deletes = None
def _assert_opts(self, q, sub_opt, non_sub_opts):
existing_attributes = q._attributes
q._attributes = q._attributes.copy()
attr_a = {}
for val in sub_opt._to_bind:
val._bind_loader(
[ent.entity_zero for ent in q._mapper_entities],
q._current_path,
attr_a,
False,
)
q._attributes = existing_attributes.copy()
attr_b = {}
for opt in non_sub_opts:
for val in opt._to_bind:
val._bind_loader(
[ent.entity_zero for ent in q._mapper_entities],
q._current_path,
attr_b,
False,
)
for k, l in attr_b.items():
if not l.strategy:
del attr_b[k]
def strat_as_tuple(strat):
return (
strat.strategy,
strat.local_opts,
strat.propagate_to_loaders,
strat._of_type,
strat.is_class_strategy,
strat.is_opts_only,
)
eq_(
{path: strat_as_tuple(load) for path, load in attr_a.items()},
{path: strat_as_tuple(load) for path, load in attr_b.items()},
)
def test_one(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
sub_opt = joinedload(User.orders).options(
joinedload(Order.items).options(defer(Item.description)),
defer(Order.description),
)
non_sub_opts = [
joinedload(User.orders),
defaultload(User.orders)
.joinedload(Order.items)
.defer(Item.description),
defaultload(User.orders).defer(Order.description),
]
sess = Session()
self._assert_opts(sess.query(User), sub_opt, non_sub_opts)
def test_two(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
sub_opt = defaultload(User.orders).options(
joinedload(Order.items),
defaultload(Order.items).options(subqueryload(Item.keywords)),
defer(Order.description),
)
non_sub_opts = [
defaultload(User.orders)
.joinedload(Order.items)
.subqueryload(Item.keywords),
defaultload(User.orders).defer(Order.description),
]
sess = Session()
self._assert_opts(sess.query(User), sub_opt, non_sub_opts)
def test_three(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
sub_opt = defaultload(User.orders).options(defer("*"))
non_sub_opts = [defaultload(User.orders).defer("*")]
sess = Session()
self._assert_opts(sess.query(User), sub_opt, non_sub_opts)
def test_four(self):
User, Address, Order, Item, SubItem, Keyword = self.classes(
"User", "Address", "Order", "Item", "SubItem", "Keyword"
)
sub_opt = joinedload(User.orders).options(
defer(Order.description),
joinedload(Order.items).options(
joinedload(Item.keywords).options(defer(Keyword.name)),
defer(Item.description),
),
)
non_sub_opts = [
joinedload(User.orders),
defaultload(User.orders).defer(Order.description),
defaultload(User.orders).joinedload(Order.items),
defaultload(User.orders)
.defaultload(Order.items)
.joinedload(Item.keywords),
defaultload(User.orders)
.defaultload(Order.items)
.defer(Item.description),
defaultload(User.orders)
.defaultload(Order.items)
.defaultload(Item.keywords)
.defer(Keyword.name),
]
sess = Session()
self._assert_opts(sess.query(User), sub_opt, non_sub_opts)
def test_four_strings(self):
User, Address, Order, Item, SubItem, Keyword = self.classes(
"User", "Address", "Order", "Item", "SubItem", "Keyword"
)
sub_opt = joinedload("orders").options(
defer("description"),
joinedload("items").options(
joinedload("keywords").options(defer("name")),
defer("description"),
),
)
non_sub_opts = [
joinedload(User.orders),
defaultload(User.orders).defer(Order.description),
defaultload(User.orders).joinedload(Order.items),
defaultload(User.orders)
.defaultload(Order.items)
.joinedload(Item.keywords),
defaultload(User.orders)
.defaultload(Order.items)
.defer(Item.description),
defaultload(User.orders)
.defaultload(Order.items)
.defaultload(Item.keywords)
.defer(Keyword.name),
]
sess = Session()
self._assert_opts(sess.query(User), sub_opt, non_sub_opts)
def test_five(self):
User, Address, Order, Item, SubItem, Keyword = self.classes(
"User", "Address", "Order", "Item", "SubItem", "Keyword"
)
sub_opt = joinedload(User.orders).options(load_only(Order.description))
non_sub_opts = [
joinedload(User.orders),
defaultload(User.orders).load_only(Order.description),
]
sess = Session()
self._assert_opts(sess.query(User), sub_opt, non_sub_opts)
def test_five_strings(self):
User, Address, Order, Item, SubItem, Keyword = self.classes(
"User", "Address", "Order", "Item", "SubItem", "Keyword"
)
sub_opt = joinedload("orders").options(load_only("description"))
non_sub_opts = [
joinedload(User.orders),
defaultload(User.orders).load_only(Order.description),
]
sess = Session()
self._assert_opts(sess.query(User), sub_opt, non_sub_opts)
def test_invalid_one(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
# these options are "invalid", in that User.orders -> Item.keywords
# is not a path. However, the "normal" option is not generating
# an error for now, which is bad, but we're testing here only that
# it works the same way, so there you go. If and when we make this
# case raise, then both cases should raise in the same way.
sub_opt = joinedload(User.orders).options(
joinedload(Item.keywords), joinedload(Order.items)
)
non_sub_opts = [
joinedload(User.orders).joinedload(Item.keywords),
defaultload(User.orders).joinedload(Order.items),
]
sess = Session()
self._assert_opts(sess.query(User), sub_opt, non_sub_opts)
def test_invalid_two(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
# these options are "invalid", in that User.orders -> Item.keywords
# is not a path. However, the "normal" option is not generating
# an error for now, which is bad, but we're testing here only that
# it works the same way, so there you go. If and when we make this
# case raise, then both cases should raise in the same way.
sub_opt = joinedload("orders").options(
joinedload("keywords"), joinedload("items")
)
non_sub_opts = [
joinedload(User.orders).joinedload(Item.keywords),
defaultload(User.orders).joinedload(Order.items),
]
sess = Session()
self._assert_opts(sess.query(User), sub_opt, non_sub_opts)
def test_not_implemented_fromload(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
assert_raises_message(
NotImplementedError,
r"The options\(\) method is currently only supported "
"for 'unbound' loader options",
Load(User).joinedload(User.orders).options,
joinedload(Order.items),
)
def test_not_implemented_toload(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
assert_raises_message(
NotImplementedError,
r"Only 'unbound' loader options may be used with the "
r"Load.options\(\) method",
joinedload(User.orders).options,
Load(Order).joinedload(Order.items),
)
class CacheKeyTest(PathTest, QueryTest):
run_create_tables = False
run_inserts = None
run_deletes = None
def test_unbound_cache_key_included_safe(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
query_path = self._make_path_registry([User, "orders"])
opt = joinedload(User.orders).joinedload(Order.items)
eq_(
opt._generate_cache_key(query_path),
(((Order, "items", Item, ("lazy", "joined")),)),
)
def test_unbound_cache_key_included_safe_multipath(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
query_path = self._make_path_registry([User, "orders"])
base = joinedload(User.orders)
opt1 = base.joinedload(Order.items)
opt2 = base.joinedload(Order.address)
eq_(
opt1._generate_cache_key(query_path),
(((Order, "items", Item, ("lazy", "joined")),)),
)
eq_(
opt2._generate_cache_key(query_path),
(((Order, "address", Address, ("lazy", "joined")),)),
)
def test_bound_cache_key_included_safe_multipath(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
query_path = self._make_path_registry([User, "orders"])
base = Load(User).joinedload(User.orders)
opt1 = base.joinedload(Order.items)
opt2 = base.joinedload(Order.address)
eq_(
opt1._generate_cache_key(query_path),
(((Order, "items", Item, ("lazy", "joined")),)),
)
eq_(
opt2._generate_cache_key(query_path),
(((Order, "address", Address, ("lazy", "joined")),)),
)
def test_bound_cache_key_included_safe(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
query_path = self._make_path_registry([User, "orders"])
opt = Load(User).joinedload(User.orders).joinedload(Order.items)
eq_(
opt._generate_cache_key(query_path),
(((Order, "items", Item, ("lazy", "joined")),)),
)
def test_unbound_cache_key_excluded_on_other(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
query_path = self._make_path_registry([User, "addresses"])
opt = joinedload(User.orders).joinedload(Order.items)
eq_(opt._generate_cache_key(query_path), None)
def test_bound_cache_key_excluded_on_other(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
query_path = self._make_path_registry([User, "addresses"])
opt = Load(User).joinedload(User.orders).joinedload(Order.items)
eq_(opt._generate_cache_key(query_path), None)
def test_unbound_cache_key_excluded_on_aliased(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
# query of:
#
# query(User).options(
# joinedload(aliased(User).orders).joinedload(Order.items))
#
# we are lazy loading Order objects from User.orders
# the path excludes our option so cache key should
# be None
query_path = self._make_path_registry([User, "orders"])
opt = joinedload(aliased(User).orders).joinedload(Order.items)
eq_(opt._generate_cache_key(query_path), None)
def test_bound_cache_key_wildcard_one(self):
# do not change this test, it is testing
# a specific condition in Load._chop_path().
User, Address = self.classes("User", "Address")
query_path = self._make_path_registry([User, "addresses"])
opt = Load(User).lazyload("*")
eq_(opt._generate_cache_key(query_path), None)
def test_unbound_cache_key_wildcard_one(self):
User, Address = self.classes("User", "Address")
query_path = self._make_path_registry([User, "addresses"])
opt = lazyload("*")
eq_(
opt._generate_cache_key(query_path),
(("relationship:_sa_default", ("lazy", "select")),),
)
def test_bound_cache_key_wildcard_two(self):
User, Address, Order, Item, SubItem, Keyword = self.classes(
"User", "Address", "Order", "Item", "SubItem", "Keyword"
)
query_path = self._make_path_registry([User])
opt = Load(User).lazyload("orders").lazyload("*")
eq_(
opt._generate_cache_key(query_path),
(
("orders", Order, ("lazy", "select")),
("orders", Order, "relationship:*", ("lazy", "select")),
),
)
def test_unbound_cache_key_wildcard_two(self):
User, Address, Order, Item, SubItem, Keyword = self.classes(
"User", "Address", "Order", "Item", "SubItem", "Keyword"
)
query_path = self._make_path_registry([User])
opt = lazyload("orders").lazyload("*")
eq_(
opt._generate_cache_key(query_path),
(
("orders", Order, ("lazy", "select")),
("orders", Order, "relationship:*", ("lazy", "select")),
),
)
def test_unbound_cache_key_of_type_subclass_relationship(self):
User, Address, Order, Item, SubItem, Keyword = self.classes(
"User", "Address", "Order", "Item", "SubItem", "Keyword"
)
query_path = self._make_path_registry([Order, "items", Item])
opt = subqueryload(Order.items.of_type(SubItem)).subqueryload(
SubItem.extra_keywords
)
eq_(
opt._generate_cache_key(query_path),
(
(SubItem, ("lazy", "subquery")),
("extra_keywords", Keyword, ("lazy", "subquery")),
),
)
def test_unbound_cache_key_of_type_subclass_relationship_stringattr(self):
User, Address, Order, Item, SubItem, Keyword = self.classes(
"User", "Address", "Order", "Item", "SubItem", "Keyword"
)
query_path = self._make_path_registry([Order, "items", Item])
opt = subqueryload(Order.items.of_type(SubItem)).subqueryload(
"extra_keywords"
)
eq_(
opt._generate_cache_key(query_path),
(
(SubItem, ("lazy", "subquery")),
("extra_keywords", Keyword, ("lazy", "subquery")),
),
)
def test_bound_cache_key_of_type_subclass_relationship(self):
User, Address, Order, Item, SubItem, Keyword = self.classes(
"User", "Address", "Order", "Item", "SubItem", "Keyword"
)
query_path = self._make_path_registry([Order, "items", Item])
opt = (
Load(Order)
.subqueryload(Order.items.of_type(SubItem))
.subqueryload(SubItem.extra_keywords)
)
eq_(
opt._generate_cache_key(query_path),
(
(SubItem, ("lazy", "subquery")),
("extra_keywords", Keyword, ("lazy", "subquery")),
),
)
def test_bound_cache_key_of_type_subclass_string_relationship(self):
User, Address, Order, Item, SubItem, Keyword = self.classes(
"User", "Address", "Order", "Item", "SubItem", "Keyword"
)
query_path = self._make_path_registry([Order, "items", Item])
opt = (
Load(Order)
.subqueryload(Order.items.of_type(SubItem))
.subqueryload("extra_keywords")
)
eq_(
opt._generate_cache_key(query_path),
(
(SubItem, ("lazy", "subquery")),
("extra_keywords", Keyword, ("lazy", "subquery")),
),
)
def test_unbound_cache_key_excluded_of_type_safe(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
# query of:
#
# query(User).options(
# subqueryload(User.orders).
# subqueryload(Order.items.of_type(SubItem)))
#
#
# we are lazy loading Address objects from User.addresses
# the path excludes our option so cache key should
# be None
query_path = self._make_path_registry([User, "addresses"])
opt = subqueryload(User.orders).subqueryload(
Order.items.of_type(SubItem)
)
eq_(opt._generate_cache_key(query_path), None)
def test_unbound_cache_key_excluded_of_type_unsafe(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
# query of:
#
# query(User).options(
# subqueryload(User.orders).
# subqueryload(Order.items.of_type(aliased(SubItem))))
#
#
# we are lazy loading Address objects from User.addresses
# the path excludes our option so cache key should
# be None
query_path = self._make_path_registry([User, "addresses"])
opt = subqueryload(User.orders).subqueryload(
Order.items.of_type(aliased(SubItem))
)
eq_(opt._generate_cache_key(query_path), None)
def test_bound_cache_key_excluded_of_type_safe(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
# query of:
#
# query(User).options(
# subqueryload(User.orders).
# subqueryload(Order.items.of_type(SubItem)))
#
#
# we are lazy loading Address objects from User.addresses
# the path excludes our option so cache key should
# be None
query_path = self._make_path_registry([User, "addresses"])
opt = (
Load(User)
.subqueryload(User.orders)
.subqueryload(Order.items.of_type(SubItem))
)
eq_(opt._generate_cache_key(query_path), None)
def test_bound_cache_key_excluded_of_type_unsafe(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
# query of:
#
# query(User).options(
# subqueryload(User.orders).
# subqueryload(Order.items.of_type(aliased(SubItem))))
#
#
# we are lazy loading Address objects from User.addresses
# the path excludes our option so cache key should
# be None
query_path = self._make_path_registry([User, "addresses"])
opt = (
Load(User)
.subqueryload(User.orders)
.subqueryload(Order.items.of_type(aliased(SubItem)))
)
eq_(opt._generate_cache_key(query_path), None)
def test_unbound_cache_key_included_of_type_safe(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
query_path = self._make_path_registry([User, "orders"])
opt = joinedload(User.orders).joinedload(Order.items.of_type(SubItem))
eq_(
opt._generate_cache_key(query_path),
((Order, "items", SubItem, ("lazy", "joined")),),
)
def test_bound_cache_key_included_of_type_safe(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
query_path = self._make_path_registry([User, "orders"])
opt = (
Load(User)
.joinedload(User.orders)
.joinedload(Order.items.of_type(SubItem))
)
eq_(
opt._generate_cache_key(query_path),
((Order, "items", SubItem, ("lazy", "joined")),),
)
def test_unbound_cache_key_included_unsafe_option_one(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
query_path = self._make_path_registry([User, "orders"])
opt = joinedload(User.orders).joinedload(
Order.items.of_type(aliased(SubItem))
)
eq_(opt._generate_cache_key(query_path), False)
def test_unbound_cache_key_included_unsafe_option_two(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
query_path = self._make_path_registry([User, "orders", Order])
opt = joinedload(User.orders).joinedload(
Order.items.of_type(aliased(SubItem))
)
eq_(opt._generate_cache_key(query_path), False)
def test_unbound_cache_key_included_unsafe_option_three(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
query_path = self._make_path_registry([User, "orders", Order, "items"])
opt = joinedload(User.orders).joinedload(
Order.items.of_type(aliased(SubItem))
)
eq_(opt._generate_cache_key(query_path), False)
def test_unbound_cache_key_included_unsafe_query(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
au = aliased(User)
query_path = self._make_path_registry([inspect(au), "orders"])
opt = joinedload(au.orders).joinedload(Order.items)
eq_(opt._generate_cache_key(query_path), False)
def test_unbound_cache_key_included_safe_w_deferred(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
query_path = self._make_path_registry([User, "addresses"])
opt = (
joinedload(User.addresses)
.defer(Address.email_address)
.defer(Address.user_id)
)
eq_(
opt._generate_cache_key(query_path),
(
(
Address,
"email_address",
("deferred", True),
("instrument", True),
),
(Address, "user_id", ("deferred", True), ("instrument", True)),
),
)
def test_unbound_cache_key_included_safe_w_deferred_multipath(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
query_path = self._make_path_registry([User, "orders"])
base = joinedload(User.orders)
opt1 = base.joinedload(Order.items)
opt2 = (
base.joinedload(Order.address)
.defer(Address.email_address)
.defer(Address.user_id)
)
eq_(
opt1._generate_cache_key(query_path),
((Order, "items", Item, ("lazy", "joined")),),
)
eq_(
opt2._generate_cache_key(query_path),
(
(Order, "address", Address, ("lazy", "joined")),
(
Order,
"address",
Address,
"email_address",
("deferred", True),
("instrument", True),
),
(
Order,
"address",
Address,
"user_id",
("deferred", True),
("instrument", True),
),
),
)
def test_bound_cache_key_included_safe_w_deferred(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
query_path = self._make_path_registry([User, "addresses"])
opt = (
Load(User)
.joinedload(User.addresses)
.defer(Address.email_address)
.defer(Address.user_id)
)
eq_(
opt._generate_cache_key(query_path),
(
(
Address,
"email_address",
("deferred", True),
("instrument", True),
),
(Address, "user_id", ("deferred", True), ("instrument", True)),
),
)
def test_bound_cache_key_included_safe_w_deferred_multipath(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
query_path = self._make_path_registry([User, "orders"])
base = Load(User).joinedload(User.orders)
opt1 = base.joinedload(Order.items)
opt2 = (
base.joinedload(Order.address)
.defer(Address.email_address)
.defer(Address.user_id)
)
eq_(
opt1._generate_cache_key(query_path),
((Order, "items", Item, ("lazy", "joined")),),
)
eq_(
opt2._generate_cache_key(query_path),
(
(Order, "address", Address, ("lazy", "joined")),
(
Order,
"address",
Address,
"email_address",
("deferred", True),
("instrument", True),
),
(
Order,
"address",
Address,
"user_id",
("deferred", True),
("instrument", True),
),
),
)
def test_unbound_cache_key_included_safe_w_option(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
opt = (
defaultload("orders")
.joinedload("items", innerjoin=True)
.defer("description")
)
query_path = self._make_path_registry([User, "orders"])
eq_(
opt._generate_cache_key(query_path),
(
(
Order,
"items",
Item,
("lazy", "joined"),
("innerjoin", True),
),
(
Order,
"items",
Item,
"description",
("deferred", True),
("instrument", True),
),
),
)
def test_bound_cache_key_excluded_on_aliased(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
query_path = self._make_path_registry([User, "orders"])
au = aliased(User)
opt = Load(au).joinedload(au.orders).joinedload(Order.items)
eq_(opt._generate_cache_key(query_path), None)
def test_bound_cache_key_included_unsafe_option_one(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
query_path = self._make_path_registry([User, "orders"])
opt = (
Load(User)
.joinedload(User.orders)
.joinedload(Order.items.of_type(aliased(SubItem)))
)
eq_(opt._generate_cache_key(query_path), False)
def test_bound_cache_key_included_unsafe_option_two(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
query_path = self._make_path_registry([User, "orders", Order])
opt = (
Load(User)
.joinedload(User.orders)
.joinedload(Order.items.of_type(aliased(SubItem)))
)
eq_(opt._generate_cache_key(query_path), False)
def test_bound_cache_key_included_unsafe_option_three(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
query_path = self._make_path_registry([User, "orders", Order, "items"])
opt = (
Load(User)
.joinedload(User.orders)
.joinedload(Order.items.of_type(aliased(SubItem)))
)
eq_(opt._generate_cache_key(query_path), False)
def test_bound_cache_key_included_unsafe_query(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
au = aliased(User)
query_path = self._make_path_registry([inspect(au), "orders"])
opt = Load(au).joinedload(au.orders).joinedload(Order.items)
eq_(opt._generate_cache_key(query_path), False)
def test_bound_cache_key_included_safe_w_option(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
opt = (
Load(User)
.defaultload("orders")
.joinedload("items", innerjoin=True)
.defer("description")
)
query_path = self._make_path_registry([User, "orders"])
eq_(
opt._generate_cache_key(query_path),
(
(
Order,
"items",
Item,
("lazy", "joined"),
("innerjoin", True),
),
(
Order,
"items",
Item,
"description",
("deferred", True),
("instrument", True),
),
),
)
def test_unbound_cache_key_included_safe_w_loadonly_strs(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
query_path = self._make_path_registry([User, "addresses"])
opt = defaultload(User.addresses).load_only("id", "email_address")
eq_(
opt._generate_cache_key(query_path),
(
(Address, "id", ("deferred", False), ("instrument", True)),
(
Address,
"email_address",
("deferred", False),
("instrument", True),
),
(
Address,
"column:*",
("deferred", True),
("instrument", True),
("undefer_pks", True),
),
),
)
def test_unbound_cache_key_included_safe_w_loadonly_props(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
query_path = self._make_path_registry([User, "addresses"])
opt = defaultload(User.addresses).load_only(
Address.id, Address.email_address
)
eq_(
opt._generate_cache_key(query_path),
(
(Address, "id", ("deferred", False), ("instrument", True)),
(
Address,
"email_address",
("deferred", False),
("instrument", True),
),
(
Address,
"column:*",
("deferred", True),
("instrument", True),
("undefer_pks", True),
),
),
)
def test_bound_cache_key_included_safe_w_loadonly(self):
User, Address, Order, Item, SubItem = self.classes(
"User", "Address", "Order", "Item", "SubItem"
)
query_path = self._make_path_registry([User, "addresses"])
opt = (
Load(User)
.defaultload(User.addresses)
.load_only("id", "email_address")
)
eq_(
opt._generate_cache_key(query_path),
(
(Address, "id", ("deferred", False), ("instrument", True)),
(
Address,
"email_address",
("deferred", False),
("instrument", True),
),
(
Address,
"column:*",
("deferred", True),
("instrument", True),
("undefer_pks", True),
),
),
)
def test_unbound_cache_key_undefer_group(self):
User, Address = self.classes("User", "Address")
query_path = self._make_path_registry([User, "addresses"])
opt = defaultload(User.addresses).undefer_group("xyz")
eq_(
opt._generate_cache_key(query_path),
((Address, "column:*", ("undefer_group_xyz", True)),),
)
def test_bound_cache_key_undefer_group(self):
User, Address = self.classes("User", "Address")
query_path = self._make_path_registry([User, "addresses"])
opt = Load(User).defaultload(User.addresses).undefer_group("xyz")
eq_(
opt._generate_cache_key(query_path),
((Address, "column:*", ("undefer_group_xyz", True)),),
)
| apache-2.0 |
endolith/pyeq2 | Examples/Simple/FitOneNamedEquation_2D.py | 3 | 4299 | from __future__ import print_function
from __future__ import unicode_literals
from __future__ import absolute_import
import os, sys, inspect
# ensure pyeq2 can be imported
if -1 != sys.path[0].find('pyeq2-master'):raise Exception('Please rename git checkout directory from "pyeq2-master" to "pyeq2"')
importDir = os.path.join(os.path.join(sys.path[0][:sys.path[0].rfind(os.sep)], '..'), '..')
if importDir not in sys.path:
sys.path.append(importDir)
import pyeq2
# see IModel.fittingTargetDictionary
equation = pyeq2.Models_2D.BioScience.HyperbolicLogistic() # SSQABS by default
data = equation.exampleData
pyeq2.dataConvertorService().ConvertAndSortColumnarASCII(data, equation, False)
equation.Solve()
##########################################################
print("Equation:", equation.GetDisplayName(), str(equation.GetDimensionality()) + "D")
print("Fitting target of", equation.fittingTargetDictionary[equation.fittingTarget], '=', equation.CalculateAllDataFittingTarget(equation.solvedCoefficients))
print("Fitted Parameters:")
for i in range(len(equation.solvedCoefficients)):
print(" %s = %-.16E" % (equation.GetCoefficientDesignators()[i], equation.solvedCoefficients[i]))
equation.CalculateModelErrors(equation.solvedCoefficients, equation.dataCache.allDataCacheDictionary)
print()
for i in range(len(equation.dataCache.allDataCacheDictionary['DependentData'])):
print('X:', equation.dataCache.allDataCacheDictionary['IndependentData'][0][i],)
print('Y:', equation.dataCache.allDataCacheDictionary['DependentData'][i],)
print('Model:', equation.modelPredictions[i],)
print('Abs. Error:', equation.modelAbsoluteError[i],)
if not equation.dataCache.DependentDataContainsZeroFlag:
print('Rel. Error:', equation.modelRelativeError[i],)
print('Percent Error:', equation.modelPercentError[i])
else:
print()
print()
##########################################################
equation.CalculateCoefficientAndFitStatistics()
if equation.upperCoefficientBounds or equation.lowerCoefficientBounds:
print('You entered coefficient bounds. Parameter statistics may')
print('not be valid for parameter values at or near the bounds.')
print()
print('Degress of freedom error', equation.df_e)
print('Degress of freedom regression', equation.df_r)
if equation.rmse == None:
print('Root Mean Squared Error (RMSE): n/a')
else:
print('Root Mean Squared Error (RMSE):', equation.rmse)
if equation.r2 == None:
print('R-squared: n/a')
else:
print('R-squared:', equation.r2)
if equation.r2adj == None:
print('R-squared adjusted: n/a')
else:
print('R-squared adjusted:', equation.r2adj)
if equation.Fstat == None:
print('Model F-statistic: n/a')
else:
print('Model F-statistic:', equation.Fstat)
if equation.Fpv == None:
print('Model F-statistic p-value: n/a')
else:
print('Model F-statistic p-value:', equation.Fpv)
if equation.ll == None:
print('Model log-likelihood: n/a')
else:
print('Model log-likelihood:', equation.ll)
if equation.aic == None:
print('Model AIC: n/a')
else:
print('Model AIC:', equation.aic)
if equation.bic == None:
print('Model BIC: n/a')
else:
print('Model BIC:', equation.bic)
print()
print("Individual Parameter Statistics:")
for i in range(len(equation.solvedCoefficients)):
if equation.tstat_beta == None:
tstat = 'n/a'
else:
tstat = '%-.5E' % ( equation.tstat_beta[i])
if equation.pstat_beta == None:
pstat = 'n/a'
else:
pstat = '%-.5E' % ( equation.pstat_beta[i])
if equation.sd_beta != None:
print("Coefficient %s = %-.16E, std error: %-.5E" % (equation.GetCoefficientDesignators()[i], equation.solvedCoefficients[i], equation.sd_beta[i]))
else:
print("Coefficient %s = %-.16E, std error: n/a" % (equation.GetCoefficientDesignators()[i], equation.solvedCoefficients[i]))
print(" t-stat: %s, p-stat: %s, 95 percent confidence intervals: [%-.5E, %-.5E]" % (tstat, pstat, equation.ci[i][0], equation.ci[i][1]))
print()
print("Coefficient Covariance Matrix:")
for i in equation.cov_beta:
print(i)
print()
print('Java Source Code:')
print(pyeq2.outputSourceCodeService().GetOutputSourceCodeJAVA(equation))
| bsd-2-clause |
umitproject/openmonitor-desktop-agent | umit/icm/agent/gui/dashboard/timeline/TimeLine.py | 1 | 4235 | # Copyright (C) 2007 Adriano Monteiro Marques
#
# Authors: Tianwei Liu <liutianweidlut@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
"""
TimeLine Part in Dashboard Window
"""
import gtk
from umit.icm.agent.I18N import _
from umit.icm.agent.gui.dashboard.timeline.TimeLineGraph import InteractiveGraph
from umit.icm.agent.gui.dashboard.timeline.TimeLineGraphToolbar import TimeLineGraphToolbar
from umit.icm.agent.gui.dashboard.timeline.TimeLineGraphBase import TimeLineBase
from deps.higwidgets.higboxes import HIGHBox, HIGVBox,hig_box_space_holder
class TLHoder(gtk.VBox):
def __init__(self,dashboard,connector=None): #maybe import some kinds(report,task,connection,Throttled,Service)
"""
Load timeline for every report(sent or unsent), test successful or failed (website or service)
task (done or not), Throttled details(different charts)
"""
gtk.VBox.__init__(self)
self.connector = connector
self.dashboard = dashboard
self.base = TimeLineBase(self.connector,self.dashboard) #Maybe add some items
self.__create_widgets()
self.__packed_widgets()
self.__connect_widgets()
def __create_widgets(self):
"""
"""
# startup data
line_filter, start, evts = self.base.grab_data()
xlabel = self.base.xlabel
glabel = self.base.title_by_graphmode()
dlabel = self.base.descr_by_graphmode()
#Box
self.box = HIGVBox()
#graph
self.graph_box = gtk.HBox()
self.graph = InteractiveGraph(evts, start, x_label=xlabel,
y_label=_('Number of events'), graph_label=glabel,
descr_label=dlabel, vdiv_labels=self.base.labels,
line_filter=line_filter, connector=self.connector)
#graph toolbar
self.graphtb = TimeLineGraphToolbar(self.graph, self.connector,
self.base.graph_mode,self.base.graph_kind,
self.base)
#TODO: Add Display Bar in the further
def __packed_widgets(self):
"""
"""
self.graph_box.add(self.graph)
self.box._pack_noexpand_nofill(self.graphtb)
self.box._pack_expand_fill(self.graph_box)
self.add(self.box)
self.show_all()
def __connect_widgets(self):
"""
Handle the connector signals
"""
self.connector.connect('data_update',self._update_graph)
#TODO: we should add signals for the changes of left treeview
def _update_graph(self,obj,*args):
"""
New graph data arrived
"""
line_filter, start, evts, labels, xlabel, glabel, dlabel = args
# new graph data
self.graph.start_pts_data = start
self.graph.graph_data = evts
# find new max value
self.graph.find_max_value()
# update graph labels
self.graph.xlabel = xlabel
self.graph.graph_label = glabel
self.graph.descr_label = dlabel
self.graph.vdiv_labels = labels
# do graph animation with new data
self.graph.do_animation()
| gpl-2.0 |
polyanskiy/refractiveindex.info-scripts | scripts/Adachi 1989 - InGaAs.py | 1 | 3831 | # -*- coding: utf-8 -*-
# Author: Mikhail Polyanskiy
# Last modified: 2017-07-08
# Original data: Adachi 1989, https://doi.org/10.1063/1.343580
# In(1-x)Ga(x)As; x=0.48
import numpy as np
import matplotlib.pyplot as plt
π = np.pi
# model parameters
E0 = 0.75 #eV
Δ0 = 1.04-E0 #eV
E1 = 2.57 #eV
Δ1 = 2.83-E1 #eV
E2 = 4.41 #eV
Eg = 1.20 #eV
A = 1.20 #eV**1.5
B1 = 3.84
B2 = 1.48
B11 = 7.57 #eV**-0.5
B21 = 2.96 #eV**-0.5
Γ = 0.14 #eV
C = 2.90
γ = 0.225
D = 20.7
εinf = 2.8
def H(x): #Heviside function
return 0.5 * (np.sign(x) + 1)
def Epsilon_A(ħω): #E0
χ0 = ħω/E0
χso = ħω / (E0+Δ0)
H0 = H(1-χ0)
Hso = H(1-χso)
fχ0 = χ0**-2 * ( 2 -(1+χ0)**0.5 - ((1-χ0)*H0)**0.5 )
fχso = χso**-2 * ( 2 - (1+χso)**0.5 - ((1-χso)*Hso)**0.5 )
H0 = H(χ0-1)
Hso = H(χso-1)
ε2 = A/(ħω)**2 * ( ((ħω-E0)*H0)**0.5 + 0.5*((ħω-E0-Δ0)*Hso)**0.5)
ε1 = A*E0**-1.5 * (fχ0+0.5*(E0/(E0+Δ0))**1.5*fχso)
return ε1 + 1j*ε2
def Epsilon_B(ħω): #E1
χ1 = ħω/E1
χ1s = ħω/(E1+Δ1)
H1 = H(1-χ1)
H1s = H(1-χ1s)
ε2 = ( π*χ1**-2*(B1-B11*((E1-ħω)*H1)**0.5)
+ π*χ1s**-2*(B2-B21*((E1+Δ1-ħω)*H1s)**0.5) )
ε2 *= H(ε2) #undocumented trick: ignore negative ε2
χ1 = (ħω+1j*Γ)/E1
χ1s = (ħω+1j*Γ)/(E1+Δ1)
ε1 = -B1*χ1**-2*np.log(1-χ1**2) - B2*χ1s**-2*np.log(1-χ1s**2)
return ε1.real + 1j*ε2.real
def Epsilon_C(ħω): #E2
χ2 = ħω/E2
ε2 = C*χ2*γ / ((1-χ2**2)**2+(χ2*γ)**2)
ε1 = C*(1-χ2**2) / ((1-χ2**2)**2+(χ2*γ)**2)
return ε1 + 1j*ε2
def Epsilon_D(ħω): #Eg
# ignoring ħωq - no data in the paper
Ech = E1
χg = Eg/ħω
χch = ħω/Ech
Hg = H(1-χg)
Hch = H(1-χch)
ε2 = D/ħω**2 * (ħω-Eg)**2 * Hg * Hch
return 1j*ε2
ev_min=0.1
ev_max=6
npoints=200
eV = np.linspace(ev_min, ev_max, npoints)
μm = 4.13566733e-1*2.99792458/eV
εA = Epsilon_A(eV)
εB = Epsilon_B(eV)
εC = Epsilon_C(eV)
εD = Epsilon_D(eV)
ε = εA + εB + εC + εD + εinf
n = (ε**.5).real
k = (ε**.5).imag
α = 4*π*k/μm*1e4 #1/cm
#============================ DATA OUTPUT =================================
file = open('out.txt', 'w')
for i in range(npoints-1, -1, -1):
file.write('\n {:.4e} {:.4e} {:.4e}'.format(μm[i],n[i],k[i]))
file.close()
#=============================== PLOT =====================================
plt.rc('font', family='Arial', size='14')
#plot ε1 vs eV
plt.figure(1)
plt.plot(eV, ε.real, label="ε1")
plt.plot(eV, εA.real, label="Re(εA)")
plt.plot(eV, εB.real, label="Re(εB)")
plt.plot(eV, εC.real, label="Re(εC)")
plt.xlabel('Photon energy (eV)')
plt.ylabel('ε1')
plt.legend(bbox_to_anchor=(0,1.02,1,0),loc=3,ncol=2,borderaxespad=0)
#plot ε2 vs eV
plt.figure(2)
plt.plot(eV, ε.imag, label="ε2")
plt.plot(eV, εA.imag, label="Im(εA)")
plt.plot(eV, εB.imag, label="Im(εB)")
plt.plot(eV, εC.imag, label="Im(εC)")
plt.plot(eV, εD.imag, label="Im(εD)")
plt.yscale('log')
plt.xlabel('Photon energy (eV)')
plt.ylabel('ε2')
plt.legend(bbox_to_anchor=(0,1.02,1,0),loc=3,ncol=2,borderaxespad=0)
plt.ylim([1e-2,1e2])
#plot n,k vs eV
plt.figure(3)
plt.plot(eV, n, label="n")
plt.plot(eV, k, label="k")
plt.xlabel('Photon energy (eV)')
plt.ylabel('n, k')
plt.legend(bbox_to_anchor=(0,1.02,1,0),loc=3,ncol=2,borderaxespad=0)
#plot n,k vs μm
plt.figure(4)
plt.plot(μm, n, label="n")
plt.plot(μm, k, label="k")
plt.xlabel('Wavelength (μm)')
plt.ylabel('n, k')
plt.xscale('log')
plt.yscale('log')
plt.legend(bbox_to_anchor=(0,1.02,1,0),loc=3,ncol=2,borderaxespad=0)
#plot α vs eV
plt.figure(7)
plt.plot(eV,α)
plt.yscale('log')
plt.ylim([1e3,1e7])
plt.xlabel('Photon energy (eV)')
plt.ylabel('α (1/cm)') | gpl-3.0 |
timlinux/QGIS | tests/src/python/test_qgsvectorlayerutils.py | 12 | 31288 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsVectorLayerUtils.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Nyall Dawson'
__date__ = '25/10/2016'
__copyright__ = 'Copyright 2016, The QGIS Project'
import os
import qgis # NOQA
import shutil
import tempfile
from qgis.PyQt.QtCore import QVariant, QDate
from qgis.core import (QgsProject,
QgsVectorLayer,
QgsVectorLayerUtils,
QgsFieldConstraints,
QgsFeature,
QgsFeatureIterator,
QgsGeometry,
QgsPointXY,
QgsDefaultValue,
QgsRelation,
QgsFields,
QgsField,
QgsMemoryProviderUtils,
QgsWkbTypes,
QgsCoordinateReferenceSystem,
QgsVectorLayerJoinInfo,
NULL
)
from qgis.testing import start_app, unittest
from utilities import unitTestDataPath
start_app()
def createLayerWithOnePoint():
layer = QgsVectorLayer("Point?field=fldtxt:string&field=fldint:integer",
"addfeat", "memory")
pr = layer.dataProvider()
f = QgsFeature()
f.setAttributes(["test", 123])
assert pr.addFeatures([f])
assert layer.featureCount() == 1
return layer
class TestQgsVectorLayerUtils(unittest.TestCase):
def test_value_exists(self):
layer = createLayerWithOnePoint()
# add some more features
f1 = QgsFeature(2)
f1.setAttributes(["test1", 124])
f2 = QgsFeature(3)
f2.setAttributes(["test2", 125])
f3 = QgsFeature(4)
f3.setAttributes(["test3", 126])
f4 = QgsFeature(5)
f4.setAttributes(["test4", 127])
layer.dataProvider().addFeatures([f1, f2, f3, f4])
self.assertTrue(QgsVectorLayerUtils.valueExists(layer, 0, 'test'))
self.assertTrue(QgsVectorLayerUtils.valueExists(layer, 0, 'test1'))
self.assertTrue(QgsVectorLayerUtils.valueExists(layer, 0, 'test4'))
self.assertFalse(QgsVectorLayerUtils.valueExists(layer, 0, 'not present!'))
self.assertTrue(QgsVectorLayerUtils.valueExists(layer, 1, 123))
self.assertTrue(QgsVectorLayerUtils.valueExists(layer, 1, 124))
self.assertTrue(QgsVectorLayerUtils.valueExists(layer, 1, 127))
self.assertFalse(QgsVectorLayerUtils.valueExists(layer, 1, 99))
# no layer
self.assertFalse(QgsVectorLayerUtils.valueExists(None, 1, 123))
# bad field indexes
self.assertFalse(QgsVectorLayerUtils.valueExists(layer, -1, 'test'))
self.assertFalse(QgsVectorLayerUtils.valueExists(layer, 100, 'test'))
# with ignore list
self.assertTrue(QgsVectorLayerUtils.valueExists(layer, 0, 'test1', [3, 4, 5]))
self.assertTrue(QgsVectorLayerUtils.valueExists(layer, 0, 'test1', [999999]))
self.assertFalse(QgsVectorLayerUtils.valueExists(layer, 0, 'test1', [2]))
self.assertFalse(QgsVectorLayerUtils.valueExists(layer, 0, 'test1', [99999, 2]))
self.assertFalse(QgsVectorLayerUtils.valueExists(layer, 0, 'test1', [3, 4, 5, 2]))
self.assertTrue(QgsVectorLayerUtils.valueExists(layer, 1, 125, [2, 4, 5]))
self.assertTrue(QgsVectorLayerUtils.valueExists(layer, 1, 125, [999999]))
self.assertFalse(QgsVectorLayerUtils.valueExists(layer, 1, 125, [3]))
self.assertFalse(QgsVectorLayerUtils.valueExists(layer, 1, 125, [99999, 3]))
self.assertFalse(QgsVectorLayerUtils.valueExists(layer, 1, 125, [2, 4, 5, 3]))
def test_value_exists_joins(self):
"""Test that unique values in fields from joined layers, see GH #36167"""
p = QgsProject()
main_layer = QgsVectorLayer("Point?field=fid:integer",
"main_layer", "memory")
self.assertTrue(main_layer.isValid())
# Attr layer is joined with layer on fk ->
attr_layer = QgsVectorLayer("Point?field=id:integer&field=fk:integer",
"attr_layer", "memory")
self.assertTrue(attr_layer.isValid())
p.addMapLayers([main_layer, attr_layer])
join_info = QgsVectorLayerJoinInfo()
join_info.setJoinLayer(attr_layer)
join_info.setJoinFieldName('fk')
join_info.setTargetFieldName('fid')
join_info.setUsingMemoryCache(True)
main_layer.addJoin(join_info)
main_layer.updateFields()
join_buffer = main_layer.joinBuffer()
self.assertTrue(join_buffer.containsJoins())
self.assertEqual(main_layer.fields().names(), ['fid', 'attr_layer_id'])
f = QgsFeature(main_layer.fields())
f.setAttributes([1])
main_layer.dataProvider().addFeatures([f])
f = QgsFeature(attr_layer.fields())
f.setAttributes([1, 1])
attr_layer.dataProvider().addFeatures([f])
self.assertTrue(QgsVectorLayerUtils.valueExists(main_layer, 0, 1))
self.assertTrue(QgsVectorLayerUtils.valueExists(main_layer, 1, 1))
self.assertFalse(QgsVectorLayerUtils.valueExists(main_layer, 0, 2))
self.assertFalse(QgsVectorLayerUtils.valueExists(main_layer, 1, 2))
def test_validate_attribute(self):
""" test validating attributes against constraints """
layer = createLayerWithOnePoint()
# field expression check
layer.setConstraintExpression(1, 'fldint>5')
f = QgsFeature(2)
f.setAttributes(["test123", 6])
res, errors = QgsVectorLayerUtils.validateAttribute(layer, f, 1)
self.assertTrue(res)
self.assertEqual(len(errors), 0)
f.setAttributes(["test123", 2])
res, errors = QgsVectorLayerUtils.validateAttribute(layer, f, 1)
self.assertFalse(res)
self.assertEqual(len(errors), 1)
print(errors)
# checking only for provider constraints
res, errors = QgsVectorLayerUtils.validateAttribute(layer, f, 1,
origin=QgsFieldConstraints.ConstraintOriginProvider)
self.assertTrue(res)
self.assertEqual(len(errors), 0)
# bad field expression check
layer.setConstraintExpression(1, 'fldint>')
res, errors = QgsVectorLayerUtils.validateAttribute(layer, f, 1)
self.assertFalse(res)
self.assertEqual(len(errors), 1)
print(errors)
layer.setConstraintExpression(1, None)
# not null constraint
f.setAttributes(["test123", NULL])
res, errors = QgsVectorLayerUtils.validateAttribute(layer, f, 1)
self.assertTrue(res)
self.assertEqual(len(errors), 0)
layer.setFieldConstraint(1, QgsFieldConstraints.ConstraintNotNull)
res, errors = QgsVectorLayerUtils.validateAttribute(layer, f, 1)
self.assertFalse(res)
self.assertEqual(len(errors), 1)
print(errors)
# checking only for provider constraints
res, errors = QgsVectorLayerUtils.validateAttribute(layer, f, 1,
origin=QgsFieldConstraints.ConstraintOriginProvider)
self.assertTrue(res)
self.assertEqual(len(errors), 0)
# unique constraint
f.setAttributes(["test123", 123])
layer.removeFieldConstraint(1, QgsFieldConstraints.ConstraintNotNull)
res, errors = QgsVectorLayerUtils.validateAttribute(layer, f, 1)
self.assertTrue(res)
self.assertEqual(len(errors), 0)
layer.setFieldConstraint(1, QgsFieldConstraints.ConstraintUnique)
res, errors = QgsVectorLayerUtils.validateAttribute(layer, f, 1)
self.assertFalse(res)
self.assertEqual(len(errors), 1)
print(errors)
# checking only for provider constraints
res, errors = QgsVectorLayerUtils.validateAttribute(layer, f, 1,
origin=QgsFieldConstraints.ConstraintOriginProvider)
self.assertTrue(res)
self.assertEqual(len(errors), 0)
# checking only for soft constraints
layer.setFieldConstraint(1, QgsFieldConstraints.ConstraintUnique, QgsFieldConstraints.ConstraintStrengthHard)
res, errors = QgsVectorLayerUtils.validateAttribute(layer, f, 1,
strength=QgsFieldConstraints.ConstraintStrengthSoft)
self.assertTrue(res)
self.assertEqual(len(errors), 0)
# checking for hard constraints
res, errors = QgsVectorLayerUtils.validateAttribute(layer, f, 1,
strength=QgsFieldConstraints.ConstraintStrengthHard)
self.assertFalse(res)
self.assertEqual(len(errors), 1)
# check - same id should be ignored when testing for uniqueness
f1 = QgsFeature(1)
f1.setAttributes(["test123", 123])
res, errors = QgsVectorLayerUtils.validateAttribute(layer, f1, 1)
self.assertTrue(res)
self.assertEqual(len(errors), 0)
# test double constraint failure
layer.setConstraintExpression(1, 'fldint>5')
layer.removeFieldConstraint(1, QgsFieldConstraints.ConstraintUnique)
layer.setFieldConstraint(1, QgsFieldConstraints.ConstraintNotNull)
f.setAttributes(["test123", NULL])
res, errors = QgsVectorLayerUtils.validateAttribute(layer, f, 1)
self.assertFalse(res)
self.assertEqual(len(errors), 2)
print(errors)
def testCreateUniqueValue(self):
""" test creating a unique value """
layer = QgsVectorLayer("Point?field=fldtxt:string&field=fldint:integer&field=flddbl:double",
"addfeat", "memory")
# add a bunch of features
f = QgsFeature()
f.setAttributes(["test", 123, 1.0])
f1 = QgsFeature(2)
f1.setAttributes(["test_1", 124, 1.1])
f2 = QgsFeature(3)
f2.setAttributes(["test_2", 125, 2.4])
f3 = QgsFeature(4)
f3.setAttributes(["test_3", 126, 1.7])
f4 = QgsFeature(5)
f4.setAttributes(["superpig", 127, 0.8])
self.assertTrue(layer.dataProvider().addFeatures([f, f1, f2, f3, f4]))
# bad field indices
self.assertFalse(QgsVectorLayerUtils.createUniqueValue(layer, -10))
self.assertFalse(QgsVectorLayerUtils.createUniqueValue(layer, 10))
# integer field
self.assertEqual(QgsVectorLayerUtils.createUniqueValue(layer, 1), 128)
# double field
self.assertEqual(QgsVectorLayerUtils.createUniqueValue(layer, 2), 3.0)
# string field
self.assertEqual(QgsVectorLayerUtils.createUniqueValue(layer, 0), 'test_4')
self.assertEqual(QgsVectorLayerUtils.createUniqueValue(layer, 0, 'test_1'), 'test_4')
self.assertEqual(QgsVectorLayerUtils.createUniqueValue(layer, 0, 'seed'), 'seed')
self.assertEqual(QgsVectorLayerUtils.createUniqueValue(layer, 0, 'superpig'), 'superpig_1')
def testCreateFeature(self):
""" test creating a feature respecting defaults and constraints """
layer = QgsVectorLayer("Point?field=fldtxt:string&field=fldint:integer&field=flddbl:double",
"addfeat", "memory")
# add a bunch of features
f = QgsFeature()
f.setAttributes(["test", 123, 1.0])
f1 = QgsFeature(2)
f1.setAttributes(["test_1", 124, 1.1])
f2 = QgsFeature(3)
f2.setAttributes(["test_2", 125, 2.4])
f3 = QgsFeature(4)
f3.setAttributes(["test_3", 126, 1.7])
f4 = QgsFeature(5)
f4.setAttributes(["superpig", 127, 0.8])
self.assertTrue(layer.dataProvider().addFeatures([f, f1, f2, f3, f4]))
# no layer
self.assertFalse(QgsVectorLayerUtils.createFeature(None).isValid())
# basic tests
f = QgsVectorLayerUtils.createFeature(layer)
self.assertTrue(f.isValid())
self.assertEqual(f.fields(), layer.fields())
self.assertFalse(f.hasGeometry())
self.assertEqual(f.attributes(), [NULL, NULL, NULL])
# set geometry
g = QgsGeometry.fromPointXY(QgsPointXY(100, 200))
f = QgsVectorLayerUtils.createFeature(layer, g)
self.assertTrue(f.hasGeometry())
self.assertEqual(f.geometry().asWkt(), g.asWkt())
# using attribute map
f = QgsVectorLayerUtils.createFeature(layer, attributes={0: 'a', 2: 6.0})
self.assertEqual(f.attributes(), ['a', NULL, 6.0])
# layer with default value expression
layer.setDefaultValueDefinition(2, QgsDefaultValue('3*4'))
f = QgsVectorLayerUtils.createFeature(layer)
self.assertEqual(f.attributes(), [NULL, NULL, 12])
# we do not expect the default value expression to take precedence over the attribute map
f = QgsVectorLayerUtils.createFeature(layer, attributes={0: 'a', 2: 6.0})
self.assertEqual(f.attributes(), ['a', NULL, 6.0])
# default value takes precedence if it's apply on update
layer.setDefaultValueDefinition(2, QgsDefaultValue('3*4', True))
f = QgsVectorLayerUtils.createFeature(layer, attributes={0: 'a', 2: 6.0})
self.assertEqual(f.attributes(), ['a', NULL, 12.0])
# layer with default value expression based on geometry
layer.setDefaultValueDefinition(2, QgsDefaultValue('3*$x'))
f = QgsVectorLayerUtils.createFeature(layer, g)
# adjusted so that input value and output feature are the same
self.assertEqual(f.attributes(), [NULL, NULL, 300.0])
layer.setDefaultValueDefinition(2, QgsDefaultValue(None))
# test with violated unique constraints
layer.setFieldConstraint(1, QgsFieldConstraints.ConstraintUnique)
f = QgsVectorLayerUtils.createFeature(layer, attributes={0: 'test_1', 1: 123})
# since field 1 has Unique Constraint, it ignores value 123 that already has been set and sets to 128
self.assertEqual(f.attributes(), ['test_1', 128, NULL])
layer.setFieldConstraint(0, QgsFieldConstraints.ConstraintUnique)
# since field 0 and 1 already have values test_1 and 123, the output must be a new unique value
f = QgsVectorLayerUtils.createFeature(layer, attributes={0: 'test_1', 1: 123})
self.assertEqual(f.attributes(), ['test_4', 128, NULL])
# test with violated unique constraints and default value expression providing unique value
layer.setDefaultValueDefinition(1, QgsDefaultValue('130'))
f = QgsVectorLayerUtils.createFeature(layer, attributes={0: 'test_1', 1: 123})
# since field 1 has Unique Constraint, it ignores value 123 that already has been set and adds the default value
self.assertEqual(f.attributes(), ['test_4', 130, NULL])
# fallback: test with violated unique constraints and default value expression providing already existing value
# add the feature with the default value:
self.assertTrue(layer.dataProvider().addFeatures([f]))
f = QgsVectorLayerUtils.createFeature(layer, attributes={0: 'test_1', 1: 123})
# since field 1 has Unique Constraint, it ignores value 123 that already has been set and adds the default value
# and since the default value providing an already existing value (130) it generates a unique value (next int: 131)
self.assertEqual(f.attributes(), ['test_5', 131, NULL])
layer.setDefaultValueDefinition(1, QgsDefaultValue(None))
# test with manually correct unique constraint
f = QgsVectorLayerUtils.createFeature(layer, attributes={0: 'test_1', 1: 132})
self.assertEqual(f.attributes(), ['test_5', 132, NULL])
""" test creating a feature respecting unique values of postgres provider """
layer = QgsVectorLayer("Point?field=fldtxt:string&field=fldint:integer&field=flddbl:double",
"addfeat", "memory")
# init connection string
dbconn = 'service=qgis_test'
if 'QGIS_PGTEST_DB' in os.environ:
dbconn = os.environ['QGIS_PGTEST_DB']
# create a vector layer
pg_layer = QgsVectorLayer('{} table="qgis_test"."authors" sql='.format(dbconn), "authors", "postgres")
self.assertTrue(pg_layer.isValid())
# check the default clause
default_clause = 'nextval(\'qgis_test.authors_pk_seq\'::regclass)'
self.assertEqual(pg_layer.dataProvider().defaultValueClause(0), default_clause)
# though default_clause is after the first create not unique (until save), it should fill up all the features with it
pg_layer.startEditing()
f = QgsVectorLayerUtils.createFeature(pg_layer)
self.assertEqual(f.attributes(), [default_clause, NULL])
self.assertTrue(pg_layer.addFeatures([f]))
self.assertTrue(QgsVectorLayerUtils.valueExists(pg_layer, 0, default_clause))
f = QgsVectorLayerUtils.createFeature(pg_layer)
self.assertEqual(f.attributes(), [default_clause, NULL])
self.assertTrue(pg_layer.addFeatures([f]))
f = QgsVectorLayerUtils.createFeature(pg_layer)
self.assertEqual(f.attributes(), [default_clause, NULL])
self.assertTrue(pg_layer.addFeatures([f]))
# if a unique value is passed, use it
f = QgsVectorLayerUtils.createFeature(pg_layer, attributes={0: 40, 1: NULL})
self.assertEqual(f.attributes(), [40, NULL])
# and if a default value is configured use it as well
pg_layer.setDefaultValueDefinition(0, QgsDefaultValue('11*4'))
f = QgsVectorLayerUtils.createFeature(pg_layer)
self.assertEqual(f.attributes(), [44, NULL])
pg_layer.rollBack()
def testDuplicateFeature(self):
""" test duplicating a feature """
project = QgsProject().instance()
# LAYERS
# - add first layer (parent)
layer1 = QgsVectorLayer("Point?field=fldtxt:string&field=pkid:integer",
"parentlayer", "memory")
# > check first layer (parent)
self.assertTrue(layer1.isValid())
# - set the value for the copy
layer1.setDefaultValueDefinition(1, QgsDefaultValue("rand(1000,2000)"))
# > check first layer (parent)
self.assertTrue(layer1.isValid())
# - add second layer (child)
layer2 = QgsVectorLayer("Point?field=fldtxt:string&field=id:integer&field=foreign_key:integer",
"childlayer", "memory")
# > check second layer (child)
self.assertTrue(layer2.isValid())
# - add layers
project.addMapLayers([layer1, layer2])
# FEATURES
# - add 2 features on layer1 (parent)
l1f1orig = QgsFeature()
l1f1orig.setFields(layer1.fields())
l1f1orig.setAttributes(["F_l1f1", 100])
l1f2orig = QgsFeature()
l1f2orig.setFields(layer1.fields())
l1f2orig.setAttributes(["F_l1f2", 101])
# > check by adding features
self.assertTrue(layer1.dataProvider().addFeatures([l1f1orig, l1f2orig]))
# add 4 features on layer2 (child)
l2f1orig = QgsFeature()
l2f1orig.setFields(layer2.fields())
l2f1orig.setAttributes(["F_l2f1", 201, 100])
l2f2orig = QgsFeature()
l2f2orig.setFields(layer2.fields())
l2f2orig.setAttributes(["F_l2f2", 202, 100])
l2f3orig = QgsFeature()
l2f3orig.setFields(layer2.fields())
l2f3orig.setAttributes(["F_l2f3", 203, 100])
l2f4orig = QgsFeature()
l2f4orig.setFields(layer2.fields())
l2f4orig.setAttributes(["F_l2f4", 204, 101])
# > check by adding features
self.assertTrue(layer2.dataProvider().addFeatures([l2f1orig, l2f2orig, l2f3orig, l2f4orig]))
# RELATION
# - create the relationmanager
relMgr = project.relationManager()
# - create the relation
rel = QgsRelation()
rel.setId('rel1')
rel.setName('childrel')
rel.setReferencingLayer(layer2.id())
rel.setReferencedLayer(layer1.id())
rel.addFieldPair('foreign_key', 'pkid')
rel.setStrength(QgsRelation.Composition)
# > check relation
self.assertTrue(rel.isValid())
# - add relation
relMgr.addRelation(rel)
# > check if referencedLayer is layer1
self.assertEqual(rel.referencedLayer(), layer1)
# > check if referencingLayer is layer2
self.assertEqual(rel.referencingLayer(), layer2)
# > check if the layers are correct in relation when loading from relationManager
relations = project.relationManager().relations()
relation = relations[list(relations.keys())[0]]
# > check if referencedLayer is layer1
self.assertEqual(relation.referencedLayer(), layer1)
# > check if referencingLayer is layer2
self.assertEqual(relation.referencingLayer(), layer2)
# > check the relatedfeatures
'''
# testoutput 1
print( "\nAll Features and relations")
featit=layer1.getFeatures()
f=QgsFeature()
while featit.nextFeature(f):
print( f.attributes())
childFeature = QgsFeature()
relfeatit=rel.getRelatedFeatures(f)
while relfeatit.nextFeature(childFeature):
print( childFeature.attributes() )
print( "\n--------------------------")
print( "\nFeatures on layer1")
for f in layer1.getFeatures():
print( f.attributes() )
print( "\nFeatures on layer2")
for f in layer2.getFeatures():
print( f.attributes() )
'''
# DUPLICATION
# - duplicate feature l1f1orig with children
layer1.startEditing()
results = QgsVectorLayerUtils.duplicateFeature(layer1, l1f1orig, project, 0)
# > check if name is name of duplicated (pk is different)
result_feature = results[0]
self.assertEqual(result_feature.attribute('fldtxt'), l1f1orig.attribute('fldtxt'))
# > check duplicated child layer
result_layer = results[1].layers()[0]
self.assertEqual(result_layer, layer2)
# > check duplicated child features
self.assertTrue(results[1].duplicatedFeatures(result_layer))
'''
# testoutput 2
print( "\nFeatures on layer1 (after duplication)")
for f in layer1.getFeatures():
print( f.attributes() )
print( "\nFeatures on layer2 (after duplication)")
for f in layer2.getFeatures():
print( f.attributes() )
print( "\nAll Features and relations")
featit=layer1.getFeatures()
f=QgsFeature()
while featit.nextFeature(f):
print( f.attributes())
childFeature = QgsFeature()
relfeatit=rel.getRelatedFeatures(f)
while relfeatit.nextFeature(childFeature):
print( childFeature.attributes() )
'''
# > compare text of parent feature
self.assertEqual(result_feature.attribute('fldtxt'), l1f1orig.attribute('fldtxt'))
# - create copyValueList
childFeature = QgsFeature()
relfeatit = rel.getRelatedFeatures(result_feature)
copyValueList = []
while relfeatit.nextFeature(childFeature):
copyValueList.append(childFeature.attribute('fldtxt'))
# - create origValueList
childFeature = QgsFeature()
relfeatit = rel.getRelatedFeatures(l1f1orig)
origValueList = []
while relfeatit.nextFeature(childFeature):
origValueList.append(childFeature.attribute('fldtxt'))
# - check if the ids are still the same
self.assertEqual(copyValueList, origValueList)
def test_make_features_compatible_attributes(self):
"""Test corner cases for attributes"""
# Test feature with attributes
fields = QgsFields()
fields.append(QgsField('int_f', QVariant.Int))
fields.append(QgsField('str_f', QVariant.String))
f1 = QgsFeature(fields)
f1['int_f'] = 1
f1['str_f'] = 'str'
f1.setGeometry(QgsGeometry.fromWkt('Point(9 45)'))
f2 = f1
QgsVectorLayerUtils.matchAttributesToFields(f2, fields)
self.assertEqual(f1.attributes(), f2.attributes())
self.assertTrue(f1.geometry().asWkt(), f2.geometry().asWkt())
# Test pad with 0 with fields
f1.setAttributes([])
QgsVectorLayerUtils.matchAttributesToFields(f1, fields)
self.assertEqual(len(f1.attributes()), 2)
self.assertEqual(f1.attributes()[0], QVariant())
self.assertEqual(f1.attributes()[1], QVariant())
# Test pad with 0 without fields
f1 = QgsFeature()
QgsVectorLayerUtils.matchAttributesToFields(f1, fields)
self.assertEqual(len(f1.attributes()), 2)
self.assertEqual(f1.attributes()[0], QVariant())
self.assertEqual(f1.attributes()[1], QVariant())
# Test drop extra attrs
f1 = QgsFeature(fields)
f1.setAttributes([1, 'foo', 'extra'])
QgsVectorLayerUtils.matchAttributesToFields(f1, fields)
self.assertEqual(len(f1.attributes()), 2)
self.assertEqual(f1.attributes()[0], 1)
self.assertEqual(f1.attributes()[1], 'foo')
# Rearranged fields
fields2 = QgsFields()
fields2.append(QgsField('str_f', QVariant.String))
fields2.append(QgsField('int_f', QVariant.Int))
f1 = QgsFeature(fields2)
f1.setAttributes([1, 'foo', 'extra'])
QgsVectorLayerUtils.matchAttributesToFields(f1, fields)
self.assertEqual(len(f1.attributes()), 2)
self.assertEqual(f1.attributes()[0], 'foo')
self.assertEqual(f1.attributes()[1], 1)
# mixed
fields2.append(QgsField('extra', QVariant.String))
fields.append(QgsField('extra2', QVariant.Int))
f1.setFields(fields2)
f1.setAttributes([1, 'foo', 'blah'])
QgsVectorLayerUtils.matchAttributesToFields(f1, fields)
self.assertEqual(len(f1.attributes()), 3)
self.assertEqual(f1.attributes()[0], 'foo')
self.assertEqual(f1.attributes()[1], 1)
self.assertEqual(f1.attributes()[2], QVariant())
fields.append(QgsField('extra', QVariant.Int))
f1.setAttributes([1, 'foo', 'blah'])
QgsVectorLayerUtils.matchAttributesToFields(f1, fields)
self.assertEqual(len(f1.attributes()), 4)
self.assertEqual(f1.attributes()[0], 1)
self.assertEqual(f1.attributes()[1], 'foo')
self.assertEqual(f1.attributes()[2], 'blah')
self.assertEqual(f1.attributes()[3], QVariant())
# case insensitive
fields2.append(QgsField('extra3', QVariant.String))
fields.append(QgsField('EXTRA3', QVariant.Int))
f1.setFields(fields2)
f1.setAttributes([1, 'foo', 'blah', 'blergh'])
QgsVectorLayerUtils.matchAttributesToFields(f1, fields)
self.assertEqual(len(f1.attributes()), 5)
self.assertEqual(f1.attributes()[0], 'foo')
self.assertEqual(f1.attributes()[1], 1)
self.assertEqual(f1.attributes()[2], QVariant())
self.assertEqual(f1.attributes()[3], 'blah')
self.assertEqual(f1.attributes()[4], 'blergh')
def test_create_multiple_unique_constraint(self):
"""Test create multiple features with unique constraint"""
vl = createLayerWithOnePoint()
vl.setFieldConstraint(1, QgsFieldConstraints.ConstraintUnique)
features_data = []
context = vl.createExpressionContext()
for i in range(2):
features_data.append(
QgsVectorLayerUtils.QgsFeatureData(QgsGeometry.fromWkt('Point (7 44)'), {0: 'test_%s' % i, 1: 123}))
features = QgsVectorLayerUtils.createFeatures(vl, features_data, context)
self.assertEqual(features[0].attributes()[1], 124)
self.assertEqual(features[1].attributes()[1], 125)
def test_create_nulls_and_defaults(self):
"""Test bug #21304 when pasting features from another layer and default values are not honored"""
vl = createLayerWithOnePoint()
vl.setDefaultValueDefinition(1, QgsDefaultValue('300'))
features_data = []
context = vl.createExpressionContext()
features_data.append(
QgsVectorLayerUtils.QgsFeatureData(QgsGeometry.fromWkt('Point (7 44)'), {0: 'test_1', 1: None}))
features_data.append(
QgsVectorLayerUtils.QgsFeatureData(QgsGeometry.fromWkt('Point (7 45)'), {0: 'test_2', 1: QVariant()}))
features_data.append(QgsVectorLayerUtils.QgsFeatureData(QgsGeometry.fromWkt('Point (7 46)'),
{0: 'test_3', 1: QVariant(QVariant.Int)}))
features_data.append(QgsVectorLayerUtils.QgsFeatureData(QgsGeometry.fromWkt('Point (7 46)'), {0: 'test_4'}))
features = QgsVectorLayerUtils.createFeatures(vl, features_data, context)
for f in features:
self.assertEqual(f.attributes()[1], 300, f.id())
vl = createLayerWithOnePoint()
vl.setDefaultValueDefinition(0, QgsDefaultValue("'my_default'"))
features_data = []
context = vl.createExpressionContext()
features_data.append(QgsVectorLayerUtils.QgsFeatureData(QgsGeometry.fromWkt('Point (7 44)'), {0: None}))
features_data.append(QgsVectorLayerUtils.QgsFeatureData(QgsGeometry.fromWkt('Point (7 45)'), {0: QVariant()}))
features_data.append(
QgsVectorLayerUtils.QgsFeatureData(QgsGeometry.fromWkt('Point (7 46)'), {0: QVariant(QVariant.String)}))
features_data.append(QgsVectorLayerUtils.QgsFeatureData(QgsGeometry.fromWkt('Point (7 46)'), {}))
features = QgsVectorLayerUtils.createFeatures(vl, features_data, context)
for f in features:
self.assertEqual(f.attributes()[0], 'my_default', f.id())
def test_unique_pk_when_subset(self):
"""Test unique values on filtered layer GH #30062"""
src = unitTestDataPath('points_gpkg.gpkg')
dest = tempfile.mktemp() + '.gpkg'
shutil.copy(src, dest)
vl = QgsVectorLayer(dest, 'vl', 'ogr')
self.assertTrue(vl.isValid())
features_data = []
it = vl.getFeatures()
for _ in range(3):
f = next(it)
features_data.append(
QgsVectorLayerUtils.QgsFeatureData(f.geometry(), dict(zip(range(f.fields().count()), f.attributes()))))
# Set a filter
vl.setSubsetString('"fid" in (4,5,6)')
self.assertTrue(vl.isValid())
context = vl.createExpressionContext()
features = QgsVectorLayerUtils.createFeatures(vl, features_data, context)
self.assertTrue(vl.startEditing())
vl.addFeatures(features)
self.assertTrue(vl.commitChanges())
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
bgshin/doc-classify-multi-gpu | src/cnntw/cnn_model.py | 1 | 7389 |
import tensorflow as tf
import re
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_boolean('use_fp16', False,
"""Train the model using fp16.""")
TOWER_NAME = 'tower'
NUM_CLASSES = 3
def _variable_on_cpu(name, shape, initializer):
"""Helper to create a Variable stored on CPU memory.
Args:
name: name of the variable
shape: list of ints
initializer: initializer for Variable
Returns:
Variable Tensor
"""
with tf.device('/cpu:0'):
dtype = tf.float16 if FLAGS.use_fp16 else tf.float32
var = tf.get_variable(name, shape, initializer=initializer, dtype=dtype)
return var
def _variable_with_weight_decay(name, shape, stddev, wd):
"""Helper to create an initialized Variable with weight decay.
Note that the Variable is initialized with a truncated normal distribution.
A weight decay is added only if one is specified.
Args:
name: name of the variable
shape: list of ints
stddev: standard deviation of a truncated Gaussian
wd: add L2Loss weight decay multiplied by this float. If None, weight
decay is not added for this Variable.
Returns:
Variable Tensor
"""
dtype = tf.float16 if FLAGS.use_fp16 else tf.float32
var = _variable_on_cpu(
name,
shape,
tf.truncated_normal_initializer(stddev=stddev, dtype=dtype))
if wd is not None:
weight_decay = tf.multiply(tf.nn.l2_loss(var), wd, name='weight_loss')
tf.add_to_collection('losses', weight_decay)
return var
def _variable_with_weight_decay_xavier(name, shape, wd):
"""Helper to create an initialized Variable with weight decay.
Note that the Variable is initialized with a truncated normal distribution.
A weight decay is added only if one is specified.
Args:
name: name of the variable
shape: list of ints
stddev: standard deviation of a truncated Gaussian
wd: add L2Loss weight decay multiplied by this float. If None, weight
decay is not added for this Variable.
Returns:
Variable Tensor
"""
dtype = tf.float16 if FLAGS.use_fp16 else tf.float32
var = _variable_on_cpu(
name,
shape,
tf.contrib.layers.xavier_initializer())
if wd is not None:
weight_decay = tf.multiply(tf.nn.l2_loss(var), wd, name='weight_loss')
tf.add_to_collection('losses', weight_decay)
return var
def _activation_summary(x):
"""Helper to create summaries for activations.
Creates a summary that provides a histogram of activations.
Creates a summary that measures the sparsity of activations.
Args:
x: Tensor
Returns:
nothing
"""
# Remove 'tower_[0-9]/' from the name in case this is a multi-GPU training
# session. This helps the clarity of presentation on tensorboard.
tensor_name = re.sub('%s_[0-9]*/' % TOWER_NAME, '', x.op.name)
tf.summary.histogram(tensor_name + '/activations', x)
tf.summary.scalar(tensor_name + '/sparsity',
tf.nn.zero_fraction(x))
def inference(txts, dropout_keep_prob=1.0):
"""Build the cnn based sentiment prediction model.
Args:
txts: text returned from get_inputs().
Returns:
Logits.
"""
# We instantiate all variables using tf.get_variable() instead of
# tf.Variable() in order to share variables across multiple GPU training runs.
# If we only ran this model on a single GPU, we could simplify this function
# by replacing all instances of tf.get_variable() with tf.Variable().
#
sequence_length = 60
embedding_size = 400
num_filters = 64
filter_sizes = [2, 3, 4, 5]
pooled_outputs = []
for i, filter_size in enumerate(filter_sizes):
with tf.variable_scope("conv-maxpool-%s" % filter_size) as scope:
cnn_shape = [filter_size, embedding_size, 1, num_filters]
kernel = _variable_with_weight_decay('weights',
shape=cnn_shape,
stddev=0.1,
wd=None)
conv = tf.nn.conv2d(txts, kernel, [1, 1, 1, 1], padding='VALID')
biases = _variable_on_cpu('biases', [num_filters], tf.constant_initializer(0.0))
pre_activation = tf.nn.bias_add(conv, biases)
conv_out = tf.nn.relu(pre_activation, name=scope.name)
_activation_summary(conv_out)
ksize = [1, sequence_length - filter_size + 1, 1, 1]
print 'filter_size', filter_size
print 'ksize', ksize
print 'conv_out', conv_out
pooled = tf.nn.max_pool(conv_out, ksize=ksize, strides=[1, 1, 1, 1],
padding='VALID', name='pool1')
norm_pooled = tf.nn.lrn(pooled, 4, bias=1.0, alpha=0.001 / 9.0, beta=0.75,
name='norm1')
# pooled_outputs.append(pooled)
pooled_outputs.append(norm_pooled)
# print 'norm1', norm1
num_filters_total = num_filters * len(filter_sizes)
h_pool = tf.concat(pooled_outputs, 3)
h_pool = tf.concat(pooled_outputs, 3)
h_pool_flat = tf.reshape(h_pool, [-1, num_filters_total])
print 'h_pool', h_pool
print 'h_pool_flat', h_pool_flat
h_drop = tf.nn.dropout(h_pool_flat, dropout_keep_prob)
# num_filters_total = num_filters * 1
# norm_flat = tf.reshape(norm1, [-1, num_filters_total])
with tf.variable_scope('softmax_linear') as scope:
weights = _variable_with_weight_decay_xavier('weights', [num_filters_total, NUM_CLASSES],
wd=0.2)
biases = _variable_on_cpu('biases', [NUM_CLASSES],
tf.constant_initializer(0.1))
softmax_linear = tf.add(tf.matmul(h_pool_flat, weights), biases, name=scope.name)
_activation_summary(softmax_linear)
return softmax_linear
def loss(logits, labels):
"""Add L2Loss to all the trainable variables.
Add summary for "Loss" and "Loss/avg".
Args:
logits: Logits from inference().
labels: Labels from distorted_inputs or inputs(). 1-D tensor
of shape [batch_size]
Returns:
Loss tensor of type float.
"""
# Calculate the average cross entropy loss across the batch.
# labels = tf.cast(labels, tf.int64)
# labels = tf.cast(tf.argmax(labels, 1), tf.int64)
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits=logits, labels=labels, name='cross_entropy_per_example')
# cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=labels, logits=logits, name='cross_entropy_per_example')
cross_entropy_mean = tf.reduce_mean(cross_entropy, name='cross_entropy')
tf.add_to_collection('losses', cross_entropy_mean)
golds = tf.argmax(labels, 1, name="golds")
predictions = tf.argmax(logits, 1, name="predictions")
correct_predictions = tf.equal(predictions, tf.argmax(labels, 1))
accuracy = tf.reduce_mean(tf.cast(correct_predictions, "float"), name="accuracy")
# The total loss is defined as the cross entropy loss plus all of the weight
# decay terms (L2 loss).
return tf.add_n(tf.get_collection('losses'), name='total_loss'), accuracy
| apache-2.0 |
lao605/product-definition-center | pdc/apps/changeset/middleware.py | 4 | 3389 | #
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
import sys
import logging
from datetime import datetime
from django.db import transaction
from . import models
# trap wrong HTTP methods
from django.http import HttpResponse
from rest_framework import status
import json
logger = logging.getLogger(__name__)
class ChangesetMiddleware(object):
"""
Create a new changeset for each request. It is accessible via
`request.changeset`. If the view function ends sucessfully, the changeset
is commited iff there are any changes associated with it.
"""
def process_view(self, request, view_func, view_args, view_kwargs):
user = None
if request.user.is_authenticated():
user = request.user
if request.method == "GET":
logger.debug("Start query request on the view %s." % view_func.__name__)
# NOTE: We do not need create a changeset when we just SELECT somw records.
response = view_func(request, *view_args, **view_kwargs)
else:
# trap the request and give response when the method is not defined in HTTP/1.1
if request.method not in ["HEAD", "POST", "PUT", "DELETE", "TRACE", "CONNECT", "PATCH", "OPTIONS"]:
logger.error('Wrong method %s specified when calling %s', request.method.decode("utf-8"), request.path,
exc_info=sys.exc_info())
response_data = json.dumps({"detail": 'Method "{method}" not allowed.'.format(method=request.method)},
ensure_ascii=False)
response = HttpResponse(response_data, content_type='application/json')
response.status_code = status.HTTP_405_METHOD_NOT_ALLOWED
return response
logger.debug("Start write request on the view %s." % view_func.__name__)
try:
with transaction.atomic():
comment = request.META.get("HTTP_PDC_CHANGE_COMMENT", None)
request.changeset = models.Changeset(author=user, comment=comment)
request.changeset.requested_on = datetime.now()
response = view_func(request, *view_args, **view_kwargs)
# response.exception=True means there is an error occurs.
if getattr(response, 'exception', 0) or (
hasattr(response, 'status_code') and response.status_code >= 400
):
# avoid recording changeset on server error, also
# abort the transaction so that no modifications are
# done to database
request.changeset.reset()
transaction.set_rollback(True)
else:
request.changeset.commit()
except:
# NOTE: catch all errors that were raised by view.
# And log the trace back to the file.
logger.error('View Function Error: %s', request.path,
exc_info=sys.exc_info())
# we do not want to break the default exception processing chains,
# so re-raise the exception to the upper level.
raise
return response
| mit |
tboyce1/home-assistant | homeassistant/components/sensor/vera.py | 6 | 3739 | """
Support for Vera sensors.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.vera/
"""
import logging
from datetime import timedelta
from homeassistant.const import (
TEMP_CELSIUS, TEMP_FAHRENHEIT)
from homeassistant.helpers.entity import Entity
from homeassistant.components.sensor import ENTITY_ID_FORMAT
from homeassistant.util import convert
from homeassistant.components.vera import (
VERA_CONTROLLER, VERA_DEVICES, VeraDevice)
DEPENDENCIES = ['vera']
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=5)
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the Vera controller devices."""
add_devices(
VeraSensor(device, hass.data[VERA_CONTROLLER])
for device in hass.data[VERA_DEVICES]['sensor'])
class VeraSensor(VeraDevice, Entity):
"""Representation of a Vera Sensor."""
def __init__(self, vera_device, controller):
"""Initialize the sensor."""
self.current_value = None
self._temperature_units = None
self.last_changed_time = None
VeraDevice.__init__(self, vera_device, controller)
self.entity_id = ENTITY_ID_FORMAT.format(self.vera_id)
@property
def state(self):
"""Return the name of the sensor."""
return self.current_value
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
import pyvera as veraApi
if self.vera_device.category == veraApi.CATEGORY_TEMPERATURE_SENSOR:
return self._temperature_units
elif self.vera_device.category == veraApi.CATEGORY_LIGHT_SENSOR:
return 'lux'
elif self.vera_device.category == veraApi.CATEGORY_UV_SENSOR:
return 'level'
elif self.vera_device.category == veraApi.CATEGORY_HUMIDITY_SENSOR:
return '%'
elif self.vera_device.category == veraApi.CATEGORY_POWER_METER:
return 'watts'
def update(self):
"""Update the state."""
import pyvera as veraApi
if self.vera_device.category == veraApi.CATEGORY_TEMPERATURE_SENSOR:
self.current_value = self.vera_device.temperature
vera_temp_units = (
self.vera_device.vera_controller.temperature_units)
if vera_temp_units == 'F':
self._temperature_units = TEMP_FAHRENHEIT
else:
self._temperature_units = TEMP_CELSIUS
elif self.vera_device.category == veraApi.CATEGORY_LIGHT_SENSOR:
self.current_value = self.vera_device.light
elif self.vera_device.category == veraApi.CATEGORY_UV_SENSOR:
self.current_value = self.vera_device.light
elif self.vera_device.category == veraApi.CATEGORY_HUMIDITY_SENSOR:
self.current_value = self.vera_device.humidity
elif self.vera_device.category == veraApi.CATEGORY_SCENE_CONTROLLER:
value = self.vera_device.get_last_scene_id(True)
time = self.vera_device.get_last_scene_time(True)
if time == self.last_changed_time:
self.current_value = None
else:
self.current_value = value
self.last_changed_time = time
elif self.vera_device.category == veraApi.CATEGORY_POWER_METER:
power = convert(self.vera_device.power, float, 0)
self.current_value = int(round(power, 0))
elif self.vera_device.is_trippable:
tripped = self.vera_device.is_tripped
self.current_value = 'Tripped' if tripped else 'Not Tripped'
else:
self.current_value = 'Unknown'
| apache-2.0 |
bluevoda/BloggyBlog | lib/python3.4/site-packages/django/db/backends/mysql/features.py | 28 | 2938 | from django.db.backends.base.features import BaseDatabaseFeatures
from django.utils.functional import cached_property
from .base import Database
try:
import pytz
except ImportError:
pytz = None
class DatabaseFeatures(BaseDatabaseFeatures):
empty_fetchmany_value = ()
update_can_self_select = False
allows_group_by_pk = True
related_fields_match_type = True
allow_sliced_subqueries = False
has_bulk_insert = True
has_select_for_update = True
has_select_for_update_nowait = False
supports_forward_references = False
supports_regex_backreferencing = False
supports_date_lookup_using_string = False
can_introspect_autofield = True
can_introspect_binary_field = False
can_introspect_small_integer_field = True
supports_timezones = False
requires_explicit_null_ordering_when_grouping = True
allows_auto_pk_0 = False
uses_savepoints = True
can_release_savepoints = True
atomic_transactions = False
supports_column_check_constraints = False
can_clone_databases = True
supports_temporal_subtraction = True
@cached_property
def _mysql_storage_engine(self):
"Internal method used in Django tests. Don't rely on this from your code"
with self.connection.cursor() as cursor:
cursor.execute("SELECT ENGINE FROM INFORMATION_SCHEMA.ENGINES WHERE SUPPORT = 'DEFAULT'")
result = cursor.fetchone()
return result[0]
@cached_property
def can_introspect_foreign_keys(self):
"Confirm support for introspected foreign keys"
return self._mysql_storage_engine != 'MyISAM'
@cached_property
def supports_microsecond_precision(self):
# See https://github.com/farcepest/MySQLdb1/issues/24 for the reason
# about requiring MySQLdb 1.2.5
return self.connection.mysql_version >= (5, 6, 4) and Database.version_info >= (1, 2, 5)
@cached_property
def has_zoneinfo_database(self):
# MySQL accepts full time zones names (eg. Africa/Nairobi) but rejects
# abbreviations (eg. EAT). When pytz isn't installed and the current
# time zone is LocalTimezone (the only sensible value in this
# context), the current time zone name will be an abbreviation. As a
# consequence, MySQL cannot perform time zone conversions reliably.
if pytz is None:
return False
# Test if the time zone definitions are installed.
with self.connection.cursor() as cursor:
cursor.execute("SELECT 1 FROM mysql.time_zone LIMIT 1")
return cursor.fetchone() is not None
def introspected_boolean_field_type(self, *args, **kwargs):
return 'IntegerField'
@cached_property
def is_sql_auto_is_null_enabled(self):
with self.connection.cursor() as cursor:
cursor.execute('SELECT @@SQL_AUTO_IS_NULL')
return cursor.fetchone()[0] == 1
| gpl-3.0 |
mitchcapper/mythbox | resources/lib/decorator/decorator.py | 40 | 6890 | """
Decorator module, see
http://www.phyast.pitt.edu/~micheles/python/documentation.html
for the documentation and below for the licence.
"""
## The basic trick is to generate the source code for the decorated function
## with the right signature and to evaluate it.
## Uncomment the statement 'print >> sys.stderr, func_src' in _decorator
## to understand what is going on.
__all__ = ["decorator", "new_wrapper", "getinfo"]
import inspect, sys
try:
set
except NameError:
from sets import Set as set
def getinfo(func):
"""
Returns an info dictionary containing:
- name (the name of the function : str)
- argnames (the names of the arguments : list)
- defaults (the values of the default arguments : tuple)
- signature (the signature : str)
- doc (the docstring : str)
- module (the module name : str)
- dict (the function __dict__ : str)
>>> def f(self, x=1, y=2, *args, **kw): pass
>>> info = getinfo(f)
>>> info["name"]
'f'
>>> info["argnames"]
['self', 'x', 'y', 'args', 'kw']
>>> info["defaults"]
(1, 2)
>>> info["signature"]
'self, x, y, *args, **kw'
"""
assert inspect.ismethod(func) or inspect.isfunction(func)
regargs, varargs, varkwargs, defaults = inspect.getargspec(func)
argnames = list(regargs)
if varargs:
argnames.append(varargs)
if varkwargs:
argnames.append(varkwargs)
signature = inspect.formatargspec(regargs, varargs, varkwargs, defaults,
formatvalue=lambda value: "")[1:-1]
return dict(name=func.__name__, argnames=argnames, signature=signature,
defaults = func.func_defaults, doc=func.__doc__,
module=func.__module__, dict=func.__dict__,
globals=func.func_globals, closure=func.func_closure)
# akin to functools.update_wrapper
def update_wrapper(wrapper, model, infodict=None):
infodict = infodict or getinfo(model)
try:
wrapper.__name__ = infodict['name']
except: # Python version < 2.4
pass
wrapper.__doc__ = infodict['doc']
wrapper.__module__ = infodict['module']
wrapper.__dict__.update(infodict['dict'])
wrapper.func_defaults = infodict['defaults']
wrapper.undecorated = model
return wrapper
def new_wrapper(wrapper, model):
"""
An improvement over functools.update_wrapper. The wrapper is a generic
callable object. It works by generating a copy of the wrapper with the
right signature and by updating the copy, not the original.
Moreovoer, 'model' can be a dictionary with keys 'name', 'doc', 'module',
'dict', 'defaults'.
"""
if isinstance(model, dict):
infodict = model
else: # assume model is a function
infodict = getinfo(model)
assert not '_wrapper_' in infodict["argnames"], (
'"_wrapper_" is a reserved argument name!')
src = "lambda %(signature)s: _wrapper_(%(signature)s)" % infodict
funcopy = eval(src, dict(_wrapper_=wrapper))
return update_wrapper(funcopy, model, infodict)
# helper used in decorator_factory
def __call__(self, func):
infodict = getinfo(func)
for name in ('_func_', '_self_'):
assert not name in infodict["argnames"], (
'%s is a reserved argument name!' % name)
src = "lambda %(signature)s: _self_.call(_func_, %(signature)s)"
new = eval(src % infodict, dict(_func_=func, _self_=self))
return update_wrapper(new, func, infodict)
def decorator_factory(cls):
"""
Take a class with a ``.caller`` method and return a callable decorator
object. It works by adding a suitable __call__ method to the class;
it raises a TypeError if the class already has a nontrivial __call__
method.
"""
attrs = set(dir(cls))
if '__call__' in attrs:
raise TypeError('You cannot decorate a class with a nontrivial '
'__call__ method')
if 'call' not in attrs:
raise TypeError('You cannot decorate a class without a '
'.call method')
cls.__call__ = __call__
return cls
def decorator(caller):
"""
General purpose decorator factory: takes a caller function as
input and returns a decorator with the same attributes.
A caller function is any function like this::
def caller(func, *args, **kw):
# do something
return func(*args, **kw)
Here is an example of usage:
>>> @decorator
... def chatty(f, *args, **kw):
... print "Calling %r" % f.__name__
... return f(*args, **kw)
>>> chatty.__name__
'chatty'
>>> @chatty
... def f(): pass
...
>>> f()
Calling 'f'
decorator can also take in input a class with a .caller method; in this
case it converts the class into a factory of callable decorator objects.
See the documentation for an example.
"""
if inspect.isclass(caller):
return decorator_factory(caller)
def _decorator(func): # the real meat is here
infodict = getinfo(func)
argnames = infodict['argnames']
assert not ('_call_' in argnames or '_func_' in argnames), (
'You cannot use _call_ or _func_ as argument names!')
src = "lambda %(signature)s: _call_(_func_, %(signature)s)" % infodict
# import sys; print >> sys.stderr, src # for debugging purposes
dec_func = eval(src, dict(_func_=func, _call_=caller))
return update_wrapper(dec_func, func, infodict)
return update_wrapper(_decorator, caller)
if __name__ == "__main__":
import doctest; doctest.testmod()
########################## LEGALESE ###############################
## Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## Redistributions in bytecode form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
## INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
## BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
## OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
## ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
## TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
## USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
## DAMAGE.
| gpl-2.0 |
OpenHID/code-vr | languages/python/server.py | 1 | 6388 | # Example of simple echo server
# www.solusipse.net
import socket
import os
import subprocess
import fileinput
from export import export_json
from parse import parse
import json
def listen():
connection = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
connection.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
connection.bind(('127.0.0.1', 5555))
connection.listen(10)
print('Server is listening for connections')
while True:
#wait to accept a connection - blocking call
current_connection, address = connection.accept()
print('Connected with ' + address[0] + ':' + str(address[1]))
while True:
# accumulator
acu = ""
# receive message size
size = current_connection.recv(8)
if size: # if received message size
print("for message size we received: %s" % size)
# send ACK
print("sending back message size ACK")
ack = "ACK"
current_connection.send(ack.encode('ascii'))
# prepare to receive message
decoded_message = size.decode('ascii')
delim = decoded_message.find('\r')
decoded_size = decoded_message[:delim]
print("receiving %s bytes" % decoded_size)
remainingData = int(decoded_size)
while remainingData != 0:
if remainingData >= 8 : # slab >= 8 byte buffer
# receive slab from client
slab = current_connection.recv(8)
acu = acu + slab.decode('ascii')
sizeofSlabReceived = len(slab)
print("wrote %d bytes" % len (slab))
remainingData = remainingData - int(sizeofSlabReceived)
else:
# receive slab from client
slab = current_connection.recv(remainingData)
acu = acu + slab.decode('ascii')
sizeofSlabReceived = len(slab)
print("wrote %d bytes" % len (slab))
remainingData = remainingData - int(sizeofSlabReceived)
if acu != "": # if we received a path
print("received: ", acu)
raw_path = acu.strip()
file_delim = raw_path.rfind('/')
file_name = raw_path[raw_path.rfind('/')+1:raw_path.rfind('.')]
file_name_ext = file_name + ".json"
print("checking if file exists...")
path = "<" + raw_path + ">"
if (os.path.isfile(raw_path)):
print("file exists!")
json_file = open(file_name_ext, 'w+')
print("output file name: %s" % file_name_ext)
# run the astexport module:
# use stderr to check
# subprocess.call(['astexport', '--i', raw_path], stdout=json)
# run the astexport module:
source = "".join(fileinput.input(raw_path))
tree = parse(source)
ast = export_json(tree, True) # True or False for pretty printing
json_file.write(ast) # or json.dump(jason, json_file)
json_file.close()
# json_file.seek(0, 0)
# line = json_file.readline()
# print("%s" % line)
# json_file.close()
# at this point we should have the .json file to send
# query .json file size
json_path = './'+file_name_ext
file_size = os.path.getsize(json_path)
print("sending %s bytes" % file_size)
# send file size to client
current_connection.send(str(file_size).encode('ascii'))
# open in byte mode
json_bytes = open(file_name_ext, 'rb')
buff_read = 0
bytes_remaining = int(file_size)
while bytes_remaining != 0:
if bytes_remaining >= 8: # slab >= 8 bytes
buff_read = json_bytes.read(8)
sizeof_slab_read = len(buff_read)
print('read: %d bytes' % sizeof_slab_read)
# send slab to client
current_connection.send(buff_read)
bytes_remaining = bytes_remaining - int(sizeof_slab_read)
else: # slab smaller than 8 bytes
buff_read = json_bytes.read(bytes_remaining)
sizeof_slab_read = len(buff_read)
print('read: %d bytes' % sizeof_slab_read)
# send small slab to client
current_connection.send(buff_read)
bytes_remaining = bytes_remaining - int(sizeof_slab_read)
print('read the file completely')
# close .json file
json_bytes.close()
# remove local json file (residual)
os.remove(json_path)
else:
print("file not found")
# make msg upper case
acu = acu.upper()
print("sending: ", acu)
# send message size
responseSize = len(acu)
print("sending %d bytes" % responseSize)
sizeS = str(responseSize)
current_connection.send(sizeS.encode('ascii'))
# encode it
encoded_acu = acu.encode('ascii')
# send message:
current_connection.send(encoded_acu)
if __name__ == "__main__":
try:
listen()
except KeyboardInterrupt:
pass | mit |
jelloslinger/aiodownload | docs/conf.py | 1 | 5741 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# aiodownload documentation build configuration file, created by
# sphinx-quickstart on Thu Jun 15 14:47:21 2017.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosectionlabel',
'sphinx.ext.doctest',
'sphinx.ext.coverage',
'sphinx.ext.githubpages'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'aiodownload'
copyright = '2017, Michael Copeland'
author = 'Michael Copeland'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.2.4'
# The full version, including alpha/beta/rc tags.
release = ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This patterns also effect to html_static_path and html_extra_path
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_logo = '_static/tree-roots.svg'
html_theme = 'alabaster'
html_sidebars = {
'**': [
'globaltoc.html',
# 'relations.html',
'sourcelink.html',
'searchbox.html'
],
}
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
'fixed_sidebar': 'true',
'github_banner': 'true',
'github_user': 'jelloslinger',
'github_repo': 'aiodownload',
'travis_button': 'jelloslinger/aiodownload',
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# -- Options for HTMLHelp output ------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'aiodownloaddoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
# 'preamble': '',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'aiodownload.tex', 'aiodownload Documentation',
'Michael Copeland', 'manual'),
]
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'aiodownload', 'aiodownload Documentation',
[author], 1)
]
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'aiodownload', 'aiodownload Documentation',
author, 'aiodownload', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
epub_author = author
epub_publisher = author
epub_copyright = copyright
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
| mit |
eeshangarg/oh-mainline | vendor/packages/gdata/src/gdata/docs/__init__.py | 263 | 8993 | #!/usr/bin/python
#
# Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains extensions to Atom objects used with Google Documents."""
__author__ = ('api.jfisher (Jeff Fisher), '
'api.eric@google.com (Eric Bidelman)')
import atom
import gdata
DOCUMENTS_NAMESPACE = 'http://schemas.google.com/docs/2007'
class Scope(atom.AtomBase):
"""The DocList ACL scope element"""
_tag = 'scope'
_namespace = gdata.GACL_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
_attributes['value'] = 'value'
_attributes['type'] = 'type'
def __init__(self, value=None, type=None, extension_elements=None,
extension_attributes=None, text=None):
self.value = value
self.type = type
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
class Role(atom.AtomBase):
"""The DocList ACL role element"""
_tag = 'role'
_namespace = gdata.GACL_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
_attributes['value'] = 'value'
def __init__(self, value=None, extension_elements=None,
extension_attributes=None, text=None):
self.value = value
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
class FeedLink(atom.AtomBase):
"""The DocList gd:feedLink element"""
_tag = 'feedLink'
_namespace = gdata.GDATA_NAMESPACE
_attributes = atom.AtomBase._attributes.copy()
_attributes['rel'] = 'rel'
_attributes['href'] = 'href'
def __init__(self, href=None, rel=None, text=None, extension_elements=None,
extension_attributes=None):
self.href = href
self.rel = rel
atom.AtomBase.__init__(self, extension_elements=extension_elements,
extension_attributes=extension_attributes, text=text)
class ResourceId(atom.AtomBase):
"""The DocList gd:resourceId element"""
_tag = 'resourceId'
_namespace = gdata.GDATA_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
_attributes['value'] = 'value'
def __init__(self, value=None, extension_elements=None,
extension_attributes=None, text=None):
self.value = value
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
class LastModifiedBy(atom.Person):
"""The DocList gd:lastModifiedBy element"""
_tag = 'lastModifiedBy'
_namespace = gdata.GDATA_NAMESPACE
class LastViewed(atom.Person):
"""The DocList gd:lastViewed element"""
_tag = 'lastViewed'
_namespace = gdata.GDATA_NAMESPACE
class WritersCanInvite(atom.AtomBase):
"""The DocList docs:writersCanInvite element"""
_tag = 'writersCanInvite'
_namespace = DOCUMENTS_NAMESPACE
_attributes = atom.AtomBase._attributes.copy()
_attributes['value'] = 'value'
class DocumentListEntry(gdata.GDataEntry):
"""The Google Documents version of an Atom Entry"""
_tag = gdata.GDataEntry._tag
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}feedLink' % gdata.GDATA_NAMESPACE] = ('feedLink', FeedLink)
_children['{%s}resourceId' % gdata.GDATA_NAMESPACE] = ('resourceId',
ResourceId)
_children['{%s}lastModifiedBy' % gdata.GDATA_NAMESPACE] = ('lastModifiedBy',
LastModifiedBy)
_children['{%s}lastViewed' % gdata.GDATA_NAMESPACE] = ('lastViewed',
LastViewed)
_children['{%s}writersCanInvite' % DOCUMENTS_NAMESPACE] = (
'writersCanInvite', WritersCanInvite)
def __init__(self, resourceId=None, feedLink=None, lastViewed=None,
lastModifiedBy=None, writersCanInvite=None, author=None,
category=None, content=None, atom_id=None, link=None,
published=None, title=None, updated=None, text=None,
extension_elements=None, extension_attributes=None):
self.feedLink = feedLink
self.lastViewed = lastViewed
self.lastModifiedBy = lastModifiedBy
self.resourceId = resourceId
self.writersCanInvite = writersCanInvite
gdata.GDataEntry.__init__(
self, author=author, category=category, content=content,
atom_id=atom_id, link=link, published=published, title=title,
updated=updated, extension_elements=extension_elements,
extension_attributes=extension_attributes, text=text)
def GetAclLink(self):
"""Extracts the DocListEntry's <gd:feedLink>.
Returns:
A FeedLink object.
"""
return self.feedLink
def GetDocumentType(self):
"""Extracts the type of document from the DocListEntry.
This method returns the type of document the DocListEntry
represents. Possible values are document, presentation,
spreadsheet, folder, or pdf.
Returns:
A string representing the type of document.
"""
if self.category:
for category in self.category:
if category.scheme == gdata.GDATA_NAMESPACE + '#kind':
return category.label
else:
return None
def DocumentListEntryFromString(xml_string):
"""Converts an XML string into a DocumentListEntry object.
Args:
xml_string: string The XML describing a Document List feed entry.
Returns:
A DocumentListEntry object corresponding to the given XML.
"""
return atom.CreateClassFromXMLString(DocumentListEntry, xml_string)
class DocumentListAclEntry(gdata.GDataEntry):
"""A DocList ACL Entry flavor of an Atom Entry"""
_tag = gdata.GDataEntry._tag
_namespace = gdata.GDataEntry._namespace
_children = gdata.GDataEntry._children.copy()
_attributes = gdata.GDataEntry._attributes.copy()
_children['{%s}scope' % gdata.GACL_NAMESPACE] = ('scope', Scope)
_children['{%s}role' % gdata.GACL_NAMESPACE] = ('role', Role)
def __init__(self, category=None, atom_id=None, link=None,
title=None, updated=None, scope=None, role=None,
extension_elements=None, extension_attributes=None, text=None):
gdata.GDataEntry.__init__(self, author=None, category=category,
content=None, atom_id=atom_id, link=link,
published=None, title=title,
updated=updated, text=None)
self.scope = scope
self.role = role
def DocumentListAclEntryFromString(xml_string):
"""Converts an XML string into a DocumentListAclEntry object.
Args:
xml_string: string The XML describing a Document List ACL feed entry.
Returns:
A DocumentListAclEntry object corresponding to the given XML.
"""
return atom.CreateClassFromXMLString(DocumentListAclEntry, xml_string)
class DocumentListFeed(gdata.GDataFeed):
"""A feed containing a list of Google Documents Items"""
_tag = gdata.GDataFeed._tag
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
[DocumentListEntry])
def DocumentListFeedFromString(xml_string):
"""Converts an XML string into a DocumentListFeed object.
Args:
xml_string: string The XML describing a DocumentList feed.
Returns:
A DocumentListFeed object corresponding to the given XML.
"""
return atom.CreateClassFromXMLString(DocumentListFeed, xml_string)
class DocumentListAclFeed(gdata.GDataFeed):
"""A DocList ACL feed flavor of a Atom feed"""
_tag = gdata.GDataFeed._tag
_namespace = atom.ATOM_NAMESPACE
_children = gdata.GDataFeed._children.copy()
_attributes = gdata.GDataFeed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry',
[DocumentListAclEntry])
def DocumentListAclFeedFromString(xml_string):
"""Converts an XML string into a DocumentListAclFeed object.
Args:
xml_string: string The XML describing a DocumentList feed.
Returns:
A DocumentListFeed object corresponding to the given XML.
"""
return atom.CreateClassFromXMLString(DocumentListAclFeed, xml_string)
| agpl-3.0 |
omeripek/arguman.org | web/main/settings.py | 1 | 4666 | """
Django settings for arguman project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
from datetime import timedelta
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'qlp_henm3k-$7u@9b(@coqgpd1-2xmtox%a8_#*r9=0wh5d0oo'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
'social_auth',
'django_gravatar',
'rest_framework',
'rest_framework.authtoken',
'profiles',
'premises',
'newsfeed',
'blog',
'api'
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'main.urls'
WSGI_APPLICATION = 'main.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'TR-tr'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(os.path.dirname(__file__), "../static"),
)
TEMPLATE_DIRS = (
os.path.join(os.path.dirname(__file__), "../templates"),
)
# Social Auth Settings
AUTHENTICATION_BACKENDS = (
'social_auth.backends.twitter.TwitterBackend',
'django.contrib.auth.backends.ModelBackend',
)
AUTH_USER_MODEL = 'profiles.Profile'
# Rules
CONTENT_DELETION = {
'MAX_PREMISE_COUNT': 2,
'HAS_EMPTY_CONTENT_DELETION': True,
'LAST_DELETION_DATE': timedelta(hours=1)
}
TWITTER_CONSUMER_KEY = None # defined in settings_local.py
TWITTER_CONSUMER_SECRET = None # defined in settings_local.py
LOGIN_URL = '/login/'
LOGIN_REDIRECT_URL = '/'
SOCIAL_AUTH_COMPLETE_URL_NAME = 'socialauth_complete'
SOCIAL_AUTH_ASSOCIATE_URL_NAME = 'socialauth_associate_complete'
SOCIAL_AUTH_PIPELINE = (
'social_auth.backends.pipeline.social.social_auth_user',
'social_auth.backends.pipeline.associate.associate_by_email',
'social_auth.backends.pipeline.user.get_username',
'social_auth.backends.pipeline.user.create_user',
'social_auth.backends.pipeline.social.associate_user',
'social_auth.backends.pipeline.social.load_extra_data',
'social_auth.backends.pipeline.user.update_user_details',
)
REST_FRAMEWORK = {
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.AllowAny'
],
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.TokenAuthentication',
'rest_framework.authentication.SessionAuthentication',
),
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
#'rest_framework.renderers.BrowsableAPIRenderer',
),
'DEFAULT_PARSER_CLASSES': (
'rest_framework.parsers.JSONParser',
),
'PAGINATE_BY': 10,
'PAGINATE_BY_PARAM': 'page_size',
'MAX_PAGINATE_BY': 100,
'UNICODE_JSON': False,
'DATETIME_FORMAT': '%d-%m-%Y %H:%m'
}
MONGODB_HOST = "localhost"
MONGODB_DATABASE = "arguman"
SITE_URL = "arguman.org"
# Markitup Settings
MARKITUP_SET = 'markitup/sets/markdown'
MARKITUP_FILTER = ('markdown.markdown', {'safe_mode': False})
BLOG_FEED_TITLE = "Arguman.org Blog'u"
BLOG_FEED_DESCRIPTION = "Arguman analizi platformu"
BLOG_URL = "http://arguman.org/blog"
try:
from settings_local import *
except ImportError:
print "settings_local.py not found!"
| mit |
louyihua/edx-platform | cms/djangoapps/contentstore/management/commands/tests/test_export_all_courses.py | 187 | 2065 | """
Test for export all courses.
"""
import shutil
from tempfile import mkdtemp
from contentstore.management.commands.export_all_courses import export_courses_to_output_path
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
class ExportAllCourses(ModuleStoreTestCase):
"""
Tests exporting all courses.
"""
def setUp(self):
""" Common setup. """
super(ExportAllCourses, self).setUp()
self.store = modulestore()._get_modulestore_by_type(ModuleStoreEnum.Type.mongo)
self.temp_dir = mkdtemp()
self.addCleanup(shutil.rmtree, self.temp_dir)
self.first_course = CourseFactory.create(
org="test", course="course1", display_name="run1", default_store=ModuleStoreEnum.Type.mongo
)
self.second_course = CourseFactory.create(
org="test", course="course2", display_name="run2", default_store=ModuleStoreEnum.Type.mongo
)
def test_export_all_courses(self):
"""
Test exporting good and faulty courses
"""
# check that both courses exported successfully
courses, failed_export_courses = export_courses_to_output_path(self.temp_dir)
self.assertEqual(len(courses), 2)
self.assertEqual(len(failed_export_courses), 0)
# manually make second course faulty and check that it fails on export
second_course_id = self.second_course.id
self.store.collection.update(
{'_id.org': second_course_id.org, '_id.course': second_course_id.course, '_id.name': second_course_id.run},
{'$set': {'metadata.tags': 'crash'}}
)
courses, failed_export_courses = export_courses_to_output_path(self.temp_dir)
self.assertEqual(len(courses), 2)
self.assertEqual(len(failed_export_courses), 1)
self.assertEqual(failed_export_courses[0], unicode(second_course_id))
| agpl-3.0 |
sarvex/django | django/conf/locale/de/formats.py | 504 | 1100 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. F Y'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j. F Y H:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y H:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
]
DATETIME_INPUT_FORMATS = [
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
| bsd-3-clause |
vertexproject/synapse | synapse/models/language.py | 1 | 2640 | import synapse.lib.module as s_module
class LangModule(s_module.CoreModule):
def getModelDefs(self):
name = 'lang'
ctors = ()
forms = (
('lang:idiom', {}, (
('url', ('inet:url', {}), {
'doc': 'Authoritative URL for the idiom.'
}),
('desc:en', ('str', {}), {
'doc': 'English description.',
'disp': {'hint': 'text'},
}),
)),
('lang:trans', {}, (
('text:en', ('str', {}), {
'doc': 'English translation.',
'disp': {'hint': 'text'},
}),
('desc:en', ('str', {}), {
'doc': 'English description.',
'disp': {'hint': 'text'},
}),
)),
('lang:translation', {}, (
('input', ('str', {}), {
'ex': 'hola',
'doc': 'The input text.',
}),
('input:lang', ('lang:code', {}), {
'doc': 'The input language code.'
}),
('output', ('str', {}), {
'ex': 'hi',
'doc': 'The output text.',
}),
('output:lang', ('lang:code', {}), {
'doc': 'The output language code.'
}),
('desc', ('str', {}), {
'doc': 'A description of the meaning of the output.',
'ex': 'A standard greeting',
}),
('engine', ('it:prod:softver', {}), {
'doc': 'The translation engine version used.',
}),
))
)
types = (
('lang:idiom', ('str', {}), {
'deprecated': True,
'doc': 'Deprecated. Please use lang:translation.'
}),
('lang:trans', ('str', {}), {
'deprecated': True,
'doc': 'Deprecated. Please use lang:translation.'
}),
('lang:code', ('str', {'lower': True, 'regex': '^[a-z]{2}(.[a-z]{2})?$'}), {
'ex': 'pt.br',
'doc': 'An optionally 2 part language code.',
}),
('lang:translation', ('guid', {}), {
'doc': 'A translation of text from one language to another.',
}),
)
modldef = (name, {
'ctors': ctors,
'forms': forms,
'types': types,
})
return (modldef, )
| apache-2.0 |
georgetown-analytics/machine-learning | examples/FrancoMBM/Wine.py | 1 | 6158 |
# importing libraries
import sys
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import scipy as sp
import IPython
from IPython.display import display
import sklearn
from sklearn.model_selection import train_test_split
from sklearn.neighbors import KNeighborsClassifier
from sklearn.preprocessing import StandardScaler
from sklearn.preprocessing import MinMaxScaler
from sklearn.preprocessing import Normalizer
from sklearn import cross_validation
from sklearn.model_selection import cross_val_score
from sklearn.metrics import classification_report
# importing datasets
raw_df_red = pd.read_csv(r"C:\Users\franc\Desktop\SecondDesk\DataScienceCertificate\Classes\Assingments\MachineLearning\Homework\winequality-red.csv", sep =';')
raw_df_white = pd.read_csv(r"C:\Users\franc\Desktop\SecondDesk\DataScienceCertificate\Classes\Assingments\MachineLearning\Homework\winequality-white.csv", sep =';')
# exploring datasets
raw_df_red.describe()
raw_df_white.describe()
raw_df_white.info()
#-------------------------------white whine selection--------------------------
X = raw_df_white.iloc[:,:-1].values # independent variables X
y = raw_df_white['quality'].values # dependent Variables y
X_train_white, X_test_white, y_train_white, y_test_white = cross_validation.train_test_split(X, y, test_size = 0.2, random_state = 0)
# visual data exploration
X_train = raw_df_white.iloc[:,:-1]
y_train = raw_df_white['quality']
pd.plotting.scatter_matrix(X_train, c = y_train, figsize = (30, 30), marker ='o', hist_kwds = {'bins': 20},
s = 60, alpha = 0.7)
#before scaling
plt.boxplot(X_train_white, manage_xticks = False)
plt.yscale("symlog")
plt.xlabel("Features")
plt.ylabel("Target Variable")
plt.show()
scaler = StandardScaler()
#scaler = MinMaxScaler()
#scaler = Normalizer()
X_train_white = scaler.fit(X_train_white).transform(X_train_white)
X_test_white = scaler.fit(X_test_white).transform(X_test_white)
# after scaling
plt.boxplot(X_train_white, manage_xticks = False)
plt.yscale("symlog")
plt.xlabel("Features")
plt.ylabel("Target Variable")
plt.show()
# performing PCA
from sklearn.decomposition import PCA
pca = PCA(n_components = None) # input a number for feature extraction
X_train_white = pca.fit_transform(X_train_white)
X_test_white = pca.transform(X_test_white)
explained_var = pca.explained_variance_ratio_
print (explained_var)
#-----------------KNN--------------------------------------
knn = KNeighborsClassifier(n_neighbors = 10, metric = 'manhattan', weights = 'distance', algorithm = 'auto')
knn.fit(X_train_white, y_train_white)
predicted_knn = knn.predict(X_test_white)
# print("Predictions: {}".format(predicted_knn))
scores = cross_val_score(knn, X = X_train_white, y = y_train_white)
print ("Cross Validation Scores: {}".format(scores))
report = classification_report(y_test_white, predicted_knn)
print (report)
# Finding the best parameters for knn:
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import RandomizedSearchCV
params2 = [{'n_neighbors': [1,10,50,100], 'algorithm': ['auto','ball_tree','kd_tree' ],
'weights': ['uniform', 'distance'], 'metric': ['minkowski', 'manhattan']}]
grid_search = GridSearchCV(estimator = knn, param_grid = params2, scoring = 'accuracy', cv = 5, n_jobs = 1)
grid_search = grid_search.fit(X_train_white, y_train_white)
accuracy = grid_search.best_score_
best_params = grid_search.best_params_
print(accuracy)
print(best_params)
train_accuracy = []
test_accuracy = []
neighbors = range(1,100,10)
algorithms = ['auto', 'ball_tree', 'kd_tree']
weights = ['uniform', 'distance']
for i in neighbors:
knn = KNeighborsClassifier(n_neighbors = i, metric = 'manhattan', weights = 'distance', algorithm = 'auto')
knn.fit(X_train_white, y_train_white)
train_accuracy.append(knn.score(X_train_white, y_train_white))
test_accuracy.append(knn.score(X_test_white, y_test_white))
plt.plot(neighbors, train_accuracy, label = 'Train set accuracy')
plt.plot(neighbors, test_accuracy, label = 'Test set accuracy')
plt.ylabel("Accuracy")
plt.xlabel("Number of neighbors")
plt.legend()
plt.show()
#------------------------------- Kernel SVC:----------------------------------
from sklearn.svm import SVC
svm = SVC(C = 1000, kernel = 'rbf', gamma = 1)
svm.fit(X_train_white, y_train_white)
predicted = svm.predict(X_test_white)
#print("Predictions: {}".format(predicted))scores = cross_val_score(svm, X = X_train_white, y = y_train_white)
report = classification_report(y_test_white, predicted)
print (report)
# print ("Cross Validation Scores: {}".format(scores))
# -----------Finding the best parameters for SVC----------
params = [{'C': [1, 10, 100, 1000], 'kernel': ['rbf'], 'gamma': [1, 0.1, 0.01, 0.001]}]
grid_search = GridSearchCV(estimator = svm, param_grid = params, scoring = 'accuracy', cv = 5, n_jobs =1)
grid_search = grid_search.fit(X_train_white, y_train_white)
accuracySVC = grid_search.best_score_
best_paramsSVC = grid_search.best_params_
print (accuracySVC)
print (best_paramsSVC)
train_accuracy = []
test_accuracy = []
Ci = [10, 100, 1000]
for i in Ci:
svm = SVC(C = i, kernel = 'rbf', gamma = 1) # try rbf, linear and poly
svm.fit(X_train_white, y_train_white)
train_accuracy.append(svm.score(X_train_white, y_train_white))
test_accuracy.append(svm.score(X_test_white, y_test_white))
plt.plot(Ci, train_accuracy, label = 'Train set accuracy')
plt.plot(Ci, test_accuracy, label = 'Test set accuracy')
plt.ylabel("Accuracy")
plt.xlabel("C")
plt.legend()
plt.show()
####---------XGBoost-----------------
from xgboost.sklearn import XGBClassifier
from xgboost.sklearn import XGBRegressor
xclas = XGBClassifier() # for classifier
xclas.fit(X_train_white, y_train_white)
y_pred = xclas.predict(X_test_white)
cross_val_score(xclas, X_train_white, y_train_white)
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(y_test_white, y_pred)
print (cm)
| mit |
cntnboys/302-project | Webapps/venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/__init__.py | 156 | 1864 | """
urllib3 - Thread-safe connection pooling and re-using.
"""
__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
__license__ = 'MIT'
__version__ = 'dev'
from .connectionpool import (
HTTPConnectionPool,
HTTPSConnectionPool,
connection_from_url
)
from . import exceptions
from .filepost import encode_multipart_formdata
from .poolmanager import PoolManager, ProxyManager, proxy_from_url
from .response import HTTPResponse
from .util.request import make_headers
from .util.url import get_host
from .util.timeout import Timeout
from .util.retry import Retry
# Set default logging handler to avoid "No handler found" warnings.
import logging
try: # Python 2.7+
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger(__name__).addHandler(NullHandler())
def add_stderr_logger(level=logging.DEBUG):
"""
Helper for quickly adding a StreamHandler to the logger. Useful for
debugging.
Returns the handler after adding it.
"""
# This method needs to be in this __init__.py to get the __name__ correct
# even if urllib3 is vendored within another package.
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
logger.addHandler(handler)
logger.setLevel(level)
logger.debug('Added a stderr logging handler to logger: %s' % __name__)
return handler
# ... Clean up.
del NullHandler
# Set security warning to only go off once by default.
import warnings
warnings.simplefilter('always', exceptions.SecurityWarning)
def disable_warnings(category=exceptions.HTTPWarning):
"""
Helper for quickly disabling all urllib3 warnings.
"""
warnings.simplefilter('ignore', category)
| apache-2.0 |
aesaae/ardupilot_str | Tools/ardupilotwaf/boards.py | 1 | 6536 | #!/usr/bin/env python
# encoding: utf-8
import sys
import waflib
_board_classes = {}
class BoardMeta(type):
def __init__(cls, name, bases, dct):
super(BoardMeta, cls).__init__(name, bases, dct)
if name == 'Board':
return
_board_classes[name] = cls
class Board:
def configure(self, cfg):
env = waflib.ConfigSet.ConfigSet()
self.configure_env(env)
d = env.get_merged_dict()
# Always prepend so that arguments passed in the command line get
# the priority.
for k, val in d.items():
# Dictionaries (like 'DEFINES') are converted to lists to
# conform to waf conventions.
if isinstance(val, dict):
for item in val.items():
cfg.env.prepend_value(k, '%s=%s' % item)
else:
cfg.env.prepend_value(k, val)
def configure_env(self, env):
# Use a dictionary instead of the convetional list for definitions to
# make easy to override them. Convert back to list before consumption.
env.DEFINES = {}
env.CFLAGS += [
'-ffunction-sections',
'-fdata-sections',
'-fsigned-char',
'-Wall',
'-Wextra',
'-Wformat',
'-Wshadow',
'-Wpointer-arith',
'-Wcast-align',
'-Wno-missing-field-initializers',
'-Wno-unused-parameter',
'-Wno-redundant-decls',
]
env.CXXFLAGS += [
'-std=gnu++11',
'-fdata-sections',
'-ffunction-sections',
'-fno-exceptions',
'-fsigned-char',
'-Wall',
'-Wextra',
'-Wformat',
'-Wshadow',
'-Wpointer-arith',
'-Wcast-align',
'-Wno-unused-parameter',
'-Wno-missing-field-initializers',
'-Wno-reorder',
'-Wno-redundant-decls',
'-Werror=format-security',
'-Werror=array-bounds',
'-Werror=unused-but-set-variable',
'-Werror=uninitialized',
'-Werror=init-self',
'-Wfatal-errors',
]
env.LINKFLAGS += [
'-Wl,--gc-sections',
]
Board = BoardMeta('Board', Board.__bases__, dict(Board.__dict__))
def get_boards_names():
return sorted(list(_board_classes.keys()))
def get_board(name):
return _board_classes[name]()
# NOTE: Keeping all the board definitions together so we can easily
# identify opportunities to simplify common flags. In the future might
# be worthy to keep board definitions in files of their own.
class sitl(Board):
def configure_env(self, env):
super(sitl, self).configure_env(env)
env.DEFINES.update(
CONFIG_HAL_BOARD = 'HAL_BOARD_SITL',
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_NONE',
)
env.CXXFLAGS += [
'-O3',
]
env.LIB += [
'm',
]
env.LINKFLAGS += ['-pthread',]
env.AP_LIBRARIES += [
'AP_HAL_SITL',
'SITL',
]
class linux(Board):
def configure_env(self, env):
super(linux, self).configure_env(env)
env.DEFINES.update(
CONFIG_HAL_BOARD = 'HAL_BOARD_LINUX',
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_NONE',
)
env.CXXFLAGS += [
'-O3',
]
env.LIB += [
'm',
'rt',
]
env.LINKFLAGS += ['-pthread',]
env.AP_LIBRARIES = [
'AP_HAL_Linux',
]
class minlure(linux):
def configure_env(self, env):
super(minlure, self).configure_env(env)
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_MINLURE',
)
class erleboard(linux):
def configure_env(self, env):
super(erleboard, self).configure_env(env)
env.TOOLCHAIN = 'arm-linux-gnueabihf'
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_ERLEBOARD',
)
class navio(linux):
def configure_env(self, env):
super(navio, self).configure_env(env)
env.TOOLCHAIN = 'arm-linux-gnueabihf'
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_NAVIO',
)
class zynq(linux):
def configure_env(self, env):
super(zynq, self).configure_env(env)
env.TOOLCHAIN = 'arm-xilinx-linux-gnueabi'
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_ZYNQ',
)
class bbbmini(linux):
def configure_env(self, env):
super(bbbmini, self).configure_env(env)
env.TOOLCHAIN = 'arm-linux-gnueabihf'
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_BBBMINI',
)
class pxf(linux):
def configure_env(self, env):
super(pxf, self).configure_env(env)
env.TOOLCHAIN = 'arm-linux-gnueabihf'
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_PXF',
)
class bebop(linux):
def configure_env(self, env):
super(bebop, self).configure_env(env)
env.TOOLCHAIN = 'arm-linux-gnueabihf'
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_BEBOP',
)
env.STATIC_LINKING = [True]
class raspilot(linux):
def configure_env(self, env):
super(raspilot, self).configure_env(env)
env.TOOLCHAIN = 'arm-linux-gnueabihf'
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_RASPILOT',
)
class erlebrain2(linux):
def configure_env(self, env):
super(erlebrain2, self).configure_env(env)
env.TOOLCHAIN = 'arm-linux-gnueabihf'
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_ERLEBRAIN2',
)
class bhat(linux):
def configure_env(self, env):
super(bhat, self).configure_env(env)
env.TOOLCHAIN = 'arm-linux-gnueabihf'
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_BH',
)
class pxfmini(linux):
def configure_env(self, env):
super(pxfmini, self).configure_env(env)
env.TOOLCHAIN = 'arm-linux-gnueabihf'
env.DEFINES.update(
CONFIG_HAL_BOARD_SUBTYPE = 'HAL_BOARD_SUBTYPE_LINUX_PXFMINI',
)
| gpl-3.0 |
bc-python-tools/orpyste | test/ast/test_mode.py | 1 | 1729 | #!/usr/bin/env python3
# --------------------- #
# -- SEVERAL IMPORTS -- #
# --------------------- #
import json
from mistool.os_use import PPath
# ------------------- #
# -- MODULE TESTED -- #
# ------------------- #
from orpyste.parse import ast
# ----------------------- #
# -- GENERAL CONSTANTS -- #
# ----------------------- #
THIS_DIR = PPath(__file__).parent
DATAS_DIR = THIS_DIR / "datas_for_tests"
MODE_CLASS = ast.Mode
# ----------- #
# -- TOOLS -- #
# ----------- #
def seps_in_sets(dicoview):
if "seps" in dicoview:
dicoview["seps"] = set(dicoview["seps"])
# ----------------- #
# -- SINGLE MODE -- #
# ----------------- #
def test_mode_alone():
for jsonpath in DATAS_DIR.walk("file::mode/alone/*.json"):
with jsonpath.open() as f:
jsonobj = json.load(f)
for mode, dicoview in jsonobj.items():
dicoview_found = MODE_CLASS(mode = mode)[":default:"]
# Destroy sorting for separators.
seps_in_sets(dicoview)
seps_in_sets(dicoview_found)
assert dicoview == dicoview_found
# ------------------ #
# -- SEVERAL MODE -- #
# ------------------ #
def test_multimode():
for jsonpath in DATAS_DIR.walk("file::mode/multi/*.json"):
with jsonpath.open() as f:
jsonobj = json.load(f)
mode_found = MODE_CLASS(mode = jsonobj["mode"])
for blockname, dicoview in jsonobj["dicoview"].items():
dicoview_found = mode_found[blockname]
# Destroy sorting for separators.
if "seps" in dicoview:
dicoview["seps"] = set(dicoview["seps"])
dicoview_found["seps"] = set(dicoview_found["seps"])
assert dicoview == dicoview_found
| gpl-3.0 |
sorz/isi | store/store/utils.py | 1 | 1373 | from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage
def make_page(object_list, page, per_page=8, neighbor_count=5):
"""Return a Django Page object with a list of neighbor pages.
"neighbor pages" is some pages next to the current page.
e.g. page 6 may has neighbor page 5, 4, 3 and 7, 8, 9.
page.neighbor_pages is a list of tuples, which combine the page
number and a boolean (is it current page)."""
paginator = Paginator(object_list, per_page)
# Reference:
# https://docs.djangoproject.com/en/1.7/topics/pagination/#using-paginator-in-a-view
try:
page = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
page = paginator.page(1)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
page = paginator.page(paginator.num_pages)
# Neighbor pages will be displayed so that user can select them quickly.
# Generate a list of neighbor pages enable template can generate them easily.
start = page.number - neighbor_count // 2
if start <= 0:
start = 1
end = start + neighbor_count
if end > page.paginator.num_pages + 1:
end = page.paginator.num_pages + 1
page.neighbor_pages = [(p, p == page.number) for p in range(start, end)]
return page
| mit |
kontais/EFI-MIPS | ToolKit/cmds/python/Lib/encodings/mac_greek.py | 15 | 7567 | """ Python Character Mapping Codec generated from 'GREEK.TXT' with gencodec.py.
Written by Marc-Andre Lemburg (mal@lemburg.com).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
(c) Copyright 2000 Guido van Rossum.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_map)
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return (Codec().encode,Codec().decode,StreamReader,StreamWriter)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x00c4, # LATIN CAPITAL LETTER A WITH DIAERESIS
0x0081: 0x00b9, # SUPERSCRIPT ONE
0x0082: 0x00b2, # SUPERSCRIPT TWO
0x0083: 0x00c9, # LATIN CAPITAL LETTER E WITH ACUTE
0x0084: 0x00b3, # SUPERSCRIPT THREE
0x0085: 0x00d6, # LATIN CAPITAL LETTER O WITH DIAERESIS
0x0086: 0x00dc, # LATIN CAPITAL LETTER U WITH DIAERESIS
0x0087: 0x0385, # GREEK DIALYTIKA TONOS
0x0088: 0x00e0, # LATIN SMALL LETTER A WITH GRAVE
0x0089: 0x00e2, # LATIN SMALL LETTER A WITH CIRCUMFLEX
0x008a: 0x00e4, # LATIN SMALL LETTER A WITH DIAERESIS
0x008b: 0x0384, # GREEK TONOS
0x008c: 0x00a8, # DIAERESIS
0x008d: 0x00e7, # LATIN SMALL LETTER C WITH CEDILLA
0x008e: 0x00e9, # LATIN SMALL LETTER E WITH ACUTE
0x008f: 0x00e8, # LATIN SMALL LETTER E WITH GRAVE
0x0090: 0x00ea, # LATIN SMALL LETTER E WITH CIRCUMFLEX
0x0091: 0x00eb, # LATIN SMALL LETTER E WITH DIAERESIS
0x0092: 0x00a3, # POUND SIGN
0x0093: 0x2122, # TRADE MARK SIGN
0x0094: 0x00ee, # LATIN SMALL LETTER I WITH CIRCUMFLEX
0x0095: 0x00ef, # LATIN SMALL LETTER I WITH DIAERESIS
0x0096: 0x2022, # BULLET
0x0097: 0x00bd, # VULGAR FRACTION ONE HALF
0x0098: 0x2030, # PER MILLE SIGN
0x0099: 0x00f4, # LATIN SMALL LETTER O WITH CIRCUMFLEX
0x009a: 0x00f6, # LATIN SMALL LETTER O WITH DIAERESIS
0x009b: 0x00a6, # BROKEN BAR
0x009c: 0x00ad, # SOFT HYPHEN
0x009d: 0x00f9, # LATIN SMALL LETTER U WITH GRAVE
0x009e: 0x00fb, # LATIN SMALL LETTER U WITH CIRCUMFLEX
0x009f: 0x00fc, # LATIN SMALL LETTER U WITH DIAERESIS
0x00a0: 0x2020, # DAGGER
0x00a1: 0x0393, # GREEK CAPITAL LETTER GAMMA
0x00a2: 0x0394, # GREEK CAPITAL LETTER DELTA
0x00a3: 0x0398, # GREEK CAPITAL LETTER THETA
0x00a4: 0x039b, # GREEK CAPITAL LETTER LAMBDA
0x00a5: 0x039e, # GREEK CAPITAL LETTER XI
0x00a6: 0x03a0, # GREEK CAPITAL LETTER PI
0x00a7: 0x00df, # LATIN SMALL LETTER SHARP S
0x00a8: 0x00ae, # REGISTERED SIGN
0x00aa: 0x03a3, # GREEK CAPITAL LETTER SIGMA
0x00ab: 0x03aa, # GREEK CAPITAL LETTER IOTA WITH DIALYTIKA
0x00ac: 0x00a7, # SECTION SIGN
0x00ad: 0x2260, # NOT EQUAL TO
0x00ae: 0x00b0, # DEGREE SIGN
0x00af: 0x0387, # GREEK ANO TELEIA
0x00b0: 0x0391, # GREEK CAPITAL LETTER ALPHA
0x00b2: 0x2264, # LESS-THAN OR EQUAL TO
0x00b3: 0x2265, # GREATER-THAN OR EQUAL TO
0x00b4: 0x00a5, # YEN SIGN
0x00b5: 0x0392, # GREEK CAPITAL LETTER BETA
0x00b6: 0x0395, # GREEK CAPITAL LETTER EPSILON
0x00b7: 0x0396, # GREEK CAPITAL LETTER ZETA
0x00b8: 0x0397, # GREEK CAPITAL LETTER ETA
0x00b9: 0x0399, # GREEK CAPITAL LETTER IOTA
0x00ba: 0x039a, # GREEK CAPITAL LETTER KAPPA
0x00bb: 0x039c, # GREEK CAPITAL LETTER MU
0x00bc: 0x03a6, # GREEK CAPITAL LETTER PHI
0x00bd: 0x03ab, # GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA
0x00be: 0x03a8, # GREEK CAPITAL LETTER PSI
0x00bf: 0x03a9, # GREEK CAPITAL LETTER OMEGA
0x00c0: 0x03ac, # GREEK SMALL LETTER ALPHA WITH TONOS
0x00c1: 0x039d, # GREEK CAPITAL LETTER NU
0x00c2: 0x00ac, # NOT SIGN
0x00c3: 0x039f, # GREEK CAPITAL LETTER OMICRON
0x00c4: 0x03a1, # GREEK CAPITAL LETTER RHO
0x00c5: 0x2248, # ALMOST EQUAL TO
0x00c6: 0x03a4, # GREEK CAPITAL LETTER TAU
0x00c7: 0x00ab, # LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00c8: 0x00bb, # RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
0x00c9: 0x2026, # HORIZONTAL ELLIPSIS
0x00ca: 0x00a0, # NO-BREAK SPACE
0x00cb: 0x03a5, # GREEK CAPITAL LETTER UPSILON
0x00cc: 0x03a7, # GREEK CAPITAL LETTER CHI
0x00cd: 0x0386, # GREEK CAPITAL LETTER ALPHA WITH TONOS
0x00ce: 0x0388, # GREEK CAPITAL LETTER EPSILON WITH TONOS
0x00cf: 0x0153, # LATIN SMALL LIGATURE OE
0x00d0: 0x2013, # EN DASH
0x00d1: 0x2015, # HORIZONTAL BAR
0x00d2: 0x201c, # LEFT DOUBLE QUOTATION MARK
0x00d3: 0x201d, # RIGHT DOUBLE QUOTATION MARK
0x00d4: 0x2018, # LEFT SINGLE QUOTATION MARK
0x00d5: 0x2019, # RIGHT SINGLE QUOTATION MARK
0x00d6: 0x00f7, # DIVISION SIGN
0x00d7: 0x0389, # GREEK CAPITAL LETTER ETA WITH TONOS
0x00d8: 0x038a, # GREEK CAPITAL LETTER IOTA WITH TONOS
0x00d9: 0x038c, # GREEK CAPITAL LETTER OMICRON WITH TONOS
0x00da: 0x038e, # GREEK CAPITAL LETTER UPSILON WITH TONOS
0x00db: 0x03ad, # GREEK SMALL LETTER EPSILON WITH TONOS
0x00dc: 0x03ae, # GREEK SMALL LETTER ETA WITH TONOS
0x00dd: 0x03af, # GREEK SMALL LETTER IOTA WITH TONOS
0x00de: 0x03cc, # GREEK SMALL LETTER OMICRON WITH TONOS
0x00df: 0x038f, # GREEK CAPITAL LETTER OMEGA WITH TONOS
0x00e0: 0x03cd, # GREEK SMALL LETTER UPSILON WITH TONOS
0x00e1: 0x03b1, # GREEK SMALL LETTER ALPHA
0x00e2: 0x03b2, # GREEK SMALL LETTER BETA
0x00e3: 0x03c8, # GREEK SMALL LETTER PSI
0x00e4: 0x03b4, # GREEK SMALL LETTER DELTA
0x00e5: 0x03b5, # GREEK SMALL LETTER EPSILON
0x00e6: 0x03c6, # GREEK SMALL LETTER PHI
0x00e7: 0x03b3, # GREEK SMALL LETTER GAMMA
0x00e8: 0x03b7, # GREEK SMALL LETTER ETA
0x00e9: 0x03b9, # GREEK SMALL LETTER IOTA
0x00ea: 0x03be, # GREEK SMALL LETTER XI
0x00eb: 0x03ba, # GREEK SMALL LETTER KAPPA
0x00ec: 0x03bb, # GREEK SMALL LETTER LAMBDA
0x00ed: 0x03bc, # GREEK SMALL LETTER MU
0x00ee: 0x03bd, # GREEK SMALL LETTER NU
0x00ef: 0x03bf, # GREEK SMALL LETTER OMICRON
0x00f0: 0x03c0, # GREEK SMALL LETTER PI
0x00f1: 0x03ce, # GREEK SMALL LETTER OMEGA WITH TONOS
0x00f2: 0x03c1, # GREEK SMALL LETTER RHO
0x00f3: 0x03c3, # GREEK SMALL LETTER SIGMA
0x00f4: 0x03c4, # GREEK SMALL LETTER TAU
0x00f5: 0x03b8, # GREEK SMALL LETTER THETA
0x00f6: 0x03c9, # GREEK SMALL LETTER OMEGA
0x00f7: 0x03c2, # GREEK SMALL LETTER FINAL SIGMA
0x00f8: 0x03c7, # GREEK SMALL LETTER CHI
0x00f9: 0x03c5, # GREEK SMALL LETTER UPSILON
0x00fa: 0x03b6, # GREEK SMALL LETTER ZETA
0x00fb: 0x03ca, # GREEK SMALL LETTER IOTA WITH DIALYTIKA
0x00fc: 0x03cb, # GREEK SMALL LETTER UPSILON WITH DIALYTIKA
0x00fd: 0x0390, # GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
0x00fe: 0x03b0, # GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
0x00ff: None, # UNDEFINED
})
### Encoding Map
encoding_map = codecs.make_encoding_map(decoding_map)
| bsd-3-clause |
attilahorvath/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/common/checkout/scm/scm_mock.py | 122 | 4889 | # Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from webkitpy.common.checkout.scm import CommitMessage
from webkitpy.common.system.filesystem_mock import MockFileSystem
from webkitpy.common.system.executive_mock import MockExecutive
class MockSCM(object):
def __init__(self, filesystem=None, executive=None):
self.checkout_root = "/mock-checkout"
self.added_paths = set()
self._filesystem = filesystem or MockFileSystem()
self._executive = executive or MockExecutive()
def add(self, destination_path):
self.add_list([destination_path])
def add_list(self, destination_paths):
self.added_paths.update(set(destination_paths))
def has_working_directory_changes(self):
return False
def discard_working_directory_changes(self):
pass
def supports_local_commits(self):
return True
def has_local_commits(self):
return False
def discard_local_commits(self):
pass
def discard_local_changes(self):
pass
def exists(self, path):
# TestRealMain.test_real_main (and several other rebaseline tests) are sensitive to this return value.
# We should make those tests more robust, but for now we just return True always (since no test needs otherwise).
return True
def absolute_path(self, *comps):
return self._filesystem.join(self.checkout_root, *comps)
def changed_files(self, git_commit=None):
return ["MockFile1"]
def changed_files_for_revision(self, revision):
return ["MockFile1"]
def head_svn_revision(self):
return '1234'
def svn_revision(self, path):
return '5678'
def timestamp_of_revision(self, path, revision):
return '2013-02-01 08:48:05 +0000'
def create_patch(self, git_commit, changed_files=None):
return "Patch1"
def commit_ids_from_commitish_arguments(self, args):
return ["Commitish1", "Commitish2"]
def committer_email_for_revision(self, revision):
return "mock@webkit.org"
def commit_locally_with_message(self, message):
pass
def commit_with_message(self, message, username=None, password=None, git_commit=None, force_squash=False, changed_files=None):
pass
def merge_base(self, git_commit):
return None
def commit_message_for_local_commit(self, commit_id):
if commit_id == "Commitish1":
return CommitMessage("CommitMessage1\n" \
"https://bugs.example.org/show_bug.cgi?id=50000\n")
if commit_id == "Commitish2":
return CommitMessage("CommitMessage2\n" \
"https://bugs.example.org/show_bug.cgi?id=50001\n")
raise Exception("Bogus commit_id in commit_message_for_local_commit.")
def diff_for_file(self, path, log=None):
return path + '-diff'
def diff_for_revision(self, revision):
return "DiffForRevision%s\nhttp://bugs.webkit.org/show_bug.cgi?id=12345" % revision
def show_head(self, path):
return path
def svn_revision_from_commit_text(self, commit_text):
return "49824"
def delete(self, path):
return self.delete_list([path])
def delete_list(self, paths):
if not self._filesystem:
return
for path in paths:
if self._filesystem.exists(path):
self._filesystem.remove(path)
| bsd-3-clause |
panagiotisl/bigtop | bigtop-packages/src/charm/kafka/layer-kafka/tests/01-deploy.py | 10 | 1603 | #!/usr/bin/env python3
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import amulet
import unittest
class TestDeploy(unittest.TestCase):
"""
Trivial deployment test for Apache Bigtop Kafka.
"""
@classmethod
def setUpClass(cls):
cls.d = amulet.Deployment(series='xenial')
cls.d.add('kafka')
cls.d.add('zookeeper')
cls.d.relate('kafka:zookeeper', 'zookeeper:zookeeper')
cls.d.setup(timeout=1800)
cls.d.sentry.wait_for_messages({'kafka': 'ready'}, timeout=1800)
cls.kafka = cls.d.sentry['kafka'][0]
def test_deploy(self):
"""
Simple test to make sure the Kafka java process is running.
"""
output, retcode = self.kafka.run("pgrep -a java")
assert 'Kafka' in output, "Kafka daemon is not started"
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
rwightman/pytorch-image-models | timm/models/layers/blur_pool.py | 1 | 1591 | """
BlurPool layer inspired by
- Kornia's Max_BlurPool2d
- Making Convolutional Networks Shift-Invariant Again :cite:`zhang2019shiftinvar`
Hacked together by Chris Ha and Ross Wightman
"""
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
from .padding import get_padding
class BlurPool2d(nn.Module):
r"""Creates a module that computes blurs and downsample a given feature map.
See :cite:`zhang2019shiftinvar` for more details.
Corresponds to the Downsample class, which does blurring and subsampling
Args:
channels = Number of input channels
filt_size (int): binomial filter size for blurring. currently supports 3 (default) and 5.
stride (int): downsampling filter stride
Returns:
torch.Tensor: the transformed tensor.
"""
def __init__(self, channels, filt_size=3, stride=2) -> None:
super(BlurPool2d, self).__init__()
assert filt_size > 1
self.channels = channels
self.filt_size = filt_size
self.stride = stride
self.padding = [get_padding(filt_size, stride, dilation=1)] * 4
coeffs = torch.tensor((np.poly1d((0.5, 0.5)) ** (self.filt_size - 1)).coeffs.astype(np.float32))
blur_filter = (coeffs[:, None] * coeffs[None, :])[None, None, :, :].repeat(self.channels, 1, 1, 1)
self.register_buffer('filt', blur_filter, persistent=False)
def forward(self, x: torch.Tensor) -> torch.Tensor:
x = F.pad(x, self.padding, 'reflect')
return F.conv2d(x, self.filt, stride=self.stride, groups=x.shape[1])
| apache-2.0 |
shyamalschandra/scikit-learn | sklearn/datasets/species_distributions.py | 64 | 7917 | """
=============================
Species distribution dataset
=============================
This dataset represents the geographic distribution of species.
The dataset is provided by Phillips et. al. (2006).
The two species are:
- `"Bradypus variegatus"
<http://www.iucnredlist.org/apps/redlist/details/3038/0>`_ ,
the Brown-throated Sloth.
- `"Microryzomys minutus"
<http://www.iucnredlist.org/apps/redlist/details/13408/0>`_ ,
also known as the Forest Small Rice Rat, a rodent that lives in Peru,
Colombia, Ecuador, Peru, and Venezuela.
References:
* `"Maximum entropy modeling of species geographic distributions"
<http://www.cs.princeton.edu/~schapire/papers/ecolmod.pdf>`_
S. J. Phillips, R. P. Anderson, R. E. Schapire - Ecological Modelling,
190:231-259, 2006.
Notes:
* See examples/applications/plot_species_distribution_modeling.py
for an example of using this dataset
"""
# Authors: Peter Prettenhofer <peter.prettenhofer@gmail.com>
# Jake Vanderplas <vanderplas@astro.washington.edu>
#
# License: BSD 3 clause
from io import BytesIO
from os import makedirs
from os.path import exists
try:
# Python 2
from urllib2 import urlopen
PY2 = True
except ImportError:
# Python 3
from urllib.request import urlopen
PY2 = False
import numpy as np
from sklearn.datasets.base import get_data_home, Bunch
from sklearn.datasets.base import _pkl_filepath
from sklearn.externals import joblib
DIRECTORY_URL = "http://www.cs.princeton.edu/~schapire/maxent/datasets/"
SAMPLES_URL = DIRECTORY_URL + "samples.zip"
COVERAGES_URL = DIRECTORY_URL + "coverages.zip"
DATA_ARCHIVE_NAME = "species_coverage.pkz"
def _load_coverage(F, header_length=6, dtype=np.int16):
"""Load a coverage file from an open file object.
This will return a numpy array of the given dtype
"""
header = [F.readline() for i in range(header_length)]
make_tuple = lambda t: (t.split()[0], float(t.split()[1]))
header = dict([make_tuple(line) for line in header])
M = np.loadtxt(F, dtype=dtype)
nodata = int(header[b'NODATA_value'])
if nodata != -9999:
M[nodata] = -9999
return M
def _load_csv(F):
"""Load csv file.
Parameters
----------
F : file object
CSV file open in byte mode.
Returns
-------
rec : np.ndarray
record array representing the data
"""
if PY2:
# Numpy recarray wants Python 2 str but not unicode
names = F.readline().strip().split(',')
else:
# Numpy recarray wants Python 3 str but not bytes...
names = F.readline().decode('ascii').strip().split(',')
rec = np.loadtxt(F, skiprows=0, delimiter=',', dtype='a22,f4,f4')
rec.dtype.names = names
return rec
def construct_grids(batch):
"""Construct the map grid from the batch object
Parameters
----------
batch : Batch object
The object returned by :func:`fetch_species_distributions`
Returns
-------
(xgrid, ygrid) : 1-D arrays
The grid corresponding to the values in batch.coverages
"""
# x,y coordinates for corner cells
xmin = batch.x_left_lower_corner + batch.grid_size
xmax = xmin + (batch.Nx * batch.grid_size)
ymin = batch.y_left_lower_corner + batch.grid_size
ymax = ymin + (batch.Ny * batch.grid_size)
# x coordinates of the grid cells
xgrid = np.arange(xmin, xmax, batch.grid_size)
# y coordinates of the grid cells
ygrid = np.arange(ymin, ymax, batch.grid_size)
return (xgrid, ygrid)
def fetch_species_distributions(data_home=None,
download_if_missing=True):
"""Loader for species distribution dataset from Phillips et. al. (2006)
Read more in the :ref:`User Guide <datasets>`.
Parameters
----------
data_home : optional, default: None
Specify another download and cache folder for the datasets. By default
all scikit learn data is stored in '~/scikit_learn_data' subfolders.
download_if_missing: optional, True by default
If False, raise a IOError if the data is not locally available
instead of trying to download the data from the source site.
Returns
--------
The data is returned as a Bunch object with the following attributes:
coverages : array, shape = [14, 1592, 1212]
These represent the 14 features measured at each point of the map grid.
The latitude/longitude values for the grid are discussed below.
Missing data is represented by the value -9999.
train : record array, shape = (1623,)
The training points for the data. Each point has three fields:
- train['species'] is the species name
- train['dd long'] is the longitude, in degrees
- train['dd lat'] is the latitude, in degrees
test : record array, shape = (619,)
The test points for the data. Same format as the training data.
Nx, Ny : integers
The number of longitudes (x) and latitudes (y) in the grid
x_left_lower_corner, y_left_lower_corner : floats
The (x,y) position of the lower-left corner, in degrees
grid_size : float
The spacing between points of the grid, in degrees
Notes
------
This dataset represents the geographic distribution of species.
The dataset is provided by Phillips et. al. (2006).
The two species are:
- `"Bradypus variegatus"
<http://www.iucnredlist.org/apps/redlist/details/3038/0>`_ ,
the Brown-throated Sloth.
- `"Microryzomys minutus"
<http://www.iucnredlist.org/apps/redlist/details/13408/0>`_ ,
also known as the Forest Small Rice Rat, a rodent that lives in Peru,
Colombia, Ecuador, Peru, and Venezuela.
References
----------
* `"Maximum entropy modeling of species geographic distributions"
<http://www.cs.princeton.edu/~schapire/papers/ecolmod.pdf>`_
S. J. Phillips, R. P. Anderson, R. E. Schapire - Ecological Modelling,
190:231-259, 2006.
Notes
-----
* See examples/applications/plot_species_distribution_modeling.py
for an example of using this dataset with scikit-learn
"""
data_home = get_data_home(data_home)
if not exists(data_home):
makedirs(data_home)
# Define parameters for the data files. These should not be changed
# unless the data model changes. They will be saved in the npz file
# with the downloaded data.
extra_params = dict(x_left_lower_corner=-94.8,
Nx=1212,
y_left_lower_corner=-56.05,
Ny=1592,
grid_size=0.05)
dtype = np.int16
archive_path = _pkl_filepath(data_home, DATA_ARCHIVE_NAME)
if not exists(archive_path):
print('Downloading species data from %s to %s' % (SAMPLES_URL,
data_home))
X = np.load(BytesIO(urlopen(SAMPLES_URL).read()))
for f in X.files:
fhandle = BytesIO(X[f])
if 'train' in f:
train = _load_csv(fhandle)
if 'test' in f:
test = _load_csv(fhandle)
print('Downloading coverage data from %s to %s' % (COVERAGES_URL,
data_home))
X = np.load(BytesIO(urlopen(COVERAGES_URL).read()))
coverages = []
for f in X.files:
fhandle = BytesIO(X[f])
print(' - converting', f)
coverages.append(_load_coverage(fhandle))
coverages = np.asarray(coverages, dtype=dtype)
bunch = Bunch(coverages=coverages,
test=test,
train=train,
**extra_params)
joblib.dump(bunch, archive_path, compress=9)
else:
bunch = joblib.load(archive_path)
return bunch
| bsd-3-clause |
MolarAmbiguity/OctoPrint | src/octoprint/util/__init__.py | 6 | 5838 | # coding=utf-8
__author__ = "Gina Häußge <osd@foosel.net>"
__license__ = 'GNU Affero General Public License http://www.gnu.org/licenses/agpl.html'
import os
import traceback
import sys
import time
import re
import tempfile
from flask import make_response
from octoprint.settings import settings, default_settings
def getFormattedSize(num):
"""
Taken from http://stackoverflow.com/a/1094933/2028598
"""
for x in ["bytes","KB","MB","GB"]:
if num < 1024.0:
return "%3.1f%s" % (num, x)
num /= 1024.0
return "%3.1f%s" % (num, "TB")
def isAllowedFile(filename, extensions):
return "." in filename and filename.rsplit(".", 1)[1] in extensions
def getFormattedTimeDelta(d):
if d is None:
return None
hours = d.days * 24 + d.seconds // 3600
minutes = (d.seconds % 3600) // 60
seconds = d.seconds % 60
return "%02d:%02d:%02d" % (hours, minutes, seconds)
def getFormattedDateTime(d):
if d is None:
return None
return d.strftime("%Y-%m-%d %H:%M")
def getClass(name):
"""
Taken from http://stackoverflow.com/a/452981/2028598
"""
parts = name.split(".")
module = ".".join(parts[:-1])
m = __import__(module)
for comp in parts[1:]:
m = getattr(m, comp)
return m
def isDevVersion():
gitPath = os.path.abspath(os.path.join(os.path.split(os.path.abspath(__file__))[0], "../../../.git"))
return os.path.exists(gitPath)
def getExceptionString():
locationInfo = traceback.extract_tb(sys.exc_info()[2])[0]
return "%s: '%s' @ %s:%s:%d" % (str(sys.exc_info()[0].__name__), str(sys.exc_info()[1]), os.path.basename(locationInfo[0]), locationInfo[2], locationInfo[1])
def getGitInfo():
gitPath = os.path.abspath(os.path.join(os.path.split(os.path.abspath(__file__))[0], "../../../.git"))
if not os.path.exists(gitPath):
return (None, None)
headref = None
with open(os.path.join(gitPath, "HEAD"), "r") as f:
headref = f.readline().strip()
if headref is None:
return (None, None)
headref = headref[len("ref: "):]
branch = headref[headref.rfind("/") + 1:]
with open(os.path.join(gitPath, headref)) as f:
head = f.readline().strip()
return (branch, head)
def getNewTimeout(type):
now = time.time()
if type not in default_settings["serial"]["timeout"].keys():
# timeout immediately for unknown timeout type
return now
return now + settings().getFloat(["serial", "timeout", type])
def getFreeBytes(path):
"""
Taken from http://stackoverflow.com/a/2372171/2028598
"""
if sys.platform == "win32":
import ctypes
freeBytes = ctypes.c_ulonglong(0)
ctypes.windll.kernel32.GetDiskFreeSpaceExW(ctypes.c_wchar_p(path), None, None, ctypes.pointer(freeBytes))
return freeBytes.value
else:
st = os.statvfs(path)
return st.f_bavail * st.f_frsize
def getRemoteAddress(request):
forwardedFor = request.headers.get("X-Forwarded-For", None)
if forwardedFor is not None:
return forwardedFor.split(",")[0]
return request.remote_addr
def getDosFilename(input, existingFilenames, extension=None):
if input is None:
return None
if extension is None:
extension = "gco"
filename, ext = input.rsplit(".", 1)
return findCollisionfreeName(filename, extension, existingFilenames)
def findCollisionfreeName(input, extension, existingFilenames):
filename = re.sub(r"\s+", "_", input.lower().translate(None, ".\"/\\[]:;=,"))
counter = 1
power = 1
while counter < (10 * power):
result = filename[:(6 - power + 1)] + "~" + str(counter) + "." + extension
if result not in existingFilenames:
return result
counter += 1
if counter == 10 * power:
power += 1
raise ValueError("Can't create a collision free filename")
def safeRename(old, new):
"""
Safely renames a file.
On Windows this is achieved by first creating a backup file of the new file (if it
already exists), thus moving it, then renaming the old into the new file and finally removing the backup. If
anything goes wrong during those steps, the backup (if already there) will be renamed to its old name and thus
the operation hopefully result in a no-op.
On other operating systems the atomic os.rename function will be used instead.
@param old the path to the old file to be renamed
@param new the path to the new file to be created/replaced
"""
if sys.platform == "win32":
fh, backup = tempfile.mkstemp()
os.close(fh)
try:
if os.path.exists(new):
silentRemove(backup)
os.rename(new, backup)
os.rename(old, new)
os.remove(backup)
except OSError:
# if anything went wrong, try to rename the backup file to its original name
if os.path.exists(backup):
os.remove(new)
os.rename(backup, new)
else:
# on anything else than windows it's ooooh so much easier...
os.rename(old, new)
def silentRemove(file):
"""
Silently removes a file. Does not raise an error if the file doesn't exist.
@param file the path of the file to be removed
"""
try:
os.remove(file)
except OSError:
pass
def sanitizeAscii(line):
return unicode(line, 'ascii', 'replace').encode('ascii', 'replace').rstrip()
def filterNonAscii(line):
"""
Returns True if the line contains non-ascii characters, false otherwise
@param line the line to test
"""
try:
unicode(line, 'ascii').encode('ascii')
return False
except ValueError:
return True
def getJsonCommandFromRequest(request, valid_commands):
if not "application/json" in request.headers["Content-Type"]:
return None, None, make_response("Expected content-type JSON", 400)
data = request.json
if not "command" in data.keys() or not data["command"] in valid_commands.keys():
return None, None, make_response("Expected valid command", 400)
command = data["command"]
for parameter in valid_commands[command]:
if not parameter in data:
return None, None, make_response("Mandatory parameter %s missing for command %s" % (parameter, command), 400)
return command, data, None
| agpl-3.0 |
jpautom/scikit-learn | sklearn/datasets/tests/test_samples_generator.py | 181 | 15664 | from __future__ import division
from collections import defaultdict
from functools import partial
import numpy as np
import scipy.sparse as sp
from sklearn.externals.six.moves import zip
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import assert_raises
from sklearn.datasets import make_classification
from sklearn.datasets import make_multilabel_classification
from sklearn.datasets import make_hastie_10_2
from sklearn.datasets import make_regression
from sklearn.datasets import make_blobs
from sklearn.datasets import make_friedman1
from sklearn.datasets import make_friedman2
from sklearn.datasets import make_friedman3
from sklearn.datasets import make_low_rank_matrix
from sklearn.datasets import make_sparse_coded_signal
from sklearn.datasets import make_sparse_uncorrelated
from sklearn.datasets import make_spd_matrix
from sklearn.datasets import make_swiss_roll
from sklearn.datasets import make_s_curve
from sklearn.datasets import make_biclusters
from sklearn.datasets import make_checkerboard
from sklearn.utils.validation import assert_all_finite
def test_make_classification():
X, y = make_classification(n_samples=100, n_features=20, n_informative=5,
n_redundant=1, n_repeated=1, n_classes=3,
n_clusters_per_class=1, hypercube=False,
shift=None, scale=None, weights=[0.1, 0.25],
random_state=0)
assert_equal(X.shape, (100, 20), "X shape mismatch")
assert_equal(y.shape, (100,), "y shape mismatch")
assert_equal(np.unique(y).shape, (3,), "Unexpected number of classes")
assert_equal(sum(y == 0), 10, "Unexpected number of samples in class #0")
assert_equal(sum(y == 1), 25, "Unexpected number of samples in class #1")
assert_equal(sum(y == 2), 65, "Unexpected number of samples in class #2")
def test_make_classification_informative_features():
"""Test the construction of informative features in make_classification
Also tests `n_clusters_per_class`, `n_classes`, `hypercube` and
fully-specified `weights`.
"""
# Create very separate clusters; check that vertices are unique and
# correspond to classes
class_sep = 1e6
make = partial(make_classification, class_sep=class_sep, n_redundant=0,
n_repeated=0, flip_y=0, shift=0, scale=1, shuffle=False)
for n_informative, weights, n_clusters_per_class in [(2, [1], 1),
(2, [1/3] * 3, 1),
(2, [1/4] * 4, 1),
(2, [1/2] * 2, 2),
(2, [3/4, 1/4], 2),
(10, [1/3] * 3, 10)
]:
n_classes = len(weights)
n_clusters = n_classes * n_clusters_per_class
n_samples = n_clusters * 50
for hypercube in (False, True):
X, y = make(n_samples=n_samples, n_classes=n_classes,
weights=weights, n_features=n_informative,
n_informative=n_informative,
n_clusters_per_class=n_clusters_per_class,
hypercube=hypercube, random_state=0)
assert_equal(X.shape, (n_samples, n_informative))
assert_equal(y.shape, (n_samples,))
# Cluster by sign, viewed as strings to allow uniquing
signs = np.sign(X)
signs = signs.view(dtype='|S{0}'.format(signs.strides[0]))
unique_signs, cluster_index = np.unique(signs,
return_inverse=True)
assert_equal(len(unique_signs), n_clusters,
"Wrong number of clusters, or not in distinct "
"quadrants")
clusters_by_class = defaultdict(set)
for cluster, cls in zip(cluster_index, y):
clusters_by_class[cls].add(cluster)
for clusters in clusters_by_class.values():
assert_equal(len(clusters), n_clusters_per_class,
"Wrong number of clusters per class")
assert_equal(len(clusters_by_class), n_classes,
"Wrong number of classes")
assert_array_almost_equal(np.bincount(y) / len(y) // weights,
[1] * n_classes,
err_msg="Wrong number of samples "
"per class")
# Ensure on vertices of hypercube
for cluster in range(len(unique_signs)):
centroid = X[cluster_index == cluster].mean(axis=0)
if hypercube:
assert_array_almost_equal(np.abs(centroid),
[class_sep] * n_informative,
decimal=0,
err_msg="Clusters are not "
"centered on hypercube "
"vertices")
else:
assert_raises(AssertionError,
assert_array_almost_equal,
np.abs(centroid),
[class_sep] * n_informative,
decimal=0,
err_msg="Clusters should not be cenetered "
"on hypercube vertices")
assert_raises(ValueError, make, n_features=2, n_informative=2, n_classes=5,
n_clusters_per_class=1)
assert_raises(ValueError, make, n_features=2, n_informative=2, n_classes=3,
n_clusters_per_class=2)
def test_make_multilabel_classification_return_sequences():
for allow_unlabeled, min_length in zip((True, False), (0, 1)):
X, Y = make_multilabel_classification(n_samples=100, n_features=20,
n_classes=3, random_state=0,
return_indicator=False,
allow_unlabeled=allow_unlabeled)
assert_equal(X.shape, (100, 20), "X shape mismatch")
if not allow_unlabeled:
assert_equal(max([max(y) for y in Y]), 2)
assert_equal(min([len(y) for y in Y]), min_length)
assert_true(max([len(y) for y in Y]) <= 3)
def test_make_multilabel_classification_return_indicator():
for allow_unlabeled, min_length in zip((True, False), (0, 1)):
X, Y = make_multilabel_classification(n_samples=25, n_features=20,
n_classes=3, random_state=0,
allow_unlabeled=allow_unlabeled)
assert_equal(X.shape, (25, 20), "X shape mismatch")
assert_equal(Y.shape, (25, 3), "Y shape mismatch")
assert_true(np.all(np.sum(Y, axis=0) > min_length))
# Also test return_distributions and return_indicator with True
X2, Y2, p_c, p_w_c = make_multilabel_classification(
n_samples=25, n_features=20, n_classes=3, random_state=0,
allow_unlabeled=allow_unlabeled, return_distributions=True)
assert_array_equal(X, X2)
assert_array_equal(Y, Y2)
assert_equal(p_c.shape, (3,))
assert_almost_equal(p_c.sum(), 1)
assert_equal(p_w_c.shape, (20, 3))
assert_almost_equal(p_w_c.sum(axis=0), [1] * 3)
def test_make_multilabel_classification_return_indicator_sparse():
for allow_unlabeled, min_length in zip((True, False), (0, 1)):
X, Y = make_multilabel_classification(n_samples=25, n_features=20,
n_classes=3, random_state=0,
return_indicator='sparse',
allow_unlabeled=allow_unlabeled)
assert_equal(X.shape, (25, 20), "X shape mismatch")
assert_equal(Y.shape, (25, 3), "Y shape mismatch")
assert_true(sp.issparse(Y))
def test_make_hastie_10_2():
X, y = make_hastie_10_2(n_samples=100, random_state=0)
assert_equal(X.shape, (100, 10), "X shape mismatch")
assert_equal(y.shape, (100,), "y shape mismatch")
assert_equal(np.unique(y).shape, (2,), "Unexpected number of classes")
def test_make_regression():
X, y, c = make_regression(n_samples=100, n_features=10, n_informative=3,
effective_rank=5, coef=True, bias=0.0,
noise=1.0, random_state=0)
assert_equal(X.shape, (100, 10), "X shape mismatch")
assert_equal(y.shape, (100,), "y shape mismatch")
assert_equal(c.shape, (10,), "coef shape mismatch")
assert_equal(sum(c != 0.0), 3, "Unexpected number of informative features")
# Test that y ~= np.dot(X, c) + bias + N(0, 1.0).
assert_almost_equal(np.std(y - np.dot(X, c)), 1.0, decimal=1)
# Test with small number of features.
X, y = make_regression(n_samples=100, n_features=1) # n_informative=3
assert_equal(X.shape, (100, 1))
def test_make_regression_multitarget():
X, y, c = make_regression(n_samples=100, n_features=10, n_informative=3,
n_targets=3, coef=True, noise=1., random_state=0)
assert_equal(X.shape, (100, 10), "X shape mismatch")
assert_equal(y.shape, (100, 3), "y shape mismatch")
assert_equal(c.shape, (10, 3), "coef shape mismatch")
assert_array_equal(sum(c != 0.0), 3,
"Unexpected number of informative features")
# Test that y ~= np.dot(X, c) + bias + N(0, 1.0)
assert_almost_equal(np.std(y - np.dot(X, c)), 1.0, decimal=1)
def test_make_blobs():
cluster_stds = np.array([0.05, 0.2, 0.4])
cluster_centers = np.array([[0.0, 0.0], [1.0, 1.0], [0.0, 1.0]])
X, y = make_blobs(random_state=0, n_samples=50, n_features=2,
centers=cluster_centers, cluster_std=cluster_stds)
assert_equal(X.shape, (50, 2), "X shape mismatch")
assert_equal(y.shape, (50,), "y shape mismatch")
assert_equal(np.unique(y).shape, (3,), "Unexpected number of blobs")
for i, (ctr, std) in enumerate(zip(cluster_centers, cluster_stds)):
assert_almost_equal((X[y == i] - ctr).std(), std, 1, "Unexpected std")
def test_make_friedman1():
X, y = make_friedman1(n_samples=5, n_features=10, noise=0.0,
random_state=0)
assert_equal(X.shape, (5, 10), "X shape mismatch")
assert_equal(y.shape, (5,), "y shape mismatch")
assert_array_almost_equal(y,
10 * np.sin(np.pi * X[:, 0] * X[:, 1])
+ 20 * (X[:, 2] - 0.5) ** 2
+ 10 * X[:, 3] + 5 * X[:, 4])
def test_make_friedman2():
X, y = make_friedman2(n_samples=5, noise=0.0, random_state=0)
assert_equal(X.shape, (5, 4), "X shape mismatch")
assert_equal(y.shape, (5,), "y shape mismatch")
assert_array_almost_equal(y,
(X[:, 0] ** 2
+ (X[:, 1] * X[:, 2] - 1
/ (X[:, 1] * X[:, 3])) ** 2) ** 0.5)
def test_make_friedman3():
X, y = make_friedman3(n_samples=5, noise=0.0, random_state=0)
assert_equal(X.shape, (5, 4), "X shape mismatch")
assert_equal(y.shape, (5,), "y shape mismatch")
assert_array_almost_equal(y, np.arctan((X[:, 1] * X[:, 2]
- 1 / (X[:, 1] * X[:, 3]))
/ X[:, 0]))
def test_make_low_rank_matrix():
X = make_low_rank_matrix(n_samples=50, n_features=25, effective_rank=5,
tail_strength=0.01, random_state=0)
assert_equal(X.shape, (50, 25), "X shape mismatch")
from numpy.linalg import svd
u, s, v = svd(X)
assert_less(sum(s) - 5, 0.1, "X rank is not approximately 5")
def test_make_sparse_coded_signal():
Y, D, X = make_sparse_coded_signal(n_samples=5, n_components=8,
n_features=10, n_nonzero_coefs=3,
random_state=0)
assert_equal(Y.shape, (10, 5), "Y shape mismatch")
assert_equal(D.shape, (10, 8), "D shape mismatch")
assert_equal(X.shape, (8, 5), "X shape mismatch")
for col in X.T:
assert_equal(len(np.flatnonzero(col)), 3, 'Non-zero coefs mismatch')
assert_array_almost_equal(np.dot(D, X), Y)
assert_array_almost_equal(np.sqrt((D ** 2).sum(axis=0)),
np.ones(D.shape[1]))
def test_make_sparse_uncorrelated():
X, y = make_sparse_uncorrelated(n_samples=5, n_features=10, random_state=0)
assert_equal(X.shape, (5, 10), "X shape mismatch")
assert_equal(y.shape, (5,), "y shape mismatch")
def test_make_spd_matrix():
X = make_spd_matrix(n_dim=5, random_state=0)
assert_equal(X.shape, (5, 5), "X shape mismatch")
assert_array_almost_equal(X, X.T)
from numpy.linalg import eig
eigenvalues, _ = eig(X)
assert_array_equal(eigenvalues > 0, np.array([True] * 5),
"X is not positive-definite")
def test_make_swiss_roll():
X, t = make_swiss_roll(n_samples=5, noise=0.0, random_state=0)
assert_equal(X.shape, (5, 3), "X shape mismatch")
assert_equal(t.shape, (5,), "t shape mismatch")
assert_array_almost_equal(X[:, 0], t * np.cos(t))
assert_array_almost_equal(X[:, 2], t * np.sin(t))
def test_make_s_curve():
X, t = make_s_curve(n_samples=5, noise=0.0, random_state=0)
assert_equal(X.shape, (5, 3), "X shape mismatch")
assert_equal(t.shape, (5,), "t shape mismatch")
assert_array_almost_equal(X[:, 0], np.sin(t))
assert_array_almost_equal(X[:, 2], np.sign(t) * (np.cos(t) - 1))
def test_make_biclusters():
X, rows, cols = make_biclusters(
shape=(100, 100), n_clusters=4, shuffle=True, random_state=0)
assert_equal(X.shape, (100, 100), "X shape mismatch")
assert_equal(rows.shape, (4, 100), "rows shape mismatch")
assert_equal(cols.shape, (4, 100,), "columns shape mismatch")
assert_all_finite(X)
assert_all_finite(rows)
assert_all_finite(cols)
X2, _, _ = make_biclusters(shape=(100, 100), n_clusters=4,
shuffle=True, random_state=0)
assert_array_almost_equal(X, X2)
def test_make_checkerboard():
X, rows, cols = make_checkerboard(
shape=(100, 100), n_clusters=(20, 5),
shuffle=True, random_state=0)
assert_equal(X.shape, (100, 100), "X shape mismatch")
assert_equal(rows.shape, (100, 100), "rows shape mismatch")
assert_equal(cols.shape, (100, 100,), "columns shape mismatch")
X, rows, cols = make_checkerboard(
shape=(100, 100), n_clusters=2, shuffle=True, random_state=0)
assert_all_finite(X)
assert_all_finite(rows)
assert_all_finite(cols)
X1, _, _ = make_checkerboard(shape=(100, 100), n_clusters=2,
shuffle=True, random_state=0)
X2, _, _ = make_checkerboard(shape=(100, 100), n_clusters=2,
shuffle=True, random_state=0)
assert_array_equal(X1, X2)
| bsd-3-clause |
papados/ordersys | Lib/encodings/shift_jis.py | 816 | 1039 | #
# shift_jis.py: Python Unicode Codec for SHIFT_JIS
#
# Written by Hye-Shik Chang <perky@FreeBSD.org>
#
import _codecs_jp, codecs
import _multibytecodec as mbc
codec = _codecs_jp.getcodec('shift_jis')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='shift_jis',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| unlicense |
crdroid-devices/android_kernel_lge_hammerhead | tools/perf/util/setup.py | 4998 | 1330 | #!/usr/bin/python2
from distutils.core import setup, Extension
from os import getenv
from distutils.command.build_ext import build_ext as _build_ext
from distutils.command.install_lib import install_lib as _install_lib
class build_ext(_build_ext):
def finalize_options(self):
_build_ext.finalize_options(self)
self.build_lib = build_lib
self.build_temp = build_tmp
class install_lib(_install_lib):
def finalize_options(self):
_install_lib.finalize_options(self)
self.build_dir = build_lib
cflags = ['-fno-strict-aliasing', '-Wno-write-strings']
cflags += getenv('CFLAGS', '').split()
build_lib = getenv('PYTHON_EXTBUILD_LIB')
build_tmp = getenv('PYTHON_EXTBUILD_TMP')
ext_sources = [f.strip() for f in file('util/python-ext-sources')
if len(f.strip()) > 0 and f[0] != '#']
perf = Extension('perf',
sources = ext_sources,
include_dirs = ['util/include'],
extra_compile_args = cflags,
)
setup(name='perf',
version='0.1',
description='Interface with the Linux profiling infrastructure',
author='Arnaldo Carvalho de Melo',
author_email='acme@redhat.com',
license='GPLv2',
url='http://perf.wiki.kernel.org',
ext_modules=[perf],
cmdclass={'build_ext': build_ext, 'install_lib': install_lib})
| gpl-2.0 |
bmhatfield/Diamond | src/collectors/endecadgraph/endecadgraph.py | 57 | 3912 | # coding=utf-8
"""
Collects stats from Endeca Dgraph/MDEX server.
Tested with: Endeca Information Access Platform version 6.3.0.655584
=== Authors
Jan van Bemmelen <jvanbemmelen@bol.com>
Renzo Toma <rtoma@bol.com>
"""
import diamond.collector
import urllib2
from StringIO import StringIO
import re
import sys
if sys.version_info >= (2, 5):
import xml.etree.cElementTree as ElementTree
else:
import cElementTree as ElementTree
class EndecaDgraphCollector(diamond.collector.Collector):
# ignore these elements, because they are of no use
IGNORE_ELEMENTS = [
'most_expensive_queries',
'general_information',
'analytics_performance',
'disk_usage',
'configupdates',
'xqueryconfigupdates',
'spelling_updates',
'precomputed_sorts',
'analytics_performance',
'cache_slices',
]
# ignore these metrics, because they can be generated by graphite
IGNORE_STATS = [
'name',
'units',
]
# set of regular expressions for matching & sub'ing.
NUMVAL_MATCH = re.compile('^[\d\.e\-\+]*$')
CHAR_BLACKLIST = re.compile('\-|\ |,|:|/|>|\(|\)')
UNDERSCORE_UNDUPE = re.compile('_+')
# endeca xml namespace
XML_NS = '{http://xmlns.endeca.com/ene/dgraph}'
def get_default_config_help(self):
config_help = super(EndecaDgraphCollector,
self).get_default_config_help()
config_help.update({
'host': "Hostname of Endeca Dgraph instance",
'port': "Port of the Dgraph API listener",
'timeout': "Timeout for http API calls",
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(EndecaDgraphCollector, self).get_default_config()
config.update({
'path': 'endeca.dgraph',
'host': 'localhost',
'port': 8080,
'timeout': 1,
})
return config
def collect(self):
def makeSane(stat):
stat = self.CHAR_BLACKLIST.sub('_', stat.lower())
stat = self.UNDERSCORE_UNDUPE.sub('_', stat)
return stat
def createKey(element):
if element.attrib.get("name"):
key = element.attrib.get("name")
key = makeSane(key)
else:
key = element.tag[len(self.XML_NS):]
return key
def processElem(elem, keyList):
for k, v in elem.items():
prefix = '.'.join(keyList)
if k not in self.IGNORE_ELEMENTS and self.NUMVAL_MATCH.match(v):
k = makeSane(k)
self.publish('%s.%s' % (prefix, k), v)
def walkXML(context, elemList):
try:
for event, elem in context:
elemName = createKey(elem)
if event == 'start':
elemList.append(elemName)
if len(elem) == 0:
if set(elemList).intersection(self.IGNORE_ELEMENTS):
continue
processElem(elem, elemList)
elif event == 'end':
elemList.pop()
except Exception, e:
self.log.error('Something went wrong: %s', e)
url = 'http://%s:%d/admin?op=stats' % (self.config['host'],
self.config['port'])
try:
xml = urllib2.urlopen(url, timeout=self.config['timeout']).read()
except Exception, e:
self.log.error('Could not connect to endeca on %s: %s' % (url, e))
return {}
context = ElementTree.iterparse(StringIO(xml), events=('start', 'end'))
elemList = []
walkXML(context, elemList)
| mit |
btrzcinski/netchat | py-client/client.py | 1 | 4786 | #! /usr/local/bin/python2.5
"""
The Py-Client serves as a lightweight altenative to the graphical
NetChat J-Client. Using curses to manage input and output, it is
entirely terminal-based, and can be configured to run to taste via
command line arguments.
"""
import os.path
import signal
import sys
from twisted.internet import reactor
from twisted.internet.protocol import Protocol, ClientFactory
from twisted.python.usage import UsageError
from netclient.connection import XMLComm, Terminal
from netclient.cmanager import cmanager
from netclient.io import CursesStdIO
from netclient.mmanager import mmanager
from netclient.opts import Options
from netclient.settings import COMPONENTS, MODULES, HOST, PORT, LOG
TERMINATED = 'Unknown.'
"""
Determines the string to be displayed when a connection is closed.
Can be replaced by Twisted if a specific reason is given.
"""
class Client(ClientFactory):
"""
The twisted ClientFactory for Py-Client.
"""
protocol = XMLComm
def __init__(self, opts):
signal.signal(signal.SIGINT, self._sigint)
self.opts = opts
cmanager.add_component('factory', self)
cmanager.load_component('config')
self.config = cmanager.get_proxy('config')
cmanager.load_component('logger')
cmanager.logger = cmanager['logger'].get_context('cmanager')
mmanager.logger = cmanager['logger'].get_context('mmanager')
self.config.logger = cmanager['logger'].get_context('config')
self.logger = cmanager['logger'].get_context('factory')
self.exiting = False
def _sigint(self, sig, frame):
self.exit('Received SIGINT.')
def startedConnecting(self, connector):
h, p = connector.host, connector.port
self.logger.log('Attemping to connect to port %d at host \'%s\'.' % (p, h))
def buildProtocol(self, addr):
self.logger.msg('Establishing connection...')
p = self.protocol(addr)
p.factory = self
return p
def clientConnectionLost(self, connector, reason):
self.logger.msg('Connection lost. Shutting down.')
reactor.callLater(0.01, self.exit, 'Lost connection.')
def clientConnectionFailed(self, connector, reason):
self.logger.log('Failed connection.')
self.exit('Connection failed. Check your network connection or try again later.')
def exit(self, reason=TERMINATED):
"""
To be called whenever a shutdown is required.
"""
global TERMINATED
if self.exiting:
return
self.exiting = True
if reason:
TERMINATED = reason
if 'xmlcomm' in cmanager:
cmanager['xmlcomm'].drop()
self.logger.log('Received stop event. %s' % reason)
reactor.callLater(0.01, reactor.stop)
def main():
"""
Main driver function for Py-Client. Handles opt-parsing and
reactor initialization.
"""
global TERMINATED
options = Options()
try:
options.parseOptions()
except UsageError, text:
print '%s: %s' % (sys.argv[0], text)
print '%s: Try --help for usage details.' % sys.argv[0]
sys.exit(1)
options['pingpong'] = not options['no-pingpong']
options['ssl'] = not options['no-ssl']
options['color'] = not options['no-color']
# TODO: Append/Multiple Logs
if isinstance(options['log'], str):
options['log'] = file(options['log'], 'w')
try:
options['port'] = int(options['port'])
except ValueError:
print '%s: Non-int value specified for port.' % sys.argv[0]
sys.exit(1)
factory = Client(options)
sysin = Terminal()
screen = CursesStdIO(sysin)
reactor.addReader(screen)
cmanager['logger'].startLogging()
s = factory.config.find('sets')
for set in s['default_sets']:
if not set in s['valid_sets']:
raise KeyError, 'All default module-command pairs must be in the valid set list.'
cmanager.load_components(factory.config.find('components', 'default_components'))
if options['ssl']:
from twisted.internet.ssl import DefaultOpenSSLContextFactory
from netclient.settings import PRIVKEY, CERTIFICATE
ssl = DefaultOpenSSLContextFactory(PRIVKEY, CERTIFICATE)
reactor.connectSSL(options['host'], options['port'], factory, ssl)
else:
reactor.connectTCP(options['host'], options['port'], factory)
if options['debug']:
factory.logger.msg('Loading test command suite.')
cmanager['commands'].loadModule('test')
reactor.run()
cmanager.close()
screen.end()
print 'Connection lost.'
print '\tReason: %s' % TERMINATED
print 'Thank you for using Py-Client. :)'
if __name__ == '__main__':
main()
| gpl-2.0 |
hurdlea/SimpleCV | SimpleCV/Features/BOFFeatureExtractor.py | 11 | 12465 | from SimpleCV.base import *
from SimpleCV.ImageClass import Image
from SimpleCV.Features.FeatureExtractorBase import *
class BOFFeatureExtractor(object):
"""
For a discussion of bag of features please see:
http://en.wikipedia.org/wiki/Bag_of_words_model_in_computer_vision
Initialize the bag of features extractor. This assumes you don't have
the feature codebook pre-computed.
patchsz = the dimensions of each codebook patch
numcodes = the number of different patches in the codebook.
imglayout = the shape of the resulting image in terms of patches
padding = the pixel padding of each patch in the resulting image.
"""
mPatchSize = (11,11)
mNumCodes = 128
mPadding = 0
mLayout = (8,16)
mCodebookImg = None
mCodebook = None
def __init__(self,patchsz=(11,11),numcodes=128,imglayout=(8,16),padding=0):
self.mPadding = padding
self.mLayout = imglayout
self.mPatchSize = patchsz
self.mNumCodes = numcodes
def generate(self,imgdirs,numcodes=128,sz=(11,11),imgs_per_dir=50,img_layout=(8,16),padding=0, verbose=True):
"""
This method builds the bag of features codebook from a list of directories
with images in them. Each directory should be broken down by image class.
* imgdirs: This list of directories.
* patchsz: the dimensions of each codebook patch
* numcodes: the number of different patches in the codebook.
* imglayout: the shape of the resulting image in terms of patches - this must
match the size of numcodes. I.e. numcodes == img_layout[0]*img_layout[1]
* padding:the pixel padding of each patch in the resulting image.
* imgs_per_dir: this method can use a specified number of images per directory
* verbose: print output
Once the method has completed it will save the results to a local file
using the file name codebook.png
WARNING:
THIS METHOD WILL TAKE FOREVER
"""
if( numcodes != img_layout[0]*img_layout[1]):
warnings.warn("Numcodes must match the size of image layout.")
return None
self.mPadding = padding
self.mLayout = img_layout
self.mNumCodes = numcodes
self.mPatchSize = sz
rawFeatures = np.zeros(sz[0]*sz[1])#fakeout numpy so we can use vstack
for path in imgdirs:
fcount = 0
files = []
for ext in IMAGE_FORMATS:
files.extend(glob.glob( os.path.join(path, ext)))
nimgs = min(len(files),imgs_per_dir)
for i in range(nimgs):
infile = files[i]
if verbose:
print(path+" "+str(i)+" of "+str(imgs_per_dir))
print "Opening file: " + infile
img = Image(infile)
newFeat = self._getPatches(img,sz)
if verbose:
print " Got " + str(len(newFeat)) + " features."
rawFeatures = np.vstack((rawFeatures,newFeat))
del img
rawFeatures = rawFeatures[1:,:] # pop the fake value we put on the top
if verbose:
print "=================================="
print "Got " + str(len(rawFeatures)) + " features "
print "Doing K-Means .... this will take a long time"
self.mCodebook = self._makeCodebook(rawFeatures,self.mNumCodes)
self.mCodebookImg = self._codebook2Img(self.mCodebook,self.mPatchSize,self.mNumCodes,self.mLayout,self.mPadding)
self.mCodebookImg.save('codebook.png')
def extractPatches(self, img, sz=(11,11) ):
"""
Get patches from a single images. This is an external access method. The
user will need to maintain the list of features. See the generate method
as a guide to doing this by hand. Sz is the image patch size.
"""
return self._getPatches(img,sz)
def makeCodebook(self, featureStack,ncodes=128):
"""
This method will return the centroids of the k-means analysis of a large
number of images. Ncodes is the number of centroids to find.
"""
return self._makeCodebook(featureStack,ncodes)
def _makeCodebook(self,data,ncodes=128):
"""
Do the k-means ... this is slow as as shit
"""
[centroids, membership] = cluster.kmeans2(data,ncodes, minit='points')
return(centroids)
def _img2Codebook(self, img, patchsize, count, patch_arrangement, spacersz):
"""
img = the image
patchsize = the patch size (ususally 11x11)
count = total codes
patch_arrangement = how are the patches grided in the image (eg 128 = (8x16) 256=(16x16) )
spacersz = the number of pixels between patches
"""
img = img.toHLS()
lmat = cv.CreateImage((img.width,img.height), cv.IPL_DEPTH_8U, 1)
patch = cv.CreateImage(patchsize,cv.IPL_DEPTH_8U,1)
cv.Split(img.getBitmap(),None,lmat,None,None)
w = patchsize[0]
h = patchsize[1]
length = w*h
retVal = np.zeros(length)
for widx in range(patch_arrangement[0]):
for hidx in range(patch_arrangement[1]):
x = (widx*patchsize[0])+((widx+1)*spacersz)
y = (hidx*patchsize[1])+((hidx+1)*spacersz)
cv.SetImageROI(lmat,(x,y,w,h))
cv.Copy(lmat,patch)
cv.ResetImageROI(lmat)
retVal = np.vstack((retVal,np.array(patch[:,:]).reshape(length)))
retVal = retVal[1:,:]
return retVal
def _codebook2Img(self, cb, patchsize, count, patch_arrangement, spacersz):
"""
cb = the codebook
patchsize = the patch size (ususally 11x11)
count = total codes
patch_arrangement = how are the patches grided in the image (eg 128 = (8x16) 256=(16x16) )
spacersz = the number of pixels between patches
"""
w = (patchsize[0]*patch_arrangement[0])+((patch_arrangement[0]+1)*spacersz)
h = (patchsize[1]*patch_arrangement[1])+((patch_arrangement[1]+1)*spacersz)
bm = cv.CreateImage((w,h), cv.IPL_DEPTH_8U, 1)
cv.Zero(bm)
img = Image(bm)
count = 0
for widx in range(patch_arrangement[0]):
for hidx in range(patch_arrangement[1]):
x = (widx*patchsize[0])+((widx+1)*spacersz)
y = (hidx*patchsize[1])+((hidx+1)*spacersz)
temp = Image(cb[count,:].reshape(patchsize[0],patchsize[1]))
img.blit(temp,pos=(x,y))
count = count + 1
return img
def _getPatches(self,img,sz=None):
#retVal = [] # may need to go to np.array
if( sz is None ):
sz = self.mPatchSize
img2 = img.toHLS()
lmat = cv.CreateImage((img.width,img.height), cv.IPL_DEPTH_8U, 1)
patch = cv.CreateImage(self.mPatchSize,cv.IPL_DEPTH_8U,1)
cv.Split(img2.getBitmap(),None,lmat,None,None)
wsteps = img2.width/sz[0]
hsteps = img2.height/sz[1]
w=sz[0]
h=sz[1]
length = w*h
retVal = np.zeros(length)
for widx in range(wsteps):
for hidx in range(hsteps):
x = (widx*sz[0])
y = (hidx*sz[1])
cv.SetImageROI(lmat,(x,y,w,h))
cv.EqualizeHist(lmat,patch)
#cv.Copy(lmat,patch)
cv.ResetImageROI(lmat)
retVal = np.vstack((retVal,np.array(patch[:,:]).reshape(length)))
#retVal.append()
retVal = retVal[1:,:] # pop the fake value we put on top of the stack
return retVal
def load(self,datafile):
"""
Load a codebook from file using the datafile. The datafile
should point to a local image for the source patch image.
"""
myFile = open(datafile, 'r')
temp = myFile.readline()
#print(temp)
self.mNumCodes = int(myFile.readline())
#print(self.mNumCodes)
w = int(myFile.readline())
h = int(myFile.readline())
self.mPatchSize = (w,h)
#print(self.mPatchSize)
self.mPadding = int(myFile.readline())
#print(self.mPadding)
w = int(myFile.readline())
h = int(myFile.readline())
self.mLayout = (w,h)
#print(self.mLayout)
imgfname = myFile.readline().strip()
#print(imgfname)
self.mCodebookImg = Image(imgfname)
self.mCodebook = self._img2Codebook(self.mCodebookImg,
self.mPatchSize,
self.mNumCodes,
self.mLayout,
self.mPadding)
#print(self.mCodebook)
return
def save(self,imgfname,datafname):
"""
Save the bag of features codebook and data set to a local file.
"""
myFile = open(datafname,'w')
myFile.write("BOF Codebook Data\n")
myFile.write(str(self.mNumCodes)+"\n")
myFile.write(str(self.mPatchSize[0])+"\n")
myFile.write(str(self.mPatchSize[1])+"\n")
myFile.write(str(self.mPadding)+"\n")
myFile.write(str(self.mLayout[0])+"\n")
myFile.write(str(self.mLayout[1])+"\n")
myFile.write(imgfname+"\n")
myFile.close()
if(self.mCodebookImg is None):
self._codebook2Img(self.mCodebook,self.mPatchSize,self.mNumCodes,self.mLayout,self.mPadding)
self.mCodebookImg.save(imgfname)
return
def __getstate__(self):
if(self.mCodebookImg is None):
self._codebook2Img(self.mCodebook,self.mPatchSize,self.mNumCodes,self.mLayout,self.mPadding)
mydict = self.__dict__.copy()
del mydict['mCodebook']
return mydict
def __setstate__(self, mydict):
self.__dict__ = mydict
self.mCodebook = self._img2Codebook(self.mCodebookImg,
self.mPatchSize,
self.mNumCodes,
self.mLayout,
self.mPadding)
def extract(self, img):
"""
This method extracts a bag of features histogram for the input image using
the provided codebook. The result are the bin counts for each codebook code.
"""
data = self._getPatches(img)
p = spsd.cdist(data,self.mCodebook)
codes = np.argmin(p,axis=1)
[retVal,foo] = np.histogram(codes,self.mNumCodes,normed=True,range=(0,self.mNumCodes-1))
return retVal
def reconstruct(self,img):
"""
This is a "just for fun" method as a sanity check for the BOF codeook.
The method takes in an image, extracts each codebook code, and replaces
the image at the position with the code.
"""
retVal = cv.CreateImage((img.width,img.height), cv.IPL_DEPTH_8U, 1)
data = self._getPatches(img)
p = spsd.cdist(data,self.mCodebook)
foo = p.shape[0]
codes = np.argmin(p,axis=1)
count = 0
wsteps = img.width/self.mPatchSize[0]
hsteps = img.height/self.mPatchSize[1]
w=self.mPatchSize[0]
h=self.mPatchSize[1]
length = w*h
retVal = Image(retVal)
for widx in range(wsteps):
for hidx in range(hsteps):
x = (widx*self.mPatchSize[0])
y = (hidx*self.mPatchSize[1])
p = codes[count]
temp = Image(self.mCodebook[p,:].reshape(self.mPatchSize[0],self.mPatchSize[1]))
retVal = retVal.blit(temp,pos=(x,y))
count = count + 1
return retVal
def getFieldNames(self):
"""
This method gives the names of each field in the feature vector in the
order in which they are returned. For example, 'xpos' or 'width'
"""
retVal = []
for widx in range(self.mLayout[0]):
for hidx in range(self.mLayout[1]):
temp = "CB_R"+str(widx)+"_C"+str(hidx)
retVal.append(temp)
return retVal
def getNumFields(self):
"""
This method returns the total number of fields in the feature vector.
"""
return self.mNumCodes
| bsd-3-clause |
chenxuhua/linux | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/EventClass.py | 4653 | 3596 | # EventClass.py
#
# This is a library defining some events types classes, which could
# be used by other scripts to analyzing the perf samples.
#
# Currently there are just a few classes defined for examples,
# PerfEvent is the base class for all perf event sample, PebsEvent
# is a HW base Intel x86 PEBS event, and user could add more SW/HW
# event classes based on requirements.
import struct
# Event types, user could add more here
EVTYPE_GENERIC = 0
EVTYPE_PEBS = 1 # Basic PEBS event
EVTYPE_PEBS_LL = 2 # PEBS event with load latency info
EVTYPE_IBS = 3
#
# Currently we don't have good way to tell the event type, but by
# the size of raw buffer, raw PEBS event with load latency data's
# size is 176 bytes, while the pure PEBS event's size is 144 bytes.
#
def create_event(name, comm, dso, symbol, raw_buf):
if (len(raw_buf) == 144):
event = PebsEvent(name, comm, dso, symbol, raw_buf)
elif (len(raw_buf) == 176):
event = PebsNHM(name, comm, dso, symbol, raw_buf)
else:
event = PerfEvent(name, comm, dso, symbol, raw_buf)
return event
class PerfEvent(object):
event_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_GENERIC):
self.name = name
self.comm = comm
self.dso = dso
self.symbol = symbol
self.raw_buf = raw_buf
self.ev_type = ev_type
PerfEvent.event_num += 1
def show(self):
print "PMU event: name=%12s, symbol=%24s, comm=%8s, dso=%12s" % (self.name, self.symbol, self.comm, self.dso)
#
# Basic Intel PEBS (Precise Event-based Sampling) event, whose raw buffer
# contains the context info when that event happened: the EFLAGS and
# linear IP info, as well as all the registers.
#
class PebsEvent(PerfEvent):
pebs_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS):
tmp_buf=raw_buf[0:80]
flags, ip, ax, bx, cx, dx, si, di, bp, sp = struct.unpack('QQQQQQQQQQ', tmp_buf)
self.flags = flags
self.ip = ip
self.ax = ax
self.bx = bx
self.cx = cx
self.dx = dx
self.si = si
self.di = di
self.bp = bp
self.sp = sp
PerfEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type)
PebsEvent.pebs_num += 1
del tmp_buf
#
# Intel Nehalem and Westmere support PEBS plus Load Latency info which lie
# in the four 64 bit words write after the PEBS data:
# Status: records the IA32_PERF_GLOBAL_STATUS register value
# DLA: Data Linear Address (EIP)
# DSE: Data Source Encoding, where the latency happens, hit or miss
# in L1/L2/L3 or IO operations
# LAT: the actual latency in cycles
#
class PebsNHM(PebsEvent):
pebs_nhm_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS_LL):
tmp_buf=raw_buf[144:176]
status, dla, dse, lat = struct.unpack('QQQQ', tmp_buf)
self.status = status
self.dla = dla
self.dse = dse
self.lat = lat
PebsEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type)
PebsNHM.pebs_nhm_num += 1
del tmp_buf
| gpl-2.0 |
willingc/oh-mainline | vendor/packages/scrapy/scrapy/tests/test_utils_signal.py | 44 | 2995 | from twisted.trial import unittest
from twisted.python import log as txlog
from twisted.python.failure import Failure
from twisted.internet import defer, reactor
from scrapy.xlib.pydispatch import dispatcher
from scrapy.utils.signal import send_catch_log, send_catch_log_deferred
from scrapy import log
class SendCatchLogTest(unittest.TestCase):
@defer.inlineCallbacks
def test_send_catch_log(self):
test_signal = object()
handlers_called = set()
def log_received(event):
handlers_called.add(log_received)
assert "error_handler" in event['message'][0]
assert event['logLevel'] == log.ERROR
txlog.addObserver(log_received)
dispatcher.connect(self.error_handler, signal=test_signal)
dispatcher.connect(self.ok_handler, signal=test_signal)
result = yield defer.maybeDeferred(self._get_result, test_signal, arg='test', \
handlers_called=handlers_called)
assert self.error_handler in handlers_called
assert self.ok_handler in handlers_called
assert log_received in handlers_called
self.assertEqual(result[0][0], self.error_handler)
self.assert_(isinstance(result[0][1], Failure))
self.assertEqual(result[1], (self.ok_handler, "OK"))
txlog.removeObserver(log_received)
self.flushLoggedErrors()
dispatcher.disconnect(self.error_handler, signal=test_signal)
dispatcher.disconnect(self.ok_handler, signal=test_signal)
def _get_result(self, signal, *a, **kw):
return send_catch_log(signal, *a, **kw)
def error_handler(self, arg, handlers_called):
handlers_called.add(self.error_handler)
a = 1/0
def ok_handler(self, arg, handlers_called):
handlers_called.add(self.ok_handler)
assert arg == 'test'
return "OK"
class SendCatchLogDeferredTest(SendCatchLogTest):
def _get_result(self, signal, *a, **kw):
return send_catch_log_deferred(signal, *a, **kw)
class SendCatchLogDeferredTest2(SendCatchLogTest):
def ok_handler(self, arg, handlers_called):
handlers_called.add(self.ok_handler)
assert arg == 'test'
d = defer.Deferred()
reactor.callLater(0, d.callback, "OK")
return d
def _get_result(self, signal, *a, **kw):
return send_catch_log_deferred(signal, *a, **kw)
class SendCatchLogTest2(unittest.TestCase):
def test_error_logged_if_deferred_not_supported(self):
test_signal = object()
test_handler = lambda: defer.Deferred()
log_events = []
txlog.addObserver(log_events.append)
dispatcher.connect(test_handler, test_signal)
send_catch_log(test_signal)
self.failUnless(log_events)
self.failUnless("Cannot return deferreds from signal handler" in str(log_events))
txlog.removeObserver(log_events.append)
self.flushLoggedErrors()
dispatcher.disconnect(test_handler, test_signal)
| agpl-3.0 |
zhouyuan/teuthology | docs/conf.py | 10 | 8236 | # -*- coding: utf-8 -*-
#
# teuthology documentation build configuration file, created by
# sphinx-quickstart on Thu Aug 7 12:30:36 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'contents'
# General information about the project.
project = u'teuthology'
copyright = u'2014, Inktank Storage, Inc.'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1.0'
# The full version, including alpha/beta/rc tags.
release = '0.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'ceph'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'teuthologydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'teuthology.tex', u'teuthology Documentation',
u'Inktank Storage, Inc.', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'teuthology', u'teuthology Documentation',
[u'Inktank Storage, Inc.'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'teuthology', u'teuthology Documentation',
u'Inktank Storage, Inc.', 'teuthology', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| mit |
wesley1001/dokomoforms | dokomoforms/handlers/api/batch.py | 1 | 1110 | """API endpoints dealing with batch operations."""
import tornado.web
import dokomoforms.api.batch as batch_api
from dokomoforms.db.survey import IncorrectQuestionIdError
from dokomoforms.handlers.util.base import APIHandler, \
catch_bare_integrity_error, \
get_json_request_body, validation_message
class BatchSubmissionAPIHandler(APIHandler):
@catch_bare_integrity_error
def post(self, survey_id: str):
data = get_json_request_body(self)
if data.get('survey_id', None) != survey_id:
reason = validation_message('submission', 'survey_id', 'invalid')
raise tornado.web.HTTPError(422, reason=reason)
try:
self.write(batch_api.submit(self.db, data))
self.set_status(201)
except KeyError as e:
reason = validation_message('submission', str(e), 'missing_field')
raise tornado.web.HTTPError(422, reason=reason)
except IncorrectQuestionIdError:
reason = validation_message('submission', 'question_id', 'invalid')
raise tornado.web.HTTPError(422, reason=reason)
| gpl-3.0 |
UnknownStudio/Codeic | ScratchPlus/kurt/kurt/__init__.py | 2 | 74227 | # Copyright (C) 2012 Tim Radvan
#
# This file is part of Kurt.
#
# Kurt is free software: you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# Kurt is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Kurt. If not, see <http://www.gnu.org/licenses/>.
"""
A Python module for reading and writing Scratch project files.
Scratch is created by the Lifelong Kindergarten Group at the MIT Media Lab.
See their website: http://scratch.mit.edu/
Classes
-------
The main interface:
* :class:`Project`
The following :class:`Actors <Actor>` may be found on the project stage:
* :class:`Stage`
* :class:`Sprite`
* :class:`Watcher`
The two :class:`Scriptables <Scriptable>` (:class:`Stage` and :class:`Sprite`)
have instances of the following contained in their attributes:
* :class:`Variable`
* :class:`List`
Scripts use the following classes:
* :class:`Block`
* :class:`Script`
* :class:`Comment`
* :class:`BlockType`
Media files use the following classes:
* :class:`Costume`
* :class:`Image`
* :class:`Sound`
* :class:`Waveform`
File Formats
------------
Supported file formats:
=============== =========== =========
Format Name Description Extension
=============== =========== =========
``"scratch14"`` Scratch 1.4 ``.sb``
``"scratch20"`` Scratch 2.0 ``.sb2``
=============== =========== =========
Pass "Format name" as the argument to :attr:`Project.convert`.
Kurt provides a superset of the information in each individual format, but will
only convert features between a subset of formats.
----
"""
__version__ = '2.0.7'
from collections import OrderedDict
import re
import os
import random
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import PIL.Image
import wave
#-- Utils --#
def _clean_filename(name):
"""Strip non-alphanumeric characters to makes name safe to be used as
filename."""
return re.sub("[^\w .]", "", name)
#-- Project: main class --#
class Project(object):
"""The main kurt class. Stores the contents of a project file.
Contents include global variables and lists, the :attr:`stage` and
:attr:`sprites`, each with their own :attr:`scripts`, :attr:`costumes`,
:attr:`sounds`, :attr:`variables` and :attr:`lists`.
A Project can be loaded from or saved to disk in a format which can be read
by a Scratch program or one of its derivatives.
Loading a project::
p = kurt.Project.load("tests/game.sb")
Getting all the scripts::
for scriptable in p.sprites + [p.stage]:
for script in scriptable.scripts:
print script
Creating a new project::
p = kurt.Project()
Converting between formats::
p = kurt.Project.load("tests/game.sb")
p.convert("scratch20")
# []
p.save()
# 'tests/game.sb2'
"""
def __init__(self):
self.name = u""
"""The name of the project.
May be displayed to the user. Doesn't have to match the filename in
:attr:`path`. May not be saved for some formats.
"""
self.path = None
"""The path to the project file."""
self._plugin = None
"""The file format plugin used to load this project.
Get the current format using the :attr:`format` property. Use
:attr:`convert()` to change between formats.
"""
self.stage = Stage(self)
"""The :class:`Stage`."""
self.sprites = []
"""List of :class:`Sprites <Sprite>`.
Use :attr:`get_sprite` to get a sprite by name.
"""
self.actors = []
"""List of each :class:`Actor` on the stage.
Includes :class:`Watchers <Watcher>` as well as :class:`Sprites
<Sprite>`.
Sprites in :attr:`sprites` but not in actors will be added to actors on
save.
"""
self.variables = {}
""":class:`dict` of global :class:`Variables <Variable>` by name."""
self.lists = {}
""":class:`dict` of global :class:`Lists <List>` by name."""
self.thumbnail = None
"""An :class:`Image` with a screenshot of the project."""
self.tempo = 60
"""The tempo in BPM used for note blocks."""
self.notes = u"Made with Kurt\nhttp://github.com/blob8108/kurt"
"""Notes about the project, aka project comments.
Displayed on the website next to the project.
Line endings will be converted to ``\\n``.
"""
self.author = u""
"""The username of the project's author, eg. ``'blob8108'``."""
def __repr__(self):
return "<%s.%s()>" % (self.__class__.__module__,
self.__class__.__name__)
def get_sprite(self, name):
"""Get a sprite from :attr:`sprites` by name.
Returns None if the sprite isn't found.
"""
for sprite in self.sprites:
if sprite.name == name:
return sprite
@property
def format(self):
"""The file format of the project.
:class:`Project` is mainly a universal representation, and so a project
has no specfic format. This is the format the project was loaded with.
To convert to a different format, use :attr:`save()`.
"""
if self._plugin:
return self._plugin.name
@classmethod
def load(cls, path, format=None):
"""Load project from file.
Use ``format`` to specify the file format to use.
Path can be a file-like object, in which case format is required.
Otherwise, can guess the appropriate format from the extension.
If you pass a file-like object, you're responsible for closing the
file.
:param path: Path or file pointer.
:param format: :attr:`KurtFileFormat.name` eg. ``"scratch14"``.
Overrides the extension.
:raises: :class:`UnknownFormat` if the extension is unrecognised.
:raises: :py:class:`ValueError` if the format doesn't exist.
"""
path_was_string = isinstance(path, basestring)
if path_was_string:
(folder, filename) = os.path.split(path)
(name, extension) = os.path.splitext(filename)
if format is None:
plugin = kurt.plugin.Kurt.get_plugin(extension=extension)
if not plugin:
raise UnknownFormat(extension)
fp = open(path, "rb")
else:
fp = path
assert format, "Format is required"
plugin = kurt.plugin.Kurt.get_plugin(format)
if not plugin:
raise ValueError, "Unknown format %r" % format
project = plugin.load(fp)
if path_was_string:
fp.close()
project.convert(plugin)
if isinstance(path, basestring):
project.path = path
if not project.name:
project.name = name
return project
def copy(self):
"""Return a new Project instance, deep-copying all the attributes."""
p = Project()
p.name = self.name
p.path = self.path
p._plugin = self._plugin
p.stage = self.stage.copy()
p.stage.project = p
for sprite in self.sprites:
s = sprite.copy()
s.project = p
p.sprites.append(s)
for actor in self.actors:
if isinstance(actor, Sprite):
p.actors.append(p.get_sprite(actor.name))
else:
a = actor.copy()
if isinstance(a, Watcher):
if isinstance(a.target, Project):
a.target = p
elif isinstance(a.target, Stage):
a.target = p.stage
else:
a.target = p.get_sprite(a.target.name)
p.actors.append(a)
p.variables = dict((n, v.copy()) for (n, v) in self.variables.items())
p.lists = dict((n, l.copy()) for (n, l) in self.lists.items())
p.thumbnail = self.thumbnail
p.tempo = self.tempo
p.notes = self.notes
p.author = self.author
return p
def convert(self, format):
"""Convert the project in-place to a different file format.
Returns a list of :class:`UnsupportedFeature` objects, which may give
warnings about the conversion.
:param format: :attr:`KurtFileFormat.name` eg. ``"scratch14"``.
:raises: :class:`ValueError` if the format doesn't exist.
"""
self._plugin = kurt.plugin.Kurt.get_plugin(format)
return list(self._normalize())
def save(self, path=None, debug=False):
"""Save project to file.
:param path: Path or file pointer.
If you pass a file pointer, you're responsible for closing
it.
If path is not given, the :attr:`path` attribute is used,
usually the original path given to :attr:`load()`.
If `path` has the extension of an existing plugin, the
project will be converted using :attr:`convert`.
Otherwise, the extension will be replaced with the
extension of the current plugin.
(Note that log output for the conversion will be printed
to stdout. If you want to deal with the output, call
:attr:`convert` directly.)
If the path ends in a folder instead of a file, the
filename is based on the project's :attr:`name`.
:param debug: If true, return debugging information from the format
plugin instead of the path.
:raises: :py:class:`ValueError` if there's no path or name.
:returns: path to the saved file.
"""
p = self.copy()
plugin = p._plugin
# require path
p.path = path or self.path
if not p.path:
raise ValueError, "path is required"
if isinstance(p.path, basestring):
# split path
(folder, filename) = os.path.split(p.path)
(name, extension) = os.path.splitext(filename)
# get plugin from extension
if path: # only if not using self.path
try:
plugin = kurt.plugin.Kurt.get_plugin(extension=extension)
except ValueError:
pass
# build output path
if not name:
name = _clean_filename(self.name)
if not name:
raise ValueError, "name is required"
filename = name + plugin.extension
p.path = os.path.join(folder, filename)
# open
fp = open(p.path, "wb")
else:
fp = p.path
path = None
if not plugin:
raise ValueError, "must convert project to a format before saving"
for m in p.convert(plugin):
print m
result = p._save(fp)
if path:
fp.close()
return result if debug else p.path
def _save(self, fp):
return self._plugin.save(fp, self)
def _normalize(self):
"""Convert the project to a standardised form for the current plugin.
Called after loading, before saving, and when converting to a new
format.
Yields UnsupportedFeature instances.
"""
unique_sprite_names = set(sprite.name for sprite in self.sprites)
if len(unique_sprite_names) < len(self.sprites):
raise ValueError, "Sprite names must be unique"
# sync self.sprites and self.actors
for sprite in self.sprites:
if sprite not in self.actors:
self.actors.append(sprite)
for actor in self.actors:
if isinstance(actor, Sprite):
if actor not in self.sprites:
raise ValueError, \
"Can't have sprite on stage that isn't in sprites"
# normalize Scriptables
self.stage._normalize()
for sprite in self.sprites:
sprite._normalize()
# normalize actors
for actor in self.actors:
if not isinstance(actor, Scriptable):
actor._normalize()
# make Watchers if needed
for thing in [self, self.stage] + self.sprites:
for (name, var) in thing.variables.items():
if not var.watcher:
var.watcher = kurt.Watcher(thing,
kurt.Block("var", name), is_visible=False)
self.actors.append(var.watcher)
for (name, list_) in thing.lists.items():
if not list_.watcher:
list_.watcher = kurt.Watcher(thing,
kurt.Block("list", name), is_visible=False)
self.actors.append(list_.watcher)
# notes - line endings
self.notes = self.notes.replace("\r\n", "\n").replace("\r", "\n")
# convert scripts
def convert_block(block):
# convert block
try:
if isinstance(block.type, CustomBlockType):
if "Custom Blocks" not in self._plugin.features:
raise BlockNotSupported(
"%s doesn't support custom blocks"
% self._plugin.display_name)
else: # BlockType
pbt = block.type.convert(self._plugin)
except BlockNotSupported, err:
err.message += ". Caused by: %r" % block
err.block = block
err.scriptable = scriptable
err.args = (err.message,)
if getattr(block.type, '_workaround', None):
block = block.type._workaround(block)
if not block:
raise
else:
raise
# convert args
args = []
for arg in block.args:
if isinstance(arg, Block):
arg = convert_block(arg)
elif isinstance(arg, list):
arg = map(convert_block, arg)
args.append(arg)
block.args = args
return block
for scriptable in [self.stage] + self.sprites:
for script in scriptable.scripts:
if isinstance(script, Script):
script.blocks = map(convert_block, script.blocks)
# workaround unsupported features
for feature in kurt.plugin.Feature.FEATURES.values():
if feature not in self._plugin.features:
for x in feature.workaround(self):
yield UnsupportedFeature(feature, x)
# normalize supported features
for feature in self._plugin.features:
feature.normalize(self)
def get_broadcasts(self):
def get_broadcasts(block):
for (arg, insert) in zip(block.args, block.type.inserts):
if isinstance(arg, Block):
for b in get_broadcasts(arg):
yield b
elif isinstance(arg, list):
for arg_block in arg:
for b in get_broadcasts(arg_block):
yield b
elif insert.kind == "broadcast":
yield arg
for scriptable in [self.stage] + self.sprites:
for script in scriptable.scripts:
for block in script.blocks:
for b in get_broadcasts(block):
yield b
class UnsupportedFeature(object):
"""The plugin doesn't support this Feature.
Output once by Project.convert for each occurence of the feature.
"""
def __init__(self, feature, obj):
self.feature = kurt.plugin.Feature.get(feature)
self.obj = obj
def __repr__(self):
return "<%s.%s(%s)>" % (self.__class__.__module__,
self.__class__.__name__, unicode(self))
def __str__(self):
return "UnsupportedFeature: %s" % unicode(self)
def __unicode__(self):
return u"%r: %r" % (self.feature.name, self.obj)
#-- Errors --#
class UnknownFormat(Exception):
"""The file extension is not recognised.
Raised when :class:`Project` can't find a valid format plugin to handle the
file extension.
"""
pass
class UnknownBlock(Exception):
"""A :class:`Block` with the given command or type cannot be found.
Raised by :attr:`BlockType.get`.
"""
class BlockNotSupported(Exception):
"""The plugin doesn't support this Block.
Raised by :attr:`Block.convert` when it can't find a
:class:`PluginBlockType` for the given plugin.
"""
pass
class VectorImageError(Exception):
"""Tried to construct a raster image from a vector format image file.
You shouldn't usally get this error, because Feature("Vector Images") will
give a warning instead when the Project is converted.
"""
pass
#-- Actors & Scriptables --#
class Actor(object):
"""An object that goes on the project stage.
Subclasses include :class:`Watcher` or :class:`Sprite`.
"""
class Scriptable(object):
"""Superclass for all scriptable objects.
Subclasses are :class:`Stage` and :class:`Sprite`.
"""
def __init__(self, project):
self.project = project
"""The :class:`Project` this belongs to."""
self.scripts = []
"""The contents of the scripting area.
List containing :class:`Scripts <Script>` and :class:`Comments
<Comment>`.
Will be sorted by y position on load/save.
"""
self.custom_blocks = {}
"""Scripts for custom blocks, indexed by :class:`CustomBlockType`."""
self.variables = {}
""":class:`dict` of :class:`Variables <Variable>` by name."""
self.lists = {}
""":class:`dict` of :class:`Lists <List>` by name."""
self.costumes = []
"""List of :class:`Costumes <Costume>`."""
self.sounds = []
"""List of :class:`Sounds <Sound>`."""
self.costume = None
"""The currently selected :class:`Costume`.
Defaults to the first costume in :attr:`self.costumes` on save.
If a sprite doesn't have a costume, a black 1x1 pixel square will be
used.
"""
self.volume = 100
"""The volume in percent used for note and sound blocks."""
def _normalize(self):
# costumes
if self.costume:
# Make sure it's in costumes
if self.costume not in self.costumes:
self.costumes.append(self.costume)
else:
# No costume!
if self.costumes:
self.costume = self.costumes[0]
else:
BLACK = (0, 0, 0)
self.costume = Costume("blank", Image.new((1, 1), BLACK))
self.costumes = [self.costume]
# scripts
for script in self.scripts:
script._normalize()
# sort scripts by y position
have_position = [s for s in self.scripts if s.pos]
no_position = [s for s in self.scripts if not s.pos]
have_position.sort(key=lambda s: (s.pos[1], s.pos[0]))
self.scripts = have_position + no_position
def copy(self, o=None):
"""Return a new instance, deep-copying all the attributes."""
if o is None: o = self.__class__(self.project)
o.scripts = [s.copy() for s in self.scripts]
o.variables = dict((n, v.copy()) for (n, v) in self.variables.items())
o.lists = dict((n, l.copy()) for (n, l) in self.lists.items())
o.costumes = [c.copy() for c in self.costumes]
o.sounds = [s.copy() for s in self.sounds]
o.costume_index = self.costume_index
o.volume = self.volume
return o
@property
def costume_index(self):
"""The index of :attr:`costume` in :attr:`costumes`.
None if no costume is selected.
"""
if self.costume:
return self.costumes.index(self.costume)
@costume_index.setter
def costume_index(self, index):
if index is None:
self.costume = None
else:
self.costume = self.costumes[index]
def parse(self, text):
"""Parse the given code and add it to :attr:`scripts`.
The syntax matches :attr:`Script.stringify()`. See :mod:`kurt.text` for
reference.
"""
self.scripts.append(kurt.text.parse(text, self))
class Stage(Scriptable):
"""Represents the background of the project. The stage is similar to a
:class:`Sprite`, but has a fixed position. The stage has a fixed size of
``480x360`` pixels.
The stage does not require a costume. If none is given, it is assumed to be
white (#FFF).
Not all formats have stage-specific variables and lists. Global variables
and lists are stored on the :class:`Project`.
:param project: The :class:`Project` this Stage belongs to.
Note that you still need to set :attr:`Project.stage` to
this Stage instance.
"""
name = "Stage"
is_draggable = False
is_visible = True
SIZE = (480, 360)
COLOR = (255, 255, 255)
def __init__(self, project):
Scriptable.__init__(self, project)
@property
def backgrounds(self):
"""Alias for :attr:`costumes`."""
return self.costumes
@backgrounds.setter
def backgrounds(self, value):
self.costumes = value
def __repr__(self):
return "<%s.%s()>" % (self.__class__.__module__,
self.__class__.__name__)
def _normalize(self):
if not self.costume and not self.costumes:
self.costume = Costume("blank", Image.new(self.SIZE, self.COLOR))
Scriptable._normalize(self)
class Sprite(Scriptable, Actor):
"""A scriptable object displayed on the project stage. Can be moved and
rotated, unlike the :class:`Stage`.
Sprites require a :attr:`costume`, and will raise an error when saving
without one.
:param project: The :class:`Project` this Sprite belongs to.
Note that you still need to add this sprite to
:attr:`Project.sprites`.
"""
def __init__(self, project, name):
Scriptable.__init__(self, project)
self.name = unicode(name)
"""The name of the sprite, as referred to from scripts and displayed in
the Scratch interface.
"""
self.position = (0, 0)
"""The ``(x, y)`` position of the centre of the sprite in Scratch
co-ordinates.
"""
self.direction = 90.0
"""The angle in degrees the sprite is rotated to."""
self.rotation_style = "normal"
"""How the sprite's costume rotates with the sprite. Valid values are:
``'normal'``
Continuous rotation with :attr:`direction`. The default.
``'leftRight'``
Don't rotate. Instead, flip the costume for directions with x
component < 0. Useful for side-views.
``'none'``
Don't rotate with direction.
"""
self.size = 100.0
"""The scale factor of the sprite in percent. Defaults to 100."""
self.is_draggable = False
"""True if the sprite can be dragged using the mouse in the
player/presentation mode.
"""
self.is_visible = True
"""Whether the sprite is shown on the stage. False if the sprite is
hidden.
"""
def _normalize(self):
Scriptable._normalize(self)
assert self.rotation_style in ("normal", "leftRight", "none")
def copy(self):
"""Return a new instance, deep-copying all the attributes."""
o = self.__class__(self.project, self.name)
Scriptable.copy(self, o)
o.position = tuple(self.position)
o.direction = self.direction
o.rotation_style = self.rotation_style
o.size = self.size
o.is_draggable = self.is_draggable
o.is_visible = self.is_visible
return o
def __repr__(self):
return "<%s.%s(%r)>" % (self.__class__.__module__,
self.__class__.__name__, self.name)
class Watcher(Actor):
"""A monitor for displaying a data value on the stage.
Some formats won't save hidden watchers, and so their position won't be
remembered.
"""
def __init__(self, target, block, style="normal", is_visible=True,
pos=None):
Actor.__init__(self)
assert target is not None
self.target = target
"""The :attr:`Scriptable` or :attr:`Project` the watcher belongs to.
"""
self.block = block
"""The :attr:`Block` to evaluate on :attr:`target`.
For variables::
kurt.Block('readVariable', 'variable name')
For lists::
kurt.Block('contentsOfList:', 'list name')
"""
self.style = str(style)
"""How the watcher should appear.
Valid values:
``'normal'``
The name of the data is displayed next to its value. The only
valid value for list watchers.
``'large'``
The data is displayed in a larger font with no describing text.
``'slider'``
Like the normal style, but displayed with a slider that can change
the variable's value. Not valid for reporter block watchers.
"""
self.pos = pos
"""``(x, y)`` position of the top-left of the watcher from the top-left
of the stage in pixels. None if not specified.
"""
self.is_visible = bool(is_visible)
"""Whether the watcher is displayed on the screen.
Some formats won't save hidden watchers, and so their position won't be
remembered.
"""
self.slider_min = 0
"""Minimum value for slider. Only applies to ``"slider"`` style."""
self.slider_max = 100
"""Maximum value for slider. Only applies to ``"slider"`` style."""
self._normalize()
def _normalize(self):
assert self.style in ("normal", "large", "slider")
if self.value:
self.value.watcher = self
def copy(self):
"""Return a new instance with the same attributes."""
o = self.__class__(self.target,
self.block.copy(),
self.style,
self.is_visible,
self.pos)
o.slider_min = self.slider_min
o.slider_max = self.slider_max
return o
@property
def kind(self):
"""The type of value to watch, based on :attr:`block`.
One of ``variable``, ``list``, or ``block``.
``block`` watchers watch the value of a reporter block.
"""
if self.block.type.has_command('readVariable'):
return 'variable'
elif self.block.type.has_command('contentsOfList:'):
return 'list'
else:
return 'block'
@property
def value(self):
"""Return the :class:`Variable` or :class:`List` to watch.
Returns ``None`` if it's a block watcher.
"""
if self.kind == 'variable':
return self.target.variables[self.block.args[0]]
elif self.kind == 'list':
return self.target.lists[self.block.args[0]]
def __repr__(self):
r = "%s.%s(%r, %r" % (self.__class__.__module__,
self.__class__.__name__, self.target, self.block)
if self.style != "normal":
r += ", style=%r" % self.style
if not self.is_visible:
r += ", is_visible=False"
if self.pos:
r += ", pos=%s" % repr(self.pos)
r += ")"
return r
#-- Variables --#
class Variable(object):
"""A memory value used in scripts.
There are both :attr:`global variables <Project.variables>` and
:attr:`sprite-specific variables <Sprite.variables>`.
Some formats also have :attr:`stage-specific variables <Stage.variables>`.
"""
def __init__(self, value=0, is_cloud=False):
self.value = value
"""The value of the variable, usually a number or a string.
For some formats, variables can take list values, and :class:`List` is
not used.
"""
self.is_cloud = bool(is_cloud)
"""Whether the value of the variable is shared with other users.
For Scratch 2.0.
"""
self.watcher = None
"""The :class:`Watcher` instance displaying this Variable's value."""
def copy(self):
"""Return a new instance with the same attributes."""
return self.__class__(self.value, self.is_cloud)
def __repr__(self):
r = "%s.%s(%r" % (self.__class__.__module__, self.__class__.__name__,
self.value)
if self.is_cloud:
r += ", is_cloud=%r" % self.is_cloud
r += ")"
return r
class List(object):
"""A sequence of items used in scripts.
Each item takes a :class:`Variable`-like value.
Lists cannot be nested. However, for some formats, variables can take
list values, and this class is not used.
"""
def __init__(self, items=None, is_cloud=False):
self.items = list(items) if items else []
"""The items contained in the list. A Python list of unicode
strings.
"""
self.is_cloud = bool(is_cloud)
"""Whether the value of the list is shared with other users.
For Scratch 2.0.
"""
self.watcher = None
"""The :class:`Watcher` instance displaying this List's value."""
self._normalize()
def _normalize(self):
self.items = map(unicode, self.items)
def copy(self):
"""Return a new instance with the same attributes."""
return self.__class__(self.items, self.is_cloud)
def __repr__(self):
r = "<%s.%s(%i items)>" % (self.__class__.__module__,
self.__class__.__name__, len(self.items))
if self.is_cloud:
r += ", is_cloud=%r" % self.is_cloud
r += ")"
return r
#-- Color --#
class Color(object):
"""A 24-bit RGB color value.
Accepts tuple or hexcode arguments::
>>> kurt.Color('#f08')
kurt.Color(255, 0, 136)
>>> kurt.Color((255, 0, 136))
kurt.Color(255, 0, 136)
>>> kurt.Color('#f0ffee')
kurt.Color(240, 255, 238)
"""
def __init__(self, r, g=None, b=None):
if g is None and b is None:
if isinstance(r, Color):
r = r.value
elif isinstance(r, basestring):
if not r.startswith("#"):
raise ValueError, "invalid color hexcode: %r" % r
r = r[1:]
if len(r) == 3:
r = r[0] + r[0] + r[1] + r[1] + r[2] + r[2]
split = (r[0:2], r[2:4], r[4:6])
r = [int(x, 16) for x in split]
(r, g, b) = r
self.r = int(r)
"""Red component, 0-255"""
self.g = int(g)
"""Green component, 0-255"""
self.b = int(b)
"""Blue component, 0-255"""
@property
def value(self):
"""Return ``(r, g, b)`` tuple."""
return (self.r, self.g, self.b)
@value.setter
def value(self, value):
(self.r, self.g, self.b) = value
def __eq__(self, other):
return isinstance(other, Color) and self.value == other.value
def __ne__(self, other):
return not self == other
def __iter__(self):
return iter(self.value)
def __repr__(self):
return "%s.%s(%s)" % (self.__class__.__module__,
self.__class__.__name__, repr(self.value).strip("()"))
def stringify(self):
"""Returns the color value in hexcode format.
eg. ``'#ff1056'``
"""
hexcode = "#"
for x in self.value:
part = hex(x)[2:]
if len(part) < 2: part = "0" + part
hexcode += part
return hexcode
@classmethod
def random(cls):
f = lambda: random.randint(0, 255)
return cls(f(), f(), f())
#-- BlockTypes --#
class Insert(object):
"""The specification for an argument to a :class:`BlockType`."""
SHAPE_DEFAULTS = {
'number': 0,
'number-menu': 0,
'stack': [],
'color': Color('#f00'),
'inline': 'nil', # Can't be empty
}
SHAPE_FMTS = {
'number': '(%s)',
'string': '[%s]',
'readonly-menu': '[%s v]',
'number-menu': '(%s v)',
'color': '[%s]',
'boolean': '<%s>',
'stack': '\n %s\n',
'inline': '%s',
'block': '{%s}',
}
KIND_OPTIONS = {
'attribute': ['x position', 'y position', 'direction', 'costume #',
'size', 'volume'],
'backdrop': [],
'booleanSensor': ['button pressed', 'A connected', 'B connected',
'C connected', 'D connected'],
'broadcast': [],
'costume': [],
'direction': [],
'drum': range(1, 18),
'effect': ['color', 'fisheye', 'whirl', 'pixelate', 'mosaic',
'brightness', 'ghost'],
'instrument': range(1, 21),
'key': ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b',
'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o',
'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', 'space',
'left arrow', 'right arrow', 'up arrow', 'down arrow'],
'list': [],
'listDeleteItem': ['last', 'all'],
'listItem': ['last', 'random'],
'mathOp': ['abs', 'floor', 'ceiling', 'sqrt', 'sin', 'cos', 'tan',
'asin', 'acos', 'atan', 'ln', 'log', 'e ^', '10 ^'],
'motorDirection': ['this way', 'that way', 'reverse'],
'note': [],
'rotationStyle': ['left-right', "don't rotate", 'all around'],
'sensor': ['slider', 'light', 'sound', 'resistance-A', 'resistance-B',
'resistance-C', 'resistance-D'],
'sound': [],
'spriteOnly': ['myself'],
'spriteOrMouse': ['mouse-pointer'],
'spriteOrStage': ['Stage'],
'stageOrThis': ['Stage'], # ? TODO
'stop': ['all', 'this script', 'other scripts in sprite'],
'timeAndDate': ['year', 'month', 'date', 'day of week', 'hour',
'minute', 'second'],
'touching': ['mouse-pointer', 'edge'],
'triggerSensor': ['loudness', 'timer', 'video motion'],
'var': [],
'videoMotionType': ['motion', 'direction'],
'videoState': ['off', 'on', 'on-flipped'],
}
def __init__(self, shape, kind=None, default=None, name=None,
unevaluated=None):
self.shape = shape
"""What kind of values this argument accepts.
Shapes that accept a simple data value or a reporter block:
``'number'``
An integer or float number. Defaults to ``0``.
``'string'``
A unicode text value.
``'readonly-menu'``
A choice of string value from a menu.
Some readonly inserts do not accept reporter blocks.
``'number-menu'``
Either a number value, or a choice of special value from a menu.
Defaults to ``0``.
``'color'``
A :class:`Color` value. Defaults to a random color.
Shapes that only accept blocks with the corresponding :attr:`shape`:
``'boolean'``
Accepts a boolean block.
``'stack'``
Accepts a list of stack blocks. Defaults to ``[]``.
The block is rendered with a "mouth" into which blocks can be
inserted.
Special shapes:
``'inline'``
Not actually an insert -- used for variable and list reporters.
``'block'``
Used for the argument to the "define ..." hat block.
"""
self.kind = kind
"""Valid arguments for a "menu"-shaped insert. Default is ``None``.
Valid values include:
* ``'attribute'``
* ``'booleanSensor'``
* ``'broadcast'``
* ``'costume'``
* ``'direction'``
* ``'drum'``
* ``'effect'``
* ``'instrument'``
* ``'key'``
* ``'list'``
* ``'listDeleteItem'``
* ``'listItem'``
* ``'mathOp'``
* ``'motorDirection'``
* ``'note'``
* ``'sensor'``
* ``'sound'``
* ``'spriteOrMouse'``
* ``'spriteOrStage'``
* ``'touching'``
* ``'var'``
Scratch 2.0-specific:
* ``'backdrop'``
* ``'rotationStyle'``
* ``'spriteOnly'``
* ``'stageOrThis'``
* ``'stop'``
* ``'timeAndDate'``
* ``'triggerSensor'``
* ``'videoMotionType'``
* ``'videoState'``
"""
self.default = default or Insert.SHAPE_DEFAULTS.get(shape, None)
"""The default value for the insert."""
if unevaluated is None:
unevaluated = True if shape == 'stack' else False
self.unevaluated = unevaluated
"""True if the interpreter should evaluate the argument to the block.
Defaults to True for 'stack' inserts, False for all others.
"""
self.name = name
"""The name of the parameter to a :class:`CustomBlockType`.
Not used for :class:`BlockTypes <BlockType>`.
"""
def __repr__(self):
r = "%s.%s(%r" % (self.__class__.__module__,
self.__class__.__name__, self.shape)
if self.kind != None:
r += ", %r" % self.kind
if self.default != Insert.SHAPE_DEFAULTS.get(self.shape, None):
r += ", default=%r" % self.default
if self.unevaluated:
r += ", unevaluated=%r" % self.unevaluated
if self.name:
r += ", name=%r" % self.name
r += ")"
return r
def __eq__(self, other):
if isinstance(other, Insert):
for name in ("shape", "kind", "default", "unevaluated"):
if getattr(self, name) != getattr(other, name):
return False
else:
return True
def __ne__(self, other):
return not self == other
def copy(self):
return Insert(self.shape, self.kind, self.default, self.name,
self.unevaluated)
def stringify(self, value=None, block_plugin=False):
if value is None or (value is False and self.shape == "boolean"):
value = self.default
if value is None:
value = ""
if isinstance(value, Block): # use block's shape
return value.stringify(block_plugin, in_insert=True)
else:
if hasattr(value, "stringify"):
value = value.stringify()
elif isinstance(value, list):
value = "\n".join(block.stringify(block_plugin) for block in value)
if self.shape == 'stack':
value = value.replace("\n", "\n ")
if block_plugin or self.shape in 'stack':
value = Insert.SHAPE_FMTS.get(self.shape, '%s') % (value,)
elif self.shape == 'string' or self.kind == 'broadcast':
value = unicode(value)
if "'" in value:
value = '"%s"' % value.replace('"', '\\"')
else:
value = "'%s'" % value.replace("'", "\\'")
return value
def options(self, scriptable=None):
"""Return a list of valid options to a menu insert, given a
Scriptable for context.
Mostly complete, excepting 'attribute'.
"""
options = list(Insert.KIND_OPTIONS.get(self.kind, []))
if scriptable:
if self.kind == 'var':
options += scriptable.variables.keys()
options += scriptable.project.variables.keys()
elif self.kind == 'list':
options += scriptable.lists.keys()
options += scriptable.project.lists.keys()
elif self.kind == 'costume':
options += [c.name for c in scriptable.costumes]
elif self.kind == 'backdrop':
options += [c.name for c in scriptable.project.stage.costumes]
elif self.kind == 'sound':
options += [c.name for c in scriptable.sounds]
options += [c.name for c in scriptable.project.stage.sounds]
elif self.kind in ('spriteOnly', 'spriteOrMouse', 'spriteOrStage',
'touching'):
options += [s.name for s in scriptable.project.sprites]
elif self.kind == 'attribute':
pass # TODO
elif self.kind == 'broadcast':
options += list(set(scriptable.project.get_broadcasts()))
return options
class BaseBlockType(object):
"""Base for :class:`BlockType` and :class:`PluginBlockType`.
Defines common attributes.
"""
SHAPE_FMTS = {
'reporter': '(%s)',
'boolean': '<%s>',
}
def __init__(self, shape, parts):
self.shape = shape
"""The shape of the block. Valid values:
``'stack'``
The default. Can connect to blocks above and below. Appear
jigsaw-shaped.
``'cap'``
Stops the script executing after this block. No blocks can be
connected below them.
``'hat'``
A block that starts a script, such as by responding to an event.
Can connect to blocks below.
``'reporter'``
Return a value. Can be placed into insert slots of other blocks as
an argument to that block. Appear rounded.
``'boolean'``
Like reporter blocks, but return a true/false value. Appear
hexagonal.
"C"-shaped blocks with "mouths" for stack blocks, such as ``"doIf"``,
are specified by adding ``Insert('stack')`` to the end of
:attr:`parts`.
"""
self.parts = parts
"""A list describing the text and arguments of the block.
Contains strings, which are part of the text displayed on the block,
and :class:`Insert` instances, which are arguments to the block.
"""
@property
def text(self):
"""The text displayed on the block.
String containing ``"%s"`` in place of inserts.
eg. ``'say %s for %s secs'``
"""
parts = [("%s" if isinstance(p, Insert) else p) for p in self.parts]
parts = [("%%" if p == "%" else p) for p in parts] # escape percent
return "".join(parts)
@property
def inserts(self):
"""The type of each argument to the block.
List of :class:`Insert` instances.
"""
return [p for p in self.parts if isinstance(p, Insert)]
@property
def defaults(self):
"""Default values for block inserts. (See :attr:`Block.args`.)"""
return [i.default for i in self.inserts]
@property
def stripped_text(self):
"""The :attr:`text`, with spaces and inserts removed.
Used by :class:`BlockType.get` to look up blocks.
"""
return BaseBlockType._strip_text(
self.text % tuple((i.default if i.shape == 'inline' else '%s')
for i in self.inserts))
@staticmethod
def _strip_text(text):
"""Returns text with spaces and inserts removed."""
text = re.sub(r'[ ,?:]|%s', "", text.lower())
for chr in "-%":
new_text = text.replace(chr, "")
if new_text:
text = new_text
return text.lower()
def __repr__(self):
return "<%s.%s(%r shape=%r)>" % (self.__class__.__module__,
self.__class__.__name__,
self.text % tuple(i.stringify(None) for i in self.inserts),
self.shape)
def stringify(self, args=None, block_plugin=False, in_insert=False):
if args is None: args = self.defaults
args = list(args)
r = self.text % tuple(i.stringify(args.pop(0), block_plugin)
for i in self.inserts)
for insert in self.inserts:
if insert.shape == 'stack':
return r + "end"
fmt = BaseBlockType.SHAPE_FMTS.get(self.shape, "%s")
if not block_plugin:
fmt = "%s" if fmt == "%s" else "(%s)"
if in_insert and fmt == "%s":
fmt = "{%s}"
return fmt % r
def has_insert(self, shape):
"""Returns True if any of the inserts have the given shape."""
for insert in self.inserts:
if insert.shape == shape:
return True
return False
class BlockType(BaseBlockType):
"""The specification for a type of :class:`Block`.
These are initialiased by :class:`Kurt` by combining
:class:`PluginBlockType` objects from individual format plugins to
create a single :class:`BlockType` for each command.
"""
def __getstate__(self):
"""lambda functions are not pickleable so drop them."""
copy = self.__dict__.copy()
copy['_workaround'] = None
return copy
def __init__(self, pbt):
if isinstance(pbt, basestring):
raise ValueError("Invalid argument. Did you mean `BlockType.get`?")
self._plugins = OrderedDict([(pbt.format, pbt)])
"""Stores :class:`PluginBlockType` objects for each plugin name."""
self._workaround = None
def _add_conversion(self, plugin, pbt):
"""Add a new PluginBlockType conversion.
If the plugin already exists, do nothing.
"""
assert self.shape == pbt.shape
assert len(self.inserts) == len(pbt.inserts)
for (i, o) in zip(self.inserts, pbt.inserts):
assert i.shape == o.shape
assert i.kind == o.kind
assert i.unevaluated == o.unevaluated
if plugin not in self._plugins:
self._plugins[plugin] = pbt
def convert(self, plugin=None):
"""Return a :class:`PluginBlockType` for the given plugin name.
If plugin is ``None``, return the first registered plugin.
"""
if plugin:
plugin = kurt.plugin.Kurt.get_plugin(plugin)
if plugin.name in self._plugins:
return self._plugins[plugin.name]
else:
err = BlockNotSupported("%s doesn't have %r" %
(plugin.display_name, self))
err.block_type = self
raise err
else:
return self.conversions[0]
@property
def conversions(self):
"""Return the list of :class:`PluginBlockType` instances."""
return self._plugins.values()
def has_conversion(self, plugin):
"""Return True if the plugin supports this block."""
plugin = kurt.plugin.Kurt.get_plugin(plugin)
return plugin.name in self._plugins
def has_command(self, command):
"""Returns True if any of the plugins have the given command."""
for pbt in self._plugins.values():
if pbt.command == command:
return True
return False
@property
def shape(self):
return self.convert().shape
@property
def parts(self):
return self.convert().parts
@classmethod
def get(cls, block_type):
"""Return a :class:`BlockType` instance from the given parameter.
* If it's already a BlockType instance, return that.
* If it exactly matches the command on a :class:`PluginBlockType`,
return the corresponding BlockType.
* If it loosely matches the text on a PluginBlockType, return the
corresponding BlockType.
* If it's a PluginBlockType instance, look for and return the
corresponding BlockType.
"""
if isinstance(block_type, (BlockType, CustomBlockType)):
return block_type
if isinstance(block_type, PluginBlockType):
block_type = block_type.command
block = kurt.plugin.Kurt.block_by_command(block_type)
if block:
return block
blocks = kurt.plugin.Kurt.blocks_by_text(block_type)
for block in blocks: # check the blocks' commands map to unique blocks
if kurt.plugin.Kurt.block_by_command(
block.convert().command) != blocks[0]:
raise ValueError(
"ambigious block text %r, use one of %r instead" %
(block_type, [b.convert().command for b in blocks]))
if blocks:
return blocks[0]
raise UnknownBlock, repr(block_type)
def __eq__(self, other):
if isinstance(other, BlockType):
if self.shape == other.shape and self.inserts == other.inserts:
for plugin in self._plugins:
if plugin in other._plugins:
return self._plugins[plugin] == other._plugins[plugin]
return False
def __ne__(self, other):
return not self == other
def _add_workaround(self, workaround):
self._workaround = workaround
class PluginBlockType(BaseBlockType):
"""Holds plugin-specific :class:`BlockType` attributes.
For each block concept, :class:`Kurt` builds a single BlockType that
references a corresponding PluginBlockType for each plugin that
supports that block.
Note that whichever plugin is loaded first takes precedence.
"""
def __init__(self, category, shape, command, parts, match=None):
BaseBlockType.__init__(self, shape, parts)
self.format = None
"""The format plugin the block belongs to."""
self.command = command
"""The method name from the source code, used to identify the block.
eg. ``'say:duration:elapsed:from:'``
"""
self.category = category
"""Where the block is found in the interface.
The same blocks may have different categories in different formats.
Possible values include::
'motion', 'looks', 'sound', 'pen', 'control', 'events', 'sensing',
'operators', 'data', 'variables', 'list', 'more blocks', 'motor',
'sensor', 'wedo', 'midi', 'obsolete'
"""
self._match = match
"""String -- equivalent command from other plugin.
The plugin containing the command to match against must have been
registered first.
"""
def copy(self):
return self.__class__(self.category, self.shape, self.command,
self.parts, self._match)
def __eq__(self, other):
if isinstance(other, BlockType):
if self.shape == other.shape and self.inserts == other.inserts:
for t in self._plugins:
if t in other._plugins:
return True
elif isinstance(other, PluginBlockType):
for name in ("shape", "inserts", "command", "format", "category"):
if getattr(self, name) != getattr(other, name):
return False
else:
return True
return False
class CustomBlockType(BaseBlockType):
"""A user-specified :class:`BlockType`.
The script defining the custom block starts with::
kurt.Block("procDef", <CustomBlockType>)
And the scripts definining the block follow.
The same CustomBlockType instance can then be used in a block in another
script::
kurt.Block(<CustomBlocktype>, [args ...,])
"""
def __init__(self, shape, parts):
BaseBlockType.__init__(self, shape, parts)
self.is_atomic = False
"""True if the block should run without screen refresh."""
#-- Scripts --#
class Block(object):
"""A statement in a graphical programming language. Blocks can connect
together to form sequences of commands, which are stored in a
:class:`Script`. Blocks perform different commands depending on their
type.
:param type: A :class:`BlockType` instance, used to identify the
command the block performs.
Will also exact match a :attr:`command` or loosely match
:attr:`text`.
:param ``*args``: List of the block's arguments. Arguments can be numbers,
strings, Blocks, or lists of Blocks (for 'stack' shaped
Inserts).
The following constructors are all equivalent::
>>> block = kurt.Block('say:duration:elapsed:from:', 'Hello!', 2)
>>> block = kurt.Block("say %s for %s secs", "Hello!", 2)
>>> block = kurt.Block("sayforsecs", "Hello!", 2)
Using BlockType::
>>> block.type
<kurt.BlockType('say [Hello!] for (2) secs', 'stack')>
>>> block.args
['Hello!', 2]
>>> block2 = kurt.Block(block.type, "Goodbye!", 5)
>>> block.stringify()
'say [Hello!] for (2) secs'
>>> block2.stringify()
'say [Goodbye!] for (5) secs'
"""
def __init__(self, block_type, *args):
self.type = BlockType.get(block_type)
""":class:`BlockType` instance. The command this block performs."""
self.args = []
"""List of arguments to the block.
The block's parameters are found in :attr:`type.inserts
<BlockType.inserts>`. Default values come from :attr:`type.defaults
<BlockType.defaults`.
"""
self.comment = ""
"""The text of the comment attached to the block. Empty if no comment
is attached.
Comments can only be attached to stack blocks.
"""
if self.type:
self.args = self.type.defaults[:]
for i in xrange(len(args)):
if i < len(self.args):
self.args[i] = args[i]
else:
self.args.append(args[i])
self._normalize()
def _normalize(self):
self.type = BlockType.get(self.type)
inserts = list(self.type.inserts)
args = []
for arg in self.args:
insert = inserts.pop(0) if inserts else None
if insert and insert.shape in ('number', 'number-menu'):
if isinstance(arg, basestring):
try:
arg = float(arg)
arg = int(arg) if int(arg) == arg else arg
except ValueError:
pass
args.append(arg)
self.args = args
self.comment = unicode(self.comment)
def copy(self):
"""Return a new Block instance with the same attributes."""
args = []
for arg in self.args:
if isinstance(arg, Block):
arg = arg.copy()
elif isinstance(arg, list):
arg = [b.copy() for b in arg]
args.append(arg)
return Block(self.type, *args)
def __eq__(self, other):
return (
isinstance(other, Block) and
self.type == other.type and
self.args == other.args
)
def __ne__(self, other):
return not self == other
def __repr__(self):
string = "%s.%s(%s, " % (self.__class__.__module__,
self.__class__.__name__,
repr(self.type.convert().command if isinstance(self.type,
BlockType) else self.type))
for arg in self.args:
if isinstance(arg, Block):
string = string.rstrip("\n")
string += "\n %s,\n" % repr(arg).replace("\n", "\n ")
elif isinstance(arg, list):
if string.endswith("\n"):
string += " "
else:
string += " "
string += "[\n"
for block in arg:
string += " "
string += repr(block).replace("\n", "\n ")
string += ",\n"
string += " ], "
else:
string += repr(arg) + ", "
string = string.rstrip(" ").rstrip(",")
return string + ")"
def stringify(self, block_plugin=False, in_insert=False):
s = self.type.stringify(self.args, block_plugin, in_insert)
if self.comment:
i = s.index("\n") if "\n" in s else len(s)
indent = "\n" + " " * i + " // "
comment = " // " + self.comment.replace("\n", indent)
s = s[:i] + comment + s[i:]
return s
class Script(object):
"""A single sequence of blocks. Each :class:`Scriptable` can have many
Scripts.
The first block, ``self.blocks[0]`` is usually a "when" block, eg. an
EventHatMorph.
Scripts implement the ``list`` interface, so can be indexed directly, eg.
``script[0]``. All other methods like ``append`` also work.
"""
def __init__(self, blocks=None, pos=None):
self.blocks = blocks or []
self.blocks = list(self.blocks)
"""The list of :class:`Blocks <Block>`."""
self.pos = tuple(pos) if pos else None
"""``(x, y)`` position from the top-left of the script area in
pixels.
"""
def _normalize(self):
self.pos = self.pos
self.blocks = list(self.blocks)
for block in self.blocks:
block._normalize()
def copy(self):
"""Return a new instance with the same attributes."""
return self.__class__([b.copy() for b in self.blocks],
tuple(self.pos) if self.pos else None)
def __eq__(self, other):
return (
isinstance(other, Script) and
self.blocks == other.blocks
)
def __ne__(self, other):
return not self == other
def __repr__(self):
r = "%s.%s([\n" % (self.__class__.__module__,
self.__class__.__name__)
for block in self.blocks:
r += " " + repr(block).replace("\n", "\n ") + ",\n"
r = r.rstrip().rstrip(",") + "]"
if self.pos:
r += ", pos=%r" % (self.pos,)
return r + ")"
def stringify(self, block_plugin=False):
return "\n".join(block.stringify(block_plugin)
for block in self.blocks)
# Pretend to be a list
def __getattr__(self, name):
if name.startswith('__') and name.endswith('__'):
return super(Script, self).__getattr__(name)
return getattr(self.blocks, name)
def __iter__(self):
return iter(self.blocks)
def __len__(self):
return len(self.blocks)
def __getitem__(self, index):
return self.blocks[index]
def __setitem__(self, index, value):
self.blocks[index] = value
def __delitem__(self, index):
del self.blocks[index]
class Comment(object):
"""A free-floating comment in :attr:`Scriptable.scripts`."""
def __init__(self, text, pos=None):
self.text = unicode(text)
"""The text of the comment."""
self.pos = tuple(pos) if pos else None
"""``(x, y)`` position from the top-left of the script area in
pixels.
"""
def copy(self):
return self.__class__(self.text, tuple(self.pos) if self.pos else None)
def __repr__(self):
r = "%s.%s(%r" % (self.__class__.__module__,
self.__class__.__name__, self.text)
if self.pos:
r += ", pos=%r" % (self.pos,)
return r + ")"
def stringify(self):
return "// " + self.text.replace("\n", "\n// ")
def _normalize(self):
self.pos = self.pos
self.text = unicode(self.text)
#-- Costumes --#
class Costume(object):
"""Describes the look of a sprite.
The raw image data is stored in :attr:`image`.
"""
def __init__(self, name, image, rotation_center=None):
self.name = unicode(name)
"""Name used by scripts to refer to this Costume."""
if not rotation_center:
rotation_center = (int(image.width / 2), int(image.height / 2))
self.rotation_center = tuple(rotation_center)
"""``(x, y)`` position from the top-left corner of the point about
which the image rotates.
Defaults to the center of the image.
"""
self.image = image
"""An :class:`Image` instance containing the raw image data."""
def copy(self):
"""Return a new instance with the same attributes."""
return Costume(self.name, self.image, self.rotation_center)
@classmethod
def load(self, path):
"""Load costume from image file.
Uses :attr:`Image.load`, but will set the Costume's name based on the
image filename.
"""
(folder, filename) = os.path.split(path)
(name, extension) = os.path.splitext(filename)
return Costume(name, Image.load(path))
def save(self, path):
"""Save the costume to an image file at the given path.
Uses :attr:`Image.save`, but if the path ends in a folder instead of a
file, the filename is based on the costume's :attr:`name`.
The image format is guessed from the extension. If path has no
extension, the image's :attr:`format` is used.
:returns: Path to the saved file.
"""
(folder, filename) = os.path.split(path)
if not filename:
filename = _clean_filename(self.name)
path = os.path.join(folder, filename)
return self.image.save(path)
def resize(self, size):
"""Resize :attr:`image` in-place."""
self.image = self.image.resize(size)
def __repr__(self):
return "<%s.%s name=%r rotation_center=%d,%d at 0x%X>" % (
self.__class__.__module__, self.__class__.__name__, self.name,
self.rotation_center[0], self.rotation_center[1], id(self)
)
def __getattr__(self, name):
if name in ('width', 'height', 'size'):
return getattr(self.image, name)
return super(Costume, self).__getattr__(name)
class Image(object):
"""The contents of an image file.
Constructing from raw file contents::
Image(file_contents, "JPEG")
Constructing from a :class:`PIL.Image.Image` instance::
pil_image = PIL.Image.new("RGBA", (480, 360))
Image(pil_image)
Loading from file path::
Image.load("path/to/image.jpg")
Images are immutable. If you want to modify an image, get a
:class:`PIL.Image.Image` instance from :attr:`pil_image`, modify that, and
use it to construct a new Image. Modifying images in-place may break
things.
The reason for having multiple constructors is so that kurt can implement
lazy loading of image data -- in many cases, a PIL image will never need to
be created.
"""
def __init__(self, contents, format=None):
self._path = None
self._pil_image = None
self._contents = None
self._format = None
self._size = None
if isinstance(contents, PIL.Image.Image):
self._pil_image = contents
else:
self._contents = contents
self._format = Image.image_format(format)
def __getstate__(self):
if isinstance(self._pil_image, PIL.Image.Image):
copy = self.__dict__.copy()
copy['_pil_image'] = {
'data': self._pil_image.tobytes(),
'size': self._pil_image.size,
'mode': self._pil_image.mode}
return copy
return self.__dict__
def __setstate__(self, data):
self.__dict__.update(data)
if self._pil_image:
self._pil_image = PIL.Image.frombytes(**self._pil_image)
# Properties
@property
def pil_image(self):
"""A :class:`PIL.Image.Image` instance containing the image data."""
if not self._pil_image:
if self._format == "SVG":
raise VectorImageError("can't rasterise vector images")
self._pil_image = PIL.Image.open(StringIO(self.contents))
return self._pil_image
@property
def contents(self):
"""The raw file contents as a string."""
if not self._contents:
if self._path:
# Read file into memory so we don't run out of file descriptors
f = open(self._path, "rb")
self._contents = f.read()
f.close()
elif self._pil_image:
# Write PIL image to string
f = StringIO()
self._pil_image.save(f, self.format)
self._contents = f.getvalue()
return self._contents
@property
def format(self):
"""The format of the image file.
An uppercase string corresponding to the
:attr:`PIL.ImageFile.ImageFile.format` attribute. Valid values include
``"JPEG"`` and ``"PNG"``.
"""
if self._format:
return self._format
elif self.pil_image:
return self.pil_image.format
@property
def extension(self):
"""The extension of the image's :attr:`format` when written to file.
eg ``".png"``
"""
return Image.image_extension(self.format)
@property
def size(self):
"""``(width, height)`` in pixels."""
if self._size and not self._pil_image:
return self._size
else:
return self.pil_image.size
@property
def width(self):
return self.size[0]
@property
def height(self):
return self.size[1]
# Methods
@classmethod
def load(cls, path):
"""Load image from file."""
assert os.path.exists(path), "No such file: %r" % path
(folder, filename) = os.path.split(path)
(name, extension) = os.path.splitext(filename)
image = Image(None)
image._path = path
image._format = Image.image_format(extension)
return image
def convert(self, *formats):
"""Return an Image instance with the first matching format.
For each format in ``*args``: If the image's :attr:`format` attribute
is the same as the format, return self, otherwise try the next format.
If none of the formats match, return a new Image instance with the
last format.
"""
for format in formats:
format = Image.image_format(format)
if self.format == format:
return self
else:
return self._convert(format)
def _convert(self, format):
"""Return a new Image instance with the given format.
Returns self if the format is already the same.
"""
if self.format == format:
return self
else:
image = Image(self.pil_image)
image._format = format
return image
def save(self, path):
"""Save image to file path.
The image format is guessed from the extension. If path has no
extension, the image's :attr:`format` is used.
:returns: Path to the saved file.
"""
(folder, filename) = os.path.split(path)
(name, extension) = os.path.splitext(filename)
if not name:
raise ValueError, "name is required"
if extension:
format = Image.image_format(extension)
else:
format = self.format
filename = name + self.extension
path = os.path.join(folder, filename)
image = self.convert(format)
if image._contents:
f = open(path, "wb")
f.write(image._contents)
f.close()
else:
image.pil_image.save(path, format)
return path
@classmethod
def new(self, size, fill):
"""Return a new Image instance filled with a color."""
return Image(PIL.Image.new("RGB", size, fill))
def resize(self, size):
"""Return a new Image instance with the given size."""
return Image(self.pil_image.resize(size, PIL.Image.ANTIALIAS))
def paste(self, other):
"""Return a new Image with the given image pasted on top.
This image will show through transparent areas of the given image.
"""
r, g, b, alpha = other.pil_image.split()
pil_image = self.pil_image.copy()
pil_image.paste(other.pil_image, mask=alpha)
return kurt.Image(pil_image)
# Static methods
@staticmethod
def image_format(format_or_extension):
if format_or_extension:
format = format_or_extension.lstrip(".").upper()
if format == "JPG":
format = "JPEG"
return format
@staticmethod
def image_extension(format_or_extension):
if format_or_extension:
extension = format_or_extension.lstrip(".").lower()
if extension == "jpeg":
extension = "jpg"
return "." + extension
#-- Sounds --#
class Sound(object):
"""A sound a :class:`Scriptable` can play.
The raw sound data is stored in :attr:`waveform`.
"""
def __init__(self, name, waveform):
self.name = name
"""Name used by scripts to refer to this Sound."""
self.waveform = waveform
"""A :class:`Waveform` instance containing the raw sound data."""
def copy(self):
"""Return a new instance with the same attributes."""
return Sound(self.name, self.waveform)
@classmethod
def load(self, path):
"""Load sound from wave file.
Uses :attr:`Waveform.load`, but will set the Waveform's name based on
the sound filename.
"""
(folder, filename) = os.path.split(path)
(name, extension) = os.path.splitext(filename)
return Sound(name, Waveform.load(path))
def save(self, path):
"""Save the sound to a wave file at the given path.
Uses :attr:`Waveform.save`, but if the path ends in a folder instead of
a file, the filename is based on the project's :attr:`name`.
:returns: Path to the saved file.
"""
(folder, filename) = os.path.split(path)
if not filename:
filename = _clean_filename(self.name)
path = os.path.join(folder, filename)
return self.waveform.save(path)
def __repr__(self):
return "<%s.%s name=%r at 0x%X>" % (self.__class__.__module__,
self.__class__.__name__, self.name, id(self))
class Waveform(object):
"""The contents of a wave file. Only WAV format files are supported.
Constructing from raw file contents::
Sound(file_contents)
Loading from file path::
Sound.load("path/to/sound.wav")
Waveforms are immutable.
"""
extension = ".wav"
def __init__(self, contents, rate=None, sample_count=None):
self._path = None
self._contents = contents
self._rate = rate
self._sample_count = sample_count
# Properties
@property
def contents(self):
"""The raw file contents as a string."""
if not self._contents:
if self._path:
# Read file into memory so we don't run out of file descriptors
f = open(self._path, "rb")
self._contents = f.read()
f.close()
return self._contents
@property
def _wave(self):
"""Return a wave.Wave_read instance from the ``wave`` module."""
try:
return wave.open(StringIO(self.contents))
except wave.Error, err:
err.message += "\nInvalid wave file: %s" % self
err.args = (err.message,)
raise
@property
def rate(self):
"""The sampling rate of the sound."""
if self._rate:
return self._rate
else:
return self._wave.getframerate()
@property
def sample_count(self):
"""The number of samples in the sound."""
if self._sample_count:
return self._sample_count
else:
return self._wave.getnframes()
# Methods
@classmethod
def load(cls, path):
"""Load Waveform from file."""
assert os.path.exists(path), "No such file: %r" % path
(folder, filename) = os.path.split(path)
(name, extension) = os.path.splitext(filename)
wave = Waveform(None)
wave._path = path
return wave
def save(self, path):
"""Save waveform to file path as a WAV file.
:returns: Path to the saved file.
"""
(folder, filename) = os.path.split(path)
(name, extension) = os.path.splitext(filename)
if not name:
raise ValueError, "name is required"
path = os.path.join(folder, name + self.extension)
f = open(path, "wb")
f.write(self.contents)
f.close()
return path
#-- Import submodules --#
import kurt.plugin
import kurt.text
import kurt.scratch20
import kurt.scratch14
| mpl-2.0 |
mustafat/odoo-1 | addons/mrp/report/__init__.py | 378 | 1122 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import price
import workcenter_load
import bom_structure
import mrp_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
AlphaSmartDog/DeepLearningNotes | Note-1 RNN-DNC择时/Note-1-2 PonderingDNCore L2正则化示例/sonnet/python/ops/nest_test.py | 6 | 6219 | # Copyright 2017 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for sonnet.python.ops.nest.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
# Dependency imports
import numpy as np
import six
from sonnet.python.ops import nest
import tensorflow as tf
typekw = "class" if six.PY3 else "type"
class NestTest(tf.test.TestCase):
def testStringRepeat(self):
ab_tuple = collections.namedtuple("ab_tuple", "a, b")
inp_a = ab_tuple(a="foo", b=("bar", "baz"))
inp_b = ab_tuple(a=2, b=(1, 3))
out = nest.map(lambda string, repeats: string * repeats, inp_a, inp_b)
self.assertEqual(out.a, "foofoo")
self.assertEqual(out.b[0], "bar")
self.assertEqual(out.b[1], "bazbazbaz")
def testMapSingleCollection(self):
ab_tuple = collections.namedtuple("ab_tuple", "a, b")
nt = ab_tuple(a=("something", "something_else"),
b="yet another thing")
rev_nt = nest.map(lambda x: x[::-1], nt)
# Check the output is the correct structure, and all strings are reversed.
nest.assert_same_structure(nt, rev_nt)
self.assertEqual(nt.a[0][::-1], rev_nt.a[0])
self.assertEqual(nt.a[1][::-1], rev_nt.a[1])
self.assertEqual(nt.b[::-1], rev_nt.b)
def testMapOverTwoTuples(self):
inp_a = (tf.placeholder(tf.float32, shape=[3, 4]),
tf.placeholder(tf.float32, shape=[3, 7]))
inp_b = (tf.placeholder(tf.float32, shape=[3, 4]),
tf.placeholder(tf.float32, shape=[3, 7]))
output = nest.map(lambda x1, x2: x1 + x2, inp_a, inp_b)
nest.assert_same_structure(output, inp_a)
self.assertShapeEqual(np.zeros((3, 4)), output[0])
self.assertShapeEqual(np.zeros((3, 7)), output[1])
feed_dict = {
inp_a: (np.random.randn(3, 4), np.random.randn(3, 7)),
inp_b: (np.random.randn(3, 4), np.random.randn(3, 7))
}
with self.test_session() as sess:
output_np = sess.run(output, feed_dict=feed_dict)
self.assertAllClose(output_np[0],
feed_dict[inp_a][0] + feed_dict[inp_b][0])
self.assertAllClose(output_np[1],
feed_dict[inp_a][1] + feed_dict[inp_b][1])
def testStructureMustBeSame(self):
inp_a = (3, 4)
inp_b = (42, 42, 44)
err = "The two structures don't have the same number of elements."
with self.assertRaisesRegexp(ValueError, err):
nest.map(lambda a, b: a + b, inp_a, inp_b)
def testMultiNest(self):
inp_a = (3, (4, 5))
inp_b = (42, (42, 44))
output = nest.map(lambda a, b: a + b, inp_a, inp_b)
self.assertEqual((45, (46, 49)), output)
def testNoSequences(self):
with self.assertRaisesRegexp(ValueError,
"Must provide at least one structure"):
nest.map(lambda x: x)
def testEmptySequences(self):
f = lambda x: x + 1
empty_nt = collections.namedtuple("empty_nt", "")
self.assertEqual((), nest.map(f, ()))
self.assertEqual([], nest.map(f, []))
self.assertEqual(empty_nt(), nest.map(f, empty_nt()))
# This is checking actual equality of types, empty list != empty tuple
self.assertNotEqual((), nest.map(f, []))
def testFlattenAndPackIterable(self):
# A nice messy mix of tuples, lists, dicts, and `OrderedDict`s.
named_tuple = collections.namedtuple("A", ("b", "c"))
mess = [
"z",
named_tuple(3, 4),
{
"c": [
1,
collections.OrderedDict([
("b", 3),
("a", 2),
]),
],
"b": 5
},
17
]
flattened = nest.flatten_iterable(mess)
self.assertEqual(flattened, ["z", 3, 4, 5, 1, 2, 3, 17])
structure_of_mess = [
14,
named_tuple("a", True),
{
"c": [
0,
collections.OrderedDict([
("b", 9),
("a", 8),
]),
],
"b": 3
},
"hi everybody",
]
unflattened = nest.pack_iterable_as(structure_of_mess, flattened)
self.assertEqual(unflattened, mess)
def testFlattenIterable_numpyIsNotFlattened(self):
structure = np.array([1, 2, 3])
flattened = nest.flatten_iterable(structure)
self.assertEqual(len(flattened), 1)
def testFlattenIterable_stringIsNotFlattened(self):
structure = "lots of letters"
flattened = nest.flatten_iterable(structure)
self.assertEqual(len(flattened), 1)
def testFlatternIterable_scalarStructure(self):
# Tests can call flatten_iterable with single "scalar" object.
structure = "hello"
flattened = nest.flatten_iterable(structure)
unflattened = nest.pack_iterable_as("goodbye", flattened)
self.assertEqual(structure, unflattened)
def testPackIterableAs_notIterableError(self):
with self.assertRaisesRegexp(TypeError,
"flat_iterable must be an iterable"):
nest.pack_iterable_as("hi", "bye")
def testPackIterableAs_scalarStructureError(self):
with self.assertRaisesRegexp(
ValueError, r"Structure is a scalar but len\(flat_iterable\) == 2 > 1"):
nest.pack_iterable_as("hi", ["bye", "twice"])
def testPackIterableAs_wrongLengthsError(self):
with self.assertRaisesRegexp(
ValueError,
"Structure had 2 elements, but flat_iterable had 3 elements."):
nest.pack_iterable_as(["hello", "world"],
["and", "goodbye", "again"])
if __name__ == "__main__":
tf.test.main()
| mit |
annarev/tensorflow | tensorflow/python/keras/layers/serialization.py | 2 | 8373 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Layer serialization/deserialization functions.
"""
# pylint: disable=wildcard-import
# pylint: disable=unused-import
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import threading
from tensorflow.python import tf2
from tensorflow.python.keras.engine import base_layer
from tensorflow.python.keras.engine import input_layer
from tensorflow.python.keras.engine import input_spec
from tensorflow.python.keras.layers import advanced_activations
from tensorflow.python.keras.layers import convolutional
from tensorflow.python.keras.layers import convolutional_recurrent
from tensorflow.python.keras.layers import core
from tensorflow.python.keras.layers import cudnn_recurrent
from tensorflow.python.keras.layers import dense_attention
from tensorflow.python.keras.layers import einsum_dense
from tensorflow.python.keras.layers import embeddings
from tensorflow.python.keras.layers import local
from tensorflow.python.keras.layers import merge
from tensorflow.python.keras.layers import multi_head_attention
from tensorflow.python.keras.layers import noise
from tensorflow.python.keras.layers import normalization
from tensorflow.python.keras.layers import normalization_v2
from tensorflow.python.keras.layers import pooling
from tensorflow.python.keras.layers import recurrent
from tensorflow.python.keras.layers import recurrent_v2
from tensorflow.python.keras.layers import rnn_cell_wrapper_v2
from tensorflow.python.keras.layers import wrappers
from tensorflow.python.keras.layers.preprocessing import category_crossing
from tensorflow.python.keras.layers.preprocessing import category_encoding
from tensorflow.python.keras.layers.preprocessing import discretization
from tensorflow.python.keras.layers.preprocessing import hashing
from tensorflow.python.keras.layers.preprocessing import image_preprocessing
from tensorflow.python.keras.layers.preprocessing import integer_lookup as preprocessing_integer_lookup
from tensorflow.python.keras.layers.preprocessing import integer_lookup_v1 as preprocessing_integer_lookup_v1
from tensorflow.python.keras.layers.preprocessing import normalization as preprocessing_normalization
from tensorflow.python.keras.layers.preprocessing import normalization_v1 as preprocessing_normalization_v1
from tensorflow.python.keras.layers.preprocessing import string_lookup as preprocessing_string_lookup
from tensorflow.python.keras.layers.preprocessing import string_lookup_v1 as preprocessing_string_lookup_v1
from tensorflow.python.keras.layers.preprocessing import text_vectorization as preprocessing_text_vectorization
from tensorflow.python.keras.layers.preprocessing import text_vectorization_v1 as preprocessing_text_vectorization_v1
from tensorflow.python.keras.utils import generic_utils
from tensorflow.python.keras.utils import tf_inspect as inspect
from tensorflow.python.util.tf_export import keras_export
ALL_MODULES = (base_layer, input_layer, advanced_activations, convolutional,
convolutional_recurrent, core, cudnn_recurrent, dense_attention,
embeddings, einsum_dense, local, merge, noise, normalization,
pooling, image_preprocessing, preprocessing_integer_lookup_v1,
preprocessing_normalization_v1, preprocessing_string_lookup_v1,
preprocessing_text_vectorization_v1, recurrent, wrappers,
hashing, category_crossing, category_encoding, discretization,
multi_head_attention)
ALL_V2_MODULES = (rnn_cell_wrapper_v2, normalization_v2, recurrent_v2,
preprocessing_integer_lookup, preprocessing_normalization,
preprocessing_string_lookup, preprocessing_text_vectorization)
# ALL_OBJECTS is meant to be a global mutable. Hence we need to make it
# thread-local to avoid concurrent mutations.
LOCAL = threading.local()
def populate_deserializable_objects():
"""Populates dict ALL_OBJECTS with every built-in layer.
"""
global LOCAL
if not hasattr(LOCAL, 'ALL_OBJECTS'):
LOCAL.ALL_OBJECTS = {}
LOCAL.GENERATED_WITH_V2 = None
if LOCAL.ALL_OBJECTS and LOCAL.GENERATED_WITH_V2 == tf2.enabled():
# Objects dict is already generated for the proper TF version:
# do nothing.
return
LOCAL.ALL_OBJECTS = {}
LOCAL.GENERATED_WITH_V2 = tf2.enabled()
base_cls = base_layer.Layer
generic_utils.populate_dict_with_module_objects(
LOCAL.ALL_OBJECTS,
ALL_MODULES,
obj_filter=lambda x: inspect.isclass(x) and issubclass(x, base_cls))
# Overwrite certain V1 objects with V2 versions
if tf2.enabled():
generic_utils.populate_dict_with_module_objects(
LOCAL.ALL_OBJECTS,
ALL_V2_MODULES,
obj_filter=lambda x: inspect.isclass(x) and issubclass(x, base_cls))
# These deserialization aliases are added for backward compatibility,
# as in TF 1.13, "BatchNormalizationV1" and "BatchNormalizationV2"
# were used as class name for v1 and v2 version of BatchNormalization,
# respectively. Here we explicitly convert them to their canonical names.
LOCAL.ALL_OBJECTS['BatchNormalizationV1'] = normalization.BatchNormalization
LOCAL.ALL_OBJECTS[
'BatchNormalizationV2'] = normalization_v2.BatchNormalization
# Prevent circular dependencies.
from tensorflow.python.keras import models # pylint: disable=g-import-not-at-top
from tensorflow.python.keras.premade.linear import LinearModel # pylint: disable=g-import-not-at-top
from tensorflow.python.keras.premade.wide_deep import WideDeepModel # pylint: disable=g-import-not-at-top
from tensorflow.python.keras.feature_column.sequence_feature_column import SequenceFeatures # pylint: disable=g-import-not-at-top
LOCAL.ALL_OBJECTS['Input'] = input_layer.Input
LOCAL.ALL_OBJECTS['InputSpec'] = input_spec.InputSpec
LOCAL.ALL_OBJECTS['Functional'] = models.Functional
LOCAL.ALL_OBJECTS['Model'] = models.Model
LOCAL.ALL_OBJECTS['SequenceFeatures'] = SequenceFeatures
LOCAL.ALL_OBJECTS['Sequential'] = models.Sequential
LOCAL.ALL_OBJECTS['LinearModel'] = LinearModel
LOCAL.ALL_OBJECTS['WideDeepModel'] = WideDeepModel
if tf2.enabled():
from tensorflow.python.keras.feature_column.dense_features_v2 import DenseFeatures # pylint: disable=g-import-not-at-top
LOCAL.ALL_OBJECTS['DenseFeatures'] = DenseFeatures
else:
from tensorflow.python.keras.feature_column.dense_features import DenseFeatures # pylint: disable=g-import-not-at-top
LOCAL.ALL_OBJECTS['DenseFeatures'] = DenseFeatures
# Merge layers, function versions.
LOCAL.ALL_OBJECTS['add'] = merge.add
LOCAL.ALL_OBJECTS['subtract'] = merge.subtract
LOCAL.ALL_OBJECTS['multiply'] = merge.multiply
LOCAL.ALL_OBJECTS['average'] = merge.average
LOCAL.ALL_OBJECTS['maximum'] = merge.maximum
LOCAL.ALL_OBJECTS['minimum'] = merge.minimum
LOCAL.ALL_OBJECTS['concatenate'] = merge.concatenate
LOCAL.ALL_OBJECTS['dot'] = merge.dot
@keras_export('keras.layers.serialize')
def serialize(layer):
return generic_utils.serialize_keras_object(layer)
@keras_export('keras.layers.deserialize')
def deserialize(config, custom_objects=None):
"""Instantiates a layer from a config dictionary.
Args:
config: dict of the form {'class_name': str, 'config': dict}
custom_objects: dict mapping class names (or function names)
of custom (non-Keras) objects to class/functions
Returns:
Layer instance (may be Model, Sequential, Network, Layer...)
"""
populate_deserializable_objects()
return generic_utils.deserialize_keras_object(
config,
module_objects=LOCAL.ALL_OBJECTS,
custom_objects=custom_objects,
printable_module_name='layer')
| apache-2.0 |
flyballlabs/threatdetectionservice | api/venv/lib/python2.7/site-packages/wheel/wininst2wheel.py | 93 | 7863 | #!/usr/bin/env python
import os.path
import re
import sys
import tempfile
import zipfile
import wheel.bdist_wheel
import distutils.dist
from distutils.archive_util import make_archive
from shutil import rmtree
from wheel.archive import archive_wheelfile
from argparse import ArgumentParser
from glob import iglob
egg_info_re = re.compile(r'''(^|/)(?P<name>[^/]+?)-(?P<ver>.+?)
(-(?P<pyver>.+?))?(-(?P<arch>.+?))?.egg-info(/|$)''', re.VERBOSE)
def parse_info(wininfo_name, egginfo_name):
"""Extract metadata from filenames.
Extracts the 4 metadataitems needed (name, version, pyversion, arch) from
the installer filename and the name of the egg-info directory embedded in
the zipfile (if any).
The egginfo filename has the format::
name-ver(-pyver)(-arch).egg-info
The installer filename has the format::
name-ver.arch(-pyver).exe
Some things to note:
1. The installer filename is not definitive. An installer can be renamed
and work perfectly well as an installer. So more reliable data should
be used whenever possible.
2. The egg-info data should be preferred for the name and version, because
these come straight from the distutils metadata, and are mandatory.
3. The pyver from the egg-info data should be ignored, as it is
constructed from the version of Python used to build the installer,
which is irrelevant - the installer filename is correct here (even to
the point that when it's not there, any version is implied).
4. The architecture must be taken from the installer filename, as it is
not included in the egg-info data.
5. Architecture-neutral installers still have an architecture because the
installer format itself (being executable) is architecture-specific. We
should therefore ignore the architecture if the content is pure-python.
"""
egginfo = None
if egginfo_name:
egginfo = egg_info_re.search(egginfo_name)
if not egginfo:
raise ValueError("Egg info filename %s is not valid" %
(egginfo_name,))
# Parse the wininst filename
# 1. Distribution name (up to the first '-')
w_name, sep, rest = wininfo_name.partition('-')
if not sep:
raise ValueError("Installer filename %s is not valid" %
(wininfo_name,))
# Strip '.exe'
rest = rest[:-4]
# 2. Python version (from the last '-', must start with 'py')
rest2, sep, w_pyver = rest.rpartition('-')
if sep and w_pyver.startswith('py'):
rest = rest2
w_pyver = w_pyver.replace('.', '')
else:
# Not version specific - use py2.py3. While it is possible that
# pure-Python code is not compatible with both Python 2 and 3, there
# is no way of knowing from the wininst format, so we assume the best
# here (the user can always manually rename the wheel to be more
# restrictive if needed).
w_pyver = 'py2.py3'
# 3. Version and architecture
w_ver, sep, w_arch = rest.rpartition('.')
if not sep:
raise ValueError("Installer filename %s is not valid" %
(wininfo_name,))
if egginfo:
w_name = egginfo.group('name')
w_ver = egginfo.group('ver')
return dict(name=w_name, ver=w_ver, arch=w_arch, pyver=w_pyver)
def bdist_wininst2wheel(path, dest_dir=os.path.curdir):
bdw = zipfile.ZipFile(path)
# Search for egg-info in the archive
egginfo_name = None
for filename in bdw.namelist():
if '.egg-info' in filename:
egginfo_name = filename
break
info = parse_info(os.path.basename(path), egginfo_name)
root_is_purelib = True
for zipinfo in bdw.infolist():
if zipinfo.filename.startswith('PLATLIB'):
root_is_purelib = False
break
if root_is_purelib:
paths = {'purelib': ''}
else:
paths = {'platlib': ''}
dist_info = "%(name)s-%(ver)s" % info
datadir = "%s.data/" % dist_info
# rewrite paths to trick ZipFile into extracting an egg
# XXX grab wininst .ini - between .exe, padding, and first zip file.
members = []
egginfo_name = ''
for zipinfo in bdw.infolist():
key, basename = zipinfo.filename.split('/', 1)
key = key.lower()
basepath = paths.get(key, None)
if basepath is None:
basepath = datadir + key.lower() + '/'
oldname = zipinfo.filename
newname = basepath + basename
zipinfo.filename = newname
del bdw.NameToInfo[oldname]
bdw.NameToInfo[newname] = zipinfo
# Collect member names, but omit '' (from an entry like "PLATLIB/"
if newname:
members.append(newname)
# Remember egg-info name for the egg2dist call below
if not egginfo_name:
if newname.endswith('.egg-info'):
egginfo_name = newname
elif '.egg-info/' in newname:
egginfo_name, sep, _ = newname.rpartition('/')
dir = tempfile.mkdtemp(suffix="_b2w")
bdw.extractall(dir, members)
# egg2wheel
abi = 'none'
pyver = info['pyver']
arch = (info['arch'] or 'any').replace('.', '_').replace('-', '_')
# Wininst installers always have arch even if they are not
# architecture-specific (because the format itself is).
# So, assume the content is architecture-neutral if root is purelib.
if root_is_purelib:
arch = 'any'
# If the installer is architecture-specific, it's almost certainly also
# CPython-specific.
if arch != 'any':
pyver = pyver.replace('py', 'cp')
wheel_name = '-'.join((
dist_info,
pyver,
abi,
arch
))
if root_is_purelib:
bw = wheel.bdist_wheel.bdist_wheel(distutils.dist.Distribution())
else:
bw = _bdist_wheel_tag(distutils.dist.Distribution())
bw.root_is_pure = root_is_purelib
bw.python_tag = pyver
bw.plat_name_supplied = True
bw.plat_name = info['arch'] or 'any'
if not root_is_purelib:
bw.full_tag_supplied = True
bw.full_tag = (pyver, abi, arch)
dist_info_dir = os.path.join(dir, '%s.dist-info' % dist_info)
bw.egg2dist(os.path.join(dir, egginfo_name), dist_info_dir)
bw.write_wheelfile(dist_info_dir, generator='wininst2wheel')
bw.write_record(dir, dist_info_dir)
archive_wheelfile(os.path.join(dest_dir, wheel_name), dir)
rmtree(dir)
class _bdist_wheel_tag(wheel.bdist_wheel.bdist_wheel):
# allow the client to override the default generated wheel tag
# The default bdist_wheel implementation uses python and abi tags
# of the running python process. This is not suitable for
# generating/repackaging prebuild binaries.
full_tag_supplied = False
full_tag = None # None or a (pytag, soabitag, plattag) triple
def get_tag(self):
if self.full_tag_supplied and self.full_tag is not None:
return self.full_tag
else:
return super(_bdist_wheel_tag, self).get_tag()
def main():
parser = ArgumentParser()
parser.add_argument('installers', nargs='*', help="Installers to convert")
parser.add_argument('--dest-dir', '-d', default=os.path.curdir,
help="Directory to store wheels (default %(default)s)")
parser.add_argument('--verbose', '-v', action='store_true')
args = parser.parse_args()
for pat in args.installers:
for installer in iglob(pat):
if args.verbose:
sys.stdout.write("{0}... ".format(installer))
bdist_wininst2wheel(installer, args.dest_dir)
if args.verbose:
sys.stdout.write("OK\n")
if __name__ == "__main__":
main()
| apache-2.0 |
cheeseywhiz/cheeseywhiz | morse/stream/decode_tree.py | 2 | 2619 | import dataclasses
@dataclasses.dataclass
class Node:
char: str
dot: 'Node' = None
dash: 'Node' = None
@dataclasses.dataclass
class DecodeTree:
dot: Node
dash: Node
def decode_byte(self, byte):
if not byte:
return ' '
n_bits = (byte & 0b111_00000) >> 5
ptr = self
for bit_n in range(n_bits):
ptr = ptr.dash if byte & (1 << bit_n) else ptr.dot
return ptr.char
def decode(self, byte_seq):
return ''.join(map(self.decode_byte, byte_seq))
decode_tree = DecodeTree(
Node(
'E',
Node(
'I',
Node(
'S',
Node(
'H',
Node('5'),
Node('4'),
),
Node(
'V',
None,
Node('3'),
),
),
Node(
'U',
Node('F'),
Node(
None,
None,
Node('2'),
),
),
),
Node(
'A',
Node(
'R',
Node('L'),
Node(
None,
Node('+'),
None,
),
),
Node(
'W',
Node('P'),
Node(
'J',
None,
Node('1'),
),
),
),
),
Node(
'T',
Node(
'N',
Node(
'D',
Node(
'B',
Node('6'),
Node('='),
),
Node(
'X',
Node('/'),
),
),
Node(
'K',
Node('C'),
Node('Y'),
),
),
Node(
'M',
Node(
'G',
Node(
'Z',
Node('7'),
None,
),
Node('Q'),
),
Node(
'O',
Node(
None,
Node('8'),
None,
),
Node(
None,
Node('9'),
Node('0'),
),
),
),
),
)
| mit |
spcui/autotest | frontend/tko/views.py | 6 | 1101 | import django.http
from autotest.frontend.tko import rpc_interface, graphing_utils
from autotest.frontend.tko import csv_encoder
from autotest.frontend.afe import rpc_handler, rpc_utils
rpc_handler_obj = rpc_handler.RpcHandler((rpc_interface,),
document_module=rpc_interface)
def handle_rpc(request):
return rpc_handler_obj.handle_rpc_request(request)
def handle_jsonp_rpc(request):
return rpc_handler_obj.handle_jsonp_rpc_request(request)
def handle_csv(request):
request_data = rpc_handler_obj.raw_request_data(request)
decoded_request = rpc_handler_obj.decode_request(request_data)
result = rpc_handler_obj.dispatch_request(decoded_request)['result']
encoder = csv_encoder.encoder(decoded_request, result)
return encoder.encode()
def rpc_documentation(request):
return rpc_handler_obj.get_rpc_documentation()
def handle_plot(request):
id = request.GET['id']
max_age = request.GET['max_age']
return django.http.HttpResponse(
graphing_utils.handle_plot_request(id, max_age), mimetype='image/png')
| gpl-2.0 |
40223136/w17test1 | static/Brython3.1.1-20150328-091302/Lib/_struct.py | 726 | 13787 | #
# This module is a pure Python version of pypy.module.struct.
# It is only imported if the vastly faster pypy.module.struct is not
# compiled in. For now we keep this version for reference and
# because pypy.module.struct is not ootype-backend-friendly yet.
#
# this module 'borrowed' from
# https://bitbucket.org/pypy/pypy/src/18626459a9b2/lib_pypy/_struct.py?at=py3k-listview_str
"""Functions to convert between Python values and C structs.
Python strings are used to hold the data representing the C struct
and also as format strings to describe the layout of data in the C struct.
The optional first format char indicates byte order, size and alignment:
@: native order, size & alignment (default)
=: native order, std. size & alignment
<: little-endian, std. size & alignment
>: big-endian, std. size & alignment
!: same as >
The remaining chars indicate types of args and must match exactly;
these can be preceded by a decimal repeat count:
x: pad byte (no data);
c:char;
b:signed byte;
B:unsigned byte;
h:short;
H:unsigned short;
i:int;
I:unsigned int;
l:long;
L:unsigned long;
f:float;
d:double.
Special cases (preceding decimal count indicates length):
s:string (array of char); p: pascal string (with count byte).
Special case (only available in native format):
P:an integer type that is wide enough to hold a pointer.
Special case (not in native mode unless 'long long' in platform C):
q:long long;
Q:unsigned long long
Whitespace between formats is ignored.
The variable struct.error is an exception raised on errors."""
import math, sys
# TODO: XXX Find a way to get information on native sizes and alignments
class StructError(Exception):
pass
error = StructError
def unpack_int(data,index,size,le):
bytes = [b for b in data[index:index+size]]
if le == 'little':
bytes.reverse()
number = 0
for b in bytes:
number = number << 8 | b
return int(number)
def unpack_signed_int(data,index,size,le):
number = unpack_int(data,index,size,le)
max = 2**(size*8)
if number > 2**(size*8 - 1) - 1:
number = int(-1*(max - number))
return number
INFINITY = 1e200 * 1e200
NAN = INFINITY / INFINITY
def unpack_char(data,index,size,le):
return data[index:index+size]
def pack_int(number,size,le):
x=number
res=[]
for i in range(size):
res.append(x&0xff)
x >>= 8
if le == 'big':
res.reverse()
return bytes(res)
def pack_signed_int(number,size,le):
if not isinstance(number, int):
raise StructError("argument for i,I,l,L,q,Q,h,H must be integer")
if number > 2**(8*size-1)-1 or number < -1*2**(8*size-1):
raise OverflowError("Number:%i too large to convert" % number)
return pack_int(number,size,le)
def pack_unsigned_int(number,size,le):
if not isinstance(number, int):
raise StructError("argument for i,I,l,L,q,Q,h,H must be integer")
if number < 0:
raise TypeError("can't convert negative long to unsigned")
if number > 2**(8*size)-1:
raise OverflowError("Number:%i too large to convert" % number)
return pack_int(number,size,le)
def pack_char(char,size,le):
return bytes(char)
def isinf(x):
return x != 0.0 and x / 2 == x
def isnan(v):
return v != v*1.0 or (v == 1.0 and v == 2.0)
def pack_float(x, size, le):
unsigned = float_pack(x, size)
result = []
for i in range(8):
result.append((unsigned >> (i * 8)) & 0xFF)
if le == "big":
result.reverse()
return bytes(result)
def unpack_float(data, index, size, le):
binary = [data[i] for i in range(index, index + 8)]
if le == "big":
binary.reverse()
unsigned = 0
for i in range(8):
unsigned |= binary[i] << (i * 8)
return float_unpack(unsigned, size, le)
def round_to_nearest(x):
"""Python 3 style round: round a float x to the nearest int, but
unlike the builtin Python 2.x round function:
- return an int, not a float
- do round-half-to-even, not round-half-away-from-zero.
We assume that x is finite and nonnegative; except wrong results
if you use this for negative x.
"""
int_part = int(x)
frac_part = x - int_part
if frac_part > 0.5 or frac_part == 0.5 and int_part & 1 == 1:
int_part += 1
return int_part
def float_unpack(Q, size, le):
"""Convert a 32-bit or 64-bit integer created
by float_pack into a Python float."""
if size == 8:
MIN_EXP = -1021 # = sys.float_info.min_exp
MAX_EXP = 1024 # = sys.float_info.max_exp
MANT_DIG = 53 # = sys.float_info.mant_dig
BITS = 64
elif size == 4:
MIN_EXP = -125 # C's FLT_MIN_EXP
MAX_EXP = 128 # FLT_MAX_EXP
MANT_DIG = 24 # FLT_MANT_DIG
BITS = 32
else:
raise ValueError("invalid size value")
if Q >> BITS:
raise ValueError("input out of range")
# extract pieces
sign = Q >> BITS - 1
exp = (Q & ((1 << BITS - 1) - (1 << MANT_DIG - 1))) >> MANT_DIG - 1
mant = Q & ((1 << MANT_DIG - 1) - 1)
if exp == MAX_EXP - MIN_EXP + 2:
# nan or infinity
result = float('nan') if mant else float('inf')
elif exp == 0:
# subnormal or zero
result = math.ldexp(float(mant), MIN_EXP - MANT_DIG)
else:
# normal
mant += 1 << MANT_DIG - 1
result = math.ldexp(float(mant), exp + MIN_EXP - MANT_DIG - 1)
return -result if sign else result
def float_pack(x, size):
"""Convert a Python float x into a 64-bit unsigned integer
with the same byte representation."""
if size == 8:
MIN_EXP = -1021 # = sys.float_info.min_exp
MAX_EXP = 1024 # = sys.float_info.max_exp
MANT_DIG = 53 # = sys.float_info.mant_dig
BITS = 64
elif size == 4:
MIN_EXP = -125 # C's FLT_MIN_EXP
MAX_EXP = 128 # FLT_MAX_EXP
MANT_DIG = 24 # FLT_MANT_DIG
BITS = 32
else:
raise ValueError("invalid size value")
sign = math.copysign(1.0, x) < 0.0
if math.isinf(x):
mant = 0
exp = MAX_EXP - MIN_EXP + 2
elif math.isnan(x):
mant = 1 << (MANT_DIG-2) # other values possible
exp = MAX_EXP - MIN_EXP + 2
elif x == 0.0:
mant = 0
exp = 0
else:
m, e = math.frexp(abs(x)) # abs(x) == m * 2**e
exp = e - (MIN_EXP - 1)
if exp > 0:
# Normal case.
mant = round_to_nearest(m * (1 << MANT_DIG))
mant -= 1 << MANT_DIG - 1
else:
# Subnormal case.
if exp + MANT_DIG - 1 >= 0:
mant = round_to_nearest(m * (1 << exp + MANT_DIG - 1))
else:
mant = 0
exp = 0
# Special case: rounding produced a MANT_DIG-bit mantissa.
assert 0 <= mant <= 1 << MANT_DIG - 1
if mant == 1 << MANT_DIG - 1:
mant = 0
exp += 1
# Raise on overflow (in some circumstances, may want to return
# infinity instead).
if exp >= MAX_EXP - MIN_EXP + 2:
raise OverflowError("float too large to pack in this format")
# check constraints
assert 0 <= mant < 1 << MANT_DIG - 1
assert 0 <= exp <= MAX_EXP - MIN_EXP + 2
assert 0 <= sign <= 1
return ((sign << BITS - 1) | (exp << MANT_DIG - 1)) | mant
big_endian_format = {
'x':{ 'size' : 1, 'alignment' : 0, 'pack' : None, 'unpack' : None},
'b':{ 'size' : 1, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int},
'B':{ 'size' : 1, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int},
'c':{ 'size' : 1, 'alignment' : 0, 'pack' : pack_char, 'unpack' : unpack_char},
's':{ 'size' : 1, 'alignment' : 0, 'pack' : None, 'unpack' : None},
'p':{ 'size' : 1, 'alignment' : 0, 'pack' : None, 'unpack' : None},
'h':{ 'size' : 2, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int},
'H':{ 'size' : 2, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int},
'i':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int},
'I':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int},
'l':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int},
'L':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int},
'q':{ 'size' : 8, 'alignment' : 0, 'pack' : pack_signed_int, 'unpack' : unpack_signed_int},
'Q':{ 'size' : 8, 'alignment' : 0, 'pack' : pack_unsigned_int, 'unpack' : unpack_int},
'f':{ 'size' : 4, 'alignment' : 0, 'pack' : pack_float, 'unpack' : unpack_float},
'd':{ 'size' : 8, 'alignment' : 0, 'pack' : pack_float, 'unpack' : unpack_float},
}
default = big_endian_format
formatmode={ '<' : (default, 'little'),
'>' : (default, 'big'),
'!' : (default, 'big'),
'=' : (default, sys.byteorder),
'@' : (default, sys.byteorder)
}
def getmode(fmt):
try:
formatdef,endianness = formatmode[fmt[0]]
index = 1
except (IndexError, KeyError):
formatdef,endianness = formatmode['@']
index = 0
return formatdef,endianness,index
def getNum(fmt,i):
num=None
cur = fmt[i]
while ('0'<= cur ) and ( cur <= '9'):
if num == None:
num = int(cur)
else:
num = 10*num + int(cur)
i += 1
cur = fmt[i]
return num,i
def calcsize(fmt):
"""calcsize(fmt) -> int
Return size of C struct described by format string fmt.
See struct.__doc__ for more on format strings."""
formatdef,endianness,i = getmode(fmt)
num = 0
result = 0
while i<len(fmt):
num,i = getNum(fmt,i)
cur = fmt[i]
try:
format = formatdef[cur]
except KeyError:
raise StructError("%s is not a valid format" % cur)
if num != None :
result += num*format['size']
else:
result += format['size']
num = 0
i += 1
return result
def pack(fmt,*args):
"""pack(fmt, v1, v2, ...) -> string
Return string containing values v1, v2, ... packed according to fmt.
See struct.__doc__ for more on format strings."""
formatdef,endianness,i = getmode(fmt)
args = list(args)
n_args = len(args)
result = []
while i<len(fmt):
num,i = getNum(fmt,i)
cur = fmt[i]
try:
format = formatdef[cur]
except KeyError:
raise StructError("%s is not a valid format" % cur)
if num == None :
num_s = 0
num = 1
else:
num_s = num
if cur == 'x':
result += [b'\0'*num]
elif cur == 's':
if isinstance(args[0], bytes):
padding = num - len(args[0])
result += [args[0][:num] + b'\0'*padding]
args.pop(0)
else:
raise StructError("arg for string format not a string")
elif cur == 'p':
if isinstance(args[0], bytes):
padding = num - len(args[0]) - 1
if padding > 0:
result += [bytes([len(args[0])]) + args[0][:num-1] + b'\0'*padding]
else:
if num<255:
result += [bytes([num-1]) + args[0][:num-1]]
else:
result += [bytes([255]) + args[0][:num-1]]
args.pop(0)
else:
raise StructError("arg for string format not a string")
else:
if len(args) < num:
raise StructError("insufficient arguments to pack")
for var in args[:num]:
result += [format['pack'](var,format['size'],endianness)]
args=args[num:]
num = None
i += 1
if len(args) != 0:
raise StructError("too many arguments for pack format")
return b''.join(result)
def unpack(fmt,data):
"""unpack(fmt, string) -> (v1, v2, ...)
Unpack the string, containing packed C structure data, according
to fmt. Requires len(string)==calcsize(fmt).
See struct.__doc__ for more on format strings."""
formatdef,endianness,i = getmode(fmt)
j = 0
num = 0
result = []
length= calcsize(fmt)
if length != len (data):
raise StructError("unpack str size does not match format")
while i<len(fmt):
num,i=getNum(fmt,i)
cur = fmt[i]
i += 1
try:
format = formatdef[cur]
except KeyError:
raise StructError("%s is not a valid format" % cur)
if not num :
num = 1
if cur == 'x':
j += num
elif cur == 's':
result.append(data[j:j+num])
j += num
elif cur == 'p':
n=data[j]
if n >= num:
n = num-1
result.append(data[j+1:j+n+1])
j += num
else:
for n in range(num):
result += [format['unpack'](data,j,format['size'],endianness)]
j += format['size']
return tuple(result)
def pack_into(fmt, buf, offset, *args):
data = pack(fmt, *args)
buffer(buf)[offset:offset+len(data)] = data
def unpack_from(fmt, buf, offset=0):
size = calcsize(fmt)
data = buffer(buf)[offset:offset+size]
if len(data) != size:
raise error("unpack_from requires a buffer of at least %d bytes"
% (size,))
return unpack(fmt, data)
def _clearcache():
"Clear the internal cache."
# No cache in this implementation
| gpl-3.0 |
bpgc-cte/python2017 | Week 7/django/lib/python3.6/site-packages/django/core/management/commands/runserver.py | 39 | 6518 | from __future__ import unicode_literals
import errno
import os
import re
import socket
import sys
from datetime import datetime
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from django.core.servers.basehttp import (
WSGIServer, get_internal_wsgi_application, run,
)
from django.utils import autoreload, six
from django.utils.encoding import force_text, get_system_encoding
naiveip_re = re.compile(r"""^(?:
(?P<addr>
(?P<ipv4>\d{1,3}(?:\.\d{1,3}){3}) | # IPv4 address
(?P<ipv6>\[[a-fA-F0-9:]+\]) | # IPv6 address
(?P<fqdn>[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*) # FQDN
):)?(?P<port>\d+)$""", re.X)
class Command(BaseCommand):
help = "Starts a lightweight Web server for development."
# Validation is called explicitly each time the server is reloaded.
requires_system_checks = False
leave_locale_alone = True
default_port = '8000'
protocol = 'http'
server_cls = WSGIServer
def add_arguments(self, parser):
parser.add_argument(
'addrport', nargs='?',
help='Optional port number, or ipaddr:port'
)
parser.add_argument(
'--ipv6', '-6', action='store_true', dest='use_ipv6', default=False,
help='Tells Django to use an IPv6 address.',
)
parser.add_argument(
'--nothreading', action='store_false', dest='use_threading', default=True,
help='Tells Django to NOT use threading.',
)
parser.add_argument(
'--noreload', action='store_false', dest='use_reloader', default=True,
help='Tells Django to NOT use the auto-reloader.',
)
def execute(self, *args, **options):
if options['no_color']:
# We rely on the environment because it's currently the only
# way to reach WSGIRequestHandler. This seems an acceptable
# compromise considering `runserver` runs indefinitely.
os.environ[str("DJANGO_COLORS")] = str("nocolor")
super(Command, self).execute(*args, **options)
def get_handler(self, *args, **options):
"""
Returns the default WSGI handler for the runner.
"""
return get_internal_wsgi_application()
def handle(self, *args, **options):
from django.conf import settings
if not settings.DEBUG and not settings.ALLOWED_HOSTS:
raise CommandError('You must set settings.ALLOWED_HOSTS if DEBUG is False.')
self.use_ipv6 = options['use_ipv6']
if self.use_ipv6 and not socket.has_ipv6:
raise CommandError('Your Python does not support IPv6.')
self._raw_ipv6 = False
if not options['addrport']:
self.addr = ''
self.port = self.default_port
else:
m = re.match(naiveip_re, options['addrport'])
if m is None:
raise CommandError('"%s" is not a valid port number '
'or address:port pair.' % options['addrport'])
self.addr, _ipv4, _ipv6, _fqdn, self.port = m.groups()
if not self.port.isdigit():
raise CommandError("%r is not a valid port number." % self.port)
if self.addr:
if _ipv6:
self.addr = self.addr[1:-1]
self.use_ipv6 = True
self._raw_ipv6 = True
elif self.use_ipv6 and not _fqdn:
raise CommandError('"%s" is not a valid IPv6 address.' % self.addr)
if not self.addr:
self.addr = '::1' if self.use_ipv6 else '127.0.0.1'
self._raw_ipv6 = self.use_ipv6
self.run(**options)
def run(self, **options):
"""
Runs the server, using the autoreloader if needed
"""
use_reloader = options['use_reloader']
if use_reloader:
autoreload.main(self.inner_run, None, options)
else:
self.inner_run(None, **options)
def inner_run(self, *args, **options):
# If an exception was silenced in ManagementUtility.execute in order
# to be raised in the child process, raise it now.
autoreload.raise_last_exception()
threading = options['use_threading']
# 'shutdown_message' is a stealth option.
shutdown_message = options.get('shutdown_message', '')
quit_command = 'CTRL-BREAK' if sys.platform == 'win32' else 'CONTROL-C'
self.stdout.write("Performing system checks...\n\n")
self.check(display_num_errors=True)
# Need to check migrations here, so can't use the
# requires_migrations_check attribute.
self.check_migrations()
now = datetime.now().strftime('%B %d, %Y - %X')
if six.PY2:
now = now.decode(get_system_encoding())
self.stdout.write(now)
self.stdout.write((
"Django version %(version)s, using settings %(settings)r\n"
"Starting development server at %(protocol)s://%(addr)s:%(port)s/\n"
"Quit the server with %(quit_command)s.\n"
) % {
"version": self.get_version(),
"settings": settings.SETTINGS_MODULE,
"protocol": self.protocol,
"addr": '[%s]' % self.addr if self._raw_ipv6 else self.addr,
"port": self.port,
"quit_command": quit_command,
})
try:
handler = self.get_handler(*args, **options)
run(self.addr, int(self.port), handler,
ipv6=self.use_ipv6, threading=threading, server_cls=self.server_cls)
except socket.error as e:
# Use helpful error messages instead of ugly tracebacks.
ERRORS = {
errno.EACCES: "You don't have permission to access that port.",
errno.EADDRINUSE: "That port is already in use.",
errno.EADDRNOTAVAIL: "That IP address can't be assigned to.",
}
try:
error_text = ERRORS[e.errno]
except KeyError:
error_text = force_text(e)
self.stderr.write("Error: %s" % error_text)
# Need to use an OS exit because sys.exit doesn't work in a thread
os._exit(1)
except KeyboardInterrupt:
if shutdown_message:
self.stdout.write(shutdown_message)
sys.exit(0)
# Kept for backward compatibility
BaseRunserverCommand = Command
| mit |
chuangWu/linux | tools/perf/scripts/python/syscall-counts.py | 1996 | 1700 | # system call counts
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide system call totals, broken down by syscall.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import syscall_name
usage = "perf script -s syscall-counts.py [comm]\n";
for_comm = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_syscall_totals()
def raw_syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
common_callchain, id, args):
if for_comm is not None:
if common_comm != for_comm:
return
try:
syscalls[id] += 1
except TypeError:
syscalls[id] = 1
def syscalls__sys_enter(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, args):
raw_syscalls__sys_enter(**locals())
def print_syscall_totals():
if for_comm is not None:
print "\nsyscall events for %s:\n\n" % (for_comm),
else:
print "\nsyscall events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \
reverse = True):
print "%-40s %10d\n" % (syscall_name(id), val),
| gpl-2.0 |
guorendong/iridium-browser-ubuntu | net/tools/net_docs/net_docs.py | 15 | 3541 | #!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Reads, parses, and (optionally) writes as HTML the contents of Markdown
files passed as arguments. Intended for rendering network stack documentation
stored as Markdown in the source tree to a human-readable format."""
import argparse
import os.path
import sys
def nth_parent_directory(path, n):
for i in range(n):
path = os.path.dirname(path)
return path
# Go up the directory tree from this script and add src/third_party to sys.path
# so "import markdown" can find it in src/third_party/markdown.
SCRIPT_PATH = os.path.abspath(__file__)
SRC_PATH = nth_parent_directory(SCRIPT_PATH, 4)
THIRD_PARTY_PATH = os.path.join(SRC_PATH, 'third_party')
sys.path.insert(0, THIRD_PARTY_PATH)
import markdown
def ReadFile(filename):
with open(filename, 'r') as file:
return file.read()
def WriteFile(filename, contents):
dir = os.path.dirname(filename)
if not os.path.isdir(dir):
os.mkdir(dir)
with open(filename, 'w') as file:
file.write(contents)
TEMPLATE = """
<html>
<head>
<title>{title}</title>
</head>
<body>
{body}
</body>
</html>"""
def FormatPage(markdown_html, title):
# TODO(ttuttle): Add a navigation list / table of contents of available
# Markdown files, perhaps?
return TEMPLATE.format(title=title, body=markdown_html)
def ProcessDocs(input_filenames, input_pathname, output_pathname):
"""Processes a list of Markdown documentation files.
If input_pathname and output_pathname are specified, outputs HTML files
into the corresponding subdirectories of output_pathname. If one or both is
not specified, simply ensures the files exist and contain valid Markdown.
Args:
input_filenames: A list of filenames (absolute, or relative to $PWD) of
Markdown files to parse and possibly render.
input_pathname: The base directory of the input files. (Needed so they
can be placed in the same relative path in the output path.)
output_pathname: The output directory into which rendered Markdown files
go, using that relative path.
Returns:
nothing
Raises:
IOError: if any of the file operations fail (e.g. input_filenames
contains a non-existent file).
"""
outputting = (input_pathname is not None) and (output_pathname is not None)
markdown_parser = markdown.Markdown()
for input_filename in input_filenames:
markdown_text = ReadFile(input_filename)
markdown_html = markdown_parser.reset().convert(markdown_text)
if not outputting:
continue
full_html = FormatPage(markdown_html, title=input_filename)
rel_filename = os.path.relpath(input_filename, start=input_pathname)
output_filename = os.path.join(output_pathname, rel_filename) + '.html'
WriteFile(output_filename, full_html)
def main():
parser = argparse.ArgumentParser(
description='Parse and render Markdown documentation')
parser.add_argument('--input_path', default=None,
help="Input path for Markdown; required only if output_path set")
parser.add_argument('--output_path', default=None,
help="Output path for rendered HTML; if unspecified, won't output")
parser.add_argument('filenames', nargs=argparse.REMAINDER)
args = parser.parse_args()
ProcessDocs(args.filenames, args.input_path, args.output_path)
return 0
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
ChronoMonochrome/android_external_chromium_org | tools/telemetry/telemetry/page/actions/scroll_unittest.py | 23 | 4546 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
from telemetry.core import util
from telemetry.page import page as page_module
from telemetry.page.actions import scroll
from telemetry.unittest import tab_test_case
class ScrollActionTest(tab_test_case.TabTestCase):
def setUp(self):
self._extra_browser_args.append('--enable-gpu-benchmarking')
super(ScrollActionTest, self).setUp()
def CreateAndNavigateToPageFromUnittestDataDir(
self, filename, page_attributes):
self._browser.SetHTTPServerDirectories(util.GetUnittestDataDir())
page = page_module.Page(
self._browser.http_server.UrlOf(filename),
None, # In this test, we don't need a page set.
attributes=page_attributes)
self._tab.Navigate(page.url)
self._tab.WaitForDocumentReadyStateToBeComplete()
return page
def testScrollAction(self):
page = self.CreateAndNavigateToPageFromUnittestDataDir(
"blank.html",
page_attributes={"smoothness": {
"action": "scroll"
}})
# Make page bigger than window so it's scrollable.
self._tab.ExecuteJavaScript("""document.body.style.height =
(2 * window.innerHeight + 1) + 'px';""")
self.assertEquals(
self._tab.EvaluateJavaScript("""document.documentElement.scrollTop
|| document.body.scrollTop"""), 0)
i = scroll.ScrollAction()
i.WillRunAction(page, self._tab)
self._tab.ExecuteJavaScript("""
window.__scrollAction.beginMeasuringHook = function() {
window.__didBeginMeasuring = true;
};
window.__scrollAction.endMeasuringHook = function() {
window.__didEndMeasuring = true;
};""")
i.RunAction(page, self._tab, None)
self.assertTrue(self._tab.EvaluateJavaScript('window.__didBeginMeasuring'))
self.assertTrue(self._tab.EvaluateJavaScript('window.__didEndMeasuring'))
# Allow for roundoff error in scaled viewport.
scroll_position = self._tab.EvaluateJavaScript(
"""(document.documentElement.scrollTop || document.body.scrollTop)
+ window.innerHeight""")
scroll_height = self._tab.EvaluateJavaScript('document.body.scrollHeight')
difference = scroll_position - scroll_height
self.assertTrue(abs(difference) <= 1,
msg='scroll_position=%d; scroll_height=%d' %
(scroll_position, scroll_height))
def testBoundingClientRect(self):
self.CreateAndNavigateToPageFromUnittestDataDir('blank.html', {})
with open(os.path.join(os.path.dirname(__file__),
'gesture_common.js')) as f:
js = f.read()
self._tab.ExecuteJavaScript(js)
# Verify that the rect returned by getBoundingVisibleRect() in scroll.js is
# completely contained within the viewport. Scroll events dispatched by the
# scrolling API use the center of this rect as their location, and this
# location needs to be within the viewport bounds to correctly decide
# between main-thread and impl-thread scroll. If the scrollable area were
# not clipped to the viewport bounds, then the instance used here (the
# scrollable area being more than twice as tall as the viewport) would
# result in a scroll location outside of the viewport bounds.
self._tab.ExecuteJavaScript("""document.body.style.height =
(2 * window.innerHeight + 1) + 'px';""")
rect_top = int(self._tab.EvaluateJavaScript(
'__GestureCommon_GetBoundingVisibleRect(document.body).top'))
rect_height = int(self._tab.EvaluateJavaScript(
'__GestureCommon_GetBoundingVisibleRect(document.body).height'))
rect_bottom = rect_top + rect_height
rect_left = int(self._tab.EvaluateJavaScript(
'__GestureCommon_GetBoundingVisibleRect(document.body).left'))
rect_width = int(self._tab.EvaluateJavaScript(
'__GestureCommon_GetBoundingVisibleRect(document.body).width'))
rect_right = rect_left + rect_width
viewport_height = int(self._tab.EvaluateJavaScript('window.innerHeight'))
viewport_width = int(self._tab.EvaluateJavaScript('window.innerWidth'))
self.assertTrue(rect_bottom <= viewport_height,
msg='%s + %s <= %s' % (rect_top, rect_height, viewport_height))
self.assertTrue(rect_right <= viewport_width,
msg='%s + %s <= %s' % (rect_left, rect_width, viewport_width))
| bsd-3-clause |
takeshineshiro/python-novaclient | novaclient/tests/unit/v2/fakes.py | 3 | 85577 | # Copyright (c) 2011 X.commerce, a business unit of eBay Inc.
# Copyright 2011 OpenStack Foundation
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import mock
from oslo_utils import strutils
import six
from six.moves.urllib import parse
from novaclient import client as base_client
from novaclient import exceptions
from novaclient.tests.unit import fakes
from novaclient.tests.unit import utils
from novaclient.v2 import client
class FakeClient(fakes.FakeClient, client.Client):
def __init__(self, api_version=None, *args, **kwargs):
client.Client.__init__(self, 'username', 'password',
'project_id', 'auth_url',
extensions=kwargs.get('extensions'))
self.api_version = api_version
self.client = FakeHTTPClient(**kwargs)
class FakeHTTPClient(base_client.HTTPClient):
def __init__(self, **kwargs):
self.username = 'username'
self.password = 'password'
self.auth_url = 'auth_url'
self.tenant_id = 'tenant_id'
self.callstack = []
self.projectid = 'projectid'
self.user = 'user'
self.region_name = 'region_name'
self.endpoint_type = 'endpoint_type'
self.service_type = 'service_type'
self.service_name = 'service_name'
self.volume_service_name = 'volume_service_name'
self.timings = 'timings'
self.bypass_url = 'bypass_url'
self.os_cache = 'os_cache'
self.http_log_debug = 'http_log_debug'
self.last_request_id = None
self.management_url = self.get_endpoint()
def _cs_request(self, url, method, **kwargs):
# Check that certain things are called correctly
if method in ['GET', 'DELETE']:
assert 'body' not in kwargs
elif method == 'PUT':
assert 'body' in kwargs
if url is not None:
# Call the method
args = parse.parse_qsl(parse.urlparse(url)[4])
kwargs.update(args)
munged_url = url.rsplit('?', 1)[0]
munged_url = munged_url.strip('/').replace('/', '_')
munged_url = munged_url.replace('.', '_')
munged_url = munged_url.replace('-', '_')
munged_url = munged_url.replace(' ', '_')
munged_url = munged_url.replace('!', '_')
munged_url = munged_url.replace('@', '_')
callback = "%s_%s" % (method.lower(), munged_url)
if url is None or callback == "get_http:__nova_api:8774":
# To get API version information, it is necessary to GET
# a nova endpoint directly without "v2/<tenant-id>".
callback = "get_versions"
elif callback == "get_http:__nova_api:8774_v2_1":
callback = "get_current_version"
if not hasattr(self, callback):
raise AssertionError('Called unknown API method: %s %s, '
'expected fakes method name: %s' %
(method, url, callback))
# Note the call
self.callstack.append((method, url, kwargs.get('body')))
status, headers, body = getattr(self, callback)(**kwargs)
r = utils.TestResponse({
"status_code": status,
"text": body,
"headers": headers,
})
return r, body
def get_endpoint(self):
return "http://nova-api:8774/v2.1/190a755eef2e4aac9f06aa6be9786385"
def get_versions(self):
return (200, {}, {
"versions": [
{"status": "SUPPORTED", "updated": "2011-01-21T11:33:21Z",
"links": [{"href": "http://nova-api:8774/v2/",
"rel": "self"}],
"min_version": "",
"version": "",
"id": "v2.0"},
{"status": "CURRENT", "updated": "2013-07-23T11:33:21Z",
"links": [{"href": "http://nova-api:8774/v2.1/",
"rel": "self"}],
"min_version": "2.1",
"version": "2.3",
"id": "v2.1"}
]})
def get_current_version(self):
return (200, {}, {
"version": {"status": "CURRENT",
"updated": "2013-07-23T11:33:21Z",
"links": [{"href": "http://nova-api:8774/v2.1/",
"rel": "self"}],
"min_version": "2.1",
"version": "2.3",
"id": "v2.1"}})
#
# agents
#
def get_os_agents(self, **kw):
hypervisor = kw.get('hypervisor', 'kvm')
return (200, {}, {
'agents':
[{'hypervisor': hypervisor,
'os': 'win',
'architecture': 'x86',
'version': '7.0',
'url': 'xxx://xxxx/xxx/xxx',
'md5hash': 'add6bb58e139be103324d04d82d8f545',
'id': 1},
{'hypervisor': hypervisor,
'os': 'linux',
'architecture': 'x86',
'version': '16.0',
'url': 'xxx://xxxx/xxx/xxx1',
'md5hash': 'add6bb58e139be103324d04d82d8f546',
'id': 2}]})
def post_os_agents(self, body):
return (200, {}, {'agent': {
'url': '/xxx/xxx/xxx',
'hypervisor': body['agent']['hypervisor'],
'md5hash': 'add6bb58e139be103324d04d82d8f546',
'version': '7.0',
'architecture': 'x86',
'os': 'win',
'id': 1}})
def delete_os_agents_1(self, **kw):
return (202, {}, None)
def put_os_agents_1(self, body, **kw):
return (200, {}, {
"agent": {"url": "/yyy/yyyy/yyyy",
"version": "8.0",
"md5hash": "add6bb58e139be103324d04d82d8f546",
'id': 1}})
#
# List all extensions
#
def get_extensions(self, **kw):
exts = [
{
"alias": "NMN",
"description": "Multiple network support",
"links": [],
"name": "Multinic",
"namespace": ("http://docs.openstack.org/"
"compute/ext/multinic/api/v1.1"),
"updated": "2011-06-09T00:00:00+00:00"
},
{
"alias": "OS-DCF",
"description": "Disk Management Extension",
"links": [],
"name": "DiskConfig",
"namespace": ("http://docs.openstack.org/"
"compute/ext/disk_config/api/v1.1"),
"updated": "2011-09-27T00:00:00+00:00"
},
{
"alias": "OS-EXT-SRV-ATTR",
"description": "Extended Server Attributes support.",
"links": [],
"name": "ExtendedServerAttributes",
"namespace": ("http://docs.openstack.org/"
"compute/ext/extended_status/api/v1.1"),
"updated": "2011-11-03T00:00:00+00:00"
},
{
"alias": "OS-EXT-STS",
"description": "Extended Status support",
"links": [],
"name": "ExtendedStatus",
"namespace": ("http://docs.openstack.org/"
"compute/ext/extended_status/api/v1.1"),
"updated": "2011-11-03T00:00:00+00:00"
},
]
return (200, {}, {
"extensions": exts,
})
#
# Limits
#
def get_limits(self, **kw):
return (200, {}, {"limits": {
"rate": [
{
"uri": "*",
"regex": ".*",
"limit": [
{
"value": 10,
"verb": "POST",
"remaining": 2,
"unit": "MINUTE",
"next-available": "2011-12-15T22:42:45Z"
},
{
"value": 10,
"verb": "PUT",
"remaining": 2,
"unit": "MINUTE",
"next-available": "2011-12-15T22:42:45Z"
},
{
"value": 100,
"verb": "DELETE",
"remaining": 100,
"unit": "MINUTE",
"next-available": "2011-12-15T22:42:45Z"
}
]
},
{
"uri": "*/servers",
"regex": "^/servers",
"limit": [
{
"verb": "POST",
"value": 25,
"remaining": 24,
"unit": "DAY",
"next-available": "2011-12-15T22:42:45Z"
}
]
}
],
"absolute": {
"maxTotalRAMSize": 51200,
"maxServerMeta": 5,
"maxImageMeta": 5,
"maxPersonality": 5,
"maxPersonalitySize": 10240
},
}})
#
# Servers
#
def get_servers(self, **kw):
return (200, {}, {"servers": [
{'id': 1234, 'name': 'sample-server'},
{'id': 5678, 'name': 'sample-server2'}
]})
def get_servers_detail(self, **kw):
return (200, {}, {"servers": [
{
"id": 1234,
"name": "sample-server",
"image": {
"id": 2,
"name": "sample image",
},
"flavor": {
"id": 1,
"name": "256 MB Server",
},
"hostId": "e4d909c290d0fb1ca068ffaddf22cbd0",
"status": "BUILD",
"progress": 60,
"addresses": {
"public": [
{
"version": 4,
"addr": "1.2.3.4",
},
{
"version": 4,
"addr": "5.6.7.8",
}],
"private": [{
"version": 4,
"addr": "10.11.12.13",
}],
},
"metadata": {
"Server Label": "Web Head 1",
"Image Version": "2.1"
},
"OS-EXT-SRV-ATTR:host": "computenode1",
"security_groups": [{
'id': 1, 'name': 'securitygroup1',
'description': 'FAKE_SECURITY_GROUP',
'tenant_id': '4ffc664c198e435e9853f2538fbcd7a7'
}],
"OS-EXT-MOD:some_thing": "mod_some_thing_value",
},
{
"id": 5678,
"name": "sample-server2",
"image": {
"id": 2,
"name": "sample image",
},
"flavor": {
"id": 1,
"name": "256 MB Server",
},
"hostId": "9e107d9d372bb6826bd81d3542a419d6",
"status": "ACTIVE",
"addresses": {
"public": [
{
"version": 4,
"addr": "4.5.6.7",
},
{
"version": 4,
"addr": "5.6.9.8",
}],
"private": [{
"version": 4,
"addr": "10.13.12.13",
}],
},
"metadata": {
"Server Label": "DB 1"
},
"OS-EXT-SRV-ATTR:host": "computenode2",
"security_groups": [
{
'id': 1, 'name': 'securitygroup1',
'description': 'FAKE_SECURITY_GROUP',
'tenant_id': '4ffc664c198e435e9853f2538fbcd7a7'
},
{
'id': 2, 'name': 'securitygroup2',
'description': 'ANOTHER_FAKE_SECURITY_GROUP',
'tenant_id': '4ffc664c198e435e9853f2538fbcd7a7'
}],
},
{
"id": 9012,
"name": "sample-server3",
"image": "",
"flavor": {
"id": 1,
"name": "256 MB Server",
},
"hostId": "9e107d9d372bb6826bd81d3542a419d6",
"status": "ACTIVE",
"addresses": {
"public": [
{
"version": 4,
"addr": "4.5.6.7",
},
{
"version": 4,
"addr": "5.6.9.8",
}],
"private": [{
"version": 4,
"addr": "10.13.12.13",
}],
},
"metadata": {
"Server Label": "DB 1"
}
},
{
"id": 9013,
"name": "sample-server4",
"flavor": {
"id": '80645cf4-6ad3-410a-bbc8-6f3e1e291f51',
},
"image": {
"id": '3e861307-73a6-4d1f-8d68-f68b03223032',
},
"hostId": "9e107d9d372bb6826bd81d3542a419d6",
"status": "ACTIVE",
},
]})
def post_servers(self, body, **kw):
assert set(body.keys()) <= set(['server', 'os:scheduler_hints'])
fakes.assert_has_keys(
body['server'],
required=['name', 'imageRef', 'flavorRef'],
optional=['metadata', 'personality'])
if 'personality' in body['server']:
for pfile in body['server']['personality']:
fakes.assert_has_keys(pfile, required=['path', 'contents'])
if body['server']['name'] == 'some-bad-server':
return (202, {}, self.get_servers_1235()[2])
else:
return (202, {}, self.get_servers_1234()[2])
def post_os_volumes_boot(self, body, **kw):
assert set(body.keys()) <= set(['server', 'os:scheduler_hints'])
fakes.assert_has_keys(
body['server'],
required=['name', 'flavorRef'],
optional=['imageRef'])
# Require one, and only one, of the keys for bdm
if 'block_device_mapping' not in body['server']:
if 'block_device_mapping_v2' not in body['server']:
raise AssertionError(
"missing required keys: 'block_device_mapping'"
)
elif 'block_device_mapping_v2' in body['server']:
raise AssertionError("found extra keys: 'block_device_mapping'")
return (202, {}, self.get_servers_9012()[2])
def get_servers_1234(self, **kw):
r = {'server': self.get_servers_detail()[2]['servers'][0]}
return (200, {}, r)
def get_servers_1235(self, **kw):
r = {'server': self.get_servers_detail()[2]['servers'][0]}
r['server']['id'] = 1235
r['server']['status'] = 'error'
r['server']['fault'] = {'message': 'something went wrong!'}
return (200, {}, r)
def get_servers_5678(self, **kw):
r = {'server': self.get_servers_detail()[2]['servers'][1]}
return (200, {}, r)
def get_servers_9012(self, **kw):
r = {'server': self.get_servers_detail()[2]['servers'][2]}
return (200, {}, r)
def get_servers_9013(self, **kw):
r = {'server': self.get_servers_detail()[2]['servers'][3]}
return (200, {}, r)
def put_servers_1234(self, body, **kw):
assert list(body) == ['server']
fakes.assert_has_keys(body['server'], optional=['name', 'adminPass'])
return (204, {}, body)
def delete_os_server_groups_12345(self, **kw):
return (202, {}, None)
def delete_os_server_groups_56789(self, **kw):
return (202, {}, None)
def delete_servers_1234(self, **kw):
return (202, {}, None)
def delete_servers_5678(self, **kw):
return (202, {}, None)
def delete_servers_1234_metadata_test_key(self, **kw):
return (204, {}, None)
def delete_servers_1234_metadata_key1(self, **kw):
return (204, {}, None)
def delete_servers_1234_metadata_key2(self, **kw):
return (204, {}, None)
def post_servers_1234_metadata(self, **kw):
return (204, {}, {'metadata': {'test_key': 'test_value'}})
def put_servers_1234_metadata_test_key(self, **kw):
return (200, {}, {'meta': {'test_key': 'test_value'}})
def get_servers_1234_diagnostics(self, **kw):
return (200, {}, {'data': 'Fake diagnostics'})
def post_servers_uuid1_metadata(self, **kw):
return (204, {}, {'metadata': {'key1': 'val1'}})
def post_servers_uuid2_metadata(self, **kw):
return (204, {}, {'metadata': {'key1': 'val1'}})
def post_servers_uuid3_metadata(self, **kw):
return (204, {}, {'metadata': {'key1': 'val1'}})
def post_servers_uuid4_metadata(self, **kw):
return (204, {}, {'metadata': {'key1': 'val1'}})
def delete_servers_uuid1_metadata_key1(self, **kw):
return (200, {}, {'data': 'Fake diagnostics'})
def delete_servers_uuid2_metadata_key1(self, **kw):
return (200, {}, {'data': 'Fake diagnostics'})
def delete_servers_uuid3_metadata_key1(self, **kw):
return (200, {}, {'data': 'Fake diagnostics'})
def delete_servers_uuid4_metadata_key1(self, **kw):
return (200, {}, {'data': 'Fake diagnostics'})
def get_servers_1234_os_security_groups(self, **kw):
return (200, {}, {
"security_groups": [{
'id': 1,
'name': 'securitygroup1',
'description': 'FAKE_SECURITY_GROUP',
'tenant_id': '4ffc664c198e435e9853f2538fbcd7a7',
'rules': []}]
})
#
# Server Addresses
#
def get_servers_1234_ips(self, **kw):
return (200, {}, {
'addresses':
self.get_servers_1234()[1]['server']['addresses']})
def get_servers_1234_ips_public(self, **kw):
return (200, {}, {
'public':
self.get_servers_1234_ips()[1]['addresses']['public']})
def get_servers_1234_ips_private(self, **kw):
return (
200, {},
{'private':
self.get_servers_1234_ips()[1]['addresses']['private']})
def delete_servers_1234_ips_public_1_2_3_4(self, **kw):
return (202, {}, None)
#
# Server password
#
# Testing with the following password and key
#
# Clear password: FooBar123
#
# RSA Private Key: novaclient/tests/unit/idfake.pem
#
# Encrypted password
# OIuEuQttO8Rk93BcKlwHQsziDAnkAm/V6V8VPToA8ZeUaUBWwS0gwo2K6Y61Z96r
# qG447iRz0uTEEYq3RAYJk1mh3mMIRVl27t8MtIecR5ggVVbz1S9AwXJQypDKl0ho
# QFvhCBcMWPohyGewDJOhDbtuN1IoFI9G55ZvFwCm5y7m7B2aVcoLeIsJZE4PLsIw
# /y5a6Z3/AoJZYGG7IH5WN88UROU3B9JZGFB2qtPLQTOvDMZLUhoPRIJeHiVSlo1N
# tI2/++UsXVg3ow6ItqCJGgdNuGG5JB+bslDHWPxROpesEIHdczk46HCpHQN8f1sk
# Hi/fmZZNQQqj1Ijq0caOIw==
def get_servers_1234_os_server_password(self, **kw):
return (200, {}, {
'password':
'OIuEuQttO8Rk93BcKlwHQsziDAnkAm/V6V8VPToA8ZeUaUBWwS0gwo2K6Y61Z96r'
'qG447iRz0uTEEYq3RAYJk1mh3mMIRVl27t8MtIecR5ggVVbz1S9AwXJQypDKl0ho'
'QFvhCBcMWPohyGewDJOhDbtuN1IoFI9G55ZvFwCm5y7m7B2aVcoLeIsJZE4PLsIw'
'/y5a6Z3/AoJZYGG7IH5WN88UROU3B9JZGFB2qtPLQTOvDMZLUhoPRIJeHiVSlo1N'
'tI2/++UsXVg3ow6ItqCJGgdNuGG5JB+bslDHWPxROpesEIHdczk46HCpHQN8f1sk'
'Hi/fmZZNQQqj1Ijq0caOIw=='})
def delete_servers_1234_os_server_password(self, **kw):
return (202, {}, None)
#
# Server actions
#
none_actions = ['revertResize', 'migrate', 'os-stop', 'os-start',
'forceDelete', 'restore', 'pause', 'unpause', 'unlock',
'unrescue', 'resume', 'suspend', 'lock', 'shelve',
'shelveOffload', 'unshelve', 'resetNetwork']
type_actions = ['os-getVNCConsole', 'os-getSPICEConsole',
'os-getRDPConsole']
@classmethod
def check_server_actions(cls, body):
action = list(body)[0]
if action == 'reboot':
assert list(body[action]) == ['type']
assert body[action]['type'] in ['HARD', 'SOFT']
elif action == 'resize':
assert 'flavorRef' in body[action]
elif action in cls.none_actions:
assert body[action] is None
elif action == 'addFixedIp':
assert list(body[action]) == ['networkId']
elif action in ['removeFixedIp', 'removeFloatingIp']:
assert list(body[action]) == ['address']
elif action == 'addFloatingIp':
assert (list(body[action]) == ['address'] or
sorted(list(body[action])) == ['address', 'fixed_address'])
elif action == 'changePassword':
assert list(body[action]) == ['adminPass']
elif action in cls.type_actions:
assert list(body[action]) == ['type']
elif action == 'os-migrateLive':
assert set(body[action].keys()) == set(['host', 'block_migration',
'disk_over_commit'])
elif action == 'os-resetState':
assert list(body[action]) == ['state']
elif action == 'resetNetwork':
assert body[action] is None
elif action in ['addSecurityGroup', 'removeSecurityGroup']:
assert list(body[action]) == ['name']
elif action == 'createBackup':
assert set(body[action]) == set(['name', 'backup_type',
'rotation'])
else:
return False
return True
def post_servers_1234_action(self, body, **kw):
_headers = None
_body = None
resp = 202
assert len(body.keys()) == 1
action = list(body)[0]
if self.check_server_actions(body):
# NOTE(snikitin): No need to do any operations here. This 'pass'
# is needed to avoid AssertionError in the last 'else' statement
# if we found 'action' in method check_server_actions and
# raise AssertionError if we didn't find 'action' at all.
pass
elif action == 'rebuild':
body = body[action]
adminPass = body.get('adminPass', 'randompassword')
assert 'imageRef' in body
_body = self.get_servers_1234()[2]
_body['server']['adminPass'] = adminPass
elif action == 'confirmResize':
assert body[action] is None
# This one method returns a different response code
return (204, {}, None)
elif action == 'rescue':
if body[action]:
keys = set(body[action].keys())
assert not (keys - set(['adminPass', 'rescue_image_ref']))
else:
assert body[action] is None
_body = {'adminPass': 'RescuePassword'}
elif action == 'createImage':
assert set(body[action].keys()) == set(['name', 'metadata'])
_headers = dict(location="http://blah/images/456")
if body[action]['name'] == 'mysnapshot_deleted':
_headers = dict(location="http://blah/images/457")
elif action == 'os-getConsoleOutput':
assert list(body[action]) == ['length']
return (202, {}, {'output': 'foo'})
elif action == 'evacuate':
keys = list(body[action])
if 'adminPass' in keys:
keys.remove('adminPass')
if 'host' in keys:
keys.remove('host')
assert set(keys) == set(['onSharedStorage'])
else:
raise AssertionError("Unexpected server action: %s" % action)
return (resp, _headers, _body)
def post_servers_5678_action(self, body, **kw):
return self.post_servers_1234_action(body, **kw)
#
# Cloudpipe
#
def get_os_cloudpipe(self, **kw):
return (
200,
{},
{'cloudpipes': [{'project_id': 1}]}
)
def post_os_cloudpipe(self, **ks):
return (
202,
{},
{'instance_id': '9d5824aa-20e6-4b9f-b967-76a699fc51fd'}
)
def put_os_cloudpipe_configure_project(self, **kw):
return (202, {}, None)
#
# Flavors
#
def get_flavors(self, **kw):
status, header, flavors = self.get_flavors_detail(**kw)
for flavor in flavors['flavors']:
for k in list(flavor):
if k not in ['id', 'name']:
del flavor[k]
return (200, {}, flavors)
def get_flavors_detail(self, **kw):
flavors = {'flavors': [
{'id': 1, 'name': '256 MB Server', 'ram': 256, 'disk': 10,
'OS-FLV-EXT-DATA:ephemeral': 10,
'os-flavor-access:is_public': True,
'links': {}},
{'id': 2, 'name': '512 MB Server', 'ram': 512, 'disk': 20,
'OS-FLV-EXT-DATA:ephemeral': 20,
'os-flavor-access:is_public': False,
'links': {}},
{'id': 4, 'name': '1024 MB Server', 'ram': 1024, 'disk': 10,
'OS-FLV-EXT-DATA:ephemeral': 10,
'os-flavor-access:is_public': True,
'links': {}},
{'id': 'aa1', 'name': '128 MB Server', 'ram': 128, 'disk': 0,
'OS-FLV-EXT-DATA:ephemeral': 0,
'os-flavor-access:is_public': True,
'links': {}}
]}
if 'is_public' not in kw:
filter_is_public = True
else:
if kw['is_public'].lower() == 'none':
filter_is_public = None
else:
filter_is_public = strutils.bool_from_string(kw['is_public'],
True)
if filter_is_public is not None:
if filter_is_public:
flavors['flavors'] = [
v for v in flavors['flavors']
if v['os-flavor-access:is_public']
]
else:
flavors['flavors'] = [
v for v in flavors['flavors']
if not v['os-flavor-access:is_public']
]
return (200, {}, flavors)
def get_flavors_1(self, **kw):
return (
200,
{},
{'flavor':
self.get_flavors_detail(is_public='None')[2]['flavors'][0]}
)
def get_flavors_2(self, **kw):
return (
200,
{},
{'flavor':
self.get_flavors_detail(is_public='None')[2]['flavors'][1]}
)
def get_flavors_3(self, **kw):
# Diablo has no ephemeral
return (
200,
{},
{'flavor': {
'id': 3,
'name': '256 MB Server',
'ram': 256,
'disk': 10,
}},
)
def get_flavors_512_MB_Server(self, **kw):
raise exceptions.NotFound('404')
def get_flavors_128_MB_Server(self, **kw):
raise exceptions.NotFound('404')
def get_flavors_80645cf4_6ad3_410a_bbc8_6f3e1e291f51(self, **kw):
raise exceptions.NotFound('404')
def get_flavors_aa1(self, **kw):
# Alphanumeric flavor id are allowed.
return (
200,
{},
{'flavor':
self.get_flavors_detail(is_public='None')[2]['flavors'][3]}
)
def get_flavors_4(self, **kw):
return (
200,
{},
{'flavor':
self.get_flavors_detail(is_public='None')[2]['flavors'][2]}
)
def delete_flavors_flavordelete(self, **kw):
return (202, {}, None)
def delete_flavors_2(self, **kw):
return (202, {}, None)
def post_flavors(self, body, **kw):
return (
202,
{},
{'flavor':
self.get_flavors_detail(is_public='None')[2]['flavors'][0]}
)
def get_flavors_1_os_extra_specs(self, **kw):
return (
200,
{},
{'extra_specs': {"k1": "v1"}})
def get_flavors_2_os_extra_specs(self, **kw):
return (
200,
{},
{'extra_specs': {"k2": "v2"}})
def get_flavors_aa1_os_extra_specs(self, **kw):
return (
200, {},
{'extra_specs': {"k3": "v3"}})
def get_flavors_4_os_extra_specs(self, **kw):
return (
200,
{},
{'extra_specs': {"k4": "v4"}})
def post_flavors_1_os_extra_specs(self, body, **kw):
assert list(body) == ['extra_specs']
fakes.assert_has_keys(body['extra_specs'],
required=['k1'])
return (
200,
{},
{'extra_specs': {"k1": "v1"}})
def post_flavors_4_os_extra_specs(self, body, **kw):
assert list(body) == ['extra_specs']
return (
200,
{},
body)
def delete_flavors_1_os_extra_specs_k1(self, **kw):
return (204, {}, None)
#
# Flavor access
#
def get_flavors_1_os_flavor_access(self, **kw):
return (404, {}, None)
def get_flavors_2_os_flavor_access(self, **kw):
return (
200, {},
{'flavor_access': [{'flavor_id': '2', 'tenant_id': 'proj1'},
{'flavor_id': '2', 'tenant_id': 'proj2'}]})
def post_flavors_2_action(self, body, **kw):
return (202, {}, self.get_flavors_2_os_flavor_access()[2])
#
# Floating IPs
#
def get_os_floating_ip_pools(self):
return (
200,
{},
{'floating_ip_pools': [{'name': 'foo'}, {'name': 'bar'}]}
)
def get_os_floating_ips(self, **kw):
return (
200,
{},
{'floating_ips': [
{'id': 1, 'fixed_ip': '10.0.0.1', 'ip': '11.0.0.1'},
{'id': 2, 'fixed_ip': '10.0.0.2', 'ip': '11.0.0.2'},
]},
)
def get_os_floating_ips_1(self, **kw):
return (
200, {}, {'floating_ip': {'id': 1, 'fixed_ip': '10.0.0.1',
'ip': '11.0.0.1'}})
def post_os_floating_ips(self, body):
if body.get('pool'):
return (
200, {}, {'floating_ip': {'id': 1, 'fixed_ip': '10.0.0.1',
'ip': '11.0.0.1',
'pool': 'nova'}})
else:
return (
200, {}, {'floating_ip': {'id': 1, 'fixed_ip': '10.0.0.1',
'ip': '11.0.0.1',
'pool': None}})
def delete_os_floating_ips_1(self, **kw):
return (204, {}, None)
def get_os_floating_ip_dns(self, **kw):
return (205, {}, {'domain_entries':
[{'domain': 'example.org'},
{'domain': 'example.com'}]})
def get_os_floating_ip_dns_testdomain_entries(self, **kw):
if kw.get('ip'):
return (205, {}, {
'dns_entries': [
{'dns_entry': {'ip': kw.get('ip'),
'name': "host1",
'type': "A",
'domain': 'testdomain'}},
{'dns_entry': {'ip': kw.get('ip'),
'name': "host2",
'type': "A",
'domain': 'testdomain'}}]})
else:
return (404, {}, None)
def get_os_floating_ip_dns_testdomain_entries_testname(self, **kw):
return (205, {}, {
'dns_entry': {'ip': "10.10.10.10",
'name': 'testname',
'type': "A",
'domain': 'testdomain'}})
def put_os_floating_ip_dns_testdomain(self, body, **kw):
if body['domain_entry']['scope'] == 'private':
fakes.assert_has_keys(body['domain_entry'],
required=['availability_zone', 'scope'])
elif body['domain_entry']['scope'] == 'public':
fakes.assert_has_keys(body['domain_entry'],
required=['project', 'scope'])
else:
fakes.assert_has_keys(body['domain_entry'],
required=['project', 'scope'])
return (205, {}, body)
def put_os_floating_ip_dns_testdomain_entries_testname(self, body, **kw):
fakes.assert_has_keys(body['dns_entry'],
required=['ip', 'dns_type'])
return (205, {}, body)
def delete_os_floating_ip_dns_testdomain(self, **kw):
return (200, {}, None)
def delete_os_floating_ip_dns_testdomain_entries_testname(self, **kw):
return (200, {}, None)
def get_os_floating_ips_bulk(self, **kw):
return (200, {}, {'floating_ip_info': [
{'id': 1, 'fixed_ip': '10.0.0.1', 'ip': '11.0.0.1'},
{'id': 2, 'fixed_ip': '10.0.0.2', 'ip': '11.0.0.2'},
]})
def get_os_floating_ips_bulk_testHost(self, **kw):
return (200, {}, {'floating_ip_info': [
{'id': 1, 'fixed_ip': '10.0.0.1', 'ip': '11.0.0.1'},
{'id': 2, 'fixed_ip': '10.0.0.2', 'ip': '11.0.0.2'},
]})
def post_os_floating_ips_bulk(self, **kw):
params = kw.get('body').get('floating_ips_bulk_create')
pool = params.get('pool', 'defaultPool')
interface = params.get('interface', 'defaultInterface')
return (200, {}, {'floating_ips_bulk_create':
{'ip_range': '192.168.1.0/30',
'pool': pool,
'interface': interface}})
def put_os_floating_ips_bulk_delete(self, **kw):
ip_range = kw.get('body').get('ip_range')
return (200, {}, {'floating_ips_bulk_delete': ip_range})
#
# Images
#
def get_images(self, **kw):
return (200, {}, {'images': [
{'id': 1, 'name': 'CentOS 5.2'},
{'id': 2, 'name': 'My Server Backup'}
]})
def get_images_detail(self, **kw):
return (200, {}, {'images': [
{
'id': 1,
'name': 'CentOS 5.2',
"updated": "2010-10-10T12:00:00Z",
"created": "2010-08-10T12:00:00Z",
"status": "ACTIVE",
"metadata": {
"test_key": "test_value",
},
"links": {},
},
{
"id": 2,
"name": "My Server Backup",
"serverId": 1234,
"updated": "2010-10-10T12:00:00Z",
"created": "2010-08-10T12:00:00Z",
"status": "SAVING",
"progress": 80,
"links": {},
},
{
"id": 3,
"name": "My Server Backup Deleted",
"serverId": 1234,
"updated": "2010-10-10T12:00:00Z",
"created": "2010-08-10T12:00:00Z",
"status": "DELETED",
"fault": {'message': 'Image has been deleted.'},
"links": {},
}
]})
def get_images_1(self, **kw):
return (200, {}, {'image': self.get_images_detail()[2]['images'][0]})
def get_images_2(self, **kw):
return (200, {}, {'image': self.get_images_detail()[2]['images'][1]})
def get_images_456(self, **kw):
return (200, {}, {'image': self.get_images_detail()[2]['images'][1]})
def get_images_457(self, **kw):
return (200, {}, {'image': self.get_images_detail()[2]['images'][2]})
def get_images_3e861307_73a6_4d1f_8d68_f68b03223032(self):
raise exceptions.NotFound('404')
def post_images(self, body, **kw):
assert list(body) == ['image']
fakes.assert_has_keys(body['image'], required=['serverId', 'name'])
return (202, {}, self.get_images_1()[2])
def post_images_1_metadata(self, body, **kw):
assert list(body) == ['metadata']
fakes.assert_has_keys(body['metadata'],
required=['test_key'])
return (
200,
{},
{'metadata': self.get_images_1()[2]['image']['metadata']})
def delete_images_1(self, **kw):
return (204, {}, None)
def delete_images_2(self, **kw):
return (204, {}, None)
def delete_images_1_metadata_test_key(self, **kw):
return (204, {}, None)
#
# Keypairs
#
def get_os_keypairs_test(self, *kw):
return (200, {}, {'keypair':
self.get_os_keypairs()[2]['keypairs'][0]['keypair']})
def get_os_keypairs(self, *kw):
return (200, {}, {
"keypairs": [{"keypair": {
"public_key": "FAKE_SSH_RSA",
"private_key": "FAKE_PRIVATE_KEY",
"user_id": "81e373b596d6466e99c4896826abaa46",
"name": "test",
"deleted": False,
"created_at": "2014-04-19T02:16:44.000000",
"updated_at": "2014-04-19T10:12:3.000000",
"figerprint": "FAKE_KEYPAIR",
"deleted_at": None,
"id": 4}}
]})
def delete_os_keypairs_test(self, **kw):
return (202, {}, None)
def post_os_keypairs(self, body, **kw):
assert list(body) == ['keypair']
fakes.assert_has_keys(body['keypair'],
required=['name'])
r = {'keypair': self.get_os_keypairs()[2]['keypairs'][0]['keypair']}
return (202, {}, r)
#
# Virtual Interfaces
#
def get_servers_1234_os_virtual_interfaces(self, **kw):
return (200, {}, {"virtual_interfaces": [
{'id': 'fakeid', 'mac_address': 'fakemac'}
]})
#
# Quotas
#
def get_os_quota_sets_test(self, **kw):
return (200, {}, {
'quota_set': {
'tenant_id': 'test',
'metadata_items': [],
'injected_file_content_bytes': 1,
'injected_file_path_bytes': 1,
'ram': 1,
'floating_ips': 1,
'instances': 1,
'injected_files': 1,
'cores': 1,
'keypairs': 1,
'security_groups': 1,
'security_group_rules': 1}})
def get_os_quota_sets_tenant_id(self, **kw):
return (200, {}, {
'quota_set': {
'tenant_id': 'test',
'metadata_items': [],
'injected_file_content_bytes': 1,
'injected_file_path_bytes': 1,
'ram': 1,
'floating_ips': 1,
'instances': 1,
'injected_files': 1,
'cores': 1,
'keypairs': 1,
'security_groups': 1,
'security_group_rules': 1}})
def get_os_quota_sets_97f4c221bff44578b0300df4ef119353(self, **kw):
return (200, {}, {
'quota_set': {
'tenant_id': '97f4c221bff44578b0300df4ef119353',
'metadata_items': [],
'injected_file_content_bytes': 1,
'injected_file_path_bytes': 1,
'ram': 1,
'floating_ips': 1,
'instances': 1,
'injected_files': 1,
'cores': 1,
'keypairs': 1,
'security_groups': 1,
'security_group_rules': 1}})
def put_os_quota_sets_97f4c221_bff4_4578_b030_0df4ef119353(self, **kw):
return (200, {}, {
'quota_set': {
'tenant_id': '97f4c221-bff4-4578-b030-0df4ef119353',
'metadata_items': [],
'injected_file_content_bytes': 1,
'injected_file_path_bytes': 1,
'ram': 1,
'floating_ips': 1,
'instances': 1,
'injected_files': 1,
'cores': 1,
'keypairs': 1,
'security_groups': 1,
'security_group_rules': 1}})
def get_os_quota_sets_97f4c221_bff4_4578_b030_0df4ef119353(self, **kw):
return (200, {}, {
'quota_set': {
'tenant_id': '97f4c221-bff4-4578-b030-0df4ef119353',
'metadata_items': [],
'injected_file_content_bytes': 1,
'injected_file_path_bytes': 1,
'ram': 1,
'floating_ips': 1,
'instances': 1,
'injected_files': 1,
'cores': 1,
'keypairs': 1,
'security_groups': 1,
'security_group_rules': 1}})
def get_os_quota_sets_97f4c221bff44578b0300df4ef119353_defaults(self):
return (200, {}, {
'quota_set': {
'tenant_id': 'test',
'metadata_items': [],
'injected_file_content_bytes': 1,
'injected_file_path_bytes': 1,
'ram': 1,
'floating_ips': 1,
'instances': 1,
'injected_files': 1,
'cores': 1,
'keypairs': 1,
'security_groups': 1,
'security_group_rules': 1}})
def get_os_quota_sets_tenant_id_defaults(self):
return (200, {}, {
'quota_set': {
'tenant_id': 'test',
'metadata_items': [],
'injected_file_content_bytes': 1,
'injected_file_path_bytes': 1,
'ram': 1,
'floating_ips': 1,
'instances': 1,
'injected_files': 1,
'cores': 1,
'keypairs': 1,
'security_groups': 1,
'security_group_rules': 1}})
def put_os_quota_sets_97f4c221bff44578b0300df4ef119353(self, body, **kw):
assert list(body) == ['quota_set']
fakes.assert_has_keys(body['quota_set'])
return (200, {}, {
'quota_set': {
'tenant_id': '97f4c221bff44578b0300df4ef119353',
'metadata_items': [],
'injected_file_content_bytes': 1,
'injected_file_path_bytes': 1,
'ram': 1,
'floating_ips': 1,
'instances': 1,
'injected_files': 1,
'cores': 1,
'keypairs': 1,
'security_groups': 1,
'security_group_rules': 1}})
def delete_os_quota_sets_test(self, **kw):
return (202, {}, {})
def delete_os_quota_sets_97f4c221bff44578b0300df4ef119353(self, **kw):
return (202, {}, {})
#
# Quota Classes
#
def get_os_quota_class_sets_test(self, **kw):
return (200, {}, {
'quota_class_set': {
'id': 'test',
'metadata_items': 1,
'injected_file_content_bytes': 1,
'injected_file_path_bytes': 1,
'ram': 1,
'floating_ips': 1,
'instances': 1,
'injected_files': 1,
'cores': 1,
'key_pairs': 1,
'security_groups': 1,
'security_group_rules': 1}})
def put_os_quota_class_sets_test(self, body, **kw):
assert list(body) == ['quota_class_set']
return (200, {}, {
'quota_class_set': {
'metadata_items': 1,
'injected_file_content_bytes': 1,
'injected_file_path_bytes': 1,
'ram': 1,
'floating_ips': 1,
'instances': 1,
'injected_files': 1,
'cores': 1,
'key_pairs': 1,
'security_groups': 1,
'security_group_rules': 1}})
def put_os_quota_class_sets_97f4c221bff44578b0300df4ef119353(self,
body, **kw):
assert list(body) == ['quota_class_set']
return (200, {}, {
'quota_class_set': {
'metadata_items': 1,
'injected_file_content_bytes': 1,
'injected_file_path_bytes': 1,
'ram': 1,
'floating_ips': 1,
'instances': 1,
'injected_files': 1,
'cores': 1,
'key_pairs': 1,
'security_groups': 1,
'security_group_rules': 1}})
#
# Security Groups
#
def get_os_security_groups(self, **kw):
return (200, {}, {"security_groups": [
{"name": "test",
"description": "FAKE_SECURITY_GROUP",
"tenant_id": "4ffc664c198e435e9853f2538fbcd7a7",
"id": 1,
"rules": [
{"id": 11,
"group": {},
"ip_protocol": "TCP",
"from_port": 22,
"to_port": 22,
"parent_group_id": 1,
"ip_range":
{"cidr": "10.0.0.0/8"}},
{"id": 12,
"group": {
"tenant_id":
"272bee4c1e624cd4a72a6b0ea55b4582",
"name": "test2"},
"ip_protocol": "TCP",
"from_port": 222,
"to_port": 222,
"parent_group_id": 1,
"ip_range": {}},
{"id": 14,
"group": {
"tenant_id":
"272bee4c1e624cd4a72a6b0ea55b4582",
"name": "test4"},
"ip_protocol": "TCP",
"from_port": -1,
"to_port": -1,
"parent_group_id": 1,
"ip_range": {}}]},
{"name": "test2",
"description": "FAKE_SECURITY_GROUP2",
"tenant_id": "272bee4c1e624cd4a72a6b0ea55b4582",
"id": 2,
"rules": []},
{"name": "test4",
"description": "FAKE_SECURITY_GROUP4",
"tenant_id": "272bee4c1e624cd4a72a6b0ea55b4582",
"id": 4,
"rules": []}
]})
def get_os_security_groups_1(self, **kw):
return (200, {}, {"security_group":
{'id': 1, 'name': 'test', 'description': 'FAKE_SECURITY_GROUP'}
})
def delete_os_security_groups_1(self, **kw):
return (202, {}, None)
def post_os_security_groups(self, body, **kw):
assert list(body) == ['security_group']
fakes.assert_has_keys(body['security_group'],
required=['name', 'description'])
r = {'security_group':
self.get_os_security_groups()[2]['security_groups'][0]}
return (202, {}, r)
def put_os_security_groups_1(self, body, **kw):
assert list(body) == ['security_group']
fakes.assert_has_keys(body['security_group'],
required=['name', 'description'])
return (205, {}, body)
#
# Security Group Rules
#
def get_os_security_group_rules(self, **kw):
return (200, {}, {"security_group_rules": [
{'id': 1, 'parent_group_id': 1, 'group_id': 2,
'ip_protocol': 'TCP', 'from_port': 22, 'to_port': 22,
'cidr': '10.0.0.0/8'}
]})
def delete_os_security_group_rules_1(self, **kw):
return (202, {}, None)
def delete_os_security_group_rules_11(self, **kw):
return (202, {}, None)
def delete_os_security_group_rules_12(self, **kw):
return (202, {}, None)
def delete_os_security_group_rules_14(self, **kw):
return (202, {}, None)
def post_os_security_group_rules(self, body, **kw):
assert list(body) == ['security_group_rule']
fakes.assert_has_keys(
body['security_group_rule'],
required=['parent_group_id'],
optional=['group_id', 'ip_protocol', 'from_port',
'to_port', 'cidr'])
r = {'security_group_rule':
self.get_os_security_group_rules()[2]['security_group_rules'][0]}
return (202, {}, r)
#
# Security Group Default Rules
#
def get_os_security_group_default_rules(self, **kw):
return (200, {}, {"security_group_default_rules": [
{'id': 1, 'ip_protocol': 'TCP', 'from_port': 22,
'to_port': 22, 'cidr': '10.0.0.0/8'}
]})
def delete_os_security_group_default_rules_1(self, **kw):
return (202, {}, None)
def delete_os_security_group_default_rules_11(self, **kw):
return (202, {}, None)
def delete_os_security_group_default_rules_12(self, **kw):
return (202, {}, None)
def post_os_security_group_default_rules(self, body, **kw):
assert list(body) == ['security_group_default_rule']
fakes.assert_has_keys(body['security_group_default_rule'],
optional=['ip_protocol', 'from_port',
'to_port', 'cidr'])
rules = self.get_os_security_group_default_rules()
r = {'security_group_default_rule':
rules[2]['security_group_default_rules'][0]}
return (202, {}, r)
#
# Tenant Usage
#
def get_os_simple_tenant_usage(self, **kw):
return (200, {},
{six.u('tenant_usages'): [{
six.u('total_memory_mb_usage'): 25451.762807466665,
six.u('total_vcpus_usage'): 49.71047423333333,
six.u('total_hours'): 49.71047423333333,
six.u('tenant_id'):
six.u('7b0a1d73f8fb41718f3343c207597869'),
six.u('stop'): six.u('2012-01-22 19:48:41.750722'),
six.u('server_usages'): [{
six.u('hours'): 49.71047423333333,
six.u('uptime'): 27035,
six.u('local_gb'): 0,
six.u('ended_at'): None,
six.u('name'): six.u('f15image1'),
six.u('tenant_id'):
six.u('7b0a1d73f8fb41718f3343c207597869'),
six.u('vcpus'): 1,
six.u('memory_mb'): 512,
six.u('state'): six.u('active'),
six.u('flavor'): six.u('m1.tiny'),
six.u('started_at'):
six.u('2012-01-20 18:06:06.479998')}],
six.u('start'): six.u('2011-12-25 19:48:41.750687'),
six.u('total_local_gb_usage'): 0.0}]})
def get_os_simple_tenant_usage_tenantfoo(self, **kw):
return (200, {},
{six.u('tenant_usage'): {
six.u('total_memory_mb_usage'): 25451.762807466665,
six.u('total_vcpus_usage'): 49.71047423333333,
six.u('total_hours'): 49.71047423333333,
six.u('tenant_id'):
six.u('7b0a1d73f8fb41718f3343c207597869'),
six.u('stop'): six.u('2012-01-22 19:48:41.750722'),
six.u('server_usages'): [{
six.u('hours'): 49.71047423333333,
six.u('uptime'): 27035, six.u('local_gb'): 0,
six.u('ended_at'): None,
six.u('name'): six.u('f15image1'),
six.u('tenant_id'):
six.u('7b0a1d73f8fb41718f3343c207597869'),
six.u('vcpus'): 1, six.u('memory_mb'): 512,
six.u('state'): six.u('active'),
six.u('flavor'): six.u('m1.tiny'),
six.u('started_at'):
six.u('2012-01-20 18:06:06.479998')}],
six.u('start'): six.u('2011-12-25 19:48:41.750687'),
six.u('total_local_gb_usage'): 0.0}})
def get_os_simple_tenant_usage_test(self, **kw):
return (200, {}, {six.u('tenant_usage'): {
six.u('total_memory_mb_usage'): 25451.762807466665,
six.u('total_vcpus_usage'): 49.71047423333333,
six.u('total_hours'): 49.71047423333333,
six.u('tenant_id'): six.u('7b0a1d73f8fb41718f3343c207597869'),
six.u('stop'): six.u('2012-01-22 19:48:41.750722'),
six.u('server_usages'): [{
six.u('hours'): 49.71047423333333,
six.u('uptime'): 27035, six.u('local_gb'): 0,
six.u('ended_at'): None,
six.u('name'): six.u('f15image1'),
six.u('tenant_id'): six.u('7b0a1d73f8fb41718f3343c207597869'),
six.u('vcpus'): 1, six.u('memory_mb'): 512,
six.u('state'): six.u('active'),
six.u('flavor'): six.u('m1.tiny'),
six.u('started_at'): six.u('2012-01-20 18:06:06.479998')}],
six.u('start'): six.u('2011-12-25 19:48:41.750687'),
six.u('total_local_gb_usage'): 0.0}})
def get_os_simple_tenant_usage_tenant_id(self, **kw):
return (200, {}, {six.u('tenant_usage'): {
six.u('total_memory_mb_usage'): 25451.762807466665,
six.u('total_vcpus_usage'): 49.71047423333333,
six.u('total_hours'): 49.71047423333333,
six.u('tenant_id'): six.u('7b0a1d73f8fb41718f3343c207597869'),
six.u('stop'): six.u('2012-01-22 19:48:41.750722'),
six.u('server_usages'): [{
six.u('hours'): 49.71047423333333,
six.u('uptime'): 27035, six.u('local_gb'): 0,
six.u('ended_at'): None,
six.u('name'): six.u('f15image1'),
six.u('tenant_id'): six.u('7b0a1d73f8fb41718f3343c207597869'),
six.u('vcpus'): 1, six.u('memory_mb'): 512,
six.u('state'): six.u('active'),
six.u('flavor'): six.u('m1.tiny'),
six.u('started_at'): six.u('2012-01-20 18:06:06.479998')}],
six.u('start'): six.u('2011-12-25 19:48:41.750687'),
six.u('total_local_gb_usage'): 0.0}})
#
# Certificates
#
def get_os_certificates_root(self, **kw):
return (
200,
{},
{'certificate': {'private_key': None, 'data': 'foo'}}
)
def post_os_certificates(self, **kw):
return (
200,
{},
{'certificate': {'private_key': 'foo', 'data': 'bar'}}
)
#
# Aggregates
#
def get_os_aggregates(self, *kw):
return (200, {}, {"aggregates": [
{'id': '1',
'name': 'test',
'availability_zone': 'nova1'},
{'id': '2',
'name': 'test2',
'availability_zone': 'nova1'},
{'id': '3',
'name': 'test3',
'metadata': {'test': "dup", "none_key": "Nine"}},
]})
def _return_aggregate(self):
r = {'aggregate': self.get_os_aggregates()[2]['aggregates'][0]}
return (200, {}, r)
def _return_aggregate_3(self):
r = {'aggregate': self.get_os_aggregates()[2]['aggregates'][2]}
return (200, {}, r)
def get_os_aggregates_1(self, **kw):
return self._return_aggregate()
def get_os_aggregates_3(self, **kw):
return self._return_aggregate_3()
def post_os_aggregates(self, body, **kw):
return self._return_aggregate()
def put_os_aggregates_1(self, body, **kw):
return self._return_aggregate()
def put_os_aggregates_2(self, body, **kw):
return self._return_aggregate()
def put_os_aggregates_3(self, body, **kw):
return self._return_aggregate_3()
def post_os_aggregates_1_action(self, body, **kw):
return self._return_aggregate()
def post_os_aggregates_2_action(self, body, **kw):
return self._return_aggregate()
def post_os_aggregates_3_action(self, body, **kw):
return self._return_aggregate_3()
def delete_os_aggregates_1(self, **kw):
return (202, {}, None)
#
# Services
#
def get_os_services(self, **kw):
host = kw.get('host', 'host1')
binary = kw.get('binary', 'nova-compute')
return (200, {}, {'services': [{'binary': binary,
'host': host,
'zone': 'nova',
'status': 'enabled',
'state': 'up',
'updated_at': datetime.datetime(
2012, 10, 29, 13, 42, 2)},
{'binary': binary,
'host': host,
'zone': 'nova',
'status': 'disabled',
'state': 'down',
'updated_at': datetime.datetime(
2012, 9, 18, 8, 3, 38)},
]})
def put_os_services_enable(self, body, **kw):
return (200, {}, {'service': {'host': body['host'],
'binary': body['binary'],
'status': 'enabled'}})
def put_os_services_disable(self, body, **kw):
return (200, {}, {'service': {'host': body['host'],
'binary': body['binary'],
'status': 'disabled'}})
def put_os_services_disable_log_reason(self, body, **kw):
return (200, {}, {'service': {
'host': body['host'],
'binary': body['binary'],
'status': 'disabled',
'disabled_reason': body['disabled_reason']}})
def delete_os_services_1(self, **kw):
return (204, {}, None)
def put_os_services_force_down(self, body, **kw):
return (200, {}, {'service': {
'host': body['host'],
'binary': body['binary'],
'forced_down': False}})
#
# Fixed IPs
#
def get_os_fixed_ips_192_168_1_1(self, *kw):
return (200, {}, {"fixed_ip": {'cidr': '192.168.1.0/24',
'address': '192.168.1.1',
'hostname': 'foo',
'host': 'bar'}})
def post_os_fixed_ips_192_168_1_1_action(self, body, **kw):
return (202, {}, None)
#
# Hosts
#
def get_os_hosts_host(self, *kw):
return (200, {}, {'host':
[{'resource': {'project': '(total)', 'host': 'dummy',
'cpu': 16, 'memory_mb': 32234, 'disk_gb': 128}},
{'resource': {'project': '(used_now)', 'host': 'dummy',
'cpu': 1, 'memory_mb': 2075, 'disk_gb': 45}},
{'resource': {'project': '(used_max)', 'host': 'dummy',
'cpu': 1, 'memory_mb': 2048, 'disk_gb': 30}},
{'resource': {'project': 'admin', 'host': 'dummy',
'cpu': 1, 'memory_mb': 2048, 'disk_gb': 30}}]})
def get_os_hosts(self, **kw):
zone = kw.get('zone', 'nova1')
return (200, {}, {'hosts': [{'host': 'host1',
'service': 'nova-compute',
'zone': zone},
{'host': 'host1',
'service': 'nova-cert',
'zone': zone}]})
def get_os_hosts_sample_host(self, *kw):
return (200, {}, {'host': [{'resource': {'host': 'sample_host'}}], })
def put_os_hosts_sample_host_1(self, body, **kw):
return (200, {}, {'host': 'sample-host_1',
'status': 'enabled'})
def put_os_hosts_sample_host_2(self, body, **kw):
return (200, {}, {'host': 'sample-host_2',
'maintenance_mode': 'on_maintenance'})
def put_os_hosts_sample_host_3(self, body, **kw):
return (200, {}, {'host': 'sample-host_3',
'status': 'enabled',
'maintenance_mode': 'on_maintenance'})
def get_os_hosts_sample_host_reboot(self, **kw):
return (200, {}, {'host': 'sample_host',
'power_action': 'reboot'})
def get_os_hosts_sample_host_startup(self, **kw):
return (200, {}, {'host': 'sample_host',
'power_action': 'startup'})
def get_os_hosts_sample_host_shutdown(self, **kw):
return (200, {}, {'host': 'sample_host',
'power_action': 'shutdown'})
def put_os_hosts_sample_host(self, body, **kw):
result = {'host': 'dummy'}
result.update(body)
return (200, {}, result)
def get_os_hypervisors(self, **kw):
return (200, {}, {
"hypervisors": [
{'id': 1234, 'hypervisor_hostname': 'hyper1'},
{'id': 5678, 'hypervisor_hostname': 'hyper2'}]})
def get_os_hypervisors_detail(self, **kw):
return (200, {}, {
"hypervisors": [
{'id': 1234,
'service': {'id': 1, 'host': 'compute1'},
'vcpus': 4,
'memory_mb': 10 * 1024,
'local_gb': 250,
'vcpus_used': 2,
'memory_mb_used': 5 * 1024,
'local_gb_used': 125,
'hypervisor_type': "xen",
'hypervisor_version': 3,
'hypervisor_hostname': "hyper1",
'free_ram_mb': 5 * 1024,
'free_disk_gb': 125,
'current_workload': 2,
'running_vms': 2,
'cpu_info': 'cpu_info',
'disk_available_least': 100},
{'id': 2,
'service': {'id': 2, 'host': "compute2"},
'vcpus': 4,
'memory_mb': 10 * 1024,
'local_gb': 250,
'vcpus_used': 2,
'memory_mb_used': 5 * 1024,
'local_gb_used': 125,
'hypervisor_type': "xen",
'hypervisor_version': 3,
'hypervisor_hostname': "hyper2",
'free_ram_mb': 5 * 1024,
'free_disk_gb': 125,
'current_workload': 2,
'running_vms': 2,
'cpu_info': 'cpu_info',
'disk_available_least': 100}]
})
def get_os_hypervisors_statistics(self, **kw):
return (200, {}, {
"hypervisor_statistics": {
'count': 2,
'vcpus': 8,
'memory_mb': 20 * 1024,
'local_gb': 500,
'vcpus_used': 4,
'memory_mb_used': 10 * 1024,
'local_gb_used': 250,
'free_ram_mb': 10 * 1024,
'free_disk_gb': 250,
'current_workload': 4,
'running_vms': 4,
'disk_available_least': 200}
})
def get_os_hypervisors_hyper1(self, **kw):
return (200, {}, {
'hypervisor':
{'id': 1234,
'service': {'id': 1, 'host': 'compute1'},
'vcpus': 4,
'memory_mb': 10 * 1024,
'local_gb': 250,
'vcpus_used': 2,
'memory_mb_used': 5 * 1024,
'local_gb_used': 125,
'hypervisor_type': "xen",
'hypervisor_version': 3,
'hypervisor_hostname': "hyper1",
'free_ram_mb': 5 * 1024,
'free_disk_gb': 125,
'current_workload': 2,
'running_vms': 2,
'cpu_info': 'cpu_info',
'disk_available_least': 100}})
def get_os_hypervisors_region_child_1(self, **kw):
return (200, {}, {
'hypervisor':
{'id': 'region!child@1',
'service': {'id': 1, 'host': 'compute1'},
'vcpus': 4,
'memory_mb': 10 * 1024,
'local_gb': 250,
'vcpus_used': 2,
'memory_mb_used': 5 * 1024,
'local_gb_used': 125,
'hypervisor_type': "xen",
'hypervisor_version': 3,
'hypervisor_hostname': "hyper1",
'free_ram_mb': 5 * 1024,
'free_disk_gb': 125,
'current_workload': 2,
'running_vms': 2,
'cpu_info': 'cpu_info',
'disk_available_least': 100}})
def get_os_hypervisors_hyper_search(self, **kw):
return (200, {}, {
'hypervisors': [
{'id': 1234, 'hypervisor_hostname': 'hyper1'},
{'id': 5678, 'hypervisor_hostname': 'hyper2'}]})
def get_os_hypervisors_hyper_servers(self, **kw):
return (200, {}, {
'hypervisors': [
{'id': 1234,
'hypervisor_hostname': 'hyper1',
'servers': [
{'name': 'inst1', 'uuid': 'uuid1'},
{'name': 'inst2', 'uuid': 'uuid2'}]},
{'id': 5678,
'hypervisor_hostname': 'hyper2',
'servers': [
{'name': 'inst3', 'uuid': 'uuid3'},
{'name': 'inst4', 'uuid': 'uuid4'}]}]
})
def get_os_hypervisors_hyper_no_servers_servers(self, **kw):
return (200, {}, {'hypervisors':
[{'id': 1234, 'hypervisor_hostname': 'hyper1'}]})
def get_os_hypervisors_1234(self, **kw):
return (200, {}, {
'hypervisor':
{'id': 1234,
'service': {'id': 1, 'host': 'compute1'},
'vcpus': 4,
'memory_mb': 10 * 1024,
'local_gb': 250,
'vcpus_used': 2,
'memory_mb_used': 5 * 1024,
'local_gb_used': 125,
'hypervisor_type': "xen",
'hypervisor_version': 3,
'hypervisor_hostname': "hyper1",
'free_ram_mb': 5 * 1024,
'free_disk_gb': 125,
'current_workload': 2,
'running_vms': 2,
'cpu_info': 'cpu_info',
'disk_available_least': 100}})
def get_os_hypervisors_1234_uptime(self, **kw):
return (200, {}, {
'hypervisor': {'id': 1234,
'hypervisor_hostname': "hyper1",
'uptime': "fake uptime"}})
def get_os_hypervisors_region_child_1_uptime(self, **kw):
return (200, {}, {
'hypervisor': {'id': 'region!child@1',
'hypervisor_hostname': "hyper1",
'uptime': "fake uptime"}})
def get_os_networks(self, **kw):
return (200, {}, {'networks': [{"label": "1", "cidr": "10.0.0.0/24",
'project_id':
'4ffc664c198e435e9853f2538fbcd7a7',
'id': '1', 'vlan': '1234'}]})
def delete_os_networks_1(self, **kw):
return (202, {}, None)
def post_os_networks(self, **kw):
return (202, {}, {'network': kw})
def get_os_networks_1(self, **kw):
return (200, {}, {'network': {"label": "1", "cidr": "10.0.0.0/24",
"id": "1"}})
def delete_os_networks_networkdelete(self, **kw):
return (202, {}, None)
def post_os_networks_add(self, **kw):
return (202, {}, None)
def post_os_networks_networkdisassociate_action(self, **kw):
return (202, {}, None)
def get_os_fping(self, **kw):
return (
200, {}, {
'servers': [
{
"id": "1",
"project_id": "fake-project",
"alive": True,
},
{
"id": "2",
"project_id": "fake-project",
"alive": True,
},
]
}
)
def get_os_fping_1(self, **kw):
return (
200, {}, {
'server': {
"id": "1",
"project_id": "fake-project",
"alive": True,
}
}
)
def post_os_networks_1_action(self, **kw):
return (202, {}, None)
def post_os_networks_networktest_action(self, **kw):
return (202, {}, None)
def post_os_networks_2_action(self, **kw):
return (202, {}, None)
def get_os_tenant_networks(self, **kw):
return (200, {}, {'networks': [{"label": "1", "cidr": "10.0.0.0/24",
'project_id':
'4ffc664c198e435e9853f2538fbcd7a7',
'id': '1', 'vlan': '1234'}]})
def get_os_tenant_networks_1(self, **kw):
return (200, {}, {'network': {"label": "1", "cidr": "10.0.0.0/24",
"id": "1"}})
def post_os_tenant_networks(self, **kw):
return (202, {}, {'network': {"label": "new_network1",
"cidr1": "10.0.1.0/24"}})
def delete_os_tenant_networks_1(self, **kw):
return (202, {}, None)
def get_os_availability_zone(self, **kw):
return (200, {}, {
"availabilityZoneInfo": [
{"zoneName": "zone-1",
"zoneState": {"available": True},
"hosts": None},
{"zoneName": "zone-2",
"zoneState": {"available": False},
"hosts": None}]
})
def get_os_availability_zone_detail(self, **kw):
return (200, {}, {
"availabilityZoneInfo": [
{"zoneName": "zone-1",
"zoneState": {"available": True},
"hosts": {
"fake_host-1": {
"nova-compute": {
"active": True,
"available": True,
"updated_at": datetime.datetime(
2012, 12, 26, 14, 45, 25, 0)}}}},
{"zoneName": "internal",
"zoneState": {"available": True},
"hosts": {
"fake_host-1": {
"nova-sched": {
"active": True,
"available": True,
"updated_at": datetime.datetime(
2012, 12, 26, 14, 45, 25, 0)}},
"fake_host-2": {
"nova-network": {
"active": True,
"available": False,
"updated_at": datetime.datetime(
2012, 12, 26, 14, 45, 24, 0)}}}},
{"zoneName": "zone-2",
"zoneState": {"available": False},
"hosts": None}]})
def get_servers_1234_os_interface(self, **kw):
return (200, {}, {
"interfaceAttachments": [
{"port_state": "ACTIVE",
"net_id": "net-id-1",
"port_id": "port-id-1",
"mac_address": "aa:bb:cc:dd:ee:ff",
"fixed_ips": [{"ip_address": "1.2.3.4"}],
},
{"port_state": "ACTIVE",
"net_id": "net-id-1",
"port_id": "port-id-1",
"mac_address": "aa:bb:cc:dd:ee:ff",
"fixed_ips": [{"ip_address": "1.2.3.4"}],
}]
})
def post_servers_1234_os_interface(self, **kw):
return (200, {}, {'interfaceAttachment': {}})
def delete_servers_1234_os_interface_port_id(self, **kw):
return (200, {}, None)
# NOTE (vkhomenko):
# Volume responses was taken from:
# https://wiki.openstack.org/wiki/CreateVolumeFromImage
# http://jorgew.github.com/block-storage-api/content/
# GET_listDetailVolumes_v1__tenantId__volumes_detail_.html
# I suppose they are outdated and should be updated after Cinder released
def get_volumes_detail(self, **kw):
return (200, {}, {"volumes": [
{
"display_name": "Work",
"display_description": "volume for work",
"status": "ATTACHED",
"id": "15e59938-07d5-11e1-90e3-e3dffe0c5983",
"created_at": "2011-09-09T00:00:00Z",
"attached": "2011-11-11T00:00:00Z",
"size": 1024,
"attachments": [
{"id": "3333",
"links": ''}],
"metadata": {}},
{
"display_name": "Work2",
"display_description": "volume for work2",
"status": "ATTACHED",
"id": "15e59938-07d5-11e1-90e3-ee32ba30feaa",
"created_at": "2011-09-09T00:00:00Z",
"attached": "2011-11-11T00:00:00Z",
"size": 1024,
"attachments": [
{"id": "2222",
"links": ''}],
"metadata": {}}]})
def get_volumes(self, **kw):
return (200, {}, {"volumes": [
{
"display_name": "Work",
"display_description": "volume for work",
"status": "ATTACHED",
"id": "15e59938-07d5-11e1-90e3-e3dffe0c5983",
"created_at": "2011-09-09T00:00:00Z",
"attached": "2011-11-11T00:00:00Z",
"size": 1024,
"attachments": [
{"id": "3333",
"links": ''}],
"metadata": {}},
{
"display_name": "Work2",
"display_description": "volume for work2",
"status": "ATTACHED",
"id": "15e59938-07d5-11e1-90e3-ee32ba30feaa",
"created_at": "2011-09-09T00:00:00Z",
"attached": "2011-11-11T00:00:00Z",
"size": 1024,
"attachments": [
{"id": "2222",
"links": ''}],
"metadata": {}}]})
def get_volumes_15e59938_07d5_11e1_90e3_e3dffe0c5983(self, **kw):
return (200, {}, {
"volume": self.get_volumes_detail()[2]['volumes'][0]})
def get_volumes_15e59938_07d5_11e1_90e3_ee32ba30feaa(self, **kw):
return (200, {}, {
"volume": self.get_volumes_detail()[2]['volumes'][1]})
def post_volumes(self, **kw):
return (200, {}, {"volume":
{"status": "creating",
"display_name": "vol-007",
"attachments": [(0)],
"availability_zone": "cinder",
"created_at": "2012-08-13T10:57:17.000000",
"display_description": "create volume from image",
"image_id": "f4cf905f-7c58-4d7b-8314-8dd8a2d1d483",
"volume_type": "None",
"metadata": {},
"id": "5cb239f6-1baf-4fe1-bd78-c852cf00fa39",
"size": 1}})
def delete_volumes_15e59938_07d5_11e1_90e3_e3dffe0c5983(self, **kw):
return (200, {}, {})
def delete_volumes_15e59938_07d5_11e1_90e3_ee32ba30feaa(self, **kw):
return (200, {}, {})
def post_servers_1234_os_volume_attachments(self, **kw):
return (200, {}, {
"volumeAttachment":
{"device": "/dev/vdb",
"volumeId": 2}})
def put_servers_1234_os_volume_attachments_Work(self, **kw):
return (200, {}, {"volumeAttachment": {"volumeId": 2}})
def get_servers_1234_os_volume_attachments(self, **kw):
return (200, {}, {
"volumeAttachments": [
{"display_name": "Work",
"display_description": "volume for work",
"status": "ATTACHED",
"id": "15e59938-07d5-11e1-90e3-e3dffe0c5983",
"created_at": "2011-09-09T00:00:00Z",
"attached": "2011-11-11T00:00:00Z",
"size": 1024,
"attachments": [{"id": "3333", "links": ''}],
"metadata": {}}]})
def get_servers_1234_os_volume_attachments_Work(self, **kw):
return (200, {}, {
"volumeAttachment":
{"display_name": "Work",
"display_description": "volume for work",
"status": "ATTACHED",
"id": "15e59938-07d5-11e1-90e3-e3dffe0c5983",
"created_at": "2011-09-09T00:00:00Z",
"attached": "2011-11-11T00:00:00Z",
"size": 1024,
"attachments": [{"id": "3333", "links": ''}],
"metadata": {}}})
def delete_servers_1234_os_volume_attachments_Work(self, **kw):
return (200, {}, {})
def get_servers_1234_os_instance_actions(self, **kw):
return (200, {}, {
"instanceActions":
[{"instance_uuid": "1234",
"user_id": "b968c25e04ab405f9fe4e6ca54cce9a5",
"start_time": "2013-03-25T13:45:09.000000",
"request_id": "req-abcde12345",
"action": "create",
"message": None,
"project_id": "04019601fe3648c0abd4f4abfb9e6106"}]})
def get_servers_1234_os_instance_actions_req_abcde12345(self, **kw):
return (200, {}, {
"instanceAction":
{"instance_uuid": "1234",
"user_id": "b968c25e04ab405f9fe4e6ca54cce9a5",
"start_time": "2013-03-25T13:45:09.000000",
"request_id": "req-abcde12345",
"action": "create",
"message": None,
"project_id": "04019601fe3648c0abd4f4abfb9e6106"}})
def post_servers_uuid1_action(self, **kw):
return 202, {}, {}
def post_servers_uuid2_action(self, **kw):
return 202, {}, {}
def post_servers_uuid3_action(self, **kw):
return 202, {}, {}
def post_servers_uuid4_action(self, **kw):
return 202, {}, {}
def get_os_cells_child_cell(self, **kw):
cell = {'cell': {
'username': 'cell1_user',
'name': 'cell1',
'rpc_host': '10.0.1.10',
'info': {
'username': 'cell1_user',
'rpc_host': '10.0.1.10',
'type': 'child',
'name': 'cell1',
'rpc_port': 5673},
'type': 'child',
'rpc_port': 5673,
'loaded': True
}}
return (200, {}, cell)
def get_os_cells_capacities(self, **kw):
cell_capacities_response = {"cell": {"capacities": {"ram_free": {
"units_by_mb": {"8192": 0, "512": 13, "4096": 1, "2048": 3,
"16384": 0}, "total_mb": 7680}, "disk_free": {
"units_by_mb": {"81920": 11, "20480": 46, "40960": 23, "163840": 5,
"0": 0}, "total_mb": 1052672}}}}
return (200, {}, cell_capacities_response)
def get_os_cells_child_cell_capacities(self, **kw):
return self.get_os_cells_capacities()
def get_os_migrations(self, **kw):
migrations = {'migrations': [
{
"created_at": "2012-10-29T13:42:02.000000",
"dest_compute": "compute2",
"dest_host": "1.2.3.4",
"dest_node": "node2",
"id": 1234,
"instance_uuid": "instance_id_123",
"new_instance_type_id": 2,
"old_instance_type_id": 1,
"source_compute": "compute1",
"source_node": "node1",
"status": "Done",
"updated_at": "2012-10-29T13:42:02.000000"
}]}
return (200, {}, migrations)
def post_os_server_external_events(self, **kw):
return (200, {}, {'events': [
{'name': 'network-changed',
'server_uuid': '1234'}]})
#
# Server Groups
#
def get_os_server_groups(self, *kw):
return (200, {},
{"server_groups": [
{"members": [], "metadata": {},
"id": "2cbd51f4-fafe-4cdb-801b-cf913a6f288b",
"policies": [], "name": "ig1"},
{"members": [], "metadata": {},
"id": "4473bb03-4370-4bfb-80d3-dc8cffc47d94",
"policies": ["anti-affinity"], "name": "ig2"},
{"members": [], "metadata": {"key": "value"},
"id": "31ab9bdb-55e1-4ac3-b094-97eeb1b65cc4",
"policies": [], "name": "ig3"},
{"members": ["2dccb4a1-02b9-482a-aa23-5799490d6f5d"],
"metadata": {},
"id": "4890bb03-7070-45fb-8453-d34556c87d94",
"policies": ["anti-affinity"], "name": "ig2"}]})
def _return_server_group(self):
r = {'server_group':
self.get_os_server_groups()[2]['server_groups'][0]}
return (200, {}, r)
def post_os_server_groups(self, body, **kw):
return self._return_server_group()
def get_os_server_groups_2cbd51f4_fafe_4cdb_801b_cf913a6f288b(self, **kw):
return self._return_server_group()
def put_os_server_groups_2cbd51f4_fafe_4cdb_801b_cf913a6f288b(self, **kw):
return self._return_server_group()
def post_os_server_groups_2cbd51f4_fafe_4cdb_801b_cf913a6f288b_action(
self, body, **kw):
return self._return_server_group()
def delete_os_server_groups_2cbd51f4_fafe_4cdb_801b_cf913a6f288b(
self, **kw):
return (202, {}, None)
class FakeSessionClient(fakes.FakeClient, client.Client):
def __init__(self, api_version, *args, **kwargs):
client.Client.__init__(self, 'username', 'password',
'project_id', 'auth_url',
extensions=kwargs.get('extensions'),
api_version=api_version)
self.client = FakeSessionMockClient(**kwargs)
class FakeSessionMockClient(base_client.SessionClient, FakeHTTPClient):
def __init__(self, *args, **kwargs):
self.callstack = []
self.auth = mock.Mock()
self.session = mock.Mock()
self.session.get_endpoint.return_value = FakeHTTPClient.get_endpoint(
self)
self.service_type = 'service_type'
self.service_name = None
self.endpoint_override = None
self.interface = None
self.region_name = None
self.version = None
self.auth.get_auth_ref.return_value.project_id = 'tenant_id'
def request(self, url, method, **kwargs):
return self._cs_request(url, method, **kwargs)
| apache-2.0 |
leiferikb/bitpop | depot_tools/third_party/gsutil/gslib/commands/version.py | 51 | 5060 | # Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import boto
import os
import re
import sys
from boto.pyami.config import BotoConfigLocations
from gslib.command import Command
from gslib.command import COMMAND_NAME
from gslib.command import COMMAND_NAME_ALIASES
from gslib.command import CONFIG_REQUIRED
from gslib.command import FILE_URIS_OK
from gslib.command import MAX_ARGS
from gslib.command import MIN_ARGS
from gslib.command import PROVIDER_URIS_OK
from gslib.command import SUPPORTED_SUB_ARGS
from gslib.command import URIS_START_ARG
from gslib.help_provider import HELP_NAME
from gslib.help_provider import HELP_NAME_ALIASES
from gslib.help_provider import HELP_ONE_LINE_SUMMARY
from gslib.help_provider import HELP_TEXT
from gslib.help_provider import HelpType
from gslib.help_provider import HELP_TYPE
from hashlib import md5
_detailed_help_text = ("""
<B>SYNOPSIS</B>
gsutil version
<B>DESCRIPTION</B>
Prints information about the version of gsutil, boto, and Python being
run on your system.
""")
class VersionCommand(Command):
"""Implementation of gsutil version command."""
# Command specification (processed by parent class).
command_spec = {
# Name of command.
COMMAND_NAME : 'version',
# List of command name aliases.
COMMAND_NAME_ALIASES : ['ver'],
# Min number of args required by this command.
MIN_ARGS : 0,
# Max number of args required by this command, or NO_MAX.
MAX_ARGS : 0,
# Getopt-style string specifying acceptable sub args.
SUPPORTED_SUB_ARGS : '',
# True if file URIs acceptable for this command.
FILE_URIS_OK : False,
# True if provider-only URIs acceptable for this command.
PROVIDER_URIS_OK : False,
# Index in args of first URI arg.
URIS_START_ARG : 0,
# True if must configure gsutil before running command.
CONFIG_REQUIRED : False,
}
help_spec = {
# Name of command or auxiliary help info for which this help applies.
HELP_NAME : 'version',
# List of help name aliases.
HELP_NAME_ALIASES : ['ver'],
# Type of help:
HELP_TYPE : HelpType.COMMAND_HELP,
# One line summary of this help.
HELP_ONE_LINE_SUMMARY : 'Print version info about gsutil',
# The full help text.
HELP_TEXT : _detailed_help_text,
}
# Command entry point.
def RunCommand(self):
for path in BotoConfigLocations:
f = None
try:
f = open(path, 'r')
break
except IOError:
pass
finally:
if f:
f.close()
else:
path = "no config found"
try:
f = open(os.path.join(self.gsutil_bin_dir, 'CHECKSUM'))
shipped_checksum = f.read().strip()
f.close()
except IOError:
shipped_checksum = 'MISSING'
try:
cur_checksum = self._ComputeCodeChecksum()
except IOError:
cur_checksum = 'MISSING FILES'
if shipped_checksum == cur_checksum:
checksum_ok_str = 'OK'
else:
checksum_ok_str = '!= %s' % shipped_checksum
sys.stderr.write(
'gsutil version %s\nchecksum %s (%s)\n'
'boto version %s\npython version %s\n'
'config path: %s\ngsutil path: %s\n' % (
self.gsutil_ver, cur_checksum, checksum_ok_str,
boto.__version__, sys.version, path, os.path.realpath(sys.argv[0])))
return 0
def _ComputeCodeChecksum(self):
"""
Computes a checksum of gsutil code so we can see if users locally modified
gsutil when requesting support. (It's fine for users to make local mods,
but when users ask for support we ask them to run a stock version of
gsutil so we can reduce possible variables.)
"""
m = md5()
# Checksum gsutil and all .py files under gsutil bin (including bundled
# libs). Although we will eventually make gsutil allow use of a centrally
# installed boto (once boto shifts to more frequent releases), in that case
# the local copies still should not have any user modifications.
files_to_checksum = [os.path.join(self.gsutil_bin_dir, 'gsutil')]
for root, sub_folders, files in os.walk(self.gsutil_bin_dir):
for file in files:
if file[-3:] == '.py':
files_to_checksum.append(os.path.join(root, file))
# Sort to ensure consistent checksum build, no matter how os.walk
# orders the list.
for file in sorted(files_to_checksum):
f = open(file, 'r')
content = f.read()
content = re.sub(r'(\r\n|\r|\n)', '\n', content)
m.update(content)
f.close()
return m.hexdigest()
| gpl-3.0 |
MarshedOut/android_external_skia | platform_tools/android/bin/download_utils.py | 149 | 8464 | #!/usr/bin/python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A library to assist automatically downloading files.
This library is used by scripts that download tarballs, zipfiles, etc. as part
of the build process.
"""
import hashlib
import http_download
import os.path
import re
import shutil
import sys
import time
import urllib2
SOURCE_STAMP = 'SOURCE_URL'
HASH_STAMP = 'SOURCE_SHA1'
# Designed to handle more general inputs than sys.platform because the platform
# name may come from the command line.
PLATFORM_COLLAPSE = {
'windows': 'windows',
'win32': 'windows',
'cygwin': 'windows',
'linux': 'linux',
'linux2': 'linux',
'linux3': 'linux',
'darwin': 'mac',
'mac': 'mac',
}
ARCH_COLLAPSE = {
'i386' : 'x86',
'i686' : 'x86',
'x86_64': 'x86',
'armv7l': 'arm',
}
class HashError(Exception):
def __init__(self, download_url, expected_hash, actual_hash):
self.download_url = download_url
self.expected_hash = expected_hash
self.actual_hash = actual_hash
def __str__(self):
return 'Got hash "%s" but expected hash "%s" for "%s"' % (
self.actual_hash, self.expected_hash, self.download_url)
def PlatformName(name=None):
if name is None:
name = sys.platform
return PLATFORM_COLLAPSE[name]
def ArchName(name=None):
if name is None:
if PlatformName() == 'windows':
# TODO(pdox): Figure out how to auto-detect 32-bit vs 64-bit Windows.
name = 'i386'
else:
import platform
name = platform.machine()
return ARCH_COLLAPSE[name]
def EnsureFileCanBeWritten(filename):
directory = os.path.dirname(filename)
if not os.path.exists(directory):
os.makedirs(directory)
def WriteData(filename, data):
EnsureFileCanBeWritten(filename)
f = open(filename, 'wb')
f.write(data)
f.close()
def WriteDataFromStream(filename, stream, chunk_size, verbose=True):
EnsureFileCanBeWritten(filename)
dst = open(filename, 'wb')
try:
while True:
data = stream.read(chunk_size)
if len(data) == 0:
break
dst.write(data)
if verbose:
# Indicate that we're still writing.
sys.stdout.write('.')
sys.stdout.flush()
finally:
if verbose:
sys.stdout.write('\n')
dst.close()
def DoesStampMatch(stampfile, expected, index):
try:
f = open(stampfile, 'r')
stamp = f.read()
f.close()
if stamp.split('\n')[index] == expected:
return "already up-to-date."
elif stamp.startswith('manual'):
return "manual override."
return False
except IOError:
return False
def WriteStamp(stampfile, data):
EnsureFileCanBeWritten(stampfile)
f = open(stampfile, 'w')
f.write(data)
f.close()
def StampIsCurrent(path, stamp_name, stamp_contents, min_time=None, index=0):
stampfile = os.path.join(path, stamp_name)
# Check if the stampfile is older than the minimum last mod time
if min_time:
try:
stamp_time = os.stat(stampfile).st_mtime
if stamp_time <= min_time:
return False
except OSError:
return False
return DoesStampMatch(stampfile, stamp_contents, index)
def WriteSourceStamp(path, url):
stampfile = os.path.join(path, SOURCE_STAMP)
WriteStamp(stampfile, url)
def WriteHashStamp(path, hash_val):
hash_stampfile = os.path.join(path, HASH_STAMP)
WriteStamp(hash_stampfile, hash_val)
def Retry(op, *args):
# Windows seems to be prone to having commands that delete files or
# directories fail. We currently do not have a complete understanding why,
# and as a workaround we simply retry the command a few times.
# It appears that file locks are hanging around longer than they should. This
# may be a secondary effect of processes hanging around longer than they
# should. This may be because when we kill a browser sel_ldr does not exit
# immediately, etc.
# Virus checkers can also accidently prevent files from being deleted, but
# that shouldn't be a problem on the bots.
if sys.platform in ('win32', 'cygwin'):
count = 0
while True:
try:
op(*args)
break
except Exception:
sys.stdout.write("FAILED: %s %s\n" % (op.__name__, repr(args)))
count += 1
if count < 5:
sys.stdout.write("RETRY: %s %s\n" % (op.__name__, repr(args)))
time.sleep(pow(2, count))
else:
# Don't mask the exception.
raise
else:
op(*args)
def MoveDirCleanly(src, dst):
RemoveDir(dst)
MoveDir(src, dst)
def MoveDir(src, dst):
Retry(shutil.move, src, dst)
def RemoveDir(path):
if os.path.exists(path):
Retry(shutil.rmtree, path)
def RemoveFile(path):
if os.path.exists(path):
Retry(os.unlink, path)
def _HashFileHandle(fh):
"""sha1 of a file like object.
Arguments:
fh: file handle like object to hash.
Returns:
sha1 as a string.
"""
hasher = hashlib.sha1()
try:
while True:
data = fh.read(4096)
if not data:
break
hasher.update(data)
finally:
fh.close()
return hasher.hexdigest()
def HashFile(filename):
"""sha1 a file on disk.
Arguments:
filename: filename to hash.
Returns:
sha1 as a string.
"""
fh = open(filename, 'rb')
return _HashFileHandle(fh)
def HashUrlByDownloading(url):
"""sha1 the data at an url.
Arguments:
url: url to download from.
Returns:
sha1 of the data at the url.
"""
try:
fh = urllib2.urlopen(url)
except:
sys.stderr.write("Failed fetching URL: %s\n" % url)
raise
return _HashFileHandle(fh)
# Attempts to get the SHA1 hash of a file given a URL by looking for
# an adjacent file with a ".sha1hash" suffix. This saves having to
# download a large tarball just to get its hash. Otherwise, we fall
# back to downloading the main file.
def HashUrl(url):
hash_url = '%s.sha1hash' % url
try:
fh = urllib2.urlopen(hash_url)
data = fh.read(100)
fh.close()
except urllib2.HTTPError, exn:
if exn.code == 404:
return HashUrlByDownloading(url)
raise
else:
if not re.match('[0-9a-f]{40}\n?$', data):
raise AssertionError('Bad SHA1 hash file: %r' % data)
return data.strip()
def SyncURL(url, filename=None, stamp_dir=None, min_time=None,
hash_val=None, keep=False, verbose=False, stamp_index=0):
"""Synchronize a destination file with a URL
if the URL does not match the URL stamp, then we must re-download it.
Arugments:
url: the url which will to compare against and download
filename: the file to create on download
path: the download path
stamp_dir: the filename containing the URL stamp to check against
hash_val: if set, the expected hash which must be matched
verbose: prints out status as it runs
stamp_index: index within the stamp file to check.
Returns:
True if the file is replaced
False if the file is not replaced
Exception:
HashError: if the hash does not match
"""
assert url and filename
# If we are not keeping the tarball, or we already have it, we can
# skip downloading it for this reason. If we are keeping it,
# it must exist.
if keep:
tarball_ok = os.path.isfile(filename)
else:
tarball_ok = True
# If we don't need the tarball and the stamp_file matches the url, then
# we must be up to date. If the URL differs but the recorded hash matches
# the one we'll insist the tarball has, then that's good enough too.
# TODO(mcgrathr): Download the .sha1sum file first to compare with
# the cached hash, in case --file-hash options weren't used.
if tarball_ok and stamp_dir is not None:
if StampIsCurrent(stamp_dir, SOURCE_STAMP, url, min_time):
if verbose:
print '%s is already up to date.' % filename
return False
if (hash_val is not None and
StampIsCurrent(stamp_dir, HASH_STAMP, hash_val, min_time, stamp_index)):
if verbose:
print '%s is identical to the up to date file.' % filename
return False
if verbose:
print 'Updating %s\n\tfrom %s.' % (filename, url)
EnsureFileCanBeWritten(filename)
http_download.HttpDownload(url, filename)
if hash_val:
tar_hash = HashFile(filename)
if hash_val != tar_hash:
raise HashError(actual_hash=tar_hash, expected_hash=hash_val,
download_url=url)
return True
| bsd-3-clause |
markduan/qingcloud-sdk-python | qingcloud/misc/json_tool.py | 3 | 1673 | # =========================================================================
# Copyright 2012-present Yunify, Inc.
# -------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========================================================================
import json as jsmod
def json_dump(obj, indent=None):
""" Dump an object to json string, only basic types are supported.
@return json string or `None` if failed
>>> json_dump({'int': 1, 'none': None, 'str': 'string'})
'{"int":1,"none":null,"str":"string"}'
"""
try:
jstr = jsmod.dumps(obj, separators=(',', ':'),
indent=indent, sort_keys=True)
except:
jstr = None
return jstr
def json_load(json):
""" Load from json string and create a new python object
@return object or `None` if failed
>>> json_load('{"int":1,"none":null,"str":"string"}')
{u'int': 1, u'none': None, u'str': u'string'}
"""
try:
obj = jsmod.loads(json)
except:
obj = None
return obj
__all__ = [json_dump, json_load]
| apache-2.0 |
AOKP/external_chromium_org | third_party/protobuf/python/google/protobuf/internal/enum_type_wrapper.py | 292 | 3541 | # Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# http://code.google.com/p/protobuf/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A simple wrapper around enum types to expose utility functions.
Instances are created as properties with the same name as the enum they wrap
on proto classes. For usage, see:
reflection_test.py
"""
__author__ = 'rabsatt@google.com (Kevin Rabsatt)'
class EnumTypeWrapper(object):
"""A utility for finding the names of enum values."""
DESCRIPTOR = None
def __init__(self, enum_type):
"""Inits EnumTypeWrapper with an EnumDescriptor."""
self._enum_type = enum_type
self.DESCRIPTOR = enum_type;
def Name(self, number):
"""Returns a string containing the name of an enum value."""
if number in self._enum_type.values_by_number:
return self._enum_type.values_by_number[number].name
raise ValueError('Enum %s has no name defined for value %d' % (
self._enum_type.name, number))
def Value(self, name):
"""Returns the value coresponding to the given enum name."""
if name in self._enum_type.values_by_name:
return self._enum_type.values_by_name[name].number
raise ValueError('Enum %s has no value defined for name %s' % (
self._enum_type.name, name))
def keys(self):
"""Return a list of the string names in the enum.
These are returned in the order they were defined in the .proto file.
"""
return [value_descriptor.name
for value_descriptor in self._enum_type.values]
def values(self):
"""Return a list of the integer values in the enum.
These are returned in the order they were defined in the .proto file.
"""
return [value_descriptor.number
for value_descriptor in self._enum_type.values]
def items(self):
"""Return a list of the (name, value) pairs of the enum.
These are returned in the order they were defined in the .proto file.
"""
return [(value_descriptor.name, value_descriptor.number)
for value_descriptor in self._enum_type.values]
| bsd-3-clause |
trolldbois/python-haystack-reverse | haystack/reverse/heuristics/dsa.py | 1 | 18223 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012 Loic Jaquemet loic.jaquemet+python@gmail.com
#
import array
import collections
import logging
import numbers
from haystack.reverse import fieldtypes
from haystack.reverse import re_string
from haystack.reverse.heuristics import model
log = logging.getLogger('dsa')
# fieldtypes.Field analysis related functions and classes
def _py3_byte_compat(c):
if isinstance(c, numbers.Number):
assert(0 <= c < 256)
c = chr(c).encode()
return c
_w = _py3_byte_compat
class ZeroFields(model.FieldAnalyser):
""" checks for possible fields, aligned, with WORDSIZE zeros."""
def make_fields(self, _record, offset, size):
assert(offset % self._word_size == 0) # vaddr and offset should be aligned
# log.debug('checking Zeroes')
self._typename = fieldtypes.ZEROES
self._zeroes = b'\x00' * self._word_size
ret = self._find_zeroes(_record, offset, size)
# TODO if its just a word, we should say its a small int.
return ret
def _find_zeroes(self, _record, offset, size):
""" iterate over the bytes until a byte if not \x00 """
_bytes = _record.bytes
# print 'offset:%x blen:%d'%(offset, len(bytes))
# print repr(bytes)
assert(offset % self._word_size == 0)
# aligned_off = (offset)%self._target_platform.get_word_size()
start = offset
# if aligned_off != 0: # align to next
# start += (self._target_platform.get_word_size() - aligned_off)
# size -= (self._target_platform.get_word_size() - aligned_off)
# iterate
matches = array.array('i')
for i in range(start, start + size, self._word_size):
# PERF TODO: bytes or struct test ?
# print repr(bytes[start+i:start+i+self._target_platform.get_word_size()])
if _w(_bytes[start + i:start + i + self._word_size]) == self._zeroes:
matches.append(start + i)
# print matches
# collate
if len(matches) == 0:
return []
# lets try to get fields
fields = []
# first we need to collate neighbors
collates = list()
prev = matches[0] - self._word_size
x = []
# PERF TODO: whats is algo here
for i in matches:
if i - self._word_size == prev:
x.append(i)
else:
collates.append(x)
x = [i]
prev = i
collates.append(x)
# log.debug(collates)
# we now have collated, lets create fields
for field in collates:
flen = len(field)
if flen > 1:
size = self._word_size * flen
elif flen == 1:
size = self._word_size
else:
continue
# make a field
_offset = start + field[0]
fields.append(fieldtypes.ZeroField('zerroes_%d' % _offset, _offset, size))
# we have all fields
return fields
class UTF16Fields(model.FieldAnalyser):
"""
rfinds utf-16-ascii and ascii 7bit
"""
def make_fields(self, _record, offset, size):
assert(offset % self._word_size == 0) # vaddr and offset should be aligned
# log.debug('checking String')
fields = []
_bytes = _record.bytes
while size > self._word_size:
# print 're_string.rfind_utf16(bytes, %d, %d)'%(offset,size)
# we force aligned results only. otherwise er have overlaps
index = re_string.rfind_utf16(_bytes, offset, size, False, self._word_size)
if index > -1:
_offset = offset + index
f = fieldtypes.Field('utf16_%d' % _offset, _offset, fieldtypes.STRING16, size - index, False)
# print repr(structure.bytes[f.offset:f.offset+f.size])
fields.append(f)
size = index # reduce unknown field in prefix
else:
size -= self._word_size # reduce unknown field
# look in head
return fields
class PrintableAsciiFields(model.FieldAnalyser):
""" finds printable ascii fields """
def make_fields(self, _record, offset, size):
# vaddr and offset should be aligned
assert(offset % self._word_size == 0)
# log.debug('checking String')
fields = []
_bytes = _record.bytes
while size >= self._word_size:
# print 're_string.find_ascii(bytes, %d, %d)'%(offset,size)
index, ssize = re_string.find_ascii(_bytes, offset, size)
if index == 0:
_offset = offset + index
# PY3 wrapper _w
if (ssize < size) and _w(_bytes[offset + index + ssize]) == b'\x00': # space for a \x00
ssize += 1
f = fieldtypes.Field('strnull_%d' % _offset, _offset, fieldtypes.STRINGNULL, ssize, False)
else:
f = fieldtypes.Field('str_%d' % _offset, _offset, fieldtypes.STRING, ssize, False)
# print repr(structure.bytes[f.offset:f.offset+f.size])
fields.append(f)
size -= ssize # reduce unknown field
offset += ssize
if ssize % self._word_size:
rest = self._word_size - ssize % self._word_size
size -= rest # goto next aligned
offset += rest
else:
size -= self._word_size # reduce unkown field
offset += self._word_size
# look in head
return fields
class PointerFields(model.FieldAnalyser):
""" looks at a word for a pointer value"""
def make_fields(self, _record, offset, size):
# iterate on all offsets . NOT assert( size ==
# self._target_platform.get_word_size())
assert(offset % self._word_size == 0) # vaddr and offset should be aligned
log.debug('checking Pointer')
_bytes = _record.bytes
fields = []
ctypes_utils = self._target.get_target_ctypes_utils()
while size >= self._word_size:
value = ctypes_utils.unpackWord(_bytes[offset:offset + self._word_size])
# check if pointer value is in range of _memory_handler and set self.comment to pathname value of pointer
# TODO : if bytes 1 & 3 == \x00, maybe utf16 string
if not self._memory_handler.is_valid_address(value):
size -= self._word_size
offset += self._word_size
continue
# FIXME 20151103 dont ignore it
# what will it break ?
if False:
# 20151026 - if aligned, ignore it
if value % self._target.get_word_size():
size -= self._word_size
offset += self._word_size
continue
# we have a pointer
log.debug('checkPointer offset:%s value:%s' % (offset, hex(value)))
field = fieldtypes.PointerField('ptr_%d' % offset, offset, self._word_size)
# TODO: leverage the context._function_names
# if value in structure._context._function_names:
# field.comment = ' %s::%s' % (os.path.basename(self._memory_handler.get_mapping_for_address(value).pathname),
# structure._context._function_names[value])
# else:
# field.comment = self._memory_handler.get_mapping_for_address(value).pathname
field.comment = self._memory_handler.get_mapping_for_address(value).pathname
fields.append(field)
size -= self._word_size
offset += self._word_size
return fields
class IntegerFields(model.FieldAnalyser):
""" looks at a word for a small int value"""
def make_fields(self, _record, offset, size):
# iterate on all offsets . NOT assert( size ==
# self._target_platform.get_word_size())
assert(offset % self._word_size == 0) # vaddr and offset should be aligned
# log.debug('checking Integer')
my_bytes = _record.bytes
fields = []
while size >= self._word_size:
# print 'checking >'
field = self.check_small_integers(my_bytes, offset)
if field is None:
# print 'checking <'
field = self.check_small_integers(my_bytes, offset, '>')
# we have a field smallint
if field is not None:
fields.append(field)
size -= self._word_size
offset += self._word_size
return fields
def check_small_integers(self, my_bytes, offset, endianess='<'):
""" check for small value in signed and unsigned forms """
data = my_bytes[offset:offset + self._word_size]
val = self._target.get_target_ctypes_utils().unpackWord(data, endianess)
# print endianess, val
if val < 0xffff:
field = fieldtypes.Field('small_int_%d' % offset, offset, fieldtypes.SMALLINT, self._word_size, False)
# FIXME
field.value = val
field.endianess = endianess
return field
# check signed int
elif (2 ** (self._word_size * 8) - 0xffff) < val:
_name = 'small_signed_int_%d' % offset
field = fieldtypes.Field(_name, offset, fieldtypes.SIGNED_SMALLINT, self._word_size, False)
# FIXME
field.value = val
field.endianess = endianess
return field
return None
class FieldReverser(model.AbstractReverser):
"""
Decode each record by asserting simple basic types from the byte content.
Simple structure analyzer that leverage simple type recognition heuristics.
For all aligned offset, try to apply the following heuristics :
ZeroFields: if the word is null
UTF16Fields: if the offset contains utf-16 data
PrintableAsciiFields: if the offset starts a printable ascii string
IntegerFields: if the word value is small ( |x| < 65535 )
PointerFields: if the word if a possible pointer value
If the word content does not match theses heuristics, tag the field has unknown.
TODO: UTF16 array corrector, if utf16 field is preceded by smallint, aggregate both in utf16,
event if not aligned.
"""
REVERSE_LEVEL = 10
def __init__(self, memory_handler):
super(FieldReverser, self).__init__(memory_handler)
self.zero_a = ZeroFields(self._memory_handler)
self.ascii_a = PrintableAsciiFields(self._memory_handler)
self.utf16_a = UTF16Fields(self._memory_handler)
self.int_a = IntegerFields(self._memory_handler)
self.ptr_a = PointerFields(self._memory_handler)
def reverse_record(self, _context, _record):
_record.reset()
fields, gaps = self._analyze(_record)
# _record.add_fields(fields)
# _record.add_fields(gaps) # , fieldtypes.UNKNOWN
# FIXME why not use fieldstypes.STRUCT for type and a field definition ?
# is it really worth haveing a definitiion separate ?
# yes so we can copy the recordType to other anonnymousstruct
_record_type = fieldtypes.RecordType('struct_%x' % _record.address, len(_record), fields + gaps)
_record.set_record_type(_record_type)
_record.set_reverse_level(self._reverse_level)
return _record
def _analyze(self, _record):
slen = len(_record)
offset = 0
# call on analyzers
fields = []
nb = -1
gaps = [fieldtypes.Field('unknown_0', 0, fieldtypes.UNKNOWN, len(_record), False)]
_record.set_reverse_level(10)
# find zeroes
# find strings
# find smallints
# find pointers
for analyser in [self.zero_a, self.utf16_a, self.ascii_a, self.int_a, self.ptr_a]:
log.debug("analyzing with %s", analyser)
for field in gaps:
if field.padding:
fields.append(field)
continue
log.debug('Using %s on %d:%d', analyser.__class__.__name__, field.offset, field.offset + len(field))
new_fields = analyser.make_fields(_record, field.offset, len(field))
fields.extend(new_fields)
for f1 in new_fields:
log.debug('new_field %s', f1)
# print fields
if len(fields) != nb: # no change in fields, keep gaps
nb = len(fields)
gaps = self._make_gaps(_record, fields)
if len(gaps) == 0:
return fields, gaps
return fields, gaps
def _make_gaps(self, _record, fields):
fields.sort()
gaps = []
nextoffset = 0
for i, f in enumerate(fields):
if f.offset > nextoffset: # add temp padding field
self._aligned_gaps(_record, f.offset, nextoffset, gaps)
elif f.offset < nextoffset:
log.debug(_record)
log.debug(f)
log.debug('%s < %s ' % (f.offset, nextoffset))
log.debug(fields[i + 1])
log.error("need to TU the fields gap with utf8 text")
assert False # f.offset < nextoffset # No overlaps authorised
# fields.remove(f)
# do next field
nextoffset = f.offset + f.size
# conclude on QUEUE insertion
lastfield_size = len(_record) - nextoffset
if lastfield_size > 0:
if lastfield_size < self._word_size:
gap = fieldtypes.Field('gap_%d' % nextoffset, nextoffset, fieldtypes.UNKNOWN, lastfield_size, True)
log.debug('_make_gaps: adding last field at offset %d:%d', gap.offset, gap.offset + len(gap))
gaps.append(gap)
else:
self._aligned_gaps(_record, len(_record), nextoffset, gaps)
return gaps
def _aligned_gaps(self, _record, endoffset, nextoffset, gaps):
""" if nextoffset is aligned
add a gap to gaps, or
if nextoffset is not aligned
add (padding + gap) to gaps
"""
if nextoffset % self._word_size == 0:
gap = fieldtypes.Field('gap_%d' % nextoffset, nextoffset, fieldtypes.UNKNOWN, endoffset - nextoffset, False)
log.debug('_make_gaps: adding field at offset %d:%d', gap.offset, gap.offset + len(gap))
gaps.append(gap)
else:
# we need a field of endoffset - nextoffset bytes.
# unaligned field should be splitted
size = endoffset - nextoffset
if size < self._word_size:
s1 = size
else:
s1 = size - size % self._word_size
gap1 = fieldtypes.Field('gap_%d' % nextoffset, nextoffset, fieldtypes.UNKNOWN, s1, True)
log.debug('_make_gaps: Unaligned field at offset %d:%d', gap1.offset, gap1.offset + len(gap1))
gaps.append(gap1)
if nextoffset + s1 < endoffset:
_offset = nextoffset + s1
_size = endoffset - nextoffset - s1
gap2 = fieldtypes.Field('gap_%d' % _offset, _offset, fieldtypes.UNKNOWN, _size, True)
log.debug('_make_gaps: adding field at offset %d:%d', gap2.offset, gap2.offset + len(gap2))
gaps.append(gap2)
return
#@FieldTypeReverser meaningm that it does not work in FieldInstance, no value query.
class TextFieldCorrection(model.AbstractReverser):
"""
Second pass on records to fix text fields.
a) utf16 could be non aligned. We look for small_int+utf16. and aggregate.
b) terminating null bytes. Due to padding there could be more than 1 byte worth. aggregate.
c) if record has one null terminated str, Rename record type as cstring. rename/retype parent pointers + comment.
"""
REVERSE_LEVEL = 11
def reverse_record(self, _context, _record):
fields = _record.record_type.get_fields()
if False:
# corrected in non-aligned FieldReverser
# a) utf16 could be non aligned. We look for small_int+utf16. and aggregate.
for i, f1 in enumerate(fields[:-1]):
if f1.field_type is not fieldtypes.SMALLINT:
continue
f2 = fields[i+1]
if f2.field_type is not fieldtypes.STRING16:
continue
# b) terminating null bytes. Due to padding there could be more than 1 byte worth. aggregate.
if len(fields) > 1:
f1, f2 = fields[-2:]
if f2.is_zeroes() and len(f2) == 4:
if f1.is_string() and f1.field_type == fieldtypes.STRING16:
# FIXME: DO WHAT ? aggregate ? set zerroes as padding ?
# set f2 as padding. ???
pass
# c) if record has one null terminated str, Rename record type as cstring.
# rename/retype parent pointers + comment.
if len(fields) == 2 and fields[0].is_string() and fields[1].is_zeroes():
_record.record_type.type_name = 'string'
return _record
class IntegerArrayFields(model.FieldAnalyser):
""" TODO """
def make_fields(self, _record, offset, size):
# this should be last resort
my_bytes = _record.bytes[offset:offset + size]
size = len(my_bytes)
if size < 4:
return False
ctr = collections.Counter([my_bytes[i:i + self._word_size] for i in range(len(my_bytes))])
floor = max(1, int(size * .1)) # 10 % variation in values
# commons = [ c for c,nb in ctr.most_common() if nb > 2 ]
commons = ctr.most_common()
if len(commons) > floor:
return False # too many different values
# few values. it migth be an array
# FIXME
# _record.values = my_bytes
# _record.comment = '10%% var in values: %s' % (','.join([repr(v) for v, nb in commons]))
return True
| gpl-3.0 |
eduNEXT/edunext-ecommerce | ecommerce/core/tests/test_views.py | 1 | 5846 | """Tests of the service health endpoint."""
from __future__ import absolute_import
import mock
from django.conf import settings
from django.contrib.auth import get_user, get_user_model
from django.db import DatabaseError
from django.test.utils import override_settings
from django.urls import reverse
from rest_framework import status
from social_django.models import UserSocialAuth
from ecommerce.core.constants import Status
from ecommerce.core.views import AutoAuth
from ecommerce.tests.factories import UserFactory
from ecommerce.tests.testcases import TestCase
LOGGER_NAME = 'ecommerce.core.views'
User = get_user_model()
class HealthTests(TestCase):
"""Tests of the health endpoint."""
def test_all_services_available(self):
"""Test that the endpoint reports when all services are healthy."""
self._assert_health(status.HTTP_200_OK, Status.OK, Status.OK)
@mock.patch('newrelic.agent')
def test_health_check_is_ignored_by_new_relic(self, mock_newrelic_agent):
"""Test that the health endpoint is ignored by NewRelic"""
self._assert_health(status.HTTP_200_OK, Status.OK, Status.OK)
self.assertTrue(mock_newrelic_agent.ignore_transaction.called)
@mock.patch('django.contrib.sites.middleware.get_current_site', mock.Mock(return_value=None))
@mock.patch('django.db.backends.base.base.BaseDatabaseWrapper.cursor', mock.Mock(side_effect=DatabaseError))
def test_database_outage(self):
"""Test that the endpoint reports when the database is unavailable."""
self._assert_health(
status.HTTP_503_SERVICE_UNAVAILABLE,
Status.UNAVAILABLE,
Status.UNAVAILABLE,
)
def _assert_health(self, status_code, overall_status, database_status):
"""Verify that the response matches expectations."""
response = self.client.get(reverse('health'))
self.assertEqual(response.status_code, status_code)
self.assertEqual(response['content-type'], 'application/json')
expected_data = {
'overall_status': overall_status,
'detailed_status': {
'database_status': database_status,
}
}
self.assertDictEqual(response.json(), expected_data)
class AutoAuthTests(TestCase):
AUTO_AUTH_PATH = reverse('auto_auth')
@override_settings(ENABLE_AUTO_AUTH=False)
def test_setting_disabled(self):
"""When the ENABLE_AUTO_AUTH setting is False, the view should raise a 404."""
response = self.client.get(self.AUTO_AUTH_PATH)
self.assertEqual(response.status_code, 404)
@override_settings(ENABLE_AUTO_AUTH=True)
def test_setting_enabled(self):
"""
When ENABLE_AUTO_AUTH is set to True, the view should create and authenticate
a new User with superuser permissions.
"""
original_user_count = User.objects.count()
response = self.client.get(self.AUTO_AUTH_PATH)
# Verify that a redirect has occurred and that a new user has been created
self.assertEqual(response.status_code, 302)
self.assertEqual(User.objects.count(), original_user_count + 1)
# Get the latest user
user = User.objects.latest()
# Verify that the user is logged in and that their username has the expected prefix
self.assertEqual(int(self.client.session['_auth_user_id']), user.pk)
self.assertTrue(user.username.startswith(settings.AUTO_AUTH_USERNAME_PREFIX))
# Verify that the user has superuser permissions
self.assertTrue(user.is_superuser)
# Verify that the user has an LMS user id
self.assertIsNotNone(user.lms_user_id)
self.assertEqual(AutoAuth.lms_user_id, user.lms_user_id)
class LogoutViewTests(TestCase):
""" Taken from https://github.com/edx/auth-backends/blob/master/auth_backends/tests/mixins.py """
PASSWORD = 'test'
def _create_user(self):
""" Create a new user. """
user = UserFactory(username='test', password=self.PASSWORD)
UserSocialAuth.objects.create(user=user, provider='edx-oauth2', uid=user.username)
return user
def get_logout_url(self):
""" Returns the URL of the logout view. """
return reverse('logout')
def get_redirect_url(self):
return self.site.siteconfiguration.oauth_settings['SOCIAL_AUTH_EDX_OAUTH2_LOGOUT_URL']
def assert_authentication_status(self, is_authenticated):
""" Verifies the authentication status of the user attached to the test client. """
user = get_user(self.client)
self.assertEqual(user.is_authenticated, is_authenticated)
def test_x_frame_options_header(self):
""" Verify no X-Frame-Options header is set in the response. """
response = self.client.get(self.get_logout_url())
self.assertNotIn('X-Frame-Options', response)
def test_logout(self):
""" Verify the user is logged out of the current session and redirected to the appropriate URL. """
self.client.logout()
self.assert_authentication_status(False)
user = self._create_user()
self.client.login(username=user.username, password=self.PASSWORD)
self.assert_authentication_status(True)
qs = 'next=/test/'
response = self.client.get('{url}?{qs}'.format(url=self.get_logout_url(), qs=qs))
self.assert_authentication_status(False)
# NOTE: The querystring parameters SHOULD be ignored
self.assertRedirects(response, self.get_redirect_url(), fetch_redirect_response=False)
def test_no_redirect(self):
""" Verify the view does not redirect if the no_redirect querystring parameter is set. """
response = self.client.get(self.get_logout_url(), {'no_redirect': 1})
self.assertEqual(response.status_code, 200)
| agpl-3.0 |
Huskerboy/startbootstrap-freelancer | freelancer_env/Lib/encodings/koi8_t.py | 101 | 13193 | """ Python Character Mapping Codec koi8_t
"""
# http://ru.wikipedia.org/wiki/КОИ-8
# http://www.opensource.apple.com/source/libiconv/libiconv-4/libiconv/tests/KOI8-T.TXT
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='koi8-t',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x04' # 0x04 -> END OF TRANSMISSION
'\x05' # 0x05 -> ENQUIRY
'\x06' # 0x06 -> ACKNOWLEDGE
'\x07' # 0x07 -> BELL
'\x08' # 0x08 -> BACKSPACE
'\t' # 0x09 -> HORIZONTAL TABULATION
'\n' # 0x0A -> LINE FEED
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x14' # 0x14 -> DEVICE CONTROL FOUR
'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x16 -> SYNCHRONOUS IDLE
'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x1a' # 0x1A -> SUBSTITUTE
'\x1b' # 0x1B -> ESCAPE
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> DELETE
'\u049b' # 0x80 -> CYRILLIC SMALL LETTER KA WITH DESCENDER
'\u0493' # 0x81 -> CYRILLIC SMALL LETTER GHE WITH STROKE
'\u201a' # 0x82 -> SINGLE LOW-9 QUOTATION MARK
'\u0492' # 0x83 -> CYRILLIC CAPITAL LETTER GHE WITH STROKE
'\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK
'\u2026' # 0x85 -> HORIZONTAL ELLIPSIS
'\u2020' # 0x86 -> DAGGER
'\u2021' # 0x87 -> DOUBLE DAGGER
'\ufffe' # 0x88 -> UNDEFINED
'\u2030' # 0x89 -> PER MILLE SIGN
'\u04b3' # 0x8A -> CYRILLIC SMALL LETTER HA WITH DESCENDER
'\u2039' # 0x8B -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
'\u04b2' # 0x8C -> CYRILLIC CAPITAL LETTER HA WITH DESCENDER
'\u04b7' # 0x8D -> CYRILLIC SMALL LETTER CHE WITH DESCENDER
'\u04b6' # 0x8E -> CYRILLIC CAPITAL LETTER CHE WITH DESCENDER
'\ufffe' # 0x8F -> UNDEFINED
'\u049a' # 0x90 -> CYRILLIC CAPITAL LETTER KA WITH DESCENDER
'\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK
'\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK
'\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK
'\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK
'\u2022' # 0x95 -> BULLET
'\u2013' # 0x96 -> EN DASH
'\u2014' # 0x97 -> EM DASH
'\ufffe' # 0x98 -> UNDEFINED
'\u2122' # 0x99 -> TRADE MARK SIGN
'\ufffe' # 0x9A -> UNDEFINED
'\u203a' # 0x9B -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
'\ufffe' # 0x9C -> UNDEFINED
'\ufffe' # 0x9D -> UNDEFINED
'\ufffe' # 0x9E -> UNDEFINED
'\ufffe' # 0x9F -> UNDEFINED
'\ufffe' # 0xA0 -> UNDEFINED
'\u04ef' # 0xA1 -> CYRILLIC SMALL LETTER U WITH MACRON
'\u04ee' # 0xA2 -> CYRILLIC CAPITAL LETTER U WITH MACRON
'\u0451' # 0xA3 -> CYRILLIC SMALL LETTER IO
'\xa4' # 0xA4 -> CURRENCY SIGN
'\u04e3' # 0xA5 -> CYRILLIC SMALL LETTER I WITH MACRON
'\xa6' # 0xA6 -> BROKEN BAR
'\xa7' # 0xA7 -> SECTION SIGN
'\ufffe' # 0xA8 -> UNDEFINED
'\ufffe' # 0xA9 -> UNDEFINED
'\ufffe' # 0xAA -> UNDEFINED
'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xac' # 0xAC -> NOT SIGN
'\xad' # 0xAD -> SOFT HYPHEN
'\xae' # 0xAE -> REGISTERED SIGN
'\ufffe' # 0xAF -> UNDEFINED
'\xb0' # 0xB0 -> DEGREE SIGN
'\xb1' # 0xB1 -> PLUS-MINUS SIGN
'\xb2' # 0xB2 -> SUPERSCRIPT TWO
'\u0401' # 0xB3 -> CYRILLIC CAPITAL LETTER IO
'\ufffe' # 0xB4 -> UNDEFINED
'\u04e2' # 0xB5 -> CYRILLIC CAPITAL LETTER I WITH MACRON
'\xb6' # 0xB6 -> PILCROW SIGN
'\xb7' # 0xB7 -> MIDDLE DOT
'\ufffe' # 0xB8 -> UNDEFINED
'\u2116' # 0xB9 -> NUMERO SIGN
'\ufffe' # 0xBA -> UNDEFINED
'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\ufffe' # 0xBC -> UNDEFINED
'\ufffe' # 0xBD -> UNDEFINED
'\ufffe' # 0xBE -> UNDEFINED
'\xa9' # 0xBF -> COPYRIGHT SIGN
'\u044e' # 0xC0 -> CYRILLIC SMALL LETTER YU
'\u0430' # 0xC1 -> CYRILLIC SMALL LETTER A
'\u0431' # 0xC2 -> CYRILLIC SMALL LETTER BE
'\u0446' # 0xC3 -> CYRILLIC SMALL LETTER TSE
'\u0434' # 0xC4 -> CYRILLIC SMALL LETTER DE
'\u0435' # 0xC5 -> CYRILLIC SMALL LETTER IE
'\u0444' # 0xC6 -> CYRILLIC SMALL LETTER EF
'\u0433' # 0xC7 -> CYRILLIC SMALL LETTER GHE
'\u0445' # 0xC8 -> CYRILLIC SMALL LETTER HA
'\u0438' # 0xC9 -> CYRILLIC SMALL LETTER I
'\u0439' # 0xCA -> CYRILLIC SMALL LETTER SHORT I
'\u043a' # 0xCB -> CYRILLIC SMALL LETTER KA
'\u043b' # 0xCC -> CYRILLIC SMALL LETTER EL
'\u043c' # 0xCD -> CYRILLIC SMALL LETTER EM
'\u043d' # 0xCE -> CYRILLIC SMALL LETTER EN
'\u043e' # 0xCF -> CYRILLIC SMALL LETTER O
'\u043f' # 0xD0 -> CYRILLIC SMALL LETTER PE
'\u044f' # 0xD1 -> CYRILLIC SMALL LETTER YA
'\u0440' # 0xD2 -> CYRILLIC SMALL LETTER ER
'\u0441' # 0xD3 -> CYRILLIC SMALL LETTER ES
'\u0442' # 0xD4 -> CYRILLIC SMALL LETTER TE
'\u0443' # 0xD5 -> CYRILLIC SMALL LETTER U
'\u0436' # 0xD6 -> CYRILLIC SMALL LETTER ZHE
'\u0432' # 0xD7 -> CYRILLIC SMALL LETTER VE
'\u044c' # 0xD8 -> CYRILLIC SMALL LETTER SOFT SIGN
'\u044b' # 0xD9 -> CYRILLIC SMALL LETTER YERU
'\u0437' # 0xDA -> CYRILLIC SMALL LETTER ZE
'\u0448' # 0xDB -> CYRILLIC SMALL LETTER SHA
'\u044d' # 0xDC -> CYRILLIC SMALL LETTER E
'\u0449' # 0xDD -> CYRILLIC SMALL LETTER SHCHA
'\u0447' # 0xDE -> CYRILLIC SMALL LETTER CHE
'\u044a' # 0xDF -> CYRILLIC SMALL LETTER HARD SIGN
'\u042e' # 0xE0 -> CYRILLIC CAPITAL LETTER YU
'\u0410' # 0xE1 -> CYRILLIC CAPITAL LETTER A
'\u0411' # 0xE2 -> CYRILLIC CAPITAL LETTER BE
'\u0426' # 0xE3 -> CYRILLIC CAPITAL LETTER TSE
'\u0414' # 0xE4 -> CYRILLIC CAPITAL LETTER DE
'\u0415' # 0xE5 -> CYRILLIC CAPITAL LETTER IE
'\u0424' # 0xE6 -> CYRILLIC CAPITAL LETTER EF
'\u0413' # 0xE7 -> CYRILLIC CAPITAL LETTER GHE
'\u0425' # 0xE8 -> CYRILLIC CAPITAL LETTER HA
'\u0418' # 0xE9 -> CYRILLIC CAPITAL LETTER I
'\u0419' # 0xEA -> CYRILLIC CAPITAL LETTER SHORT I
'\u041a' # 0xEB -> CYRILLIC CAPITAL LETTER KA
'\u041b' # 0xEC -> CYRILLIC CAPITAL LETTER EL
'\u041c' # 0xED -> CYRILLIC CAPITAL LETTER EM
'\u041d' # 0xEE -> CYRILLIC CAPITAL LETTER EN
'\u041e' # 0xEF -> CYRILLIC CAPITAL LETTER O
'\u041f' # 0xF0 -> CYRILLIC CAPITAL LETTER PE
'\u042f' # 0xF1 -> CYRILLIC CAPITAL LETTER YA
'\u0420' # 0xF2 -> CYRILLIC CAPITAL LETTER ER
'\u0421' # 0xF3 -> CYRILLIC CAPITAL LETTER ES
'\u0422' # 0xF4 -> CYRILLIC CAPITAL LETTER TE
'\u0423' # 0xF5 -> CYRILLIC CAPITAL LETTER U
'\u0416' # 0xF6 -> CYRILLIC CAPITAL LETTER ZHE
'\u0412' # 0xF7 -> CYRILLIC CAPITAL LETTER VE
'\u042c' # 0xF8 -> CYRILLIC CAPITAL LETTER SOFT SIGN
'\u042b' # 0xF9 -> CYRILLIC CAPITAL LETTER YERU
'\u0417' # 0xFA -> CYRILLIC CAPITAL LETTER ZE
'\u0428' # 0xFB -> CYRILLIC CAPITAL LETTER SHA
'\u042d' # 0xFC -> CYRILLIC CAPITAL LETTER E
'\u0429' # 0xFD -> CYRILLIC CAPITAL LETTER SHCHA
'\u0427' # 0xFE -> CYRILLIC CAPITAL LETTER CHE
'\u042a' # 0xFF -> CYRILLIC CAPITAL LETTER HARD SIGN
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| mit |
yufengg/tensorflow | tensorflow/python/platform/flags_test.py | 79 | 3046 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for our flags implementation."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
import unittest
from tensorflow.python.platform import app
from tensorflow.python.platform import flags
flags.DEFINE_string("string_foo", "default_val", "HelpString")
flags.DEFINE_integer("int_foo", 42, "HelpString")
flags.DEFINE_float("float_foo", 42.0, "HelpString")
flags.DEFINE_boolean("bool_foo", True, "HelpString")
flags.DEFINE_boolean("bool_negation", True, "HelpString")
flags.DEFINE_boolean("bool-dash-negation", True, "HelpString")
flags.DEFINE_boolean("bool_a", False, "HelpString")
flags.DEFINE_boolean("bool_c", False, "HelpString")
flags.DEFINE_boolean("bool_d", True, "HelpString")
flags.DEFINE_bool("bool_e", True, "HelpString")
FLAGS = flags.FLAGS
class FlagsTest(unittest.TestCase):
def testString(self):
res = FLAGS.string_foo
self.assertEqual(res, "default_val")
FLAGS.string_foo = "bar"
self.assertEqual("bar", FLAGS.string_foo)
def testBool(self):
res = FLAGS.bool_foo
self.assertTrue(res)
FLAGS.bool_foo = False
self.assertFalse(FLAGS.bool_foo)
def testBoolCommandLines(self):
# Specified on command line with no args, sets to True,
# even if default is False.
self.assertEqual(True, FLAGS.bool_a)
# --no before the flag forces it to False, even if the
# default is True
self.assertEqual(False, FLAGS.bool_negation)
# --bool_flag=True sets to True
self.assertEqual(True, FLAGS.bool_c)
# --bool_flag=False sets to False
self.assertEqual(False, FLAGS.bool_d)
def testInt(self):
res = FLAGS.int_foo
self.assertEquals(res, 42)
FLAGS.int_foo = -1
self.assertEqual(-1, FLAGS.int_foo)
def testFloat(self):
res = FLAGS.float_foo
self.assertEquals(42.0, res)
FLAGS.float_foo = -1.0
self.assertEqual(-1.0, FLAGS.float_foo)
def main(_):
# unittest.main() tries to interpret the unknown flags, so use the
# direct functions instead.
runner = unittest.TextTestRunner()
itersuite = unittest.TestLoader().loadTestsFromTestCase(FlagsTest)
runner.run(itersuite)
if __name__ == "__main__":
# Test command lines
sys.argv.extend([
"--bool_a", "--nobool_negation", "--bool_c=True", "--bool_d=False",
"and_argument"
])
app.run()
| apache-2.0 |
UnrememberMe/pants | tests/python/pants_test/util/test_osutil.py | 1 | 1440 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import logging
from pants.util.osutil import OS_ALIASES, known_os_names, normalize_os_name
from pants_test.base_test import BaseTest
class OsutilTest(BaseTest):
def test_alias_normalization(self):
for normal_os, aliases in OS_ALIASES.items():
for alias in aliases:
self.assertEqual(normal_os, normalize_os_name(alias))
def test_keys_in_aliases(self):
for key in OS_ALIASES.keys():
self.assertIn(key, known_os_names())
def test_no_warnings_on_known_names(self):
for name in known_os_names():
with self.captured_logging(logging.WARNING) as captured:
normalize_os_name(name)
self.assertEqual(0, len(captured.warnings()),
'Recieved unexpected warnings: {}'.format(captured.warnings()))
def test_warnings_on_unknown_names(self):
name = 'I really hope no one ever names an operating system with this string.'
with self.captured_logging(logging.WARNING) as captured:
normalize_os_name(name)
self.assertEqual(1, len(captured.warnings()),
'Expected exactly one warning, but got: {}'.format(captured.warnings()))
| apache-2.0 |
dydek/django | tests/null_fk/tests.py | 352 | 2982 | from __future__ import unicode_literals
from django.db.models import Q
from django.test import TestCase
from .models import (
Comment, Forum, Item, Post, PropertyValue, SystemDetails, SystemInfo,
)
class NullFkTests(TestCase):
def test_null_fk(self):
d = SystemDetails.objects.create(details='First details')
s = SystemInfo.objects.create(system_name='First forum', system_details=d)
f = Forum.objects.create(system_info=s, forum_name='First forum')
p = Post.objects.create(forum=f, title='First Post')
c1 = Comment.objects.create(post=p, comment_text='My first comment')
c2 = Comment.objects.create(comment_text='My second comment')
# Starting from comment, make sure that a .select_related(...) with a specified
# set of fields will properly LEFT JOIN multiple levels of NULLs (and the things
# that come after the NULLs, or else data that should exist won't). Regression
# test for #7369.
c = Comment.objects.select_related().get(id=c1.id)
self.assertEqual(c.post, p)
self.assertEqual(Comment.objects.select_related().get(id=c2.id).post, None)
self.assertQuerysetEqual(
Comment.objects.select_related('post__forum__system_info').all(),
[
(c1.id, 'My first comment', '<Post: First Post>'),
(c2.id, 'My second comment', 'None')
],
transform=lambda c: (c.id, c.comment_text, repr(c.post))
)
# Regression test for #7530, #7716.
self.assertIsNone(Comment.objects.select_related('post').filter(post__isnull=True)[0].post)
self.assertQuerysetEqual(
Comment.objects.select_related('post__forum__system_info__system_details'),
[
(c1.id, 'My first comment', '<Post: First Post>'),
(c2.id, 'My second comment', 'None')
],
transform=lambda c: (c.id, c.comment_text, repr(c.post))
)
def test_combine_isnull(self):
item = Item.objects.create(title='Some Item')
pv = PropertyValue.objects.create(label='Some Value')
item.props.create(key='a', value=pv)
item.props.create(key='b') # value=NULL
q1 = Q(props__key='a', props__value=pv)
q2 = Q(props__key='b', props__value__isnull=True)
# Each of these individually should return the item.
self.assertEqual(Item.objects.get(q1), item)
self.assertEqual(Item.objects.get(q2), item)
# Logically, qs1 and qs2, and qs3 and qs4 should be the same.
qs1 = Item.objects.filter(q1) & Item.objects.filter(q2)
qs2 = Item.objects.filter(q2) & Item.objects.filter(q1)
qs3 = Item.objects.filter(q1) | Item.objects.filter(q2)
qs4 = Item.objects.filter(q2) | Item.objects.filter(q1)
# Regression test for #15823.
self.assertEqual(list(qs1), list(qs2))
self.assertEqual(list(qs3), list(qs4))
| bsd-3-clause |
wbinventor/openmc | openmc/summary.py | 1 | 7606 | from collections.abc import Iterable
import re
import warnings
import numpy as np
import h5py
import openmc
import openmc.checkvalue as cv
from openmc.region import Region
_VERSION_SUMMARY = 6
class Summary(object):
"""Summary of geometry, materials, and tallies used in a simulation.
Attributes
----------
date_and_time : str
Date and time when simulation began
geometry : openmc.Geometry
The geometry reconstructed from the summary file
materials : openmc.Materials
The materials reconstructed from the summary file
nuclides : dict
Dictionary whose keys are nuclide names and values are atomic weight
ratios.
macroscopics : list
Names of macroscopic data sets
version: tuple of int
Version of OpenMC
"""
def __init__(self, filename):
if not filename.endswith(('.h5', '.hdf5')):
msg = 'Unable to open "{0}" which is not an HDF5 summary file'
raise ValueError(msg)
self._f = h5py.File(filename, 'r')
cv.check_filetype_version(self._f, 'summary', _VERSION_SUMMARY)
self._geometry = openmc.Geometry()
self._fast_materials = {}
self._fast_surfaces = {}
self._fast_cells = {}
self._fast_universes = {}
self._fast_lattices = {}
self._materials = openmc.Materials()
self._nuclides = {}
self._macroscopics = []
self._read_nuclides()
self._read_macroscopics()
with warnings.catch_warnings():
warnings.simplefilter("ignore", openmc.IDWarning)
self._read_geometry()
@property
def date_and_time(self):
return self._f.attrs['date_and_time'].decode()
@property
def geometry(self):
return self._geometry
@property
def materials(self):
return self._materials
@property
def nuclides(self):
return self._nuclides
@property
def macroscopics(self):
return self._macroscopics
@property
def version(self):
return tuple(self._f.attrs['openmc_version'])
def _read_nuclides(self):
if 'nuclides/names' in self._f:
names = self._f['nuclides/names'][()]
awrs = self._f['nuclides/awrs'][()]
for name, awr in zip(names, awrs):
self._nuclides[name.decode()] = awr
def _read_macroscopics(self):
if 'macroscopics/names' in self._f:
names = self._f['macroscopics/names'][()]
for name in names:
self._macroscopics = name.decode()
def _read_geometry(self):
if "dagmc" in self._f['geometry'].attrs.keys():
return
# Read in and initialize the Materials and Geometry
self._read_materials()
self._read_surfaces()
cell_fills = self._read_cells()
self._read_universes()
self._read_lattices()
self._finalize_geometry(cell_fills)
def _read_materials(self):
for group in self._f['materials'].values():
material = openmc.Material.from_hdf5(group)
# Add the material to the Materials collection
self.materials.append(material)
# Store in the dictionary of materials for fast queries
self._fast_materials[material.id] = material
def _read_surfaces(self):
for group in self._f['geometry/surfaces'].values():
surface = openmc.Surface.from_hdf5(group)
self._fast_surfaces[surface.id] = surface
def _read_cells(self):
# Initialize dictionary for each Cell's fill
cell_fills = {}
for key, group in self._f['geometry/cells'].items():
cell_id = int(key.lstrip('cell '))
name = group['name'][()].decode() if 'name' in group else ''
fill_type = group['fill_type'][()].decode()
if fill_type == 'material':
fill = group['material'][()]
elif fill_type == 'universe':
fill = group['fill'][()]
else:
fill = group['lattice'][()]
region = group['region'][()].decode() if 'region' in group else ''
# Create this Cell
cell = openmc.Cell(cell_id=cell_id, name=name)
if fill_type == 'universe':
if 'translation' in group:
translation = group['translation'][()]
translation = np.asarray(translation, dtype=np.float64)
cell.translation = translation
if 'rotation' in group:
rotation = group['rotation'][()]
rotation = np.asarray(rotation, dtype=np.int)
cell._rotation = rotation
elif fill_type == 'material':
cell.temperature = group['temperature'][()]
# Store Cell fill information for after Universe/Lattice creation
cell_fills[cell.id] = (fill_type, fill)
# Generate Region object given infix expression
if region:
cell.region = Region.from_expression(region, self._fast_surfaces)
# Add the Cell to the global dictionary of all Cells
self._fast_cells[cell.id] = cell
return cell_fills
def _read_universes(self):
for group in self._f['geometry/universes'].values():
universe = openmc.Universe.from_hdf5(group, self._fast_cells)
self._fast_universes[universe.id] = universe
def _read_lattices(self):
for group in self._f['geometry/lattices'].values():
lattice = openmc.Lattice.from_hdf5(group, self._fast_universes)
self._fast_lattices[lattice.id] = lattice
def _finalize_geometry(self, cell_fills):
# Keep track of universes that are used as fills. That way, we can
# determine which universe is NOT used as a fill (and hence is the root
# universe)
fill_univ_ids = set()
# Iterate over all Cells and add fill Materials, Universes and Lattices
for cell_id, (fill_type, fill_id) in cell_fills.items():
# Retrieve the object corresponding to the fill type and ID
if fill_type == 'material':
if isinstance(fill_id, Iterable):
fill = [self._fast_materials[mat] if mat > 0 else None
for mat in fill_id]
else:
fill = self._fast_materials[fill_id] if fill_id > 0 else None
elif fill_type == 'universe':
fill = self._fast_universes[fill_id]
fill_univ_ids.add(fill_id)
else:
fill = self._fast_lattices[fill_id]
for idx in fill._natural_indices:
univ = fill.get_universe(idx)
fill_univ_ids.add(univ.id)
if fill.outer is not None:
fill_univ_ids.add(fill.outer.id)
# Set the fill for the Cell
self._fast_cells[cell_id].fill = fill
# Determine root universe for geometry
non_fill = set(self._fast_universes.keys()) - fill_univ_ids
self.geometry.root_universe = self._fast_universes[non_fill.pop()]
def add_volume_information(self, volume_calc):
"""Add volume information to the geometry within the summary file
Parameters
----------
volume_calc : openmc.VolumeCalculation
Results from a stochastic volume calculation
"""
self.geometry.add_volume_information(volume_calc)
| mit |
spidercensus/py-junos-eznc | setup.py | 3 | 1830 | from setuptools import setup, find_packages
import sys
# parse requirements
req_lines = [line.strip() for line in open(
'requirements.txt').readlines()]
install_reqs = list(filter(None, req_lines))
if sys.version_info[:2] == (2, 6):
install_reqs.append('importlib>=1.0.3')
setup(
name="junos-eznc",
namespace_packages=['jnpr'],
version="2.0.2-dev",
author="Jeremy Schulman, Nitin Kumar",
author_email="jnpr-community-netdev@juniper.net",
description=("Junos 'EZ' automation for non-programmers"),
license="Apache 2.0",
keywords="Junos NETCONF networking automation",
url="http://www.github.com/Juniper/py-junos-eznc",
package_dir={'': 'lib'},
packages=find_packages('lib'),
package_data={
'jnpr.junos.op': ['*.yml'],
'jnpr.junos.cfgro': ['*.yml'],
'jnpr.junos.resources': ['*.yml']
},
install_requires=install_reqs,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Intended Audience :: System Administrators',
'Intended Audience :: Telecommunications Industry',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.4',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Networking',
'Topic :: Text Processing :: Markup :: XML'
],
)
| apache-2.0 |
glatard/nipype | nipype/interfaces/spm/base.py | 9 | 18367 | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""The spm module provides basic functions for interfacing with SPM tools.
In order to use the standalone MCR version of spm, you need to ensure that
the following commands are executed at the beginning of your script::
from nipype.interfaces import spm
matlab_cmd = '/path/to/run_spm8.sh /path/to/Compiler_Runtime/v713/ script'
spm.SPMCommand.set_mlab_paths(matlab_cmd=matlab_cmd, use_mcr=True)
you can test by calling::
spm.SPMCommand().version
"""
__docformat__ = 'restructuredtext'
# Standard library imports
import os
from copy import deepcopy
# Third-party imports
from nibabel import load
import numpy as np
from scipy.io import savemat
from nipype.external import six
# Local imports
from ..base import (BaseInterface, traits, isdefined, InputMultiPath,
BaseInterfaceInputSpec, Directory, Undefined)
from ..matlab import MatlabCommand
from ...utils import spm_docs as sd
from ... import logging
logger = logging.getLogger('interface')
def func_is_3d(in_file):
"""Checks if input functional files are 3d."""
if isinstance(in_file, list):
return func_is_3d(in_file[0])
else:
img = load(in_file)
shape = img.get_shape()
if len(shape) == 3 or (len(shape) == 4 and shape[3] == 1):
return True
else:
return False
def get_first_3dfile(in_files):
if not func_is_3d(in_files):
return None
if isinstance(in_files[0], list):
return in_files[0]
return in_files
def scans_for_fname(fname):
"""Reads a nifti file and converts it to a numpy array storing
individual nifti volumes.
Opens images so will fail if they are not found.
"""
if isinstance(fname, list):
scans = np.zeros((len(fname),), dtype=object)
for sno, f in enumerate(fname):
scans[sno] = '%s,1' % f
return scans
img = load(fname)
if len(img.get_shape()) == 3:
return np.array(('%s,1' % fname,), dtype=object)
else:
n_scans = img.get_shape()[3]
scans = np.zeros((n_scans,), dtype=object)
for sno in range(n_scans):
scans[sno] = '%s,%d' % (fname, sno + 1)
return scans
def scans_for_fnames(fnames, keep4d=False, separate_sessions=False):
"""Converts a list of files to a concatenated numpy array for each
volume.
keep4d : boolean
keeps the entries of the numpy array as 4d files instead of
extracting the individual volumes.
separate_sessions: boolean
if 4d nifti files are being used, then separate_sessions
ensures a cell array per session is created in the structure.
"""
flist = None
if not isinstance(fnames[0], list):
if func_is_3d(fnames[0]):
fnames = [fnames]
if separate_sessions or keep4d:
flist = np.zeros((len(fnames),), dtype=object)
for i, f in enumerate(fnames):
if separate_sessions:
if keep4d:
if isinstance(f, list):
flist[i] = np.array(f, dtype=object)
else:
flist[i] = np.array([f], dtype=object)
else:
flist[i] = scans_for_fname(f)
else:
if keep4d:
flist[i] = f
else:
scans = scans_for_fname(f)
if flist is None:
flist = scans
else:
flist = np.concatenate((flist, scans))
return flist
class Info(object):
"""Handles SPM version information
"""
@staticmethod
def version(matlab_cmd=None, paths=None, use_mcr=None):
"""Returns the path to the SPM directory in the Matlab path
If path not found, returns None.
Parameters
----------
matlab_cmd: str
Sets the default matlab command. If None, the value of the
environment variable SPMMCRCMD will be used if set and use_mcr
is True or the environment variable FORCE_SPMMCR is set.
If one of FORCE_SPMMCR or SPMMCRCMD is not set, the existence
of the environment variable MATLABCMD is checked and its value
is used as the matlab command if possible.
If none of the above was successful, the fallback value of
'matlab -nodesktop -nosplash' will be used.
paths : str
use_mcr : bool
Returns
-------
spm_path : string representing path to SPM directory
returns None of path not found
"""
if use_mcr or 'FORCE_SPMMCR' in os.environ:
use_mcr = True
if matlab_cmd is None:
try:
matlab_cmd = os.environ['SPMMCRCMD']
except KeyError:
pass
if matlab_cmd is None:
try:
matlab_cmd = os.environ['MATLABCMD']
except KeyError:
matlab_cmd = 'matlab -nodesktop -nosplash'
mlab = MatlabCommand(matlab_cmd=matlab_cmd)
mlab.inputs.mfile = False
if paths:
mlab.inputs.paths = paths
if use_mcr:
mlab.inputs.nodesktop = Undefined
mlab.inputs.nosplash = Undefined
mlab.inputs.single_comp_thread = Undefined
mlab.inputs.mfile = True
mlab.inputs.uses_mcr = True
mlab.inputs.script = """
if isempty(which('spm')),
throw(MException('SPMCheck:NotFound','SPM not in matlab path'));
end;
spm_path = spm('dir');
[name, version] = spm('ver');
fprintf(1, 'NIPYPE path:%s|name:%s|release:%s', spm_path, name, version);
exit;
"""
try:
out = mlab.run()
except (IOError, RuntimeError), e:
# if no Matlab at all -- exception could be raised
# No Matlab -- no spm
logger.debug(str(e))
return None
else:
out = sd._strip_header(out.runtime.stdout)
out_dict = {}
for part in out.split('|'):
key, val = part.split(':')
out_dict[key] = val
return out_dict
def no_spm():
""" Checks if SPM is NOT installed
used with nosetests skipif to skip tests
that will fail if spm is not installed"""
if Info.version() is None or 'NIPYPE_NO_MATLAB' in os.environ:
return True
else:
return False
class SPMCommandInputSpec(BaseInterfaceInputSpec):
matlab_cmd = traits.Str(desc='matlab command to use')
paths = InputMultiPath(Directory(), desc='Paths to add to matlabpath')
mfile = traits.Bool(True, desc='Run m-code using m-file',
usedefault=True)
use_mcr = traits.Bool(desc='Run m-code using SPM MCR')
use_v8struct = traits.Bool(True, min_ver='8', usedefault=True,
desc=('Generate SPM8 and higher compatible jobs')
)
class SPMCommand(BaseInterface):
"""Extends `BaseInterface` class to implement SPM specific interfaces.
WARNING: Pseudo prototype class, meant to be subclassed
"""
input_spec = SPMCommandInputSpec
_additional_metadata = ['field']
_jobtype = 'basetype'
_jobname = 'basename'
_matlab_cmd = None
_paths = None
_use_mcr = None
def __init__(self, **inputs):
super(SPMCommand, self).__init__(**inputs)
self.inputs.on_trait_change(self._matlab_cmd_update, ['matlab_cmd',
'mfile',
'paths',
'use_mcr'])
self._find_mlab_cmd_defaults()
self._check_mlab_inputs()
self._matlab_cmd_update()
@classmethod
def set_mlab_paths(cls, matlab_cmd=None, paths=None, use_mcr=None):
cls._matlab_cmd = matlab_cmd
cls._paths = paths
cls._use_mcr = use_mcr
def _find_mlab_cmd_defaults(self):
# check if the user has set environment variables to enforce
# the standalone (MCR) version of SPM
if self._use_mcr or 'FORCE_SPMMCR' in os.environ:
self._use_mcr = True
if self._matlab_cmd is None:
try:
self._matlab_cmd = os.environ['SPMMCRCMD']
except KeyError:
pass
def _matlab_cmd_update(self):
# MatlabCommand has to be created here,
# because matlab_cmb is not a proper input
# and can be set only during init
self.mlab = MatlabCommand(matlab_cmd=self.inputs.matlab_cmd,
mfile=self.inputs.mfile,
paths=self.inputs.paths)
self.mlab.inputs.script_file = 'pyscript_%s.m' % \
self.__class__.__name__.split('.')[-1].lower()
if isdefined(self.inputs.use_mcr) and self.inputs.use_mcr:
self.mlab.inputs.nodesktop = Undefined
self.mlab.inputs.nosplash = Undefined
self.mlab.inputs.single_comp_thread = Undefined
self.mlab.inputs.uses_mcr = True
self.mlab.inputs.mfile = True
@property
def version(self):
version_dict = Info.version(matlab_cmd=self.inputs.matlab_cmd,
paths=self.inputs.paths,
use_mcr=self.inputs.use_mcr)
if version_dict:
return '.'.join((version_dict['name'].split('SPM')[-1],
version_dict['release']))
return version_dict
@property
def jobtype(self):
return self._jobtype
@property
def jobname(self):
return self._jobname
def _check_mlab_inputs(self):
if not isdefined(self.inputs.matlab_cmd) and self._matlab_cmd:
self.inputs.matlab_cmd = self._matlab_cmd
if not isdefined(self.inputs.paths) and self._paths:
self.inputs.paths = self._paths
if not isdefined(self.inputs.use_mcr) and self._use_mcr:
self.inputs.use_mcr = self._use_mcr
def _run_interface(self, runtime):
"""Executes the SPM function using MATLAB."""
self.mlab.inputs.script = self._make_matlab_command(
deepcopy(self._parse_inputs()))
results = self.mlab.run()
runtime.returncode = results.runtime.returncode
if self.mlab.inputs.uses_mcr:
if 'Skipped' in results.runtime.stdout:
self.raise_exception(runtime)
runtime.stdout = results.runtime.stdout
runtime.stderr = results.runtime.stderr
runtime.merged = results.runtime.merged
return runtime
def _list_outputs(self):
"""Determine the expected outputs based on inputs."""
raise NotImplementedError
def _format_arg(self, opt, spec, val):
"""Convert input to appropriate format for SPM."""
if spec.is_trait_type(traits.Bool):
return int(val)
else:
return val
def _parse_inputs(self, skip=()):
spmdict = {}
metadata = dict(field=lambda t: t is not None)
for name, spec in self.inputs.traits(**metadata).items():
if skip and name in skip:
continue
value = getattr(self.inputs, name)
if not isdefined(value):
continue
field = spec.field
if '.' in field:
fields = field.split('.')
dictref = spmdict
for f in fields[:-1]:
if f not in dictref.keys():
dictref[f] = {}
dictref = dictref[f]
dictref[fields[-1]] = self._format_arg(name, spec, value)
else:
spmdict[field] = self._format_arg(name, spec, value)
return [spmdict]
def _reformat_dict_for_savemat(self, contents):
"""Encloses a dict representation within hierarchical lists.
In order to create an appropriate SPM job structure, a Python
dict storing the job needs to be modified so that each dict
embedded in dict needs to be enclosed as a list element.
Examples
--------
>>> a = SPMCommand()._reformat_dict_for_savemat(dict(a=1,
... b=dict(c=2, d=3)))
>>> a == [{'a': 1, 'b': [{'c': 2, 'd': 3}]}]
True
"""
newdict = {}
try:
for key, value in contents.items():
if isinstance(value, dict):
if value:
newdict[key] = self._reformat_dict_for_savemat(value)
# if value is None, skip
else:
newdict[key] = value
return [newdict]
except TypeError:
print 'Requires dict input'
def _generate_job(self, prefix='', contents=None):
"""Recursive function to generate spm job specification as a string
Parameters
----------
prefix : string
A string that needs to get
contents : dict
A non-tuple Python structure containing spm job
information gets converted to an appropriate sequence of
matlab commands.
"""
jobstring = ''
if contents is None:
return jobstring
if isinstance(contents, list):
for i, value in enumerate(contents):
if prefix.endswith(")"):
newprefix = "%s,%d)" % (prefix[:-1], i + 1)
else:
newprefix = "%s(%d)" % (prefix, i + 1)
jobstring += self._generate_job(newprefix, value)
return jobstring
if isinstance(contents, dict):
for key, value in contents.items():
newprefix = "%s.%s" % (prefix, key)
jobstring += self._generate_job(newprefix, value)
return jobstring
if isinstance(contents, np.ndarray):
if contents.dtype == np.dtype(object):
if prefix:
jobstring += "%s = {...\n" % (prefix)
else:
jobstring += "{...\n"
for i, val in enumerate(contents):
if isinstance(val, np.ndarray):
jobstring += self._generate_job(prefix=None,
contents=val)
elif isinstance(val, six.string_types):
jobstring += '\'%s\';...\n' % (val)
else:
jobstring += '%s;...\n' % str(val)
jobstring += '};\n'
else:
for i, val in enumerate(contents):
for field in val.dtype.fields:
if prefix:
newprefix = "%s(%d).%s" % (prefix, i + 1, field)
else:
newprefix = "(%d).%s" % (i + 1, field)
jobstring += self._generate_job(newprefix,
val[field])
return jobstring
if isinstance(contents, six.string_types):
jobstring += "%s = '%s';\n" % (prefix, contents)
return jobstring
jobstring += "%s = %s;\n" % (prefix, str(contents))
return jobstring
def _make_matlab_command(self, contents, postscript=None):
"""Generates a mfile to build job structure
Parameters
----------
contents : list
a list of dicts generated by _parse_inputs
in each subclass
cwd : string
default os.getcwd()
Returns
-------
mscript : string
contents of a script called by matlab
"""
cwd = os.getcwd()
mscript = """
%% Generated by nipype.interfaces.spm
if isempty(which('spm')),
throw(MException('SPMCheck:NotFound', 'SPM not in matlab path'));
end
[name, version] = spm('ver');
fprintf('SPM version: %s Release: %s\\n',name, version);
fprintf('SPM path: %s\\n', which('spm'));
spm('Defaults','fMRI');
if strcmp(name, 'SPM8') || strcmp(name(1:5), 'SPM12'),
spm_jobman('initcfg');
spm_get_defaults('cmdline', 1);
end\n
"""
if self.mlab.inputs.mfile:
if isdefined(self.inputs.use_v8struct) and self.inputs.use_v8struct:
mscript += self._generate_job('jobs{1}.spm.%s.%s' %
(self.jobtype, self.jobname),
contents[0])
else:
if self.jobname in ['st', 'smooth', 'preproc', 'preproc8',
'fmri_spec', 'fmri_est', 'factorial_design',
'defs']:
# parentheses
mscript += self._generate_job('jobs{1}.%s{1}.%s(1)' %
(self.jobtype, self.jobname),
contents[0])
else:
#curly brackets
mscript += self._generate_job('jobs{1}.%s{1}.%s{1}' %
(self.jobtype, self.jobname),
contents[0])
else:
jobdef = {'jobs': [{self.jobtype:
[{self.jobname:
self.reformat_dict_for_savemat(contents[0])}]
}]}
savemat(os.path.join(cwd, 'pyjobs_%s.mat' % self.jobname), jobdef)
mscript += "load pyjobs_%s;\n\n" % self.jobname
mscript += """
spm_jobman(\'run\', jobs);\n
"""
if self.inputs.use_mcr:
mscript += """
if strcmp(name, 'SPM8') || strcmp(name(1:5), 'SPM12'),
close(\'all\', \'force\');
end;
"""
if postscript is not None:
mscript += postscript
return mscript
| bsd-3-clause |
kingvuplus/b-p | lib/python/Screens/BhDevice.py | 1 | 12361 | from Screens.Screen import Screen
from Screens.MessageBox import MessageBox
from Screens.ChoiceBox import ChoiceBox
from Screens.Standby import TryQuitMainloop
from Screens.Console import Console
from Components.ActionMap import ActionMap
from Components.Sources.List import List
from Components.Label import Label
from Components.Pixmap import Pixmap
from Components.ConfigList import ConfigListScreen
from Components.config import getConfigListEntry, ConfigSelection, NoSave
from Tools.LoadPixmap import LoadPixmap
from Tools.Directories import fileExists, pathExists, createDir, resolveFilename, SCOPE_CURRENT_SKIN
from os import system, listdir, remove as os_remove, rename as os_rename, stat as os_stat
from enigma import eTimer
import stat
class DeliteDevicesPanel(Screen):
def __init__(self, session):
Screen.__init__(self, session)
self['key_red'] = Label(_('Mountpoints'))
self['key_yellow'] = Label(_('Cancel'))
self['lab1'] = Label(_('Wait please while scanning your devices...'))
self.list = []
self['list'] = List(self.list)
self['actions'] = ActionMap(['WizardActions', 'ColorActions'], {'back': self.close,
'red': self.mapSetup,
'yellow': self.close})
self.activityTimer = eTimer()
self.activityTimer.timeout.get().append(self.updateList)
self.gO()
def gO(self):
paths = ['/media/hdd',
'/media/usb',
'/media/downloads',
'/media/music',
'/media/personal',
'/media/photo',
'/media/video']
for path in paths:
if not pathExists(path):
createDir(path)
self.activityTimer.start(1)
def updateList(self):
self.activityTimer.stop()
self.list = []
self.conflist = []
rc = system('blkid > /tmp/blkid.log')
f = open('/tmp/blkid.log', 'r')
for line in f.readlines():
if line.find('/dev/sd') == -1:
continue
parts = line.strip().split()
device = parts[0][5:-2]
partition = parts[0][5:-1]
pos = line.find('UUID') + 6
end = line.find('"', pos)
uuid = line[pos:end]
dtype = self.get_Dtype(device)
category = dtype[0]
png = LoadPixmap(dtype[1])
size = self.get_Dsize(device, partition)
model = self.get_Dmodel(device)
mountpoint = self.get_Dpoint(uuid)
name = '%s: %s' % (category, model)
description = _(' device: %s size: %s\n mountpoint: %s' % (parts[0], size, mountpoint))
self.list.append((name, description, png))
description = '%s %s %s' % (name, size, partition)
self.conflist.append((description, uuid))
self['list'].list = self.list
self['lab1'].hide()
os_remove('/tmp/blkid.log')
def get_Dpoint(self, uuid):
point = 'NOT MAPPED'
f = open('/etc/fstab', 'r')
for line in f.readlines():
if line.find(uuid) != -1:
parts = line.strip().split()
point = parts[1]
break
f.close()
return point
def get_Dmodel(self, device):
model = 'Generic'
filename = '/sys/block/%s/device/vendor' % device
if fileExists(filename):
vendor = file(filename).read().strip()
filename = '/sys/block/%s/device/model' % device
mod = file(filename).read().strip()
model = '%s %s' % (vendor, mod)
return model
def get_Dsize(self, device, partition):
size = '0'
filename = '/sys/block/%s/%s/size' % (device, partition)
if fileExists(filename):
size = int(file(filename).read().strip())
cap = size / 1000 * 512 / 1000
size = '%d.%03d GB' % (cap / 1000, cap % 1000)
return size
def get_Dtype(self, device):
pixpath = resolveFilename(SCOPE_CURRENT_SKIN, '')
if pixpath == '/usr/share/enigma2/' or pixpath == '/usr/share/enigma2/./':
pixpath = '/usr/share/enigma2/skin_default/'
name = 'USB'
pix = pixpath + 'icons/dev_usb.png'
filename = '/sys/block/%s/removable' % device
if fileExists(filename):
if file(filename).read().strip() == '0':
name = 'HARD DISK'
pix = pixpath + 'icons/dev_hdd.png'
return (name, pix)
def mapSetup(self):
self.session.openWithCallback(self.close, DeliteSetupDevicePanelConf, self.conflist)
class DeliteSetupDevicePanelConf(Screen, ConfigListScreen):
def __init__(self, session, devices):
Screen.__init__(self, session)
self.list = []
ConfigListScreen.__init__(self, self.list)
self['key_red'] = Label(_('Save'))
self['key_green'] = Label(_('Cancel'))
self['Linconn'] = Label(_('Wait please while scanning your box devices...'))
self['actions'] = ActionMap(['WizardActions', 'ColorActions'], {'red': self.savePoints,
'green': self.close,
'back': self.close})
self.devices = devices
self.updateList()
def updateList(self):
self.list = []
for device in self.devices:
item = NoSave(ConfigSelection(default='Not mapped', choices=self.get_Choices()))
item.value = self.get_currentPoint(device[1])
res = getConfigListEntry(device[0], item, device[1])
self.list.append(res)
self['config'].list = self.list
self['config'].l.setList(self.list)
self['Linconn'].hide()
def get_currentPoint(self, uuid):
point = 'Not mapped'
f = open('/etc/fstab', 'r')
for line in f.readlines():
if line.find(uuid) != -1:
parts = line.strip().split()
point = parts[1].strip()
break
f.close()
return point
def get_Choices(self):
choices = [('Not mapped', 'Not mapped')]
folders = listdir('/media')
for f in folders:
if f == 'net':
continue
c = '/media/' + f
choices.append((c, c))
return choices
def savePoints(self):
f = open('/etc/fstab', 'r')
out = open('/etc/fstab.tmp', 'w')
for line in f.readlines():
if line.find('UUID') != -1 or len(line) < 6:
continue
out.write(line)
for x in self['config'].list:
if x[1].value != 'Not mapped':
line = 'UUID=%s %s auto defaults 0 0\n' % (x[2], x[1].value)
out.write(line)
out.write('\n')
f.close()
out.close()
os_rename('/etc/fstab.tmp', '/etc/fstab')
message = _('Devices changes need a system restart to take effects.\nRestart your Box now?')
self.session.openWithCallback(self.restBo, MessageBox, message, MessageBox.TYPE_YESNO)
def restBo(self, answer):
if answer is True:
self.session.open(TryQuitMainloop, 2)
else:
self.close()
class BlackPoleSwap(Screen):
skin = '\n\t<screen position="center,center" size="420,240" title="Black Hole Swap File Manager">\n\t\t<widget name="lab1" position="10,20" size="400,150" font="Regular;20" transparent="1"/>\n\t\t<ePixmap pixmap="skin_default/buttons/red.png" position="0,190" size="140,40" alphatest="on" />\n\t\t<ePixmap pixmap="skin_default/buttons/green.png" position="140,190" size="140,40" alphatest="on" />\n\t\t<ePixmap pixmap="skin_default/buttons/yellow.png" position="280,190" size="140,40" alphatest="on" />\n\t\t<widget name="key_red" position="0,190" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#9f1313" transparent="1" />\n\t\t<widget name="key_green" position="140,190" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#1f771f" transparent="1" />\n\t\t<widget name="key_yellow" position="280,190" zPosition="1" size="140,40" font="Regular;20" halign="center" valign="center" backgroundColor="#a08500" transparent="1" />\n\t</screen>'
def __init__(self, session):
Screen.__init__(self, session)
self['lab1'] = Label(_('Swap status: disabled'))
self['key_red'] = Label(_('Create'))
self['key_green'] = Label(_('Remove'))
self['key_yellow'] = Label(_('Close'))
self['actions'] = ActionMap(['WizardActions', 'ColorActions'], {'back': self.close,
'red': self.keyRed,
'green': self.keyGreen,
'yellow': self.close})
self.onLayoutFinish.append(self.updateSwap)
def updateSwap(self):
self.swap_file = ''
swapinfo = _('Swap status: disabled')
f = open('/proc/swaps', 'r')
for line in f.readlines():
if line.find('swapfile') != -1:
parts = line.split()
self.swap_file = parts[0].strip()
size = int(parts[2].strip()) / 1024
swapinfo = _('Swap status: active\nSwap file: %s \nSwap size: %d M \nSwap used: %s Kb') % (self.swap_file, size, parts[3].strip())
f.close()
self['lab1'].setText(swapinfo)
def keyGreen(self):
if self.swap_file:
cmd = 'swapoff %s' % self.swap_file
rc = system(cmd)
try:
out = open('/etc/init.d/bh_swap', 'w')
strview = '#!/bin/sh\n\nexit 0'
out.write(strview)
out.close()
system('chmod 0755 /etc/init.d/bh_swap')
except:
pass
self.updateSwap()
else:
self.session.open(MessageBox, _('Swap already disabled.'), MessageBox.TYPE_INFO)
def keyRed(self):
if self.swap_file:
self.session.open(MessageBox, _('Swap file is active.\nRemove it before to create a new swap space.'), MessageBox.TYPE_INFO)
else:
options = []
f = open('/proc/mounts', 'r')
for line in f.readlines():
if line.find('/media/sd') != -1:
continue
elif line.find('/media/') != -1:
if line.find(' ext') != -1:
parts = line.split()
options.append([parts[1].strip(), parts[1].strip()])
f.close()
if len(options) == 0:
self.session.open(MessageBox, _('Sorry no valid device found.\nBe sure your device is Linux formatted and mapped.\nPlease use Black Hole format wizard and Black Hole device manager to prepare and map your usb stick.'), MessageBox.TYPE_INFO)
else:
self.session.openWithCallback(self.selectSize, ChoiceBox, title='Select the Swap File device:', list=options)
def selectSize(self, device):
if device:
self.new_swap = device[1] + '/swapfile'
options = [['16 Mega', '16384'],
['32 Mega', '32768'],
['64 Mega', '65536'],
['128 Mega', '131072'],
['256 Mega', '262144'],
['512 MB', '524288'],
['1 GB', '1048576'],
['2 GB', '2097152']]
self.session.openWithCallback(self.swapOn, ChoiceBox, title=_('Select the Swap File Size:'), list=options)
def swapOn(self, size):
if size:
cmd = 'dd if=/dev/zero of=%s bs=1024 count=%s 2>/dev/null' % (self.new_swap, size[1])
rc = system(cmd)
if rc == 0:
cmd = 'mkswap %s' % self.new_swap
rc = system(cmd)
cmd = 'swapon %s' % self.new_swap
rc = system(cmd)
out = open('/etc/init.d/bh_swap', 'w')
strview = '#!/bin/sh\nmkswap ' + self.new_swap + '\nswapon ' + self.new_swap + '\nexit 0'
out.write(strview)
out.close()
system('chmod 0755 /etc/init.d/bh_swap')
self.session.open(MessageBox, _('Swap File created.'), MessageBox.TYPE_INFO)
self.updateSwap()
else:
self.session.open(MessageBox, _('Swap File creation Failed. Check for available space.'), MessageBox.TYPE_INFO)
| gpl-2.0 |
edgarRd/incubator-airflow | tests/contrib/sensors/test_aws_redshift_cluster_sensor.py | 5 | 3429 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import unittest
import boto3
from airflow import configuration
from airflow.contrib.sensors.aws_redshift_cluster_sensor import AwsRedshiftClusterSensor
try:
from moto import mock_redshift
except ImportError:
mock_redshift = None
class TestAwsRedshiftClusterSensor(unittest.TestCase):
def setUp(self):
configuration.load_test_config()
@staticmethod
def _create_cluster():
client = boto3.client('redshift', region_name='us-east-1')
client.create_cluster(
ClusterIdentifier='test_cluster',
NodeType='dc1.large',
MasterUsername='admin',
MasterUserPassword='mock_password'
)
if len(client.describe_clusters()['Clusters']) == 0:
raise ValueError('AWS not properly mocked')
@unittest.skipIf(mock_redshift is None, 'mock_redshift package not present')
@mock_redshift
def test_poke(self):
self._create_cluster()
op = AwsRedshiftClusterSensor(task_id='test_cluster_sensor',
poke_interval=1,
timeout=5,
aws_conn_id='aws_default',
cluster_identifier='test_cluster',
target_status='available')
self.assertTrue(op.poke(None))
@unittest.skipIf(mock_redshift is None, 'mock_redshift package not present')
@mock_redshift
def test_poke_false(self):
self._create_cluster()
op = AwsRedshiftClusterSensor(task_id='test_cluster_sensor',
poke_interval=1,
timeout=5,
aws_conn_id='aws_default',
cluster_identifier='test_cluster_not_found',
target_status='available')
self.assertFalse(op.poke(None))
@unittest.skipIf(mock_redshift is None, 'mock_redshift package not present')
@mock_redshift
def test_poke_cluster_not_found(self):
self._create_cluster()
op = AwsRedshiftClusterSensor(task_id='test_cluster_sensor',
poke_interval=1,
timeout=5,
aws_conn_id='aws_default',
cluster_identifier='test_cluster_not_found',
target_status='cluster_not_found')
self.assertTrue(op.poke(None))
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
technologiescollege/s2a_fr | s2a/Python/Lib/test/test_htmlparser.py | 50 | 24838 | """Tests for HTMLParser.py."""
import HTMLParser
import pprint
import unittest
from test import test_support
class EventCollector(HTMLParser.HTMLParser):
def __init__(self):
self.events = []
self.append = self.events.append
HTMLParser.HTMLParser.__init__(self)
def get_events(self):
# Normalize the list of events so that buffer artefacts don't
# separate runs of contiguous characters.
L = []
prevtype = None
for event in self.events:
type = event[0]
if type == prevtype == "data":
L[-1] = ("data", L[-1][1] + event[1])
else:
L.append(event)
prevtype = type
self.events = L
return L
# structure markup
def handle_starttag(self, tag, attrs):
self.append(("starttag", tag, attrs))
def handle_startendtag(self, tag, attrs):
self.append(("startendtag", tag, attrs))
def handle_endtag(self, tag):
self.append(("endtag", tag))
# all other markup
def handle_comment(self, data):
self.append(("comment", data))
def handle_charref(self, data):
self.append(("charref", data))
def handle_data(self, data):
self.append(("data", data))
def handle_decl(self, data):
self.append(("decl", data))
def handle_entityref(self, data):
self.append(("entityref", data))
def handle_pi(self, data):
self.append(("pi", data))
def unknown_decl(self, decl):
self.append(("unknown decl", decl))
class EventCollectorExtra(EventCollector):
def handle_starttag(self, tag, attrs):
EventCollector.handle_starttag(self, tag, attrs)
self.append(("starttag_text", self.get_starttag_text()))
class TestCaseBase(unittest.TestCase):
def _run_check(self, source, expected_events, collector=EventCollector):
parser = collector()
for s in source:
parser.feed(s)
parser.close()
events = parser.get_events()
if events != expected_events:
self.fail("received events did not match expected events\n"
"Expected:\n" + pprint.pformat(expected_events) +
"\nReceived:\n" + pprint.pformat(events))
def _run_check_extra(self, source, events):
self._run_check(source, events, EventCollectorExtra)
def _parse_error(self, source):
def parse(source=source):
parser = HTMLParser.HTMLParser()
parser.feed(source)
parser.close()
self.assertRaises(HTMLParser.HTMLParseError, parse)
class HTMLParserTestCase(TestCaseBase):
def test_processing_instruction_only(self):
self._run_check("<?processing instruction>", [
("pi", "processing instruction"),
])
self._run_check("<?processing instruction ?>", [
("pi", "processing instruction ?"),
])
def test_simple_html(self):
self._run_check("""
<!DOCTYPE html PUBLIC 'foo'>
<HTML>&entity; 
<!--comment1a
-></foo><bar><<?pi?></foo<bar
comment1b-->
<Img sRc='Bar' isMAP>sample
text
“
<!--comment2a-- --comment2b-->
</Html>
""", [
("data", "\n"),
("decl", "DOCTYPE html PUBLIC 'foo'"),
("data", "\n"),
("starttag", "html", []),
("entityref", "entity"),
("charref", "32"),
("data", "\n"),
("comment", "comment1a\n-></foo><bar><<?pi?></foo<bar\ncomment1b"),
("data", "\n"),
("starttag", "img", [("src", "Bar"), ("ismap", None)]),
("data", "sample\ntext\n"),
("charref", "x201C"),
("data", "\n"),
("comment", "comment2a-- --comment2b"),
("data", "\n"),
("endtag", "html"),
("data", "\n"),
])
def test_unclosed_entityref(self):
self._run_check("&entityref foo", [
("entityref", "entityref"),
("data", " foo"),
])
def test_bad_nesting(self):
# Strangely, this *is* supposed to test that overlapping
# elements are allowed. HTMLParser is more geared toward
# lexing the input that parsing the structure.
self._run_check("<a><b></a></b>", [
("starttag", "a", []),
("starttag", "b", []),
("endtag", "a"),
("endtag", "b"),
])
def test_bare_ampersands(self):
self._run_check("this text & contains & ampersands &", [
("data", "this text & contains & ampersands &"),
])
def test_bare_pointy_brackets(self):
self._run_check("this < text > contains < bare>pointy< brackets", [
("data", "this < text > contains < bare>pointy< brackets"),
])
def test_illegal_declarations(self):
self._run_check('<!spacer type="block" height="25">',
[('comment', 'spacer type="block" height="25"')])
def test_starttag_end_boundary(self):
self._run_check("""<a b='<'>""", [("starttag", "a", [("b", "<")])])
self._run_check("""<a b='>'>""", [("starttag", "a", [("b", ">")])])
def test_buffer_artefacts(self):
output = [("starttag", "a", [("b", "<")])]
self._run_check(["<a b='<'>"], output)
self._run_check(["<a ", "b='<'>"], output)
self._run_check(["<a b", "='<'>"], output)
self._run_check(["<a b=", "'<'>"], output)
self._run_check(["<a b='<", "'>"], output)
self._run_check(["<a b='<'", ">"], output)
output = [("starttag", "a", [("b", ">")])]
self._run_check(["<a b='>'>"], output)
self._run_check(["<a ", "b='>'>"], output)
self._run_check(["<a b", "='>'>"], output)
self._run_check(["<a b=", "'>'>"], output)
self._run_check(["<a b='>", "'>"], output)
self._run_check(["<a b='>'", ">"], output)
output = [("comment", "abc")]
self._run_check(["", "<!--abc-->"], output)
self._run_check(["<", "!--abc-->"], output)
self._run_check(["<!", "--abc-->"], output)
self._run_check(["<!-", "-abc-->"], output)
self._run_check(["<!--", "abc-->"], output)
self._run_check(["<!--a", "bc-->"], output)
self._run_check(["<!--ab", "c-->"], output)
self._run_check(["<!--abc", "-->"], output)
self._run_check(["<!--abc-", "->"], output)
self._run_check(["<!--abc--", ">"], output)
self._run_check(["<!--abc-->", ""], output)
def test_starttag_junk_chars(self):
self._run_check("</>", [])
self._run_check("</$>", [('comment', '$')])
self._run_check("</", [('data', '</')])
self._run_check("</a", [('data', '</a')])
# XXX this might be wrong
self._run_check("<a<a>", [('data', '<a'), ('starttag', 'a', [])])
self._run_check("</a<a>", [('endtag', 'a<a')])
self._run_check("<!", [('data', '<!')])
self._run_check("<a", [('data', '<a')])
self._run_check("<a foo='bar'", [('data', "<a foo='bar'")])
self._run_check("<a foo='bar", [('data', "<a foo='bar")])
self._run_check("<a foo='>'", [('data', "<a foo='>'")])
self._run_check("<a foo='>", [('data', "<a foo='>")])
def test_valid_doctypes(self):
# from http://www.w3.org/QA/2002/04/valid-dtd-list.html
dtds = ['HTML', # HTML5 doctype
('HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" '
'"http://www.w3.org/TR/html4/strict.dtd"'),
('HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" '
'"http://www.w3.org/TR/html4/loose.dtd"'),
('html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" '
'"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"'),
('html PUBLIC "-//W3C//DTD XHTML 1.0 Frameset//EN" '
'"http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd"'),
('math PUBLIC "-//W3C//DTD MathML 2.0//EN" '
'"http://www.w3.org/Math/DTD/mathml2/mathml2.dtd"'),
('html PUBLIC "-//W3C//DTD '
'XHTML 1.1 plus MathML 2.0 plus SVG 1.1//EN" '
'"http://www.w3.org/2002/04/xhtml-math-svg/xhtml-math-svg.dtd"'),
('svg PUBLIC "-//W3C//DTD SVG 1.1//EN" '
'"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"'),
'html PUBLIC "-//IETF//DTD HTML 2.0//EN"',
'html PUBLIC "-//W3C//DTD HTML 3.2 Final//EN"']
for dtd in dtds:
self._run_check("<!DOCTYPE %s>" % dtd,
[('decl', 'DOCTYPE ' + dtd)])
def test_slashes_in_starttag(self):
self._run_check('<a foo="var"/>', [('startendtag', 'a', [('foo', 'var')])])
html = ('<img width=902 height=250px '
'src="/sites/default/files/images/homepage/foo.jpg" '
'/*what am I doing here*/ />')
expected = [(
'startendtag', 'img',
[('width', '902'), ('height', '250px'),
('src', '/sites/default/files/images/homepage/foo.jpg'),
('*what', None), ('am', None), ('i', None),
('doing', None), ('here*', None)]
)]
self._run_check(html, expected)
html = ('<a / /foo/ / /=/ / /bar/ / />'
'<a / /foo/ / /=/ / /bar/ / >')
expected = [
('startendtag', 'a', [('foo', None), ('=', None), ('bar', None)]),
('starttag', 'a', [('foo', None), ('=', None), ('bar', None)])
]
self._run_check(html, expected)
#see issue #14538
html = ('<meta><meta / ><meta // ><meta / / >'
'<meta/><meta /><meta //><meta//>')
expected = [
('starttag', 'meta', []), ('starttag', 'meta', []),
('starttag', 'meta', []), ('starttag', 'meta', []),
('startendtag', 'meta', []), ('startendtag', 'meta', []),
('startendtag', 'meta', []), ('startendtag', 'meta', []),
]
self._run_check(html, expected)
def test_declaration_junk_chars(self):
self._run_check("<!DOCTYPE foo $ >", [('decl', 'DOCTYPE foo $ ')])
def test_startendtag(self):
self._run_check("<p/>", [
("startendtag", "p", []),
])
self._run_check("<p></p>", [
("starttag", "p", []),
("endtag", "p"),
])
self._run_check("<p><img src='foo' /></p>", [
("starttag", "p", []),
("startendtag", "img", [("src", "foo")]),
("endtag", "p"),
])
def test_invalid_end_tags(self):
# A collection of broken end tags. <br> is used as separator.
# see http://www.w3.org/TR/html5/tokenization.html#end-tag-open-state
# and #13993
html = ('<br></label</p><br></div end tmAd-leaderBoard><br></<h4><br>'
'</li class="unit"><br></li\r\n\t\t\t\t\t\t</ul><br></><br>')
expected = [('starttag', 'br', []),
# < is part of the name, / is discarded, p is an attribute
('endtag', 'label<'),
('starttag', 'br', []),
# text and attributes are discarded
('endtag', 'div'),
('starttag', 'br', []),
# comment because the first char after </ is not a-zA-Z
('comment', '<h4'),
('starttag', 'br', []),
# attributes are discarded
('endtag', 'li'),
('starttag', 'br', []),
# everything till ul (included) is discarded
('endtag', 'li'),
('starttag', 'br', []),
# </> is ignored
('starttag', 'br', [])]
self._run_check(html, expected)
def test_broken_invalid_end_tag(self):
# This is technically wrong (the "> shouldn't be included in the 'data')
# but is probably not worth fixing it (in addition to all the cases of
# the previous test, it would require a full attribute parsing).
# see #13993
html = '<b>This</b attr=">"> confuses the parser'
expected = [('starttag', 'b', []),
('data', 'This'),
('endtag', 'b'),
('data', '"> confuses the parser')]
self._run_check(html, expected)
def test_get_starttag_text(self):
s = """<foo:bar \n one="1"\ttwo=2 >"""
self._run_check_extra(s, [
("starttag", "foo:bar", [("one", "1"), ("two", "2")]),
("starttag_text", s)])
def test_cdata_content(self):
contents = [
'<!-- not a comment --> ¬-an-entity-ref;',
"<not a='start tag'>",
'<a href="" /> <p> <span></span>',
'foo = "</scr" + "ipt>";',
'foo = "</SCRIPT" + ">";',
'foo = <\n/script> ',
'<!-- document.write("</scr" + "ipt>"); -->',
('\n//<![CDATA[\n'
'document.write(\'<s\'+\'cript type="text/javascript" '
'src="http://www.example.org/r=\'+new '
'Date().getTime()+\'"><\\/s\'+\'cript>\');\n//]]>'),
'\n<!-- //\nvar foo = 3.14;\n// -->\n',
'foo = "</sty" + "le>";',
u'<!-- \u2603 -->',
# these two should be invalid according to the HTML 5 spec,
# section 8.1.2.2
#'foo = </\nscript>',
#'foo = </ script>',
]
elements = ['script', 'style', 'SCRIPT', 'STYLE', 'Script', 'Style']
for content in contents:
for element in elements:
element_lower = element.lower()
s = u'<{element}>{content}</{element}>'.format(element=element,
content=content)
self._run_check(s, [("starttag", element_lower, []),
("data", content),
("endtag", element_lower)])
def test_cdata_with_closing_tags(self):
# see issue #13358
# make sure that HTMLParser calls handle_data only once for each CDATA.
# The normal event collector normalizes the events in get_events,
# so we override it to return the original list of events.
class Collector(EventCollector):
def get_events(self):
return self.events
content = """<!-- not a comment --> ¬-an-entity-ref;
<a href="" /> </p><p> & <span></span></style>
'</script' + '>' </html> </head> </scripter>!"""
for element in [' script', 'script ', ' script ',
'\nscript', 'script\n', '\nscript\n']:
s = u'<script>{content}</{element}>'.format(element=element,
content=content)
self._run_check(s, [("starttag", "script", []),
("data", content),
("endtag", "script")],
collector=Collector)
def test_malformatted_charref(self):
self._run_check("<p>&#bad;</p>", [
("starttag", "p", []),
("data", "&#bad;"),
("endtag", "p"),
])
def test_unescape_function(self):
parser = HTMLParser.HTMLParser()
self.assertEqual(parser.unescape('&#bad;'),'&#bad;')
self.assertEqual(parser.unescape('&'),'&')
class AttributesTestCase(TestCaseBase):
def test_attr_syntax(self):
output = [
("starttag", "a", [("b", "v"), ("c", "v"), ("d", "v"), ("e", None)])
]
self._run_check("""<a b='v' c="v" d=v e>""", output)
self._run_check("""<a b = 'v' c = "v" d = v e>""", output)
self._run_check("""<a\nb\n=\n'v'\nc\n=\n"v"\nd\n=\nv\ne>""", output)
self._run_check("""<a\tb\t=\t'v'\tc\t=\t"v"\td\t=\tv\te>""", output)
def test_attr_values(self):
self._run_check("""<a b='xxx\n\txxx' c="yyy\t\nyyy" d='\txyz\n'>""",
[("starttag", "a", [("b", "xxx\n\txxx"),
("c", "yyy\t\nyyy"),
("d", "\txyz\n")])])
self._run_check("""<a b='' c="">""",
[("starttag", "a", [("b", ""), ("c", "")])])
# Regression test for SF patch #669683.
self._run_check("<e a=rgb(1,2,3)>",
[("starttag", "e", [("a", "rgb(1,2,3)")])])
# Regression test for SF bug #921657.
self._run_check(
"<a href=mailto:xyz@example.com>",
[("starttag", "a", [("href", "mailto:xyz@example.com")])])
def test_attr_nonascii(self):
# see issue 7311
self._run_check(
u"<img src=/foo/bar.png alt=\u4e2d\u6587>",
[("starttag", "img", [("src", "/foo/bar.png"),
("alt", u"\u4e2d\u6587")])])
self._run_check(
u"<a title='\u30c6\u30b9\u30c8' href='\u30c6\u30b9\u30c8.html'>",
[("starttag", "a", [("title", u"\u30c6\u30b9\u30c8"),
("href", u"\u30c6\u30b9\u30c8.html")])])
self._run_check(
u'<a title="\u30c6\u30b9\u30c8" href="\u30c6\u30b9\u30c8.html">',
[("starttag", "a", [("title", u"\u30c6\u30b9\u30c8"),
("href", u"\u30c6\u30b9\u30c8.html")])])
def test_attr_entity_replacement(self):
self._run_check(
"<a b='&><"''>",
[("starttag", "a", [("b", "&><\"'")])])
def test_attr_funky_names(self):
self._run_check(
"<a a.b='v' c:d=v e-f=v>",
[("starttag", "a", [("a.b", "v"), ("c:d", "v"), ("e-f", "v")])])
self._run_check(
"<a $><b $=%><c \=/>",
[("starttag", "a", [("$", None)]),
("starttag", "b", [("$", "%")]),
("starttag", "c", [("\\", "/")])])
def test_entityrefs_in_attributes(self):
self._run_check(
"<html foo='€&aa&unsupported;'>",
[("starttag", "html", [("foo", u"\u20AC&aa&unsupported;")])])
def test_entities_in_attribute_value(self):
# see #1200313
for entity in ['&', '&', '&', '&']:
self._run_check('<a href="%s">' % entity,
[("starttag", "a", [("href", "&")])])
self._run_check("<a href='%s'>" % entity,
[("starttag", "a", [("href", "&")])])
self._run_check("<a href=%s>" % entity,
[("starttag", "a", [("href", "&")])])
def test_malformed_attributes(self):
# see #13357
html = (
"<a href=test'style='color:red;bad1'>test - bad1</a>"
"<a href=test'+style='color:red;ba2'>test - bad2</a>"
"<a href=test' style='color:red;bad3'>test - bad3</a>"
"<a href = test' style='color:red;bad4' >test - bad4</a>"
)
expected = [
('starttag', 'a', [('href', "test'style='color:red;bad1'")]),
('data', 'test - bad1'), ('endtag', 'a'),
('starttag', 'a', [('href', "test'+style='color:red;ba2'")]),
('data', 'test - bad2'), ('endtag', 'a'),
('starttag', 'a', [('href', u"test'\xa0style='color:red;bad3'")]),
('data', 'test - bad3'), ('endtag', 'a'),
('starttag', 'a', [('href', u"test'\xa0style='color:red;bad4'")]),
('data', 'test - bad4'), ('endtag', 'a')
]
self._run_check(html, expected)
def test_malformed_adjacent_attributes(self):
# see #12629
self._run_check('<x><y z=""o"" /></x>',
[('starttag', 'x', []),
('startendtag', 'y', [('z', ''), ('o""', None)]),
('endtag', 'x')])
self._run_check('<x><y z="""" /></x>',
[('starttag', 'x', []),
('startendtag', 'y', [('z', ''), ('""', None)]),
('endtag', 'x')])
# see #755670 for the following 3 tests
def test_adjacent_attributes(self):
self._run_check('<a width="100%"cellspacing=0>',
[("starttag", "a",
[("width", "100%"), ("cellspacing","0")])])
self._run_check('<a id="foo"class="bar">',
[("starttag", "a",
[("id", "foo"), ("class","bar")])])
def test_missing_attribute_value(self):
self._run_check('<a v=>',
[("starttag", "a", [("v", "")])])
def test_javascript_attribute_value(self):
self._run_check("<a href=javascript:popup('/popup/help.html')>",
[("starttag", "a",
[("href", "javascript:popup('/popup/help.html')")])])
def test_end_tag_in_attribute_value(self):
# see #1745761
self._run_check("<a href='http://www.example.org/\">;'>spam</a>",
[("starttag", "a",
[("href", "http://www.example.org/\">;")]),
("data", "spam"), ("endtag", "a")])
def test_comments(self):
html = ("<!-- I'm a valid comment -->"
'<!--me too!-->'
'<!------>'
'<!---->'
'<!----I have many hyphens---->'
'<!-- I have a > in the middle -->'
'<!-- and I have -- in the middle! -->')
expected = [('comment', " I'm a valid comment "),
('comment', 'me too!'),
('comment', '--'),
('comment', ''),
('comment', '--I have many hyphens--'),
('comment', ' I have a > in the middle '),
('comment', ' and I have -- in the middle! ')]
self._run_check(html, expected)
def test_broken_comments(self):
html = ('<! not really a comment >'
'<! not a comment either -->'
'<! -- close enough -->'
'<!><!<-- this was an empty comment>'
'<!!! another bogus comment !!!>')
expected = [
('comment', ' not really a comment '),
('comment', ' not a comment either --'),
('comment', ' -- close enough --'),
('comment', ''),
('comment', '<-- this was an empty comment'),
('comment', '!! another bogus comment !!!'),
]
self._run_check(html, expected)
def test_condcoms(self):
html = ('<!--[if IE & !(lte IE 8)]>aren\'t<![endif]-->'
'<!--[if IE 8]>condcoms<![endif]-->'
'<!--[if lte IE 7]>pretty?<![endif]-->')
expected = [('comment', "[if IE & !(lte IE 8)]>aren't<![endif]"),
('comment', '[if IE 8]>condcoms<![endif]'),
('comment', '[if lte IE 7]>pretty?<![endif]')]
self._run_check(html, expected)
def test_broken_condcoms(self):
# these condcoms are missing the '--' after '<!' and before the '>'
html = ('<![if !(IE)]>broken condcom<![endif]>'
'<![if ! IE]><link href="favicon.tiff"/><![endif]>'
'<![if !IE 6]><img src="firefox.png" /><![endif]>'
'<![if !ie 6]><b>foo</b><![endif]>'
'<![if (!IE)|(lt IE 9)]><img src="mammoth.bmp" /><![endif]>')
# According to the HTML5 specs sections "8.2.4.44 Bogus comment state"
# and "8.2.4.45 Markup declaration open state", comment tokens should
# be emitted instead of 'unknown decl', but calling unknown_decl
# provides more flexibility.
# See also Lib/_markupbase.py:parse_declaration
expected = [
('unknown decl', 'if !(IE)'),
('data', 'broken condcom'),
('unknown decl', 'endif'),
('unknown decl', 'if ! IE'),
('startendtag', 'link', [('href', 'favicon.tiff')]),
('unknown decl', 'endif'),
('unknown decl', 'if !IE 6'),
('startendtag', 'img', [('src', 'firefox.png')]),
('unknown decl', 'endif'),
('unknown decl', 'if !ie 6'),
('starttag', 'b', []),
('data', 'foo'),
('endtag', 'b'),
('unknown decl', 'endif'),
('unknown decl', 'if (!IE)|(lt IE 9)'),
('startendtag', 'img', [('src', 'mammoth.bmp')]),
('unknown decl', 'endif')
]
self._run_check(html, expected)
def test_main():
test_support.run_unittest(HTMLParserTestCase, AttributesTestCase)
if __name__ == "__main__":
test_main()
| gpl-3.0 |
gavin-feng/odoo | openerp/http.py | 7 | 60957 | # -*- coding: utf-8 -*-
#----------------------------------------------------------
# OpenERP HTTP layer
#----------------------------------------------------------
import ast
import collections
import contextlib
import datetime
import errno
import functools
import getpass
import inspect
import logging
import mimetypes
import os
import pprint
import random
import re
import sys
import tempfile
import threading
import time
import traceback
import urlparse
import warnings
from zlib import adler32
import babel.core
import psycopg2
import simplejson
import werkzeug.contrib.sessions
import werkzeug.datastructures
import werkzeug.exceptions
import werkzeug.local
import werkzeug.routing
import werkzeug.wrappers
import werkzeug.wsgi
from werkzeug.wsgi import wrap_file
try:
import psutil
except ImportError:
psutil = None
import openerp
from openerp import SUPERUSER_ID
from openerp.service import security, model as service_model
from openerp.tools.func import lazy_property
from openerp.tools import ustr
_logger = logging.getLogger(__name__)
rpc_request = logging.getLogger(__name__ + '.rpc.request')
rpc_response = logging.getLogger(__name__ + '.rpc.response')
# 1 week cache for statics as advised by Google Page Speed
STATIC_CACHE = 60 * 60 * 24 * 7
#----------------------------------------------------------
# RequestHandler
#----------------------------------------------------------
# Thread local global request object
_request_stack = werkzeug.local.LocalStack()
request = _request_stack()
"""
A global proxy that always redirect to the current request object.
"""
def replace_request_password(args):
# password is always 3rd argument in a request, we replace it in RPC logs
# so it's easier to forward logs for diagnostics/debugging purposes...
if len(args) > 2:
args = list(args)
args[2] = '*'
return tuple(args)
# don't trigger debugger for those exceptions, they carry user-facing warnings
# and indications, they're not necessarily indicative of anything being
# *broken*
NO_POSTMORTEM = (openerp.osv.orm.except_orm,
openerp.exceptions.AccessError,
openerp.exceptions.AccessDenied,
openerp.exceptions.Warning,
openerp.exceptions.RedirectWarning)
def dispatch_rpc(service_name, method, params):
""" Handle a RPC call.
This is pure Python code, the actual marshalling (from/to XML-RPC) is done
in a upper layer.
"""
try:
rpc_request_flag = rpc_request.isEnabledFor(logging.DEBUG)
rpc_response_flag = rpc_response.isEnabledFor(logging.DEBUG)
if rpc_request_flag or rpc_response_flag:
start_time = time.time()
start_rss, start_vms = 0, 0
if psutil:
start_rss, start_vms = psutil.Process(os.getpid()).get_memory_info()
if rpc_request and rpc_response_flag:
openerp.netsvc.log(rpc_request, logging.DEBUG, '%s.%s' % (service_name, method), replace_request_password(params))
threading.current_thread().uid = None
threading.current_thread().dbname = None
if service_name == 'common':
dispatch = openerp.service.common.dispatch
elif service_name == 'db':
dispatch = openerp.service.db.dispatch
elif service_name == 'object':
dispatch = openerp.service.model.dispatch
elif service_name == 'report':
dispatch = openerp.service.report.dispatch
else:
dispatch = openerp.service.wsgi_server.rpc_handlers.get(service_name)
result = dispatch(method, params)
if rpc_request_flag or rpc_response_flag:
end_time = time.time()
end_rss, end_vms = 0, 0
if psutil:
end_rss, end_vms = psutil.Process(os.getpid()).get_memory_info()
logline = '%s.%s time:%.3fs mem: %sk -> %sk (diff: %sk)' % (service_name, method, end_time - start_time, start_vms / 1024, end_vms / 1024, (end_vms - start_vms)/1024)
if rpc_response_flag:
openerp.netsvc.log(rpc_response, logging.DEBUG, logline, result)
else:
openerp.netsvc.log(rpc_request, logging.DEBUG, logline, replace_request_password(params), depth=1)
return result
except NO_POSTMORTEM:
raise
except openerp.exceptions.DeferredException, e:
_logger.exception(openerp.tools.exception_to_unicode(e))
openerp.tools.debugger.post_mortem(openerp.tools.config, e.traceback)
raise
except Exception, e:
_logger.exception(openerp.tools.exception_to_unicode(e))
openerp.tools.debugger.post_mortem(openerp.tools.config, sys.exc_info())
raise
def local_redirect(path, query=None, keep_hash=False, forward_debug=True, code=303):
url = path
if not query:
query = {}
if forward_debug and request and request.debug:
query['debug'] = None
if query:
url += '?' + werkzeug.url_encode(query)
if keep_hash:
return redirect_with_hash(url, code)
else:
return werkzeug.utils.redirect(url, code)
def redirect_with_hash(url, code=303):
# Most IE and Safari versions decided not to preserve location.hash upon
# redirect. And even if IE10 pretends to support it, it still fails
# inexplicably in case of multiple redirects (and we do have some).
# See extensive test page at http://greenbytes.de/tech/tc/httpredirects/
if request.httprequest.user_agent.browser in ('firefox',):
return werkzeug.utils.redirect(url, code)
return "<html><head><script>window.location = '%s' + location.hash;</script></head></html>" % url
class WebRequest(object):
""" Parent class for all Odoo Web request types, mostly deals with
initialization and setup of the request object (the dispatching itself has
to be handled by the subclasses)
:param httprequest: a wrapped werkzeug Request object
:type httprequest: :class:`werkzeug.wrappers.BaseRequest`
.. attribute:: httprequest
the original :class:`werkzeug.wrappers.Request` object provided to the
request
.. attribute:: params
:class:`~collections.Mapping` of request parameters, not generally
useful as they're provided directly to the handler method as keyword
arguments
"""
def __init__(self, httprequest):
self.httprequest = httprequest
self.httpresponse = None
self.httpsession = httprequest.session
self.disable_db = False
self.uid = None
self.endpoint = None
self.auth_method = None
self._cr = None
# prevents transaction commit, use when you catch an exception during handling
self._failed = None
# set db/uid trackers - they're cleaned up at the WSGI
# dispatching phase in openerp.service.wsgi_server.application
if self.db:
threading.current_thread().dbname = self.db
if self.session.uid:
threading.current_thread().uid = self.session.uid
@lazy_property
def env(self):
"""
The :class:`~openerp.api.Environment` bound to current request.
Raises a :class:`RuntimeError` if the current requests is not bound
to a database.
"""
if not self.db:
return RuntimeError('request not bound to a database')
return openerp.api.Environment(self.cr, self.uid, self.context)
@lazy_property
def context(self):
"""
:class:`~collections.Mapping` of context values for the current
request
"""
return dict(self.session.context)
@lazy_property
def lang(self):
self.session._fix_lang(self.context)
return self.context["lang"]
@lazy_property
def session(self):
"""
a :class:`OpenERPSession` holding the HTTP session data for the
current http session
"""
return self.httprequest.session
@property
def cr(self):
"""
:class:`~openerp.sql_db.Cursor` initialized for the current method
call.
Accessing the cursor when the current request uses the ``none``
authentication will raise an exception.
"""
# can not be a lazy_property because manual rollback in _call_function
# if already set (?)
if not self.db:
return RuntimeError('request not bound to a database')
if not self._cr:
self._cr = self.registry.cursor()
return self._cr
def __enter__(self):
_request_stack.push(self)
return self
def __exit__(self, exc_type, exc_value, traceback):
_request_stack.pop()
if self._cr:
if exc_type is None and not self._failed:
self._cr.commit()
self._cr.close()
# just to be sure no one tries to re-use the request
self.disable_db = True
self.uid = None
def set_handler(self, endpoint, arguments, auth):
# is this needed ?
arguments = dict((k, v) for k, v in arguments.iteritems()
if not k.startswith("_ignored_"))
endpoint.arguments = arguments
self.endpoint = endpoint
self.auth_method = auth
def _handle_exception(self, exception):
"""Called within an except block to allow converting exceptions
to abitrary responses. Anything returned (except None) will
be used as response."""
self._failed = exception # prevent tx commit
if not isinstance(exception, NO_POSTMORTEM) \
and not isinstance(exception, werkzeug.exceptions.HTTPException):
openerp.tools.debugger.post_mortem(
openerp.tools.config, sys.exc_info())
raise
def _call_function(self, *args, **kwargs):
request = self
if self.endpoint.routing['type'] != self._request_type:
msg = "%s, %s: Function declared as capable of handling request of type '%s' but called with a request of type '%s'"
params = (self.endpoint.original, self.httprequest.path, self.endpoint.routing['type'], self._request_type)
_logger.error(msg, *params)
raise werkzeug.exceptions.BadRequest(msg % params)
kwargs.update(self.endpoint.arguments)
# Backward for 7.0
if self.endpoint.first_arg_is_req:
args = (request,) + args
# Correct exception handling and concurency retry
@service_model.check
def checked_call(___dbname, *a, **kw):
# The decorator can call us more than once if there is an database error. In this
# case, the request cursor is unusable. Rollback transaction to create a new one.
if self._cr:
self._cr.rollback()
return self.endpoint(*a, **kw)
if self.db:
return checked_call(self.db, *args, **kwargs)
return self.endpoint(*args, **kwargs)
@property
def debug(self):
""" Indicates whether the current request is in "debug" mode
"""
return 'debug' in self.httprequest.args
@contextlib.contextmanager
def registry_cr(self):
warnings.warn('please use request.registry and request.cr directly', DeprecationWarning)
yield (self.registry, self.cr)
@lazy_property
def session_id(self):
"""
opaque identifier for the :class:`OpenERPSession` instance of
the current request
.. deprecated:: 8.0
Use the ``sid`` attribute on :attr:`.session`
"""
return self.session.sid
@property
def registry(self):
"""
The registry to the database linked to this request. Can be ``None``
if the current request uses the ``none`` authentication.
.. deprecated:: 8.0
use :attr:`.env`
"""
return openerp.modules.registry.RegistryManager.get(self.db) if self.db else None
@property
def db(self):
"""
The database linked to this request. Can be ``None``
if the current request uses the ``none`` authentication.
"""
return self.session.db if not self.disable_db else None
@lazy_property
def httpsession(self):
""" HTTP session data
.. deprecated:: 8.0
Use :attr:`.session` instead.
"""
return self.session
def route(route=None, **kw):
"""
Decorator marking the decorated method as being a handler for
requests. The method must be part of a subclass of ``Controller``.
:param route: string or array. The route part that will determine which
http requests will match the decorated method. Can be a
single string or an array of strings. See werkzeug's routing
documentation for the format of route expression (
http://werkzeug.pocoo.org/docs/routing/ ).
:param type: The type of request, can be ``'http'`` or ``'json'``.
:param auth: The type of authentication method, can on of the following:
* ``user``: The user must be authenticated and the current request
will perform using the rights of the user.
* ``admin``: The user may not be authenticated and the current request
will perform using the admin user.
* ``none``: The method is always active, even if there is no
database. Mainly used by the framework and authentication
modules. There request code will not have any facilities to access
the database nor have any configuration indicating the current
database nor the current user.
:param methods: A sequence of http methods this route applies to. If not
specified, all methods are allowed.
:param cors: The Access-Control-Allow-Origin cors directive value.
"""
routing = kw.copy()
assert not 'type' in routing or routing['type'] in ("http", "json")
def decorator(f):
if route:
if isinstance(route, list):
routes = route
else:
routes = [route]
routing['routes'] = routes
@functools.wraps(f)
def response_wrap(*args, **kw):
response = f(*args, **kw)
if isinstance(response, Response) or f.routing_type == 'json':
return response
if isinstance(response, basestring):
return Response(response)
if isinstance(response, werkzeug.exceptions.HTTPException):
response = response.get_response(request.httprequest.environ)
if isinstance(response, werkzeug.wrappers.BaseResponse):
response = Response.force_type(response)
response.set_default()
return response
_logger.warn("<function %s.%s> returns an invalid response type for an http request" % (f.__module__, f.__name__))
return response
response_wrap.routing = routing
response_wrap.original_func = f
return response_wrap
return decorator
class JsonRequest(WebRequest):
""" Request handler for `JSON-RPC 2
<http://www.jsonrpc.org/specification>`_ over HTTP
* ``method`` is ignored
* ``params`` must be a JSON object (not an array) and is passed as keyword
arguments to the handler method
* the handler method's result is returned as JSON-RPC ``result`` and
wrapped in the `JSON-RPC Response
<http://www.jsonrpc.org/specification#response_object>`_
Sucessful request::
--> {"jsonrpc": "2.0",
"method": "call",
"params": {"context": {},
"arg1": "val1" },
"id": null}
<-- {"jsonrpc": "2.0",
"result": { "res1": "val1" },
"id": null}
Request producing a error::
--> {"jsonrpc": "2.0",
"method": "call",
"params": {"context": {},
"arg1": "val1" },
"id": null}
<-- {"jsonrpc": "2.0",
"error": {"code": 1,
"message": "End user error message.",
"data": {"code": "codestring",
"debug": "traceback" } },
"id": null}
"""
_request_type = "json"
def __init__(self, *args):
super(JsonRequest, self).__init__(*args)
self.jsonp_handler = None
args = self.httprequest.args
jsonp = args.get('jsonp')
self.jsonp = jsonp
request = None
request_id = args.get('id')
if jsonp and self.httprequest.method == 'POST':
# jsonp 2 steps step1 POST: save call
def handler():
self.session['jsonp_request_%s' % (request_id,)] = self.httprequest.form['r']
self.session.modified = True
headers=[('Content-Type', 'text/plain; charset=utf-8')]
r = werkzeug.wrappers.Response(request_id, headers=headers)
return r
self.jsonp_handler = handler
return
elif jsonp and args.get('r'):
# jsonp method GET
request = args.get('r')
elif jsonp and request_id:
# jsonp 2 steps step2 GET: run and return result
request = self.session.pop('jsonp_request_%s' % (request_id,), '{}')
else:
# regular jsonrpc2
request = self.httprequest.stream.read()
# Read POST content or POST Form Data named "request"
try:
self.jsonrequest = simplejson.loads(request)
except simplejson.JSONDecodeError:
msg = 'Invalid JSON data: %r' % (request,)
_logger.error('%s: %s', self.httprequest.path, msg)
raise werkzeug.exceptions.BadRequest(msg)
self.params = dict(self.jsonrequest.get("params", {}))
self.context = self.params.pop('context', dict(self.session.context))
def _json_response(self, result=None, error=None):
response = {
'jsonrpc': '2.0',
'id': self.jsonrequest.get('id')
}
if error is not None:
response['error'] = error
if result is not None:
response['result'] = result
if self.jsonp:
# If we use jsonp, that's mean we are called from another host
# Some browser (IE and Safari) do no allow third party cookies
# We need then to manage http sessions manually.
response['session_id'] = self.session_id
mime = 'application/javascript'
body = "%s(%s);" % (self.jsonp, simplejson.dumps(response),)
else:
mime = 'application/json'
body = simplejson.dumps(response)
return Response(
body, headers=[('Content-Type', mime),
('Content-Length', len(body))])
def _handle_exception(self, exception):
"""Called within an except block to allow converting exceptions
to arbitrary responses. Anything returned (except None) will
be used as response."""
try:
return super(JsonRequest, self)._handle_exception(exception)
except Exception:
if not isinstance(exception, (openerp.exceptions.Warning, SessionExpiredException)):
_logger.exception("Exception during JSON request handling.")
error = {
'code': 200,
'message': "Odoo Server Error",
'data': serialize_exception(exception)
}
if isinstance(exception, AuthenticationError):
error['code'] = 100
error['message'] = "Odoo Session Invalid"
if isinstance(exception, SessionExpiredException):
error['code'] = 100
error['message'] = "Odoo Session Expired"
return self._json_response(error=error)
def dispatch(self):
if self.jsonp_handler:
return self.jsonp_handler()
try:
rpc_request_flag = rpc_request.isEnabledFor(logging.DEBUG)
rpc_response_flag = rpc_response.isEnabledFor(logging.DEBUG)
if rpc_request_flag or rpc_response_flag:
endpoint = self.endpoint.method.__name__
model = self.params.get('model')
method = self.params.get('method')
args = self.params.get('args', [])
start_time = time.time()
_, start_vms = 0, 0
if psutil:
_, start_vms = psutil.Process(os.getpid()).get_memory_info()
if rpc_request and rpc_response_flag:
rpc_request.debug('%s: %s %s, %s',
endpoint, model, method, pprint.pformat(args))
result = self._call_function(**self.params)
if rpc_request_flag or rpc_response_flag:
end_time = time.time()
_, end_vms = 0, 0
if psutil:
_, end_vms = psutil.Process(os.getpid()).get_memory_info()
logline = '%s: %s %s: time:%.3fs mem: %sk -> %sk (diff: %sk)' % (
endpoint, model, method, end_time - start_time, start_vms / 1024, end_vms / 1024, (end_vms - start_vms)/1024)
if rpc_response_flag:
rpc_response.debug('%s, %s', logline, pprint.pformat(result))
else:
rpc_request.debug(logline)
return self._json_response(result)
except Exception, e:
return self._handle_exception(e)
def serialize_exception(e):
tmp = {
"name": type(e).__module__ + "." + type(e).__name__ if type(e).__module__ else type(e).__name__,
"debug": traceback.format_exc(),
"message": ustr(e),
"arguments": to_jsonable(e.args),
}
if isinstance(e, openerp.osv.osv.except_osv):
tmp["exception_type"] = "except_osv"
elif isinstance(e, openerp.exceptions.Warning):
tmp["exception_type"] = "warning"
elif isinstance(e, openerp.exceptions.AccessError):
tmp["exception_type"] = "access_error"
elif isinstance(e, openerp.exceptions.AccessDenied):
tmp["exception_type"] = "access_denied"
return tmp
def to_jsonable(o):
if isinstance(o, str) or isinstance(o,unicode) or isinstance(o, int) or isinstance(o, long) \
or isinstance(o, bool) or o is None or isinstance(o, float):
return o
if isinstance(o, list) or isinstance(o, tuple):
return [to_jsonable(x) for x in o]
if isinstance(o, dict):
tmp = {}
for k, v in o.items():
tmp[u"%s" % k] = to_jsonable(v)
return tmp
return ustr(o)
def jsonrequest(f):
"""
.. deprecated:: 8.0
Use the :func:`~openerp.http.route` decorator instead.
"""
base = f.__name__.lstrip('/')
if f.__name__ == "index":
base = ""
return route([base, base + "/<path:_ignored_path>"], type="json", auth="user", combine=True)(f)
class HttpRequest(WebRequest):
""" Handler for the ``http`` request type.
matched routing parameters, query string parameters, form_ parameters
and files are passed to the handler method as keyword arguments.
In case of name conflict, routing parameters have priority.
The handler method's result can be:
* a falsy value, in which case the HTTP response will be an
`HTTP 204`_ (No Content)
* a werkzeug Response object, which is returned as-is
* a ``str`` or ``unicode``, will be wrapped in a Response object and
interpreted as HTML
.. _form: http://www.w3.org/TR/html401/interact/forms.html#h-17.13.4.2
.. _HTTP 204: http://tools.ietf.org/html/rfc7231#section-6.3.5
"""
_request_type = "http"
def __init__(self, *args):
super(HttpRequest, self).__init__(*args)
params = self.httprequest.args.to_dict()
params.update(self.httprequest.form.to_dict())
params.update(self.httprequest.files.to_dict())
params.pop('session_id', None)
self.params = params
def _handle_exception(self, exception):
"""Called within an except block to allow converting exceptions
to abitrary responses. Anything returned (except None) will
be used as response."""
try:
return super(HttpRequest, self)._handle_exception(exception)
except SessionExpiredException:
if not request.params.get('noredirect'):
query = werkzeug.urls.url_encode({
'redirect': request.httprequest.url,
})
return werkzeug.utils.redirect('/web/login?%s' % query)
except werkzeug.exceptions.HTTPException, e:
return e
def dispatch(self):
if request.httprequest.method == 'OPTIONS' and request.endpoint and request.endpoint.routing.get('cors'):
headers = {
'Access-Control-Max-Age': 60 * 60 * 24,
'Access-Control-Allow-Headers': 'Origin, X-Requested-With, Content-Type, Accept'
}
return Response(status=200, headers=headers)
r = self._call_function(**self.params)
if not r:
r = Response(status=204) # no content
return r
def make_response(self, data, headers=None, cookies=None):
""" Helper for non-HTML responses, or HTML responses with custom
response headers or cookies.
While handlers can just return the HTML markup of a page they want to
send as a string if non-HTML data is returned they need to create a
complete response object, or the returned data will not be correctly
interpreted by the clients.
:param basestring data: response body
:param headers: HTTP headers to set on the response
:type headers: ``[(name, value)]``
:param collections.Mapping cookies: cookies to set on the client
"""
response = Response(data, headers=headers)
if cookies:
for k, v in cookies.iteritems():
response.set_cookie(k, v)
return response
def render(self, template, qcontext=None, lazy=True, **kw):
""" Lazy render of a QWeb template.
The actual rendering of the given template will occur at then end of
the dispatching. Meanwhile, the template and/or qcontext can be
altered or even replaced by a static response.
:param basestring template: template to render
:param dict qcontext: Rendering context to use
:param bool lazy: whether the template rendering should be deferred
until the last possible moment
:param kw: forwarded to werkzeug's Response object
"""
response = Response(template=template, qcontext=qcontext, **kw)
if not lazy:
return response.render()
return response
def not_found(self, description=None):
""" Shortcut for a `HTTP 404
<http://tools.ietf.org/html/rfc7231#section-6.5.4>`_ (Not Found)
response
"""
return werkzeug.exceptions.NotFound(description)
def httprequest(f):
"""
.. deprecated:: 8.0
Use the :func:`~openerp.http.route` decorator instead.
"""
base = f.__name__.lstrip('/')
if f.__name__ == "index":
base = ""
return route([base, base + "/<path:_ignored_path>"], type="http", auth="user", combine=True)(f)
#----------------------------------------------------------
# Controller and route registration
#----------------------------------------------------------
addons_module = {}
addons_manifest = {}
controllers_per_module = collections.defaultdict(list)
class ControllerType(type):
def __init__(cls, name, bases, attrs):
super(ControllerType, cls).__init__(name, bases, attrs)
# flag old-style methods with req as first argument
for k, v in attrs.items():
if inspect.isfunction(v) and hasattr(v, 'original_func'):
# Set routing type on original functions
routing_type = v.routing.get('type')
parent = [claz for claz in bases if isinstance(claz, ControllerType) and hasattr(claz, k)]
parent_routing_type = getattr(parent[0], k).original_func.routing_type if parent else routing_type or 'http'
if routing_type is not None and routing_type is not parent_routing_type:
routing_type = parent_routing_type
_logger.warn("Subclass re-defines <function %s.%s.%s> with different type than original."
" Will use original type: %r" % (cls.__module__, cls.__name__, k, parent_routing_type))
v.original_func.routing_type = routing_type or parent_routing_type
spec = inspect.getargspec(v.original_func)
first_arg = spec.args[1] if len(spec.args) >= 2 else None
if first_arg in ["req", "request"]:
v._first_arg_is_req = True
# store the controller in the controllers list
name_class = ("%s.%s" % (cls.__module__, cls.__name__), cls)
class_path = name_class[0].split(".")
if not class_path[:2] == ["openerp", "addons"]:
module = ""
else:
# we want to know all modules that have controllers
module = class_path[2]
# but we only store controllers directly inheriting from Controller
if not "Controller" in globals() or not Controller in bases:
return
controllers_per_module[module].append(name_class)
class Controller(object):
__metaclass__ = ControllerType
class EndPoint(object):
def __init__(self, method, routing):
self.method = method
self.original = getattr(method, 'original_func', method)
self.routing = routing
self.arguments = {}
@property
def first_arg_is_req(self):
# Backward for 7.0
return getattr(self.method, '_first_arg_is_req', False)
def __call__(self, *args, **kw):
return self.method(*args, **kw)
def routing_map(modules, nodb_only, converters=None):
routing_map = werkzeug.routing.Map(strict_slashes=False, converters=converters)
def get_subclasses(klass):
def valid(c):
return c.__module__.startswith('openerp.addons.') and c.__module__.split(".")[2] in modules
subclasses = klass.__subclasses__()
result = []
for subclass in subclasses:
if valid(subclass):
result.extend(get_subclasses(subclass))
if not result and valid(klass):
result = [klass]
return result
uniq = lambda it: collections.OrderedDict((id(x), x) for x in it).values()
for module in modules:
if module not in controllers_per_module:
continue
for _, cls in controllers_per_module[module]:
subclasses = uniq(c for c in get_subclasses(cls) if c is not cls)
if subclasses:
name = "%s (extended by %s)" % (cls.__name__, ', '.join(sub.__name__ for sub in subclasses))
cls = type(name, tuple(reversed(subclasses)), {})
o = cls()
members = inspect.getmembers(o, inspect.ismethod)
for _, mv in members:
if hasattr(mv, 'routing'):
routing = dict(type='http', auth='user', methods=None, routes=None)
methods_done = list()
# update routing attributes from subclasses(auth, methods...)
for claz in reversed(mv.im_class.mro()):
fn = getattr(claz, mv.func_name, None)
if fn and hasattr(fn, 'routing') and fn not in methods_done:
methods_done.append(fn)
routing.update(fn.routing)
if not nodb_only or routing['auth'] == "none":
assert routing['routes'], "Method %r has not route defined" % mv
endpoint = EndPoint(mv, routing)
for url in routing['routes']:
if routing.get("combine", False):
# deprecated v7 declaration
url = o._cp_path.rstrip('/') + '/' + url.lstrip('/')
if url.endswith("/") and len(url) > 1:
url = url[: -1]
xtra_keys = 'defaults subdomain build_only strict_slashes redirect_to alias host'.split()
kw = {k: routing[k] for k in xtra_keys if k in routing}
routing_map.add(werkzeug.routing.Rule(url, endpoint=endpoint, methods=routing['methods'], **kw))
return routing_map
#----------------------------------------------------------
# HTTP Sessions
#----------------------------------------------------------
class AuthenticationError(Exception):
pass
class SessionExpiredException(Exception):
pass
class Service(object):
"""
.. deprecated:: 8.0
Use :func:`dispatch_rpc` instead.
"""
def __init__(self, session, service_name):
self.session = session
self.service_name = service_name
def __getattr__(self, method):
def proxy_method(*args):
result = dispatch_rpc(self.service_name, method, args)
return result
return proxy_method
class Model(object):
"""
.. deprecated:: 8.0
Use the registry and cursor in :data:`request` instead.
"""
def __init__(self, session, model):
self.session = session
self.model = model
self.proxy = self.session.proxy('object')
def __getattr__(self, method):
self.session.assert_valid()
def proxy(*args, **kw):
# Can't provide any retro-compatibility for this case, so we check it and raise an Exception
# to tell the programmer to adapt his code
if not request.db or not request.uid or self.session.db != request.db \
or self.session.uid != request.uid:
raise Exception("Trying to use Model with badly configured database or user.")
if method.startswith('_'):
raise Exception("Access denied")
mod = request.registry[self.model]
meth = getattr(mod, method)
# make sure to instantiate an environment
cr = request.env.cr
result = meth(cr, request.uid, *args, **kw)
# reorder read
if method == "read":
if isinstance(result, list) and len(result) > 0 and "id" in result[0]:
index = {}
for r in result:
index[r['id']] = r
result = [index[x] for x in args[0] if x in index]
return result
return proxy
class OpenERPSession(werkzeug.contrib.sessions.Session):
def __init__(self, *args, **kwargs):
self.inited = False
self.modified = False
self.rotate = False
super(OpenERPSession, self).__init__(*args, **kwargs)
self.inited = True
self._default_values()
self.modified = False
def __getattr__(self, attr):
return self.get(attr, None)
def __setattr__(self, k, v):
if getattr(self, "inited", False):
try:
object.__getattribute__(self, k)
except:
return self.__setitem__(k, v)
object.__setattr__(self, k, v)
def authenticate(self, db, login=None, password=None, uid=None):
"""
Authenticate the current user with the given db, login and
password. If successful, store the authentication parameters in the
current session and request.
:param uid: If not None, that user id will be used instead the login
to authenticate the user.
"""
if uid is None:
wsgienv = request.httprequest.environ
env = dict(
base_location=request.httprequest.url_root.rstrip('/'),
HTTP_HOST=wsgienv['HTTP_HOST'],
REMOTE_ADDR=wsgienv['REMOTE_ADDR'],
)
uid = dispatch_rpc('common', 'authenticate', [db, login, password, env])
else:
security.check(db, uid, password)
self.rotate = True
self.db = db
self.uid = uid
self.login = login
self.password = password
request.uid = uid
request.disable_db = False
if uid: self.get_context()
return uid
def check_security(self):
"""
Check the current authentication parameters to know if those are still
valid. This method should be called at each request. If the
authentication fails, a :exc:`SessionExpiredException` is raised.
"""
if not self.db or not self.uid:
raise SessionExpiredException("Session expired")
security.check(self.db, self.uid, self.password)
def logout(self, keep_db=False):
for k in self.keys():
if not (keep_db and k == 'db'):
del self[k]
self._default_values()
self.rotate = True
def _default_values(self):
self.setdefault("db", None)
self.setdefault("uid", None)
self.setdefault("login", None)
self.setdefault("password", None)
self.setdefault("context", {})
def get_context(self):
"""
Re-initializes the current user's session context (based on his
preferences) by calling res.users.get_context() with the old context.
:returns: the new context
"""
assert self.uid, "The user needs to be logged-in to initialize his context"
self.context = request.registry.get('res.users').context_get(request.cr, request.uid) or {}
self.context['uid'] = self.uid
self._fix_lang(self.context)
return self.context
def _fix_lang(self, context):
""" OpenERP provides languages which may not make sense and/or may not
be understood by the web client's libraries.
Fix those here.
:param dict context: context to fix
"""
lang = context.get('lang')
# inane OpenERP locale
if lang == 'ar_AR':
lang = 'ar'
# lang to lang_REGION (datejs only handles lang_REGION, no bare langs)
if lang in babel.core.LOCALE_ALIASES:
lang = babel.core.LOCALE_ALIASES[lang]
context['lang'] = lang or 'en_US'
# Deprecated to be removed in 9
"""
Damn properties for retro-compatibility. All of that is deprecated,
all of that.
"""
@property
def _db(self):
return self.db
@_db.setter
def _db(self, value):
self.db = value
@property
def _uid(self):
return self.uid
@_uid.setter
def _uid(self, value):
self.uid = value
@property
def _login(self):
return self.login
@_login.setter
def _login(self, value):
self.login = value
@property
def _password(self):
return self.password
@_password.setter
def _password(self, value):
self.password = value
def send(self, service_name, method, *args):
"""
.. deprecated:: 8.0
Use :func:`dispatch_rpc` instead.
"""
return dispatch_rpc(service_name, method, args)
def proxy(self, service):
"""
.. deprecated:: 8.0
Use :func:`dispatch_rpc` instead.
"""
return Service(self, service)
def assert_valid(self, force=False):
"""
.. deprecated:: 8.0
Use :meth:`check_security` instead.
Ensures this session is valid (logged into the openerp server)
"""
if self.uid and not force:
return
# TODO use authenticate instead of login
self.uid = self.proxy("common").login(self.db, self.login, self.password)
if not self.uid:
raise AuthenticationError("Authentication failure")
def ensure_valid(self):
"""
.. deprecated:: 8.0
Use :meth:`check_security` instead.
"""
if self.uid:
try:
self.assert_valid(True)
except Exception:
self.uid = None
def execute(self, model, func, *l, **d):
"""
.. deprecated:: 8.0
Use the registry and cursor in :data:`request` instead.
"""
model = self.model(model)
r = getattr(model, func)(*l, **d)
return r
def exec_workflow(self, model, id, signal):
"""
.. deprecated:: 8.0
Use the registry and cursor in :data:`request` instead.
"""
self.assert_valid()
r = self.proxy('object').exec_workflow(self.db, self.uid, self.password, model, signal, id)
return r
def model(self, model):
"""
.. deprecated:: 8.0
Use the registry and cursor in :data:`request` instead.
Get an RPC proxy for the object ``model``, bound to this session.
:param model: an OpenERP model name
:type model: str
:rtype: a model object
"""
if not self.db:
raise SessionExpiredException("Session expired")
return Model(self, model)
def save_action(self, action):
"""
This method store an action object in the session and returns an integer
identifying that action. The method get_action() can be used to get
back the action.
:param the_action: The action to save in the session.
:type the_action: anything
:return: A key identifying the saved action.
:rtype: integer
"""
saved_actions = self.setdefault('saved_actions', {"next": 1, "actions": {}})
# we don't allow more than 10 stored actions
if len(saved_actions["actions"]) >= 10:
del saved_actions["actions"][min(saved_actions["actions"])]
key = saved_actions["next"]
saved_actions["actions"][key] = action
saved_actions["next"] = key + 1
self.modified = True
return key
def get_action(self, key):
"""
Gets back a previously saved action. This method can return None if the action
was saved since too much time (this case should be handled in a smart way).
:param key: The key given by save_action()
:type key: integer
:return: The saved action or None.
:rtype: anything
"""
saved_actions = self.get('saved_actions', {})
return saved_actions.get("actions", {}).get(key)
def session_gc(session_store):
if random.random() < 0.001:
# we keep session one week
last_week = time.time() - 60*60*24*7
for fname in os.listdir(session_store.path):
path = os.path.join(session_store.path, fname)
try:
if os.path.getmtime(path) < last_week:
os.unlink(path)
except OSError:
pass
#----------------------------------------------------------
# WSGI Layer
#----------------------------------------------------------
# Add potentially missing (older ubuntu) font mime types
mimetypes.add_type('application/font-woff', '.woff')
mimetypes.add_type('application/vnd.ms-fontobject', '.eot')
mimetypes.add_type('application/x-font-ttf', '.ttf')
class Response(werkzeug.wrappers.Response):
""" Response object passed through controller route chain.
In addition to the :class:`werkzeug.wrappers.Response` parameters, this
class's constructor can take the following additional parameters
for QWeb Lazy Rendering.
:param basestring template: template to render
:param dict qcontext: Rendering context to use
:param int uid: User id to use for the ir.ui.view render call,
``None`` to use the request's user (the default)
these attributes are available as parameters on the Response object and
can be altered at any time before rendering
Also exposes all the attributes and methods of
:class:`werkzeug.wrappers.Response`.
"""
default_mimetype = 'text/html'
def __init__(self, *args, **kw):
template = kw.pop('template', None)
qcontext = kw.pop('qcontext', None)
uid = kw.pop('uid', None)
super(Response, self).__init__(*args, **kw)
self.set_default(template, qcontext, uid)
def set_default(self, template=None, qcontext=None, uid=None):
self.template = template
self.qcontext = qcontext or dict()
self.uid = uid
# Support for Cross-Origin Resource Sharing
if request.endpoint and 'cors' in request.endpoint.routing:
self.headers.set('Access-Control-Allow-Origin', request.endpoint.routing['cors'])
methods = 'GET, POST'
if request.endpoint.routing['type'] == 'json':
methods = 'POST'
elif request.endpoint.routing.get('methods'):
methods = ', '.join(request.endpoint.routing['methods'])
self.headers.set('Access-Control-Allow-Methods', methods)
@property
def is_qweb(self):
return self.template is not None
def render(self):
""" Renders the Response's template, returns the result
"""
view_obj = request.registry["ir.ui.view"]
uid = self.uid or request.uid or openerp.SUPERUSER_ID
return view_obj.render(
request.cr, uid, self.template, self.qcontext,
context=request.context)
def flatten(self):
""" Forces the rendering of the response's template, sets the result
as response body and unsets :attr:`.template`
"""
self.response.append(self.render())
self.template = None
class DisableCacheMiddleware(object):
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
def start_wrapped(status, headers):
referer = environ.get('HTTP_REFERER', '')
parsed = urlparse.urlparse(referer)
debug = parsed.query.count('debug') >= 1
new_headers = []
unwanted_keys = ['Last-Modified']
if debug:
new_headers = [('Cache-Control', 'no-cache')]
unwanted_keys += ['Expires', 'Etag', 'Cache-Control']
for k, v in headers:
if k not in unwanted_keys:
new_headers.append((k, v))
start_response(status, new_headers)
return self.app(environ, start_wrapped)
class Root(object):
"""Root WSGI application for the OpenERP Web Client.
"""
def __init__(self):
self._loaded = False
@lazy_property
def session_store(self):
# Setup http sessions
path = openerp.tools.config.session_dir
_logger.debug('HTTP sessions stored in: %s', path)
return werkzeug.contrib.sessions.FilesystemSessionStore(path, session_class=OpenERPSession)
@lazy_property
def nodb_routing_map(self):
_logger.info("Generating nondb routing")
return routing_map([''] + openerp.conf.server_wide_modules, True)
def __call__(self, environ, start_response):
""" Handle a WSGI request
"""
if not self._loaded:
self._loaded = True
self.load_addons()
return self.dispatch(environ, start_response)
def load_addons(self):
""" Load all addons from addons path containing static files and
controllers and configure them. """
# TODO should we move this to ir.http so that only configured modules are served ?
statics = {}
for addons_path in openerp.modules.module.ad_paths:
for module in sorted(os.listdir(str(addons_path))):
if module not in addons_module:
manifest_path = os.path.join(addons_path, module, '__openerp__.py')
path_static = os.path.join(addons_path, module, 'static')
if os.path.isfile(manifest_path) and os.path.isdir(path_static):
manifest = ast.literal_eval(open(manifest_path).read())
manifest['addons_path'] = addons_path
_logger.debug("Loading %s", module)
if 'openerp.addons' in sys.modules:
m = __import__('openerp.addons.' + module)
else:
m = None
addons_module[module] = m
addons_manifest[module] = manifest
statics['/%s/static' % module] = path_static
if statics:
_logger.info("HTTP Configuring static files")
app = werkzeug.wsgi.SharedDataMiddleware(self.dispatch, statics, cache_timeout=STATIC_CACHE)
self.dispatch = DisableCacheMiddleware(app)
def setup_session(self, httprequest):
# recover or create session
session_gc(self.session_store)
sid = httprequest.args.get('session_id')
explicit_session = True
if not sid:
sid = httprequest.headers.get("X-Openerp-Session-Id")
if not sid:
sid = httprequest.cookies.get('session_id')
explicit_session = False
if sid is None:
httprequest.session = self.session_store.new()
else:
httprequest.session = self.session_store.get(sid)
return explicit_session
def setup_db(self, httprequest):
db = httprequest.session.db
# Check if session.db is legit
if db:
if db not in db_filter([db], httprequest=httprequest):
_logger.warn("Logged into database '%s', but dbfilter "
"rejects it; logging session out.", db)
httprequest.session.logout()
db = None
if not db:
httprequest.session.db = db_monodb(httprequest)
def setup_lang(self, httprequest):
if not "lang" in httprequest.session.context:
lang = httprequest.accept_languages.best or "en_US"
lang = babel.core.LOCALE_ALIASES.get(lang, lang).replace('-', '_')
httprequest.session.context["lang"] = lang
def get_request(self, httprequest):
# deduce type of request
if httprequest.args.get('jsonp'):
return JsonRequest(httprequest)
if httprequest.mimetype in ("application/json", "application/json-rpc"):
return JsonRequest(httprequest)
else:
return HttpRequest(httprequest)
def get_response(self, httprequest, result, explicit_session):
if isinstance(result, Response) and result.is_qweb:
try:
result.flatten()
except(Exception), e:
if request.db:
result = request.registry['ir.http']._handle_exception(e)
else:
raise
if isinstance(result, basestring):
response = Response(result, mimetype='text/html')
else:
response = result
if httprequest.session.should_save:
if httprequest.session.rotate:
self.session_store.delete(httprequest.session)
httprequest.session.sid = self.session_store.generate_key()
httprequest.session.modified = True
self.session_store.save(httprequest.session)
# We must not set the cookie if the session id was specified using a http header or a GET parameter.
# There are two reasons to this:
# - When using one of those two means we consider that we are overriding the cookie, which means creating a new
# session on top of an already existing session and we don't want to create a mess with the 'normal' session
# (the one using the cookie). That is a special feature of the Session Javascript class.
# - It could allow session fixation attacks.
if not explicit_session and hasattr(response, 'set_cookie'):
response.set_cookie('session_id', httprequest.session.sid, max_age=90 * 24 * 60 * 60)
return response
def dispatch(self, environ, start_response):
"""
Performs the actual WSGI dispatching for the application.
"""
try:
httprequest = werkzeug.wrappers.Request(environ)
httprequest.app = self
explicit_session = self.setup_session(httprequest)
self.setup_db(httprequest)
self.setup_lang(httprequest)
request = self.get_request(httprequest)
def _dispatch_nodb():
try:
func, arguments = self.nodb_routing_map.bind_to_environ(request.httprequest.environ).match()
except werkzeug.exceptions.HTTPException, e:
return request._handle_exception(e)
request.set_handler(func, arguments, "none")
result = request.dispatch()
return result
with request:
db = request.session.db
if db:
openerp.modules.registry.RegistryManager.check_registry_signaling(db)
try:
with openerp.tools.mute_logger('openerp.sql_db'):
ir_http = request.registry['ir.http']
except (AttributeError, psycopg2.OperationalError):
# psycopg2 error or attribute error while constructing
# the registry. That means the database probably does
# not exists anymore or the code doesnt match the db.
# Log the user out and fall back to nodb
request.session.logout()
result = _dispatch_nodb()
else:
result = ir_http._dispatch()
openerp.modules.registry.RegistryManager.signal_caches_change(db)
else:
result = _dispatch_nodb()
response = self.get_response(httprequest, result, explicit_session)
return response(environ, start_response)
except werkzeug.exceptions.HTTPException, e:
return e(environ, start_response)
def get_db_router(self, db):
if not db:
return self.nodb_routing_map
return request.registry['ir.http'].routing_map()
def db_list(force=False, httprequest=None):
dbs = dispatch_rpc("db", "list", [force])
return db_filter(dbs, httprequest=httprequest)
def db_filter(dbs, httprequest=None):
httprequest = httprequest or request.httprequest
h = httprequest.environ.get('HTTP_HOST', '').split(':')[0]
d, _, r = h.partition('.')
if d == "www" and r:
d = r.partition('.')[0]
r = openerp.tools.config['dbfilter'].replace('%h', h).replace('%d', d)
dbs = [i for i in dbs if re.match(r, i)]
return dbs
def db_monodb(httprequest=None):
"""
Magic function to find the current database.
Implementation details:
* Magic
* More magic
Returns ``None`` if the magic is not magic enough.
"""
httprequest = httprequest or request.httprequest
dbs = db_list(True, httprequest)
# try the db already in the session
db_session = httprequest.session.db
if db_session in dbs:
return db_session
# if there is only one possible db, we take that one
if len(dbs) == 1:
return dbs[0]
return None
def send_file(filepath_or_fp, mimetype=None, as_attachment=False, filename=None, mtime=None,
add_etags=True, cache_timeout=STATIC_CACHE, conditional=True):
"""This is a modified version of Flask's send_file()
Sends the contents of a file to the client. This will use the
most efficient method available and configured. By default it will
try to use the WSGI server's file_wrapper support.
By default it will try to guess the mimetype for you, but you can
also explicitly provide one. For extra security you probably want
to send certain files as attachment (HTML for instance). The mimetype
guessing requires a `filename` or an `attachment_filename` to be
provided.
Please never pass filenames to this function from user sources without
checking them first.
:param filepath_or_fp: the filename of the file to send.
Alternatively a file object might be provided
in which case `X-Sendfile` might not work and
fall back to the traditional method. Make sure
that the file pointer is positioned at the start
of data to send before calling :func:`send_file`.
:param mimetype: the mimetype of the file if provided, otherwise
auto detection happens.
:param as_attachment: set to `True` if you want to send this file with
a ``Content-Disposition: attachment`` header.
:param filename: the filename for the attachment if it differs from the file's filename or
if using file object without 'name' attribute (eg: E-tags with StringIO).
:param mtime: last modification time to use for contitional response.
:param add_etags: set to `False` to disable attaching of etags.
:param conditional: set to `False` to disable conditional responses.
:param cache_timeout: the timeout in seconds for the headers.
"""
if isinstance(filepath_or_fp, (str, unicode)):
if not filename:
filename = os.path.basename(filepath_or_fp)
file = open(filepath_or_fp, 'rb')
if not mtime:
mtime = os.path.getmtime(filepath_or_fp)
else:
file = filepath_or_fp
if not filename:
filename = getattr(file, 'name', None)
file.seek(0, 2)
size = file.tell()
file.seek(0)
if mimetype is None and filename:
mimetype = mimetypes.guess_type(filename)[0]
if mimetype is None:
mimetype = 'application/octet-stream'
headers = werkzeug.datastructures.Headers()
if as_attachment:
if filename is None:
raise TypeError('filename unavailable, required for sending as attachment')
headers.add('Content-Disposition', 'attachment', filename=filename)
headers['Content-Length'] = size
data = wrap_file(request.httprequest.environ, file)
rv = Response(data, mimetype=mimetype, headers=headers,
direct_passthrough=True)
if isinstance(mtime, str):
try:
server_format = openerp.tools.misc.DEFAULT_SERVER_DATETIME_FORMAT
mtime = datetime.datetime.strptime(mtime.split('.')[0], server_format)
except Exception:
mtime = None
if mtime is not None:
rv.last_modified = mtime
rv.cache_control.public = True
if cache_timeout:
rv.cache_control.max_age = cache_timeout
rv.expires = int(time.time() + cache_timeout)
if add_etags and filename and mtime:
rv.set_etag('odoo-%s-%s-%s' % (
mtime,
size,
adler32(
filename.encode('utf-8') if isinstance(filename, unicode)
else filename
) & 0xffffffff
))
if conditional:
rv = rv.make_conditional(request.httprequest)
# make sure we don't send x-sendfile for servers that
# ignore the 304 status code for x-sendfile.
if rv.status_code == 304:
rv.headers.pop('x-sendfile', None)
return rv
#----------------------------------------------------------
# RPC controller
#----------------------------------------------------------
class CommonController(Controller):
@route('/jsonrpc', type='json', auth="none")
def jsonrpc(self, service, method, args):
""" Method used by client APIs to contact OpenERP. """
return dispatch_rpc(service, method, args)
@route('/gen_session_id', type='json', auth="none")
def gen_session_id(self):
nsession = root.session_store.new()
return nsession.sid
# register main wsgi handler
root = Root()
openerp.service.wsgi_server.register_wsgi_handler(root)
# vim:et:ts=4:sw=4:
| agpl-3.0 |
hobson/pug-dj | pug/dj/miner/management/commands/inspect_dbs.py | 1 | 4927 | import sys
import os
import re
from StringIO import StringIO
from django.conf import settings
from django.core.management import call_command
# 1. turn this into a management command `inspectdbs`
# 2. create a Model mixin that stores attributes in the Meta class per this:
# http://stackoverflow.com/questions/1088431/adding-attributes-into-django-models-meta-class
from pug.nlp.util import make_name
def inspect_dbs(output_dir='.', db_names=None, db_aliases=None, alias_prefix='SEC_', db_alias_maker=make_name, verbosity=1):
db_names = db_names or settings.INSPECT_DB_NAMES
db_aliases = db_aliases or [alias_prefix + db_alias_maker(name) for name in db_names]
for db_name, db_alias in zip(db_names, db_aliases):
fn = os.path.join(os.path.realpath(output_dir), 'models_%s.py' % db_alias)
if verbosity > 0:
sys.stderr.write('Writing model definitions to file %r for db_alias %r.\n' % (fn, db_alias))
models_py_buffer = StringIO()
call_command('inspectdb', database=db_alias, verbosity=0, traceback=False, interactive=False, stdout=models_py_buffer)
models_py_buffer.seek(0)
with open(fn, 'w') as fp:
line = models_py_buffer.readline()
while line and fp:
if verbosity > 2:
sys.stderr.write('READ: %r\n' % line)
seditted_lines = line
for sed in inspect_dbs.seds:
if sed['regex'].match(line):
seditted_lines = sed.get('before', '').format(**{'db_name': db_name, 'alias_prefix': alias_prefix}) or ''
seditted_lines += line if sed.get('sub', None) is None else sed['regex'].sub(sed['sub'], line)
seditted_lines += sed.get('after', '').format(**{'db_name': db_name, 'alias_prefix': alias_prefix}) or ''
if verbosity > 1:
print 'WAS: %r' % line
print ' IS: %r' % seditted_lines
break; # stop processing the regexes if one already matched this line
if verbosity > 2:
sys.stderr.write('WRITING: %r\n' % seditted_lines)
# TODO: Add a multi-line edit that deals with multiple primary_key=True fields
# * delete second and subsequent primary_key=True arguments within the same Model
# * add a unique_together constraint on all the primary_keys that were originally there
fp.write(seditted_lines)
line = models_py_buffer.readline()
inspect_dbs.seds = [
{
'regex': re.compile(r'^from django[.]\w+\simport\smodels(\s*[#].*)?$'),
'after': '\nfrom pug import decorators\n',
},
{
'regex': re.compile(r'^class\s+\w+\(models[.]Model\):(\s*[#].*)?$'),
'before': "\n@decorators.represent\n@decorators.dbname(db_name='{db_name}', alias_prefix='{alias_prefix}')\n",
},
{
'regex': re.compile(r'^(\s+\w+\s*=\s*models[.])AutoField\(\)'),
'sub': r"\1IntegerField(primary_key=True)",
},
{ # not strictly necessary, but since this is intended for read-only databases, probably a good idea to change AutoFields to IntegerFields, even if already a primary_key
'regex': re.compile(r'^(\s+\w+\s*=\s*models[.])AutoField\((.*)(primary_key\=True)(.*)\)'),
'sub': r"\1IntegerField(\2\3\4)",
},
{ # any AutoFields not yet turned to IntegerFields need to have their primary_key set
'regex': re.compile(r'^(\s+\w+\s*=\s*models[.])AutoField\((.+)\)'),
'sub': r"\1IntegerField(\2, primary_key=True)",
},
{
'regex': re.compile(r'^(\s+\w+\s*=\s*models[.])BooleanField\((.+)\)'),
'sub': r"\1NullBooleanField(\2)",
},
{ # no need to do anything if a primary_key argument is set
'regex': re.compile(r'^\s+id\s*=\s*models[.]\w+Field\(.*primary_key\=True.*\)'),
},
{ # need to set PK for fields named id, FIXME: but should really only do this if another PK isn't labeled as such already
'regex': re.compile(r'^(\s+)id(\s*=\s*models[.]\w+)Field\(([^)\n]*)([^p)\s\n][^r)\s\n][^i)\s\n][^)\s\n]*[^u)\s\n][^e)\s\n][^)\s\n]*)\)$'),
'sub': r"\1id\2Field(\3, primary_key=True)",
},
# { # need to rename id fields that are not the primary_key
# 'regex': re.compile(r'^(\s+)id(\s*=\s*models[.]\w+)Field\((.*)\)'),
# 'sub': r"\1id_\2Field(\3, db_column='id')",
# },
{ # no primary keys should be allowed to be null
'regex': re.compile(r'null[=]True(.*)primary_key[=]True'),
'sub': r"null=False\1primary_key=True",
},
# { # have to get rid of the useless ''self'' FK reference
# 'regex': re.compile(r"^class\s+(\w+)\(models[.]Model\):(\s*[#].*)?\n(.*)[']{2}self[']{2}"),
# 'sub': r"^class \1(models.Model):\2\n\3'\1'",
# }
]
| mit |
bpow/gemini | gemini/scripts/gemini_install.py | 2 | 18433 | #!/usr/bin/env python
"""Installer for gemini: a lightweight db framework for disease and population genetics.
https://github.com/arq5x/gemini
Handles installation of:
- Required third party software
- Required Python libraries
- Gemini application
- Associated data files
Requires: Python 2.7 (or 2.6 and argparse), git, and compilers (gcc, g++)
Run gemini_install.py -h for usage.
"""
import argparse
import platform
import os
import shutil
import subprocess
import sys
import urllib2
import urllib
remotes = {"requirements_pip": "https://raw.github.com/arq5x/gemini/master/requirements.txt",
"requirements_conda": "",
"versioned_installations": "https://raw.githubusercontent.com/arq5x/gemini/master/versioning/",
"cloudbiolinux": "https://github.com/chapmanb/cloudbiolinux.git",
"gemini": "https://github.com/arq5x/gemini.git",
"anaconda": "http://repo.continuum.io/miniconda/Miniconda-3.7.0-%s-x86%s.sh"}
remotes_dev = remotes.copy()
remotes_dev.update({
"requirements_pip": "https://raw.github.com/arq5x/gemini/dev/requirements.txt",
"gemini": "git+https://github.com/arq5x/gemini.git@dev",
"requirements_conda": "https://raw.githubusercontent.com/arq5x/gemini/dev/versioning/unstable/requirements_conda.txt",
})
remotes_bp = remotes_dev
remotes_bp.update({
"requirements_pip": "https://raw.github.com/brentp/gemini/dev/requirements.txt",
"gemini": "git+https://github.com/brentp/gemini.git@dev",
"requirements_conda": "https://raw.githubusercontent.com/brentp/gemini/dev/versioning/unstable/requirements_conda.txt",
})
def main(args, remotes=remotes):
check_dependencies()
clean_env_variables()
work_dir = os.path.join(os.getcwd(), "tmpgemini_install")
if not os.path.exists(work_dir):
os.makedirs(work_dir)
os.chdir(work_dir)
if args.gemini_version in ("unstable", "bp"):
if args.gemini_version == "unstable":
remotes = remotes_dev
else:
remotes = remotes_bp
requirements_pip = remotes['requirements_pip']
requirements_conda = remotes['requirements_conda']
urllib.urlretrieve(requirements_pip, filename='_pip_dev.txt')
urllib.urlretrieve(requirements_conda, filename='_conda_dev.txt')
# quick hack to support testing installs:
if args.gemini_version == "bp":
for f in ('_pip_dev.txt', '_conda_dev.txt'):
contents = open(f).read().replace('arq5x', 'brentp')
with open(f, 'w') as fh:
fh.write(contents)
remotes.update({'requirements_pip': '_pip_dev.txt', 'requirements_conda': '_conda_dev.txt'})
elif args.gemini_version != 'latest':
requirements_pip = os.path.join(remotes['versioned_installations'],
args.gemini_version, 'requirements_pip.txt')
requirements_conda = os.path.join(remotes['versioned_installations'],
args.gemini_version, 'requirements_conda.txt')
try:
urllib2.urlopen(requirements_pip)
except:
sys.exit('Gemini version %s could not be found. Try the latest version.' % args.gemini_version)
remotes.update({'requirements_pip': requirements_pip, 'requirements_conda': requirements_conda})
print "Installing isolated base python installation"
make_dirs(args)
anaconda = install_anaconda_python(args, remotes)
print "Installing gemini..."
install_conda_pkgs(anaconda, remotes, args)
gemini = install_gemini(anaconda, remotes, args.datadir, args.tooldir, args.sudo)
if args.install_tools:
cbl = get_cloudbiolinux(remotes["cloudbiolinux"])
fabricrc = write_fabricrc(cbl["fabricrc"], args.tooldir, args.datadir, args.sudo)
print "Installing associated tools..."
install_tools(gemini["fab"], cbl["tool_fabfile"], fabricrc)
os.chdir(work_dir)
install_data(gemini["python"], gemini["data_script"], args)
os.chdir(work_dir)
test_script = install_testbase(args.datadir, remotes["gemini"], gemini)
print "Finished: gemini, tools and data installed"
print " Tools installed in:\n %s" % args.tooldir
print " Data installed in:\n %s" % args.datadir
print " Run tests with:\n cd %s && bash %s" % (os.path.dirname(test_script),
os.path.basename(test_script))
print " NOTE: be sure to add %s/bin to your PATH." % args.tooldir
print " NOTE: Install data files for GERP_bp & CADD_scores (not installed by default).\n "
shutil.rmtree(work_dir)
def install_gemini(anaconda, remotes, datadir, tooldir, use_sudo):
"""Install gemini plus python dependencies inside isolated Anaconda environment.
"""
# Work around issue with distribute where asks for 'distribute==0.0'
# try:
# subprocess.check_call([anaconda["easy_install"], "--upgrade", "distribute"])
# except subprocess.CalledProcessError:
# try:
# subprocess.check_call([anaconda["pip"], "install", "--upgrade", "distribute"])
# except subprocess.CalledProcessError:
# pass
# Ensure latest version of fabric for running CloudBioLinux
subprocess.check_call([anaconda["pip"], "install", "fabric>=1.7.0"])
# allow downloads excluded in recent pip (1.5 or greater) versions
try:
p = subprocess.Popen([anaconda["pip"], "--version"], stdout=subprocess.PIPE)
pip_version = p.communicate()[0].split()[1]
except:
pip_version = ""
pip_compat = []
if pip_version >= "1.5":
for req in ["python-graph-core", "python-graph-dot"]:
pip_compat += ["--allow-external", req, "--allow-unverified", req]
# Set PIP SSL certificate to installed conda certificate to avoid SSL errors
cert_file = os.path.join(anaconda["dir"], "ssl", "cert.pem")
if os.path.exists(cert_file):
os.environ["PIP_CERT"] = cert_file
subprocess.check_call([anaconda["pip"], "install"] + pip_compat + ["-r", remotes["requirements_pip"]])
python_bin = os.path.join(anaconda["dir"], "bin", "python")
_cleanup_problem_files(anaconda["dir"])
_add_missing_inits(python_bin)
for final_name, ve_name in [("gemini", "gemini"), ("gemini_python", "python"),
("gemini_pip", "pip")]:
final_script = os.path.join(tooldir, "bin", final_name)
ve_script = os.path.join(anaconda["dir"], "bin", ve_name)
sudo_cmd = ["sudo"] if use_sudo else []
if os.path.lexists(final_script):
subprocess.check_call(sudo_cmd + ["rm", "-f", final_script])
else:
subprocess.check_call(sudo_cmd + ["mkdir", "-p", os.path.dirname(final_script)])
cmd = ["ln", "-s", ve_script, final_script]
subprocess.check_call(sudo_cmd + cmd)
library_loc = check_output("%s -c 'import gemini; print gemini.__file__'" % python_bin,
shell=True)
return {"fab": os.path.join(anaconda["dir"], "bin", "fab"),
"data_script": os.path.join(os.path.dirname(library_loc.strip()), "install-data.py"),
"python": python_bin,
"cmd": os.path.join(anaconda["dir"], "bin", "gemini")}
def install_conda_pkgs(anaconda, remotes, args):
if args.gemini_version != 'latest':
pkgs = ["--file", remotes['requirements_conda']]
else:
pkgs = ["bcolz", "conda", "cython", "ipyparallel",
"jinja2", "nose", "numexpr", "numpy", "openssl", "pip", "pybedtools",
"pycrypto", "pyparsing", "python-graph-core", "python-graph-dot",
"pyyaml", "pyzmq", "pandas", "scipy"]
if platform.architecture()[0] != "32bit":
pkgs += ["bx-python", "pysam", "ipython-cluster-helper"]
channels = ["-c", "https://conda.binstar.org/bcbio"]
subprocess.check_call([anaconda["conda"], "install", "--yes"] + channels + pkgs)
def install_anaconda_python(args, remotes):
"""Provide isolated installation of Anaconda python.
http://docs.continuum.io/anaconda/index.html
"""
anaconda_dir = os.path.join(args.datadir, "anaconda")
bindir = os.path.join(anaconda_dir, "bin")
conda = os.path.join(bindir, "conda")
if platform.mac_ver()[0]:
distribution = "macosx"
else:
distribution = "linux"
if platform.architecture()[0] == "32bit":
arch = ""
else:
arch = "_64"
if not os.path.exists(anaconda_dir) or not os.path.exists(conda):
if os.path.exists(anaconda_dir):
shutil.rmtree(anaconda_dir)
url = remotes["anaconda"] % ("MacOSX" if distribution == "macosx" else "Linux", arch)
if not os.path.exists(os.path.basename(url)):
subprocess.check_call(["wget", url])
subprocess.check_call("bash %s -b -p %s" %
(os.path.basename(url), anaconda_dir), shell=True)
return {"conda": conda,
"pip": os.path.join(bindir, "pip"),
"easy_install": os.path.join(bindir, "easy_install"),
"dir": anaconda_dir}
def _add_missing_inits(python_bin):
"""pip/setuptools strips __init__.py files with namespace declarations.
I have no idea why, but this adds them back, skipping if compiled into an egg.
"""
library_loc = check_output("%s -c 'import pygraph.classes.graph; "
"print pygraph.classes.graph.__file__'" % python_bin,
shell=True)
pygraph_init = os.path.normpath(os.path.join(os.path.dirname(library_loc.strip()), os.pardir,
"__init__.py"))
if not os.path.exists(pygraph_init):
if os.path.isdir(os.path.dirname(pygraph_init)):
with open(pygraph_init, "w") as out_handle:
out_handle.write("__import__('pkg_resources').declare_namespace(__name__)\n")
def _cleanup_problem_files(venv_dir):
"""Remove problem bottle items in PATH which conflict with site-packages
"""
for cmd in ["bottle.py", "bottle.pyc"]:
bin_cmd = os.path.join(venv_dir, "bin", cmd)
if os.path.exists(bin_cmd):
os.remove(bin_cmd)
def install_tools(fab_cmd, fabfile, fabricrc):
"""Install 3rd party tools used by Gemini using a custom CloudBioLinux flavor.
"""
tools = ["grabix"]
flavor_dir = os.path.join(os.getcwd(), "gemini-flavor")
if not os.path.exists(flavor_dir):
os.makedirs(flavor_dir)
with open(os.path.join(flavor_dir, "main.yaml"), "w") as out_handle:
out_handle.write("packages:\n")
out_handle.write(" - bio_nextgen\n")
out_handle.write("libraries:\n")
with open(os.path.join(flavor_dir, "custom.yaml"), "w") as out_handle:
out_handle.write("bio_nextgen:\n")
for tool in tools:
out_handle.write(" - %s\n" % tool)
cmd = [fab_cmd, "-f", fabfile, "-H", "localhost", "-c", fabricrc,
"install_biolinux:target=custom,flavor=%s" % flavor_dir]
subprocess.check_call(cmd)
def install_data(python_cmd, data_script, args):
"""Install biological data used by gemini.
"""
data_dir = os.path.join(args.datadir, "gemini_data") if args.sharedpy else args.datadir
cmd = [python_cmd, data_script, data_dir]
if args.install_data:
print "Installing gemini data..."
else:
cmd.append("--nodata")
subprocess.check_call(cmd)
def install_testbase(datadir, repo, gemini):
"""Clone or update gemini code so we have the latest test suite.
"""
gemini_dir = os.path.join(datadir, "gemini")
cur_dir = os.getcwd()
needs_git = True
if os.path.exists(gemini_dir):
os.chdir(gemini_dir)
try:
subprocess.check_call(["git", "pull", "origin", "master", "--tags"])
needs_git = False
except:
os.chdir(cur_dir)
shutil.move(gemini_dir, "gtmp")
branch = None
if needs_git:
os.chdir(os.path.split(gemini_dir)[0])
if repo.startswith("git+"):
repo = repo[4:]
if repo.endswith("@dev"):
url, branch = repo.rsplit("@", 1)
subprocess.check_call(["git", "clone", "-b", branch, url])
else:
subprocess.check_call(["git", "clone", repo])
os.makedirs(os.path.join(gemini_dir, "data"))
if os.path.exists(os.path.join(cur_dir, "gtmp", "data")):
for f in os.listdir(os.path.join(cur_dir, "gtmp", "data")):
shutil.move(os.path.join(cur_dir, "gtmp", "data", f), os.path.join(gemini_dir, "data"))
#shutil.move(os.path.join(cur_dir, "gtmp"), gemini_dir)
shutil.rmtree(os.path.join(cur_dir, "gtmp", "data"))
os.chdir(gemini_dir)
if branch is None: # otherwise, we use the test structure at current head.
_update_testdir_revision(gemini["cmd"])
os.chdir(cur_dir)
return os.path.join(gemini_dir, "master-test.sh")
def _update_testdir_revision(gemini_cmd):
"""Update test directory to be in sync with a tagged installed version or development.
"""
try:
p = subprocess.Popen([gemini_cmd, "--version"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
gversion = p.communicate()[0].split()[1]
except:
gversion = ""
tag = ""
if gversion:
try:
p = subprocess.Popen("git tag -l | grep %s" % gversion, stdout=subprocess.PIPE, shell=True)
tag = p.communicate()[0].strip()
except:
tag = ""
if tag:
subprocess.check_call(["git", "checkout", "tags/%s" % tag])
pass
else:
subprocess.check_call(["git", "reset", "--hard", "HEAD"])
def write_fabricrc(base_file, tooldir, datadir, use_sudo):
out_file = os.path.join(os.getcwd(), os.path.basename(base_file))
with open(base_file) as in_handle:
with open(out_file, "w") as out_handle:
for line in in_handle:
if line.startswith("system_install"):
line = "system_install = %s\n" % tooldir
elif line.startswith("local_install"):
line = "local_install = %s/install\n" % tooldir
elif line.startswith("data_files"):
line = "data_files = %s\n" % datadir
elif line.startswith("use_sudo"):
line = "use_sudo = %s\n" % use_sudo
elif line.startswith("edition"):
line = "edition = minimal\n"
elif line.startswith("#galaxy_home"):
line = "galaxy_home = %s\n" % os.path.join(datadir, "galaxy")
out_handle.write(line)
return out_file
def make_dirs(args):
sudo_cmd = ["sudo"] if args.sudo else []
for dname in [args.datadir, args.tooldir]:
if not os.path.exists(dname):
subprocess.check_call(sudo_cmd + ["mkdir", "-p", dname])
username = check_output("echo $USER", shell=True).strip()
subprocess.check_call(sudo_cmd + ["chown", username, dname])
def get_cloudbiolinux(repo):
base_dir = os.path.join(os.getcwd(), "cloudbiolinux")
if not os.path.exists(base_dir):
subprocess.check_call(["git", "clone", repo])
return {"fabricrc": os.path.join(base_dir, "config", "fabricrc.txt"),
"tool_fabfile": os.path.join(base_dir, "fabfile.py")}
def clean_env_variables():
"""Adjust environmental variables which can cause conflicts with installed anaconda python.
"""
for k in ["PYTHONPATH", "PYTHONHOME"]:
os.environ.pop(k, None)
# https://docs.python.org/2/using/cmdline.html#envvar-PYTHONNOUSERSITE
os.environ["PYTHONNOUSERSITE"] = "1"
def check_dependencies():
"""Ensure required tools for installation are present.
"""
print "Checking required dependencies..."
for cmd, url in [("git", "http://git-scm.com/"),
("wget", "http://www.gnu.org/software/wget/"),
("curl", "http://curl.haxx.se/")]:
try:
retcode = subprocess.call([cmd, "--version"], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
except OSError:
retcode = 127
if retcode == 127:
raise OSError("gemini requires %s (%s)" % (cmd, url))
else:
print " %s found" % cmd
def check_output(*popenargs, **kwargs):
"""python2.6 compatible version of check_output.
Thanks to:
https://github.com/stackforge/bindep/blob/master/bindep/support_py26.py
"""
if 'stdout' in kwargs:
raise ValueError('stdout argument not allowed, it will be overridden.')
process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise subprocess.CalledProcessError(retcode, cmd, output=output)
return output
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Automated installer for gemini framework.")
parser.add_argument("tooldir", help="Directory to install 3rd party software tools",
type=os.path.abspath)
parser.add_argument("datadir", help="Directory to install gemini data files",
type=os.path.abspath)
parser.add_argument("--gemini-version", dest="gemini_version", default="latest",
help="Install one specific gemini version with a fixed dependency chain.")
parser.add_argument("--nosudo", help="Specify we cannot use sudo for commands",
dest="sudo", action="store_false", default=True)
parser.add_argument("--notools", help="Do not install tool dependencies",
dest="install_tools", action="store_false", default=True)
parser.add_argument("--nodata", help="Do not install data dependencies",
dest="install_data", action="store_false", default=True)
parser.add_argument("--sharedpy", help=("Indicate we share an Anaconda Python directory with "
"another project. Creates unique gemini data directory."),
action="store_true", default=False)
if len(sys.argv) == 1:
parser.print_help()
else:
main(parser.parse_args())
| mit |
uwafsl/MissionPlanner | Lib/site-packages/numpy/core/machar.py | 83 | 10658 | """
Machine arithmetics - determine the parameters of the
floating-point arithmetic system
"""
# Author: Pearu Peterson, September 2003
__all__ = ['MachAr']
from numpy.core.fromnumeric import any
from numpy.core.numeric import seterr
# Need to speed this up...especially for longfloat
class MachAr(object):
"""
Diagnosing machine parameters.
Attributes
----------
ibeta : int
Radix in which numbers are represented.
it : int
Number of base-`ibeta` digits in the floating point mantissa M.
machep : int
Exponent of the smallest (most negative) power of `ibeta` that,
added to 1.0, gives something different from 1.0
eps : float
Floating-point number ``beta**machep`` (floating point precision)
negep : int
Exponent of the smallest power of `ibeta` that, substracted
from 1.0, gives something different from 1.0.
epsneg : float
Floating-point number ``beta**negep``.
iexp : int
Number of bits in the exponent (including its sign and bias).
minexp : int
Smallest (most negative) power of `ibeta` consistent with there
being no leading zeros in the mantissa.
xmin : float
Floating point number ``beta**minexp`` (the smallest [in
magnitude] usable floating value).
maxexp : int
Smallest (positive) power of `ibeta` that causes overflow.
xmax : float
``(1-epsneg) * beta**maxexp`` (the largest [in magnitude]
usable floating value).
irnd : int
In ``range(6)``, information on what kind of rounding is done
in addition, and on how underflow is handled.
ngrd : int
Number of 'guard digits' used when truncating the product
of two mantissas to fit the representation.
epsilon : float
Same as `eps`.
tiny : float
Same as `xmin`.
huge : float
Same as `xmax`.
precision : float
``- int(-log10(eps))``
resolution : float
``- 10**(-precision)``
Parameters
----------
float_conv : function, optional
Function that converts an integer or integer array to a float
or float array. Default is `float`.
int_conv : function, optional
Function that converts a float or float array to an integer or
integer array. Default is `int`.
float_to_float : function, optional
Function that converts a float array to float. Default is `float`.
Note that this does not seem to do anything useful in the current
implementation.
float_to_str : function, optional
Function that converts a single float to a string. Default is
``lambda v:'%24.16e' %v``.
title : str, optional
Title that is printed in the string representation of `MachAr`.
See Also
--------
finfo : Machine limits for floating point types.
iinfo : Machine limits for integer types.
References
----------
.. [1] Press, Teukolsky, Vetterling and Flannery,
"Numerical Recipes in C++," 2nd ed,
Cambridge University Press, 2002, p. 31.
"""
def __init__(self, float_conv=float,int_conv=int,
float_to_float=float,
float_to_str = lambda v:'%24.16e' % v,
title = 'Python floating point number'):
"""
float_conv - convert integer to float (array)
int_conv - convert float (array) to integer
float_to_float - convert float array to float
float_to_str - convert array float to str
title - description of used floating point numbers
"""
# We ignore all errors here because we are purposely triggering
# underflow to detect the properties of the runninng arch.
saverrstate = seterr(under='ignore')
try:
self._do_init(float_conv, int_conv, float_to_float, float_to_str, title)
finally:
seterr(**saverrstate)
def _do_init(self, float_conv, int_conv, float_to_float, float_to_str, title):
max_iterN = 10000
msg = "Did not converge after %d tries with %s"
one = float_conv(1)
two = one + one
zero = one - one
# Do we really need to do this? Aren't they 2 and 2.0?
# Determine ibeta and beta
a = one
for _ in xrange(max_iterN):
a = a + a
temp = a + one
temp1 = temp - a
if any(temp1 - one != zero):
break
else:
raise RuntimeError, msg % (_, one.dtype)
b = one
for _ in xrange(max_iterN):
b = b + b
temp = a + b
itemp = int_conv(temp-a)
if any(itemp != 0):
break
else:
raise RuntimeError, msg % (_, one.dtype)
ibeta = itemp
beta = float_conv(ibeta)
# Determine it and irnd
it = -1
b = one
for _ in xrange(max_iterN):
it = it + 1
b = b * beta
temp = b + one
temp1 = temp - b
if any(temp1 - one != zero):
break
else:
raise RuntimeError, msg % (_, one.dtype)
betah = beta / two
a = one
for _ in xrange(max_iterN):
a = a + a
temp = a + one
temp1 = temp - a
if any(temp1 - one != zero):
break
else:
raise RuntimeError, msg % (_, one.dtype)
temp = a + betah
irnd = 0
if any(temp-a != zero):
irnd = 1
tempa = a + beta
temp = tempa + betah
if irnd==0 and any(temp-tempa != zero):
irnd = 2
# Determine negep and epsneg
negep = it + 3
betain = one / beta
a = one
for i in range(negep):
a = a * betain
b = a
for _ in xrange(max_iterN):
temp = one - a
if any(temp-one != zero):
break
a = a * beta
negep = negep - 1
# Prevent infinite loop on PPC with gcc 4.0:
if negep < 0:
raise RuntimeError, "could not determine machine tolerance " \
"for 'negep', locals() -> %s" % (locals())
else:
raise RuntimeError, msg % (_, one.dtype)
negep = -negep
epsneg = a
# Determine machep and eps
machep = - it - 3
a = b
for _ in xrange(max_iterN):
temp = one + a
if any(temp-one != zero):
break
a = a * beta
machep = machep + 1
else:
raise RuntimeError, msg % (_, one.dtype)
eps = a
# Determine ngrd
ngrd = 0
temp = one + eps
if irnd==0 and any(temp*one - one != zero):
ngrd = 1
# Determine iexp
i = 0
k = 1
z = betain
t = one + eps
nxres = 0
for _ in xrange(max_iterN):
y = z
z = y*y
a = z*one # Check here for underflow
temp = z*t
if any(a+a == zero) or any(abs(z)>=y):
break
temp1 = temp * betain
if any(temp1*beta == z):
break
i = i + 1
k = k + k
else:
raise RuntimeError, msg % (_, one.dtype)
if ibeta != 10:
iexp = i + 1
mx = k + k
else:
iexp = 2
iz = ibeta
while k >= iz:
iz = iz * ibeta
iexp = iexp + 1
mx = iz + iz - 1
# Determine minexp and xmin
for _ in xrange(max_iterN):
xmin = y
y = y * betain
a = y * one
temp = y * t
if any(a+a != zero) and any(abs(y) < xmin):
k = k + 1
temp1 = temp * betain
if any(temp1*beta == y) and any(temp != y):
nxres = 3
xmin = y
break
else:
break
else:
raise RuntimeError, msg % (_, one.dtype)
minexp = -k
# Determine maxexp, xmax
if mx <= k + k - 3 and ibeta != 10:
mx = mx + mx
iexp = iexp + 1
maxexp = mx + minexp
irnd = irnd + nxres
if irnd >= 2:
maxexp = maxexp - 2
i = maxexp + minexp
if ibeta == 2 and not i:
maxexp = maxexp - 1
if i > 20:
maxexp = maxexp - 1
if any(a != y):
maxexp = maxexp - 2
xmax = one - epsneg
if any(xmax*one != xmax):
xmax = one - beta*epsneg
xmax = xmax / (xmin*beta*beta*beta)
i = maxexp + minexp + 3
for j in range(i):
if ibeta==2:
xmax = xmax + xmax
else:
xmax = xmax * beta
self.ibeta = ibeta
self.it = it
self.negep = negep
self.epsneg = float_to_float(epsneg)
self._str_epsneg = float_to_str(epsneg)
self.machep = machep
self.eps = float_to_float(eps)
self._str_eps = float_to_str(eps)
self.ngrd = ngrd
self.iexp = iexp
self.minexp = minexp
self.xmin = float_to_float(xmin)
self._str_xmin = float_to_str(xmin)
self.maxexp = maxexp
self.xmax = float_to_float(xmax)
self._str_xmax = float_to_str(xmax)
self.irnd = irnd
self.title = title
# Commonly used parameters
self.epsilon = self.eps
self.tiny = self.xmin
self.huge = self.xmax
import math
self.precision = int(-math.log10(float_to_float(self.eps)))
ten = two + two + two + two + two
resolution = ten ** (-self.precision)
self.resolution = float_to_float(resolution)
self._str_resolution = float_to_str(resolution)
def __str__(self):
return '''\
Machine parameters for %(title)s
---------------------------------------------------------------------
ibeta=%(ibeta)s it=%(it)s iexp=%(iexp)s ngrd=%(ngrd)s irnd=%(irnd)s
machep=%(machep)s eps=%(_str_eps)s (beta**machep == epsilon)
negep =%(negep)s epsneg=%(_str_epsneg)s (beta**epsneg)
minexp=%(minexp)s xmin=%(_str_xmin)s (beta**minexp == tiny)
maxexp=%(maxexp)s xmax=%(_str_xmax)s ((1-epsneg)*beta**maxexp == huge)
---------------------------------------------------------------------
''' % self.__dict__
if __name__ == '__main__':
print MachAr()
| gpl-3.0 |
machinelearningnanodegree/stanford-cs231 | solutions/levin/assignment2/FullyConectedNets/batchnormalization.py | 1 | 14980 | import sys
import os
from astropy.units import ys
sys.path.insert(0, os.path.abspath('..'))
import random
import numpy as np
import matplotlib.pyplot as plt
from assignment2.cs231n.layers import affine_forward
from assignment2.cs231n.layers import affine_backward
from assignment2.cs231n.layers import relu_forward
from assignment2.cs231n.layers import relu_backward
from assignment2.cs231n.layers import svm_loss
from assignment2.cs231n.layers import softmax_loss
from assignment2.cs231n.classifiers.fc_net import *
from assignment2.cs231n.data_utils import get_CIFAR10_data
from assignment2.cs231n.gradient_check import eval_numerical_gradient, eval_numerical_gradient_array
from assignment2.cs231n.solver import Solver
from assignment2.cs231n.layer_utils import affine_relu_forward, affine_relu_backward
from assignment2.cs231n.data_utils import load_CIFAR10
from assignment2.cs231n.optim import sgd_momentum
from assignment2.cs231n.optim import rmsprop
from assignment2.cs231n.optim import adam
import time
class BatchNormalization(object):
def __init__(self):
return
def rel_error(self, x, y):
""" returns relative error """
return np.max(np.abs(x - y) / (np.maximum(1e-8, np.abs(x) + np.abs(y))))
def test_batch_norm_forward_train_time(self):
# Check the training-time forward pass by checking means and variances
# of features both before and after batch normalization
# Simulate the forward pass for a two-layer network
N, D1, D2, D3 = 200, 50, 60, 3
X = np.random.randn(N, D1)
W1 = np.random.randn(D1, D2)
W2 = np.random.randn(D2, D3)
a = np.maximum(0, X.dot(W1)).dot(W2)
print 'Before batch normalization:'
print ' means: ', a.mean(axis=0)
print ' stds: ', a.std(axis=0)
# Means should be close to zero and stds close to one
print 'After batch normalization (gamma=1, beta=0)'
a_norm, _ = batchnorm_forward(a, np.ones(D3), np.zeros(D3), {'mode': 'train'})
print ' mean: ', a_norm.mean(axis=0)
print ' std: ', a_norm.std(axis=0)
# Now means should be close to beta and stds close to gamma
gamma = np.asarray([1.0, 2.0, 3.0])
beta = np.asarray([11.0, 12.0, 13.0])
a_norm, _ = batchnorm_forward(a, gamma, beta, {'mode': 'train'})
print 'After batch normalization (nontrivial gamma, beta)'
print ' means: ', a_norm.mean(axis=0)
print ' stds: ', a_norm.std(axis=0)
return
def test_batch_norm_forward_test_time(self):
# Check the test-time forward pass by running the training-time
# forward pass many times to warm up the running averages, and then
# checking the means and variances of activations after a test-time
# forward pass.
N, D1, D2, D3 = 200, 50, 60, 3
W1 = np.random.randn(D1, D2)
W2 = np.random.randn(D2, D3)
bn_param = {'mode': 'train'}
gamma = np.ones(D3)
beta = np.zeros(D3)
for t in xrange(50):
X = np.random.randn(N, D1)
a = np.maximum(0, X.dot(W1)).dot(W2)
batchnorm_forward(a, gamma, beta, bn_param)
bn_param['mode'] = 'test'
X = np.random.randn(N, D1)
a = np.maximum(0, X.dot(W1)).dot(W2)
a_norm, _ = batchnorm_forward(a, gamma, beta, bn_param)
# Means should be close to zero and stds close to one, but will be
# noisier than training-time forward passes.
print 'After batch normalization (test-time):'
print ' means: ', a_norm.mean(axis=0)
print ' stds: ', a_norm.std(axis=0)
return
def get_CIFAR10_data(self, num_training=49000, num_validation=1000, num_test=1000):
"""
Load the CIFAR-10 dataset from disk and perform preprocessing to prepare
it for the two-layer neural net classifier. These are the same steps as
we used for the SVM, but condensed to a single function.
"""
# Load the raw CIFAR-10 data
cifar10_dir = '../../assignment1/cs231n/datasets/cifar-10-batches-py'
X_train, y_train, X_test, y_test = load_CIFAR10(cifar10_dir)
# Subsample the data
mask = range(num_training, num_training + num_validation)
X_val = X_train[mask]
y_val = y_train[mask]
mask = range(num_training)
X_train = X_train[mask]
y_train = y_train[mask]
mask = range(num_test)
X_test = X_test[mask]
y_test = y_test[mask]
# Normalize the data: subtract the mean image
mean_image = np.mean(X_train, axis=0)
X_train -= mean_image
X_val -= mean_image
X_test -= mean_image
# Reshape data to rows
X_train = X_train.reshape(num_training, -1)
X_val = X_val.reshape(num_validation, -1)
X_test = X_test.reshape(num_test, -1)
print 'Train data shape: ', X_train.shape
print 'Train labels shape: ', y_train.shape
print 'Validation data shape: ', X_val.shape
print 'Validation labels shape: ', y_val.shape
print 'Test data shape: ', X_test.shape
print 'Test labels shape: ', y_test.shape
# self.X_train = X_train
# self.y_train = y_train
# self.X_val = X_val
# self.y_val = y_val
# self.X_test = X_test
# self.y_test = y_test
self.data = {
'X_train': X_train,
'y_train': y_train,
'X_val': X_val,
'y_val': y_val}
return X_train, y_train, X_val, y_val,X_test,y_test
def backnorm_backward(self):
# Gradient check batchnorm backward pass
N, D = 4, 5
x = 5 * np.random.randn(N, D) + 12
gamma = np.random.randn(D)
beta = np.random.randn(D)
dout = np.random.randn(N, D)
bn_param = {'mode': 'train'}
fx = lambda x: batchnorm_forward(x, gamma, beta, bn_param)[0]
fg = lambda a: batchnorm_forward(x, gamma, beta, bn_param)[0]
fb = lambda b: batchnorm_forward(x, gamma, beta, bn_param)[0]
dx_num = eval_numerical_gradient_array(fx, x, dout)
da_num = eval_numerical_gradient_array(fg, gamma, dout)
db_num = eval_numerical_gradient_array(fb, beta, dout)
_, cache = batchnorm_forward(x, gamma, beta, bn_param)
dx, dgamma, dbeta = batchnorm_backward(dout, cache)
print 'dx error: ', self.rel_error(dx_num, dx)
print 'dgamma error: ', self.rel_error(da_num, dgamma)
print 'dbeta error: ', self.rel_error(db_num, dbeta)
return
def analytical_backward(self):
N, D = 100, 500
x = 5 * np.random.randn(N, D) + 12
gamma = np.random.randn(D)
beta = np.random.randn(D)
dout = np.random.randn(N, D)
bn_param = {'mode': 'train'}
out, cache = batchnorm_forward(x, gamma, beta, bn_param)
t1 = time.time()
dx1, dgamma1, dbeta1 = batchnorm_backward(dout, cache)
t2 = time.time()
dx2, dgamma2, dbeta2 = batchnorm_backward_alt(dout, cache)
t3 = time.time()
print 'dx difference: ', self.rel_error(dx1, dx2)
print 'dgamma difference: ', self.rel_error(dgamma1, dgamma2)
print 'dbeta difference: ', self.rel_error(dbeta1, dbeta2)
print 'speedup: %.2fx' % ((t2 - t1) / (t3 - t2))
return
def check_network_withbatchnorm(self):
N, D, H1, H2, C = 2, 15, 20, 30, 10
X = np.random.randn(N, D)
y = np.random.randint(C, size=(N,))
for reg in [0, 3.14]:
print 'Running check with reg = ', reg
model = FullyConnectedNet([H1, H2], input_dim=D, num_classes=C,
reg=reg, weight_scale=5e-2, dtype=np.float64,
use_batchnorm=True)
loss, grads = model.loss(X, y)
print 'Initial loss: ', loss
for name in sorted(grads):
f = lambda _: model.loss(X, y)[0]
grad_num = eval_numerical_gradient(f, model.params[name], verbose=False, h=1e-5)
print '%s relative error: %.2e' % (name, self.rel_error(grad_num, grads[name]))
if reg == 0: print
return
def batch_norm_with_deep(self):
# Try training a very deep net with batchnorm
data = self.data
hidden_dims = [100, 100, 100, 100, 100]
num_train = 1000
small_data = {
'X_train': data['X_train'][:num_train],
'y_train': data['y_train'][:num_train],
'X_val': data['X_val'],
'y_val': data['y_val'],
}
weight_scale = 2e-2
bn_model = FullyConnectedNet(hidden_dims, weight_scale=weight_scale, use_batchnorm=True)
model = FullyConnectedNet(hidden_dims, weight_scale=weight_scale, use_batchnorm=False)
bn_solver = Solver(bn_model, small_data,
num_epochs=10, batch_size=50,
update_rule='adam',
optim_config={
'learning_rate': 1e-3,
},
verbose=True, print_every=200)
bn_solver.train()
solver = Solver(model, small_data,
num_epochs=10, batch_size=50,
update_rule='adam',
optim_config={
'learning_rate': 1e-3,
},
verbose=True, print_every=200)
solver.train()
plt.subplot(3, 1, 1)
plt.title('Training loss')
plt.xlabel('Iteration')
plt.subplot(3, 1, 2)
plt.title('Training accuracy')
plt.xlabel('Epoch')
plt.subplot(3, 1, 3)
plt.title('Validation accuracy')
plt.xlabel('Epoch')
plt.subplot(3, 1, 1)
plt.plot(solver.loss_history, 'o', label='baseline')
plt.plot(bn_solver.loss_history, 'o', label='batchnorm')
plt.subplot(3, 1, 2)
plt.plot(solver.train_acc_history, '-o', label='baseline')
plt.plot(bn_solver.train_acc_history, '-o', label='batchnorm')
plt.subplot(3, 1, 3)
plt.plot(solver.val_acc_history, '-o', label='baseline')
plt.plot(bn_solver.val_acc_history, '-o', label='batchnorm')
for i in [1, 2, 3]:
plt.subplot(3, 1, i)
plt.legend(loc='upper center', ncol=4)
plt.gcf().set_size_inches(15, 15)
plt.show()
return
def weight_initialization_batch_norm(self):
# Try training a very deep net with batchnorm
data = self.data
hidden_dims = [50, 50, 50, 50, 50, 50, 50]
num_train = 1000
small_data = {
'X_train': data['X_train'][:num_train],
'y_train': data['y_train'][:num_train],
'X_val': data['X_val'],
'y_val': data['y_val'],
}
bn_solvers = {}
solvers = {}
weight_scales = np.logspace(-4, 0, num=20)
for i, weight_scale in enumerate(weight_scales):
print 'Running weight scale %d / %d' % (i + 1, len(weight_scales))
bn_model = FullyConnectedNet(hidden_dims, weight_scale=weight_scale, use_batchnorm=True)
model = FullyConnectedNet(hidden_dims, weight_scale=weight_scale, use_batchnorm=False)
bn_solver = Solver(bn_model, small_data,
num_epochs=10, batch_size=50,
update_rule='adam',
optim_config={
'learning_rate': 1e-3,
},
verbose=False, print_every=200)
bn_solver.train()
bn_solvers[weight_scale] = bn_solver
solver = Solver(model, small_data,
num_epochs=10, batch_size=50,
update_rule='adam',
optim_config={
'learning_rate': 1e-3,
},
verbose=False, print_every=200)
solver.train()
solvers[weight_scale] = solver
# Plot results of weight scale experiment
best_train_accs, bn_best_train_accs = [], []
best_val_accs, bn_best_val_accs = [], []
final_train_loss, bn_final_train_loss = [], []
for ws in weight_scales:
best_train_accs.append(max(solvers[ws].train_acc_history))
bn_best_train_accs.append(max(bn_solvers[ws].train_acc_history))
best_val_accs.append(max(solvers[ws].val_acc_history))
bn_best_val_accs.append(max(bn_solvers[ws].val_acc_history))
final_train_loss.append(np.mean(solvers[ws].loss_history[-100:]))
bn_final_train_loss.append(np.mean(bn_solvers[ws].loss_history[-100:]))
plt.subplot(3, 1, 1)
plt.title('Best val accuracy vs weight initialization scale')
plt.xlabel('Weight initialization scale')
plt.ylabel('Best val accuracy')
plt.semilogx(weight_scales, best_val_accs, '-o', label='baseline')
plt.semilogx(weight_scales, bn_best_val_accs, '-o', label='batchnorm')
plt.legend(ncol=2, loc='lower right')
plt.subplot(3, 1, 2)
plt.title('Best train accuracy vs weight initialization scale')
plt.xlabel('Weight initialization scale')
plt.ylabel('Best training accuracy')
plt.semilogx(weight_scales, best_train_accs, '-o', label='baseline')
plt.semilogx(weight_scales, bn_best_train_accs, '-o', label='batchnorm')
plt.legend()
plt.subplot(3, 1, 3)
plt.title('Final training loss vs weight initialization scale')
plt.xlabel('Weight initialization scale')
plt.ylabel('Final training loss')
plt.semilogx(weight_scales, final_train_loss, '-o', label='baseline')
plt.semilogx(weight_scales, bn_final_train_loss, '-o', label='batchnorm')
plt.legend()
plt.gcf().set_size_inches(10, 15)
plt.show()
return
def run(self):
self.get_CIFAR10_data()
# self.test_batch_norm_forward_train_time()
# self.test_batch_norm_forward_test_time()
# self.backnorm_backward()
# self.analytical_backward()
# self.check_network_withbatchnorm()
# self.batch_norm_with_deep()
self.weight_initialization_batch_norm()
return
if __name__ == "__main__":
obj= BatchNormalization()
obj.run() | mit |
redhat-openstack/rally | tests/unit/task/test_scenario.py | 6 | 10463 | # Copyright 2013: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import traceback
import mock
import six
from rally import consts
from rally import exceptions
from rally.task import context
from rally.task import scenario
from rally.task import validation
from tests.unit import fakes
from tests.unit import test
class ScenarioConfigureTestCase(test.TestCase):
def test_configure(self):
@scenario.configure("test_configure", "testing")
def some_func():
pass
self.assertEqual("test_configure", some_func.get_name())
self.assertEqual("testing", some_func.get_namespace())
some_func.unregister()
def test_configure_default_name(self):
@scenario.configure(namespace="testing", context={"any": 42})
def some_func():
pass
self.assertIsNone(some_func._meta_get("name"))
self.assertEqual("testing", some_func.get_namespace())
self.assertEqual({"any": 42}, some_func._meta_get("default_context"))
some_func.unregister()
def test_configure_cls(self):
class ScenarioPluginCls(scenario.Scenario):
@scenario.configure(namespace="any", context={"any": 43})
def some(self):
pass
self.assertEqual("ScenarioPluginCls.some",
ScenarioPluginCls.some.get_name())
self.assertEqual("any", ScenarioPluginCls.some.get_namespace())
self.assertEqual({"any": 43},
ScenarioPluginCls.some._meta_get("default_context"))
ScenarioPluginCls.some.unregister()
class ScenarioTestCase(test.TestCase):
def test__validate_helper(self):
validators = [
mock.MagicMock(return_value=validation.ValidationResult(True)),
mock.MagicMock(return_value=validation.ValidationResult(True))
]
clients = mock.MagicMock()
config = {"a": 1, "b": 2}
deployment = mock.MagicMock()
scenario.Scenario._validate_helper(validators, clients, config,
deployment)
for validator in validators:
validator.assert_called_with(config, clients=clients,
deployment=deployment)
def test__validate_helper_somethingwent_wrong(self):
validator = mock.MagicMock()
validator.side_effect = Exception()
self.assertRaises(exceptions.InvalidScenarioArgument,
scenario.Scenario._validate_helper,
[validator], "cl", "config", "deployment")
validator.assert_called_once_with("config", clients="cl",
deployment="deployment")
def test__validate_helper__no_valid(self):
validators = [
mock.MagicMock(return_value=validation.ValidationResult(True)),
mock.MagicMock(
return_value=validation.ValidationResult(is_valid=False)
)
]
clients = mock.MagicMock()
args = {"a": 1, "b": 2}
self.assertRaises(exceptions.InvalidScenarioArgument,
scenario.Scenario._validate_helper,
validators, clients, args, "fake_uuid")
@mock.patch("rally.task.scenario.Scenario.get")
def test_validate__no_validators(self, mock_scenario_get):
class Testing(fakes.FakeScenario):
@scenario.configure()
def validate__no_validators(self):
pass
mock_scenario_get.return_value = Testing.validate__no_validators
scenario.Scenario.validate("Testing.validate__no_validators",
{"a": 1, "b": 2})
mock_scenario_get.assert_called_once_with(
"Testing.validate__no_validators")
Testing.validate__no_validators.unregister()
@mock.patch("rally.task.scenario.Scenario._validate_helper")
@mock.patch("rally.task.scenario.Scenario.get")
def test_validate__admin_validators(self, mock_scenario_get,
mock_scenario__validate_helper):
class Testing(fakes.FakeScenario):
@scenario.configure(namespace="testing")
def validate_admin_validators(self):
pass
mock_scenario_get.return_value = Testing.validate_admin_validators
validators = [mock.MagicMock(), mock.MagicMock()]
for validator in validators:
validator.permission = consts.EndpointPermission.ADMIN
Testing.validate_admin_validators._meta_set(
"validators", validators)
deployment = mock.MagicMock()
args = {"a": 1, "b": 2}
scenario.Scenario.validate("Testing.validate_admin_validators",
args, admin="admin", deployment=deployment)
mock_scenario__validate_helper.assert_called_once_with(
validators, "admin", args, deployment)
Testing.validate_admin_validators.unregister()
@mock.patch("rally.task.scenario.Scenario._validate_helper")
@mock.patch("rally.task.scenario.Scenario.get")
def test_validate_user_validators(self, mock_scenario_get,
mock_scenario__validate_helper):
class Testing(fakes.FakeScenario):
@scenario.configure()
def validate_user_validators(self):
pass
mock_scenario_get.return_value = Testing.validate_user_validators
validators = [mock.MagicMock(), mock.MagicMock()]
for validator in validators:
validator.permission = consts.EndpointPermission.USER
Testing.validate_user_validators._meta_set("validators", validators)
args = {"a": 1, "b": 2}
scenario.Scenario.validate(
"Testing.validate_user_validators", args, users=["u1", "u2"])
mock_scenario__validate_helper.assert_has_calls([
mock.call(validators, "u1", args, None),
mock.call(validators, "u2", args, None)
])
Testing.validate_user_validators.unregister()
def test_sleep_between_invalid_args(self):
self.assertRaises(exceptions.InvalidArgumentsException,
scenario.Scenario().sleep_between, 15, 5)
self.assertRaises(exceptions.InvalidArgumentsException,
scenario.Scenario().sleep_between, -1, 0)
self.assertRaises(exceptions.InvalidArgumentsException,
scenario.Scenario().sleep_between, 0, -2)
def test_sleep_between(self):
scenario_inst = scenario.Scenario()
scenario_inst.sleep_between(0.001, 0.002)
self.assertTrue(0.001 <= scenario_inst.idle_duration() <= 0.002)
def test_sleep_beetween_multi(self):
scenario_inst = scenario.Scenario()
scenario_inst.sleep_between(0.001, 0.001)
scenario_inst.sleep_between(0.004, 0.004)
self.assertEqual(scenario_inst.idle_duration(), 0.005)
@mock.patch("rally.task.scenario.time.sleep")
@mock.patch("rally.task.scenario.random.uniform")
def test_sleep_between_internal(self, mock_uniform, mock_sleep):
scenario_inst = scenario.Scenario()
mock_uniform.return_value = 1.5
scenario_inst.sleep_between(1, 2)
mock_sleep.assert_called_once_with(mock_uniform.return_value)
self.assertEqual(scenario_inst.idle_duration(),
mock_uniform.return_value)
def test_scenario_context_are_valid(self):
for s in scenario.Scenario.get_all():
try:
context.ContextManager.validate(s._meta_get("default_context"))
except Exception:
print(traceback.format_exc())
self.assertTrue(False,
"Scenario `%s` has wrong context" % scenario)
def test_RESOURCE_NAME_PREFIX(self):
self.assertIsInstance(scenario.Scenario.RESOURCE_NAME_PREFIX,
six.string_types)
def test_RESOURCE_NAME_LENGTH(self):
self.assertIsInstance(scenario.Scenario.RESOURCE_NAME_LENGTH, int)
self.assertTrue(scenario.Scenario.RESOURCE_NAME_LENGTH > 4)
def test_generate_random_name(self):
set_by_length = lambda lst: set(map(len, lst))
len_by_prefix = (lambda lst, prefix:
len([i.startswith(prefix) for i in lst]))
range_num = 50
# Defaults
result = [scenario.Scenario._generate_random_name()
for i in range(range_num)]
self.assertEqual(len(result), len(set(result)))
self.assertEqual(
set_by_length(result),
set([(len(
scenario.Scenario.RESOURCE_NAME_PREFIX) +
scenario.Scenario.RESOURCE_NAME_LENGTH)]))
self.assertEqual(
len_by_prefix(result, scenario.Scenario.RESOURCE_NAME_PREFIX),
range_num)
# Custom prefix
prefix = "another_prefix_"
result = [scenario.Scenario._generate_random_name(prefix)
for i in range(range_num)]
self.assertEqual(len(result), len(set(result)))
self.assertEqual(
set_by_length(result),
set([len(prefix) + scenario.Scenario.RESOURCE_NAME_LENGTH]))
self.assertEqual(
len_by_prefix(result, prefix), range_num)
# Custom length
name_length = 12
result = [
scenario.Scenario._generate_random_name(length=name_length)
for i in range(range_num)]
self.assertEqual(len(result), len(set(result)))
self.assertEqual(
set_by_length(result),
set([len(
scenario.Scenario.RESOURCE_NAME_PREFIX) + name_length]))
self.assertEqual(
len_by_prefix(result, scenario.Scenario.RESOURCE_NAME_PREFIX),
range_num)
| apache-2.0 |
cogeorg/black_rhino | examples/firesales_simple/networkx/linalg/graphmatrix.py | 35 | 5008 | """
Adjacency matrix and incidence matrix of graphs.
"""
# Copyright (C) 2004-2011 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
import networkx as nx
__author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)',
'Pieter Swart (swart@lanl.gov)',
'Dan Schult(dschult@colgate.edu)'])
__all__ = ['incidence_matrix',
'adj_matrix', 'adjacency_matrix',
]
def incidence_matrix(G, nodelist=None, edgelist=None,
oriented=False, weight=None):
"""Return incidence matrix of G.
The incidence matrix assigns each row to a node and each column to an edge.
For a standard incidence matrix a 1 appears wherever a row's node is
incident on the column's edge. For an oriented incidence matrix each
edge is assigned an orientation (arbitrarily for undirected and aligning to
direction for directed). A -1 appears for the tail of an edge and 1
for the head of the edge. The elements are zero otherwise.
Parameters
----------
G : graph
A NetworkX graph
nodelist : list, optional (default= all nodes in G)
The rows are ordered according to the nodes in nodelist.
If nodelist is None, then the ordering is produced by G.nodes().
edgelist : list, optional (default= all edges in G)
The columns are ordered according to the edges in edgelist.
If edgelist is None, then the ordering is produced by G.edges().
oriented: bool, optional (default=False)
If True, matrix elements are +1 or -1 for the head or tail node
respectively of each edge. If False, +1 occurs at both nodes.
weight : string or None, optional (default=None)
The edge data key used to provide each value in the matrix.
If None, then each edge has weight 1. Edge weights, if used,
should be positive so that the orientation can provide the sign.
Returns
-------
A : NumPy matrix
The incidence matrix of G.
Notes
-----
For MultiGraph/MultiDiGraph, the edges in edgelist should be
(u,v,key) 3-tuples.
"Networks are the best discrete model for so many problems in
applied mathematics" [1]_.
References
----------
.. [1] Gil Strang, Network applications: A = incidence matrix,
http://academicearth.org/lectures/network-applications-incidence-matrix
"""
try:
import numpy as np
except ImportError:
raise ImportError(
"incidence_matrix() requires numpy: http://scipy.org/ ")
if nodelist is None:
nodelist = G.nodes()
if edgelist is None:
if G.is_multigraph():
edgelist = G.edges(keys=True)
else:
edgelist = G.edges()
A = np.zeros((len(nodelist),len(edgelist)))
node_index = dict( (node,i) for i,node in enumerate(nodelist) )
for ei,e in enumerate(edgelist):
(u,v) = e[:2]
if u == v: continue # self loops give zero column
try:
ui = node_index[u]
vi = node_index[v]
except KeyError:
raise NetworkXError('node %s or %s in edgelist '
'but not in nodelist"%(u,v)')
if weight is None:
wt = 1
else:
if G.is_multigraph():
ekey = e[2]
wt = G[u][v][ekey].get(weight,1)
else:
wt = G[u][v].get(weight,1)
if oriented:
A[ui,ei] = -wt
A[vi,ei] = wt
else:
A[ui,ei] = wt
A[vi,ei] = wt
return np.asmatrix(A)
def adjacency_matrix(G, nodelist=None, weight='weight'):
"""Return adjacency matrix of G.
Parameters
----------
G : graph
A NetworkX graph
nodelist : list, optional
The rows and columns are ordered according to the nodes in nodelist.
If nodelist is None, then the ordering is produced by G.nodes().
weight : string or None, optional (default='weight')
The edge data key used to provide each value in the matrix.
If None, then each edge has weight 1.
Returns
-------
A : numpy matrix
Adjacency matrix representation of G.
Notes
-----
If you want a pure Python adjacency matrix representation try
networkx.convert.to_dict_of_dicts which will return a
dictionary-of-dictionaries format that can be addressed as a
sparse matrix.
For MultiGraph/MultiDiGraph, the edges weights are summed.
See to_numpy_matrix for other options.
See Also
--------
to_numpy_matrix
to_dict_of_dicts
"""
return nx.to_numpy_matrix(G,nodelist=nodelist,weight=weight)
adj_matrix=adjacency_matrix
# fixture for nose tests
def setup_module(module):
from nose import SkipTest
try:
import numpy
except:
raise SkipTest("NumPy not available")
| gpl-3.0 |
intfrr/powerline-shell | segments/hg.py | 20 | 1246 | import os
import subprocess
def get_hg_status():
has_modified_files = False
has_untracked_files = False
has_missing_files = False
output = subprocess.Popen(['hg', 'status'],
stdout=subprocess.PIPE).communicate()[0]
for line in output.split('\n'):
if line == '':
continue
elif line[0] == '?':
has_untracked_files = True
elif line[0] == '!':
has_missing_files = True
else:
has_modified_files = True
return has_modified_files, has_untracked_files, has_missing_files
def add_hg_segment():
branch = os.popen('hg branch 2> /dev/null').read().rstrip()
if len(branch) == 0:
return False
bg = Color.REPO_CLEAN_BG
fg = Color.REPO_CLEAN_FG
has_modified_files, has_untracked_files, has_missing_files = get_hg_status()
if has_modified_files or has_untracked_files or has_missing_files:
bg = Color.REPO_DIRTY_BG
fg = Color.REPO_DIRTY_FG
extra = ''
if has_untracked_files:
extra += '+'
if has_missing_files:
extra += '!'
branch += (' ' + extra if extra != '' else '')
return powerline.append(' %s ' % branch, fg, bg)
add_hg_segment()
| mit |
wkrzemien/DIRAC | DataManagementSystem/Service/FileCatalogProxyHandler.py | 9 | 3580 | ########################################################################
# $HeadURL $
# File: FileCatalogProxyHandler.py
########################################################################
"""
:mod: FileCatalogProxyHandler
.. module: FileCatalogProxyHandler
:synopsis: This is a service which represents a DISET proxy to the File
Catalog
"""
## imports
import os
from types import StringTypes, DictType, TupleType
## from DIRAC
from DIRAC import gLogger, S_OK, S_ERROR
from DIRAC.Core.DISET.RequestHandler import RequestHandler
from DIRAC.Resources.Catalog.FileCatalogFactory import FileCatalogFactory
from DIRAC.Core.Utilities.Subprocess import pythonCall
from DIRAC.FrameworkSystem.Client.ProxyManagerClient import gProxyManager
__RCSID__ = "$Id$"
def initializeFileCatalogProxyHandler( serviceInfo ):
""" service initalisation """
return S_OK()
class FileCatalogProxyHandler( RequestHandler ):
"""
.. class:: FileCatalogProxyHandler
"""
types_callProxyMethod = [ StringTypes, StringTypes, TupleType, DictType ]
def export_callProxyMethod( self, fcName, methodName, args, kargs ):
""" A generic method to call methods of the Storage Element.
"""
res = pythonCall( 120, self.__proxyWrapper, fcName, methodName, args, kargs )
if res['OK']:
return res['Value']
else:
return res
def __proxyWrapper( self, fcName, methodName, args, kwargs ):
""" The wrapper will obtain the client proxy and set it up in the environment.
The required functionality is then executed and returned to the client.
:param self: self reference
:param str name: fcn name
:param tuple args: fcn args
:param dict kwargs: fcn keyword args
"""
result = self.__prepareSecurityDetails()
if not result['OK']:
return result
proxyLocation =result['Value']
try:
result = FileCatalogFactory().createCatalog( fcName )
if result['OK']:
fileCatalog = result['Value']
method = getattr( fileCatalog, methodName )
else:
return result
except AttributeError, error:
errStr = "%s proxy: no method named %s" % ( fcName, methodName )
gLogger.exception( errStr, methodName, error )
return S_ERROR( errStr )
try:
result = method( *args, **kwargs )
if os.path.exists(proxyLocation):
os.remove(proxyLocation)
return result
except Exception, error:
if os.path.exists(proxyLocation):
os.remove(proxyLocation)
errStr = "%s proxy: Exception while performing %s" % ( fcName, methodName )
gLogger.exception( errStr, error )
return S_ERROR( errStr )
def __prepareSecurityDetails( self, vomsFlag = True ):
""" Obtains the connection details for the client """
try:
credDict = self.getRemoteCredentials()
clientDN = credDict[ 'DN' ]
clientUsername = credDict['username']
clientGroup = credDict['group']
gLogger.debug( "Getting proxy for %s@%s (%s)" % ( clientUsername, clientGroup, clientDN ) )
if vomsFlag:
result = gProxyManager.downloadVOMSProxyToFile( clientDN, clientGroup )
else:
result = gProxyManager.downloadProxyToFile( clientDN, clientGroup )
if not result['OK']:
return result
gLogger.debug( "Updating environment." )
os.environ['X509_USER_PROXY'] = result['Value']
return result
except Exception, error:
exStr = "__getConnectionDetails: Failed to get client connection details."
gLogger.exception( exStr, '', error )
return S_ERROR( exStr )
| gpl-3.0 |
hoatle/odoo | addons/hr_timesheet/__init__.py | 410 | 1104 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_timesheet
import wizard
import report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
bigdatauniversity/edx-platform | lms/djangoapps/lti_provider/models.py | 33 | 5750 | """
Database models for the LTI provider feature.
This app uses migrations. If you make changes to this model, be sure to create
an appropriate migration file and check it in at the same time as your model
changes. To do that,
1. Go to the edx-platform dir
2. ./manage.py lms schemamigration lti_provider --auto "description" --settings=devstack
"""
from django.contrib.auth.models import User
from django.db import models
import logging
from xmodule_django.models import CourseKeyField, UsageKeyField
from provider.utils import short_token, long_token
log = logging.getLogger("edx.lti_provider")
class LtiConsumer(models.Model):
"""
Database model representing an LTI consumer. This model stores the consumer
specific settings, such as the OAuth key/secret pair and any LTI fields
that must be persisted.
"""
consumer_name = models.CharField(max_length=255, unique=True)
consumer_key = models.CharField(max_length=32, unique=True, db_index=True, default=short_token)
consumer_secret = models.CharField(max_length=32, unique=True, default=short_token)
instance_guid = models.CharField(max_length=255, blank=True, null=True, unique=True)
@staticmethod
def get_or_supplement(instance_guid, consumer_key):
"""
The instance_guid is the best way to uniquely identify an LTI consumer.
However according to the LTI spec, the instance_guid field is optional
and so cannot be relied upon to be present.
This method first attempts to find an LtiConsumer by instance_guid.
Failing that, it tries to find a record with a matching consumer_key.
This can be the case if the LtiConsumer record was created as the result
of an LTI launch with no instance_guid.
If the instance_guid is now present, the LtiConsumer model will be
supplemented with the instance_guid, to more concretely identify the
consumer.
In practice, nearly all major LTI consumers provide an instance_guid, so
the fallback mechanism of matching by consumer key should be rarely
required.
"""
consumer = None
if instance_guid:
try:
consumer = LtiConsumer.objects.get(instance_guid=instance_guid)
except LtiConsumer.DoesNotExist:
# The consumer may not exist, or its record may not have a guid
pass
# Search by consumer key instead of instance_guid. If there is no
# consumer with a matching key, the LTI launch does not have permission
# to access the content.
if not consumer:
consumer = LtiConsumer.objects.get(
consumer_key=consumer_key,
)
# Add the instance_guid field to the model if it's not there already.
if instance_guid and not consumer.instance_guid:
consumer.instance_guid = instance_guid
consumer.save()
return consumer
class OutcomeService(models.Model):
"""
Model for a single outcome service associated with an LTI consumer. Note
that a given consumer may have more than one outcome service URL over its
lifetime, so we need to store the outcome service separately from the
LtiConsumer model.
An outcome service can be identified in two ways, depending on the
information provided by an LTI launch. The ideal way to identify the service
is by instance_guid, which should uniquely identify a consumer. However that
field is optional in the LTI launch, and so if it is missing we can fall
back on the consumer key (which should be created uniquely for each consumer
although we don't have a technical way to guarantee that).
Some LTI-specified fields use the prefix lis_; this refers to the IMS
Learning Information Services standard from which LTI inherits some
properties
"""
lis_outcome_service_url = models.CharField(max_length=255, unique=True)
lti_consumer = models.ForeignKey(LtiConsumer)
class GradedAssignment(models.Model):
"""
Model representing a single launch of a graded assignment by an individual
user. There will be a row created here only if the LTI consumer may require
a result to be returned from the LTI launch (determined by the presence of
the lis_result_sourcedid parameter in the launch POST). There will be only
one row created for a given usage/consumer combination; repeated launches of
the same content by the same user from the same LTI consumer will not add
new rows to the table.
Some LTI-specified fields use the prefix lis_; this refers to the IMS
Learning Information Services standard from which LTI inherits some
properties
"""
user = models.ForeignKey(User, db_index=True)
course_key = CourseKeyField(max_length=255, db_index=True)
usage_key = UsageKeyField(max_length=255, db_index=True)
outcome_service = models.ForeignKey(OutcomeService)
lis_result_sourcedid = models.CharField(max_length=255, db_index=True)
version_number = models.IntegerField(default=0)
class Meta(object):
unique_together = ('outcome_service', 'lis_result_sourcedid')
class LtiUser(models.Model):
"""
Model mapping the identity of an LTI user to an account on the edX platform.
The LTI user_id field is guaranteed to be unique per LTI consumer (per
to the LTI spec), so we guarantee a unique mapping from LTI to edX account
by using the lti_consumer/lti_user_id tuple.
"""
lti_consumer = models.ForeignKey(LtiConsumer)
lti_user_id = models.CharField(max_length=255)
edx_user = models.OneToOneField(User)
class Meta(object):
unique_together = ('lti_consumer', 'lti_user_id')
| agpl-3.0 |
gdub/django | tests/introspection/models.py | 103 | 1079 | from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Reporter(models.Model):
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
email = models.EmailField()
facebook_user_id = models.BigIntegerField(null=True)
raw_data = models.BinaryField(null=True)
small_int = models.SmallIntegerField()
class Meta:
unique_together = ('first_name', 'last_name')
def __str__(self):
return "%s %s" % (self.first_name, self.last_name)
@python_2_unicode_compatible
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateField()
body = models.TextField(default='')
reporter = models.ForeignKey(Reporter)
response_to = models.ForeignKey('self', null=True)
def __str__(self):
return self.headline
class Meta:
ordering = ('headline',)
index_together = [
["headline", "pub_date"],
]
| bsd-3-clause |
yoava333/servo | tests/wpt/css-tests/css-text-decor-3_dev/html/support/generate-text-emphasis-ruby-tests.py | 829 | 3042 | #!/usr/bin/env python
# - * - coding: UTF-8 - * -
"""
This script generates tests text-emphasis-ruby-001 ~ 004 which tests
emphasis marks with ruby in four directions. It outputs a list of all
tests it generated in the format of Mozilla reftest.list to the stdout.
"""
from __future__ import unicode_literals
TEST_FILE = 'text-emphasis-ruby-{:03}{}.html'
TEST_TEMPLATE = '''<!DOCTYPE html>
<meta charset="utf-8">
<title>CSS Test: text-emphasis and ruby, {wm}, {pos}</title>
<link rel="author" title="Xidorn Quan" href="https://www.upsuper.org">
<link rel="author" title="Mozilla" href="https://www.mozilla.org">
<link rel="help" href="https://drafts.csswg.org/css-text-decor-3/#text-emphasis-position-property">
<meta name="assert" content="emphasis marks are drawn outside the ruby">
<link rel="match" href="text-emphasis-ruby-{index:03}-ref.html">
<p>Pass if the emphasis marks are outside the ruby:</p>
<div style="line-height: 5; writing-mode: {wm}; ruby-position: {ruby_pos}; text-emphasis-position: {posval}">ルビ<span style="text-emphasis: circle">と<ruby>圏<rt>けん</rt>点<rt>てん</rt></ruby>を</span>同時</div>
'''
REF_FILE = 'text-emphasis-ruby-{:03}-ref.html'
REF_TEMPLATE = '''<!DOCTYPE html>
<meta charset="utf-8">
<title>CSS Reference: text-emphasis and ruby, {wm}, {pos}</title>
<link rel="author" title="Xidorn Quan" href="https://www.upsuper.org">
<link rel="author" title="Mozilla" href="https://www.mozilla.org">
<style> rtc {{ font-variant-east-asian: inherit; }} </style>
<p>Pass if the emphasis marks are outside the ruby:</p>
<div style="line-height: 5; writing-mode: {wm}; ruby-position: {posval}">ルビ<ruby>と<rtc>●</rtc>圏<rt>けん</rt><rtc>●</rtc>点<rt>てん</rt><rtc>●</rtc>を<rtc>●</rtc></ruby>同時</div>
'''
TEST_CASES = [
('top', 'horizontal-tb', 'over', [
('horizontal-tb', 'over right')]),
('bottom', 'horizontal-tb', 'under', [
('horizontal-tb', 'under right')]),
('right', 'vertical-rl', 'over', [
('vertical-rl', 'over right'),
('vertical-lr', 'over right')]),
('left', 'vertical-rl', 'under', [
('vertical-rl', 'over left'),
('vertical-lr', 'over left')]),
]
SUFFIXES = ['', 'a']
def write_file(filename, content):
with open(filename, 'wb') as f:
f.write(content.encode('UTF-8'))
print("# START tests from {}".format(__file__))
idx = 0
for pos, ref_wm, ruby_pos, subtests in TEST_CASES:
idx += 1
ref_file = REF_FILE.format(idx)
ref_content = REF_TEMPLATE.format(pos=pos, wm=ref_wm, posval=ruby_pos)
write_file(ref_file, ref_content)
suffix = iter(SUFFIXES)
for wm, posval in subtests:
test_file = TEST_FILE.format(idx, next(suffix))
test_content = TEST_TEMPLATE.format(
wm=wm, pos=pos, index=idx, ruby_pos=ruby_pos, posval=posval)
write_file(test_file, test_content)
print("== {} {}".format(test_file, ref_file))
print("# END tests from {}".format(__file__))
| mpl-2.0 |
tomasreimers/tensorflow-emscripten | tensorflow/contrib/rnn/python/ops/gru_ops.py | 31 | 5438 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Python wrapper for the Block GRU Op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.rnn.python.ops import core_rnn_cell
from tensorflow.contrib.util import loader
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.platform import resource_loader
_gru_ops_so = loader.load_op_library(
resource_loader.get_path_to_datafile("_gru_ops.so"))
@ops.RegisterGradient("GRUBlockCell")
def _GRUBlockCellGrad(op, *grad):
r"""Gradient for GRUBlockCell.
Args:
op: Op for which the gradient is defined.
*grad: Gradients of the optimization function wrt output
for the Op.
Returns:
d_x: Gradients wrt to x
d_h: Gradients wrt to h
d_w_ru: Gradients wrt to w_ru
d_w_c: Gradients wrt to w_c
d_b_ru: Gradients wrt to b_ru
d_b_c: Gradients wrt to b_c
Mathematics behind the Gradients below:
```
d_c_bar = d_h \circ (1-u) \circ (1-c \circ c)
d_u_bar = d_h \circ (h-c) \circ u \circ (1-u)
d_r_bar_u_bar = [d_r_bar d_u_bar]
[d_x_component_1 d_h_prev_component_1] = d_r_bar_u_bar * w_ru^T
[d_x_component_2 d_h_prevr] = d_c_bar * w_c^T
d_x = d_x_component_1 + d_x_component_2
d_h_prev = d_h_prev_component_1 + d_h_prevr \circ r + u
```
Below calculation is performed in the python wrapper for the Gradients
(not in the gradient kernel.)
```
d_w_ru = x_h_prevr^T * d_c_bar
d_w_c = x_h_prev^T * d_r_bar_u_bar
d_b_ru = sum of d_r_bar_u_bar along axis = 0
d_b_c = sum of d_c_bar along axis = 0
```
"""
x, h_prev, w_ru, w_c, b_ru, b_c = op.inputs
r, u, c, _ = op.outputs
_, _, _, d_h = grad
d_x, d_h_prev, d_c_bar, d_r_bar_u_bar = _gru_ops_so.gru_block_cell_grad(
x, h_prev, w_ru, w_c, b_ru, b_c, r, u, c, d_h)
x_h_prev = array_ops.concat([x, h_prev], 1)
d_w_ru = math_ops.matmul(x_h_prev, d_r_bar_u_bar, transpose_a=True)
d_b_ru = nn_ops.bias_add_grad(d_r_bar_u_bar)
x_h_prevr = array_ops.concat([x, h_prev * r], 1)
d_w_c = math_ops.matmul(x_h_prevr, d_c_bar, transpose_a=True)
d_b_c = nn_ops.bias_add_grad(d_c_bar)
return d_x, d_h_prev, d_w_ru, d_w_c, d_b_ru, d_b_c
class GRUBlockCell(core_rnn_cell.RNNCell):
r"""Block GRU cell implementation.
The implementation is based on: http://arxiv.org/abs/1406.1078
Computes the LSTM cell forward propagation for 1 time step.
This kernel op implements the following mathematical equations:
Biases are initialized with:
* `b_ru` - constant_initializer(1.0)
* `b_c` - constant_initializer(0.0)
```
x_h_prev = [x, h_prev]
[r_bar u_bar] = x_h_prev * w_ru + b_ru
r = sigmoid(r_bar)
u = sigmoid(u_bar)
h_prevr = h_prev \circ r
x_h_prevr = [x h_prevr]
c_bar = x_h_prevr * w_c + b_c
c = tanh(c_bar)
h = (1-u) \circ c + u \circ h_prev
```
"""
def __init__(self, cell_size):
"""Initialize the Block GRU cell.
Args:
cell_size: int, GRU cell size.
"""
self._cell_size = cell_size
@property
def state_size(self):
return self._cell_size
@property
def output_size(self):
return self._cell_size
def __call__(self, x, h_prev, scope=None):
"""GRU cell."""
with vs.variable_scope(scope or type(self).__name__):
input_size = x.get_shape().with_rank(2)[1]
# Check if the input size exist.
if input_size is None:
raise ValueError("Expecting input_size to be set.")
# Check cell_size == state_size from h_prev.
cell_size = h_prev.get_shape().with_rank(2)[1]
if cell_size != self._cell_size:
raise ValueError("Shape of h_prev[1] incorrect: cell_size %i vs %s" %
(self._cell_size, cell_size))
if cell_size is None:
raise ValueError("cell_size from `h_prev` should not be None.")
w_ru = vs.get_variable("w_ru", [input_size + self._cell_size,
self._cell_size * 2])
b_ru = vs.get_variable(
"b_ru", [self._cell_size * 2],
initializer=init_ops.constant_initializer(1.0))
w_c = vs.get_variable("w_c",
[input_size + self._cell_size, self._cell_size])
b_c = vs.get_variable(
"b_c", [self._cell_size],
initializer=init_ops.constant_initializer(0.0))
_gru_block_cell = _gru_ops_so.gru_block_cell # pylint: disable=invalid-name
_, _, _, new_h = _gru_block_cell(
x=x, h_prev=h_prev, w_ru=w_ru, w_c=w_c, b_ru=b_ru, b_c=b_c)
return new_h, new_h
| apache-2.0 |
hkwi/twink | twink/ofp4/parse.py | 1 | 34533 |
from __future__ import absolute_import
import struct
from collections import namedtuple
from . import *
_len = len
_type = type
def _align(length):
return (length+7)//8*8
class _pos(object):
offset = 0
def _cursor(offset):
if isinstance(offset, _pos):
return offset
elif isinstance(offset, int):
ret = _pos()
ret.offset = offset
return ret
else:
raise ValueError(offset)
def _unpack(fmt, msg, offset):
cur = _cursor(offset)
if fmt[0] != "!":
fmt = "!"+fmt
ret = struct.unpack_from(fmt, msg, cur.offset)
cur.offset += struct.calcsize(fmt)
return ret
def from_bitmap(uint32_t_list):
ret = []
for o,i in zip(range(_len(uint32_t_list)),uint32_t_list):
for s in range(32):
if i & (1<<s):
ret.append(32*o + s)
return ret
def parse(message, offset=0):
if message is None:
return None
cursor = _cursor(offset)
header = ofp_header(message, cursor.offset)
assert header.version == 4
if header.type == OFPT_HELLO:
return ofp_hello(message, cursor)
elif header.type == OFPT_ERROR:
return ofp_error_msg(message, cursor)
elif header.type == OFPT_FEATURES_REPLY:
return ofp_switch_features(message, cursor)
elif header.type in (OFPT_SET_CONFIG, OFPT_GET_CONFIG_REPLY):
return ofp_switch_config(message, cursor)
elif header.type == OFPT_PACKET_IN:
return ofp_packet_in(message, cursor)
elif header.type == OFPT_FLOW_REMOVED:
return ofp_flow_removed(message, cursor)
elif header.type == OFPT_PORT_STATUS:
return ofp_port_status(message, cursor)
elif header.type == OFPT_PACKET_OUT:
return ofp_packet_out(message, cursor)
elif header.type == OFPT_FLOW_MOD:
return ofp_flow_mod(message, cursor)
elif header.type == OFPT_GROUP_MOD:
return ofp_group_mod(message, cursor)
elif header.type == OFPT_PORT_MOD:
return ofp_port_mod(message, cursor)
elif header.type == OFPT_TABLE_MOD:
return ofp_table_mod(message, cursor)
elif header.type == OFPT_MULTIPART_REQUEST:
return ofp_multipart_request(message, cursor)
elif header.type == OFPT_MULTIPART_REPLY:
return ofp_multipart_reply(message, cursor)
elif header.type == OFPT_EXPERIMENTER:
return ofp_experimenter_(message, cursor)
elif header.type == OFPT_QUEUE_GET_CONFIG_REQUEST:
return ofp_queue_get_config_request(message, cursor)
elif header.type == OFPT_QUEUE_GET_CONFIG_REPLY:
return ofp_queue_get_config_reply(message, cursor)
elif header.type in (OFPT_SET_ASYNC, OFPT_GET_ASYNC_REPLY):
return ofp_async_config(message, cursor)
elif header.type == OFPT_METER_MOD:
return ofp_meter_mod(message, cursor)
else:
# OFPT_ECHO_REQUEST, OFPT_ECHO_REPLY
# OFPT_FEATURES_REQUEST
# OFPT_BARRIER_REQUEST, OFPT_BARRIER_REPLY
# OFPT_GET_ASYNC_REQUEST
return ofp_(message, cursor)
# 7.1
def ofp_header(message, offset):
cursor = _cursor(offset)
(version, type, length, xid) = _unpack("BBHI", message, cursor)
assert version == 4
return namedtuple("ofp_header",
"version type length xid")(
version,type,length,xid)
def ofp_(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
header = ofp_header(message, cursor)
data = message[cursor.offset:offset+header.length]
cursor.offset = offset+header.length
return namedtuple("ofp_",
"header,data")(header, data)
# 7.2.1 and 7.3.5.7
def ofp_port(message, offset):
cursor = _cursor(offset)
p = list(_unpack("I4x6s2x16sII6I", message, cursor))
p[2] = p[2].partition(b"\0")[0].decode("UTF-8")
return namedtuple("ofp_port", '''
port_no hw_addr name
config state
curr advertised supported peer
curr_speed max_speed''')(*p)
# 7.2.2
def ofp_packet_queue(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
(queue_id,port,len) = _unpack("IIH6x", message, cursor)
properties = []
while cursor.offset < offset + len:
prop_header = ofp_queue_prop_header(message, cursor.offset)
if prop_header.property == OFPQT_MIN:
properties.append(ofp_queue_prop_min_rate(message, cursor))
elif prop_header.property == OFPQT_MAX:
properties.append(ofp_queue_prop_max_rate(message, cursor))
elif prop_header.property == OFPQT_EXPERIMENTER:
properties.append(ofp_queue_prop_experimenter(message, cursor))
else:
raise ValueError(prop_header)
assert cursor.offset == offset + len
return namedtuple("ofp_packet_queue",
"queue_id port len properties")(queue_id,port,len,properties)
def ofp_queue_prop_header(message, offset):
return namedtuple("ofp_queue_prop_header",
"property len")(*_unpack("HH4x", message, offset))
def ofp_queue_prop_min_rate(message, offset):
cursor = _cursor(offset)
prop_header = ofp_queue_prop_header(message, cursor)
(rate,) = _unpack("H6x", message, cursor)
return namedtuple("ofp_queue_prop_min_rate",
"prop_header rate")(prop_header, rate)
def ofp_queue_prop_max_rate(message, offset):
cursor = _cursor(offset)
prop_header = ofp_queue_prop_header(message, cursor)
(rate,) = _unpack("H6x", message, cursor)
return namedtuple("ofp_queue_prop_max_rate",
"prop_header rate")(prop_header, rate)
def ofp_queue_prop_experimenter(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
prop_header = ofp_queue_prop_header(message, cursor)
(experimenter,) = _unpack("I4x", message, cursor)
data = message[cursor.offset:offset+prop_header.len]
cursor.offset = offset + prop_header.len
return namedtuple("ofp_queue_prop_experimenter",
"prop_header experimenter data")(prop_header,experimenter,data)
# 7.2.3.1
def ofp_match(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
(type,length) = _unpack("HH", message, cursor)
oxm_fields = message[cursor.offset:offset+length]
cursor.offset = offset+_align(length)
return namedtuple("ofp_match",
"type length oxm_fields")(type,length,oxm_fields)
# 7.2.3.8
def ofp_oxm_experimenter_header(message, offset):
return namedtuple("ofp_oxm_experimenter_header",
"oxm_header experimenter")(*_unpack("II", message, offset))
# 7.2.4
def ofp_instruction(message, offset):
return namedtuple("ofp_instruction",
"type len")(*_unpack("HH", message, offset))
def ofp_instruction_(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
(type, len) = ofp_instruction(message, cursor.offset)
if type == OFPIT_GOTO_TABLE:
return ofp_instruction_goto_table(message, cursor)
elif type == OFPIT_WRITE_METADATA:
return ofp_instruction_write_metadata(message, cursor)
elif type in (OFPIT_WRITE_ACTIONS, OFPIT_APPLY_ACTIONS, OFPIT_CLEAR_ACTIONS):
return ofp_instruction_actions(message, cursor)
elif type == OFPIT_METER:
return ofp_instruction_meter(message, cursor)
elif type == OFPIT_EXPERIMENTER:
return ofp_instruction_experimenter(message, cursor)
else:
raise ValueError(ofp_instruction(message, cursor.offset))
def ofp_instruction_goto_table(message, offset):
return namedtuple("ofp_instruction_goto_table",
"type len table_id")(*_unpack("HHB3x", message, offset))
def ofp_instruction_write_metadata(message, offset):
return namedtuple("ofp_instruction_write_metadata",
"type len metadata metadata_mask")(*_unpack("HH4xQQ", message, offset))
def ofp_instruction_actions(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
(type,len) = _unpack("HH4x", message, cursor)
actions = []
while cursor.offset < offset + len:
actions.append(ofp_action_(message,cursor))
assert cursor.offset == offset+len
return namedtuple("ofp_instruction_actions",
"type,len,actions")(type,len,actions)
def ofp_instruction_meter(message, offset):
return namedtuple("ofp_instruction_meter",
"type len meter_id")(*_unpack("HHI", message, offset))
def ofp_instruction_experimenter(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
(type,len,experimenter) = _unpack("HHI", message, cursor)
data = message[cursor.offset:offset+len]
cursor.offset = offset+len
return namedtuple("ofp_instruction_experimenter",
"type len experimenter data")(type,len,experimenter,data)
# 7.2.5
def ofp_action_header(message, offset):
return namedtuple("ofp_action_header",
"type,len")(*_unpack("HH4x", message, offset))
def ofp_action_(message, offset):
cursor = _cursor(offset)
header = ofp_action_header(message, cursor.offset)
if header.type == OFPAT_OUTPUT:
return ofp_action_output(message, cursor)
elif header.type == OFPAT_GROUP:
return ofp_action_group(message, cursor)
elif header.type == OFPAT_SET_QUEUE:
return ofp_action_set_queue(message, cursor)
elif header.type == OFPAT_SET_MPLS_TTL:
return ofp_action_mpls_ttl(message, cursor)
elif header.type == OFPAT_SET_NW_TTL:
return ofp_action_nw_ttl(message, cursor)
elif header.type in (OFPAT_PUSH_VLAN,OFPAT_PUSH_MPLS,OFPAT_PUSH_PBB):
return ofp_action_push(message, cursor)
elif header.type == OFPAT_POP_MPLS:
return ofp_action_pop_mpls(message, cursor)
elif header.type == OFPAT_SET_FIELD:
return ofp_action_set_field(message, cursor)
elif header.type == OFPAT_EXPERIMENTER:
return ofp_action_experimenter_(message, cursor)
else:
return ofp_action_header(message, cursor)
def ofp_action_output(message, offset):
return namedtuple("ofp_action_output",
"type,len,port,max_len")(*_unpack("HHIH6x", message, offset))
def ofp_action_group(message, offset):
return namedtuple("ofp_action_group",
"type,len,group_id")(*_unpack("HHI", message, offset))
def ofp_action_set_queue(message, offset):
return namedtuple("ofp_action_set_queue",
"type,len,queue_id")(*_unpack("HHI", message, offset))
def ofp_action_mpls_ttl(message, offset):
return namedutple("ofp_action_mpls_ttl",
"type,len,mpls_ttl")(*_unpack("HHB3x", message, offset))
def ofp_action_nw_ttl(message, offset):
return namedtuple("ofp_action_nw_ttl",
"type,len,nw_ttl")(*_unpack("HHB3x", message, offset))
def ofp_action_push(message, offset):
return namedtuple("ofp_action_push",
"type,len,ethertype")(*_unpack("HHH2x", message, offset))
def ofp_action_pop_mpls(message, offset):
return namedtuple("ofp_action_pop_mpls",
"type,len,ethertype")(*_unpack("HHH2x", message, offset))
def ofp_action_set_field(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
(type,len) = _unpack("HH", message, cursor)
field = message[cursor.offset:offset+len]
cursor.offset = offset+len
return namedtuple("ofp_action_set_field",
"type,len,field")(type,len,field)
def ofp_action_experimenter_header(message, offset):
return namedtuple("ofp_action_experimenter_header",
"type,len,experimenter")(*_unpack("HHI", message, offset))
def ofp_action_experimenter_(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
header = ofp_action_experimenter_header(message, cursor)
data = message[cursor.offset:offset+header.len]
cursor.offset = offset + header.len
return namedtuple("ofp_action_experimenter_",
"type,len,experimenter,data")(*header+(data,))
# 7.3.1
def ofp_switch_features(message, offset=0):
cursor = _cursor(offset)
offset = cursor.offset
header = ofp_header(message, cursor)
(datapath_id, n_buffers, n_tables,
auxiliary_id, capabilities, reserved) = _unpack("QIBB2xII", message, cursor)
return namedtuple("ofp_switch_features",
"header,datapath_id,n_buffers,n_tables,auxiliary_id,capabilities")(
header,datapath_id,n_buffers,n_tables,auxiliary_id,capabilities)
# 7.3.2
def ofp_switch_config(message, offset):
cursor = _cursor(offset)
header = ofp_header(message, cursor)
(flags,miss_send_len) = _unpack("HH", message, cursor)
return namedtuple("ofp_switch_config",
"header,flags,miss_send_len")(header,flags,miss_send_len)
# 7.3.3
def ofp_table_mod(message, offset):
cursor = _cursor(offset)
header = ofp_header(message, cursor)
(table_id,config) = _unpack("B3xI", message, cursor)
return namedtuple("ofp_table_mod",
"header,table_id,config")(header,table_id,config)
# 7.3.4.1
def ofp_flow_mod(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
header = ofp_header(message, cursor)
(cookie,cookie_mask,table_id,command,
idle_timeout,hard_timeout,priority,
buffer_id,out_port,out_group,flags) = _unpack("QQBB3H3IH2x", message, cursor)
match = ofp_match(message, cursor)
instructions = _list_fetch(message, cursor, offset+header.length, ofp_instruction_)
return namedtuple("ofp_flow_mod",
'''header,cookie,cookie_mask,table_id,command,
idle_timeout,hard_timeout,priority,
buffer_id,out_port,out_group,flags,match,instructions''')(
header,cookie,cookie_mask,table_id,command,
idle_timeout,hard_timeout,priority,
buffer_id,out_port,out_group,flags,match,instructions)
# 7.3.4.2
def ofp_group_mod(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
header = ofp_header(message, cursor)
(command,type,group_id) = _unpack("HBxI", message, cursor)
buckets = []
while cursor.offset < offset + header.length:
buckets.append(ofp_bucket(message, cursor))
return namedtuple("ofp_group_mod",
"header,command,type,group_id,buckets")(header,command,type,group_id,buckets)
def ofp_bucket(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
(len,weight,watch_port,watch_group)=_unpack("HHII4x", message, cursor)
actions = []
while cursor.offset < offset+len:
actions.append(ofp_action_(message, cursor))
return namedtuple("ofp_bucket",
"len weight watch_port watch_group actions")(
len,weight,watch_port,watch_group,actions)
# 7.3.4.3
def ofp_port_mod(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
header = ofp_header(message, cursor)
(port_no, hw_addr, config, advertise) = _unpack("I4x6s2xIII4x", message, offset)
assert offset + header.length == cursor.offset
return namedtuple("ofp_port_mod",
"header,port_no,hw_addr,config,advertise")(
header,port_no,hw_addr,config,advertise)
# 7.3.4.4
def ofp_meter_mod(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
header = ofp_header(message, cursor)
(command,flags,meter_id) = _unpack("HHI", message, cursor)
bands = []
while cursor.offset < offset + header.length:
bands.append(ofp_meter_band_(message, cursor))
return namedtuple("ofp_meter_mod",
"header,command,flags,meter_id,bands")(
header,command,flags,meter_id,bands)
def ofp_meter_band_header(message, offset):
return namedtuple("ofp_meter_band_header",
"type,len,rate,burst_size")(*_unpack("HHII", message, offset))
def ofp_meter_band_(message, offset):
cursor = _cursor(offset)
header = ofp_meter_band_header(message, cursor.offset)
if header.type == OFPMBT_DROP:
return ofp_meter_band_drop(message, cursor)
elif header.type == OFPMBT_DSCP_REMARK:
return ofp_meter_band_dscp_remark(message, cursor)
elif header.type == OFPMBT_EXPERIMENTER:
return ofp_meter_band_experimenter(message, cursor)
else:
raise ValueError(header)
def ofp_meter_band_drop(message, offset):
return namedtuple("ofp_meter_band_drop",
"type,len,rate,burst_size")(*_unpack("HHII4x", message, offset))
def ofp_meter_band_dscp_remark(message, offset):
return namedtuple("ofp_meter_band_dscp_remark",
"type,len,rate,burst_size,prec_level")(
*_unpack("HHIIB3x", message, offset))
def ofp_meter_band_experimenter(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
(type,len,rate,burst_size,experimenter) = _unpack("HH3I", message, offset)
data = message[cursor.offset:offset+len]
return namedtuple("ofp_meter_band_experimenter",
"type,len,rate,burst_size,experimenter,data")(
type,len,rate,burst_size,experimenter,data)
# 7.3.5
def ofp_multipart_request(message, offset=0):
cursor = _cursor(offset)
offset = cursor.offset
header = ofp_header(message, cursor)
(type, flags) = _unpack("HH4x", message, cursor)
if type in (OFPMP_DESC, OFPMP_TABLE, OFPMP_GROUP_DESC,
OFPMP_GROUP_FEATURES, OFPMP_METER_FEATURES, OFPMP_PORT_DESC):
body = ""
elif type == OFPMP_FLOW:
body = ofp_flow_stats_request(message, cursor)
elif type == OFPMP_AGGREGATE:
body = ofp_aggregate_stats_request(message, cursor)
elif type == OFPMP_PORT_STATS:
body = ofp_port_stats_request(message, cursor)
elif type == OFPMP_QUEUE:
body = ofp_queue_stats_request(message, cursor)
elif type == OFPMP_GROUP:
body = ofp_group_stats_request(message, cursor)
elif type in (OFPMP_METER, OFPMP_METER_CONFIG):
body = ofp_meter_multipart_requests(message, cursor)
elif type == OFPMP_TABLE_FEATURES:
body = []
while cursor.offset < offset + header.length:
body.append(ofp_table_features(message, cursor))
elif type == OFPMP_EXPERIMENTER:
body = message[cursor.offset:offset+header.length]
cursor.offset = offset + header.length
else:
raise ValueError("multiaprt type=%d flags=%s" % (type, flags))
return namedtuple("ofp_multipart_request",
"header type flags body")(header,type,flags,body)
def ofp_multipart_reply(message, offset=0):
cursor = _cursor(offset)
offset = cursor.offset
header = ofp_header(message, cursor)
(type, flags) = _unpack("HH4x", message, cursor)
body = []
if type == OFPMP_DESC:
body = ofp_desc(message, cursor)
elif type == OFPMP_FLOW:
body = _list_fetch(message, cursor, offset + header.length, ofp_flow_stats)
elif type == OFPMP_AGGREGATE:
body = _list_fetch(message, cursor, offset + header.length, ofp_aggregate_stats_reply)
elif type == OFPMP_TABLE:
body = _list_fetch(message, cursor, offset + header.length, ofp_table_stats)
elif type == OFPMP_PORT_STATS:
body = _list_fetch(message, cursor, offset + header.length, ofp_port_stats)
elif type == OFPMP_QUEUE:
body = _list_fetch(message, cursor, offset + header.length, ofp_queue_stats)
elif type == OFPMP_GROUP:
body = _list_fetch(message, cursor, offset + header.length, ofp_group_stats)
elif type == OFPMP_GROUP_DESC:
body = _list_fetch(message, cursor, offset + header.length, ofp_group_desc)
elif type == OFPMP_GROUP_FEATURES:
body = ofp_group_features(message, cursor)
elif type == OFPMP_METER:
body = _list_fetch(message, cursor, offset + header.length, ofp_meter_stats)
elif type == OFPMP_METER_CONFIG:
body = _list_fetch(message, cursor, offset + header.length, ofp_meter_config)
elif type == OFPMP_METER_FEATURES:
body = ofp_meter_features(message, cursor)
elif type == OFPMP_TABLE_FEATURES:
body = _list_fetch(message, cursor, offset + header.length, ofp_table_features)
elif type == OFPMP_PORT_DESC:
body = _list_fetch(message, cursor, offset + header.length, ofp_port)
elif type == OFPMP_EXPERIMENTER:
body = ofp_experimenter_multipart_(message, cursor, offset+header.length)
else:
raise ValueError("multiaprt type=%d flags=%s" % (type, flags))
return namedtuple("ofp_multipart_reply",
"header type flags body")(header,type,flags,body)
def _list_fetch(message, cursor, limit, fetcher):
ret = []
while cursor.offset < limit:
ret.append(fetcher(message, cursor))
assert cursor.offset == limit
return ret
# 7.3.5.1
def ofp_desc(message, offset):
return namedtuple("ofp_desc",
"mfr_desc,hw_desc,sw_desc,serial_num,dp_desc")(*_unpack("256s256s256s32s256s", message, offset))
# 7.3.5.2
def ofp_flow_stats_request(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
(table_id,out_port,out_group,cookie,cookie_mask) = _unpack("B3xII4xQQ", message, cursor)
match = ofp_match(message, cursor)
return namedtuple("ofp_flow_stats_request",
"table_id,out_port,out_group,cookie,cookie_mask,match")(
table_id,out_port,out_group,cookie,cookie_mask,match)
def ofp_flow_stats(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
(length,table_id,duration_sec,duration_nsec,priority,
idle_timeout,hard_timeout,flags,cookie,
packet_count,byte_count) = _unpack("HBxII4H4x3Q", message, cursor)
match = ofp_match(message, cursor)
instructions = _list_fetch(message, cursor, offset+length, ofp_instruction_)
return namedtuple("ofp_flow_stats", '''
length table_id duration_sec duration_nsec
priority idle_timeout hard_timeout flags cookie
packet_count byte_count match instructions''')(
length,table_id,duration_sec,duration_nsec,priority,
idle_timeout,hard_timeout,flags,cookie,
packet_count,byte_count,match,instructions)
# 7.3.5.3
def ofp_aggregate_stats_request(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
(table_id,out_port,out_group,cookie,cookie_mask) = _unpack("B3xII4xQQ", message, cursor)
match = ofp_match(message, cursor)
return namedtuple("ofp_aggregate_stats_request",
"table_id,out_port,out_group,cookie,cookie_mask,match")(
table_id,out_port,out_group,cookie,cookie_mask,match)
def ofp_aggregate_stats_reply(message, offset):
return namedtuple("ofp_aggregate_stats_reply",
"packet_count,byte_count,flow_count")(
*_unpack("QQI4x", message, offset))
# 7.3.5.4
def ofp_table_stats(message, offset):
return namedtuple("ofp_table_stats", "table_id,active_count,lookup_count,matched_count")(
*_unpack("B3xIQQ", message, offset))
# 7.3.5.5.1
def ofp_table_features(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
(length,table_id,name,metadata_match,metadata_write,config,max_entries) = _unpack("HB5x32sQQII", message, cursor)
properties = _list_fetch(message, cursor, offset+length, ofp_table_feature_prop_)
name = name.partition('\0')[0]
return namedtuple("ofp_table_feature_prop_header",
"length,table_id,name,metadata_match,metadata_write,config,max_entries,properties")(
length,table_id,name,metadata_match,metadata_write,config,max_entries,properties)
# 7.3.5.5.2
def ofp_table_feature_prop_header(message, offset):
return namedtuple("ofp_table_feature_prop_header",
"type,length")(*_unpack("HH", message, offset))
def ofp_table_feature_prop_instructions(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
(type,length) = _unpack("HH", message, cursor)
instruction_ids = []
while cursor.offset < offset+length:
header = ofp_instruction(cursor.offset)
if header.type == OFPIT_EXPERIMENTER:
instruction_ids.append(ofp_instruction_experimenter(message, cursor))
else:
assert header.len == 4
instruction_ids.append(header)
cursor.offset += _align(length)-length
return namedtuple("ofp_table_feature_prop_instructions",
"type,length,instruction_ids")(
type,length,instruction_ids)
def ofp_table_feature_prop_next_tables(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
(type,length) = _unpack("HH", message, cursor)
next_table_ids = _unpack("%dB" % (length-4), message, offset)
cursor.offset += _align(length)-length
return namedtuple("ofp_table_feature_prop_next_tables",
"type,length,next_table_ids")(type,length,next_table_ids)
def ofp_table_feature_prop_actions(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
(type,length) = _unpack("HH", message, cursor)
action_ids = []
while cursor.offset < offset+length:
header = ofp_action_header(message, cursor.offset)
if header.type == OFPAT_EXPERIMENTER:
action_ids.append(ofp_action_experimenter(message, cursor))
else:
assert header.len == 4
action_ids.append(header)
cursor.offset += _align(length)-length
return namedtuple("ofp_table_feature_prop_actions",
"type,length,action_ids")(type,length,action_ids)
def ofp_table_feature_prop_oxm(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
(type,length) = _unpack("HH", message, cursor)
oxm_ids = _unpack("%dI" % ((length-4)//4), message, cursor)
cursor.offset += _align(length)-length
return namedtuple("ofp_table_feature_prop_oxm",
"type,length,oxm_ids")(type,length,oxm_ids)
def ofp_table_feature_prop_experimenter(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
(type,length,experimenter,exp_type) = _unpack("HHII", message, cursor)
data = message[cursor.offset:offset+length]
cursor.offset += _align(length)-length
return namedtuple("ofp_table_feature_prop_experimenter",
"type,length,experimenter,exp_type,data")(
type,length,experimenter,exp_type,data)
# 7.3.5.6
def ofp_port_stats_request(message, offset):
return namedtuple("ofp_port_stats_request",
"port_no")(*_unpack("I4x", message, offset))
def ofp_port_stats(message, offset):
return namedtuple("ofp_port_stats", '''
port_no
rx_packets tx_packets
rx_bytes tx_bytes
rx_dropped tx_dropped
rx_errors tx_errors
rx_frame_err
rx_over_err
rx_crc_err
collisions
duration_sec duration_nsec''')(*_unpack("I3x12Q2I", message, offset))
# 7.3.5.8
def ofp_queue_stats_request(message, offset):
return namedtuple("ofp_queue_stats_request",
"port_no queue_id")(*_unpack("II", message, offset))
def ofp_queue_stats(message, offset):
return namedtuple("ofp_queue_stats", '''
port_no queue_id
tx_bytes tx_packets tx_errors
duration_sec duration_nsec''')(*_unpack("2I3Q2I", message, offset))
# 7.3.5.9
def ofp_group_stats_request(message, offset):
return namedtuple("ofp_group_stats_request",
"group_id")(*_unpack("I4x", message, offset))
def ofp_group_stats(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
(length, group_id, ref_count, packet_count, byte_count,
duration_sec, duration_nsec) = _unpack("H2xII4xQQII", message, cursor)
bucket_stats = _list_fetch(message, cursor, offset+length, ofp_bucket_counter)
return namedtuple("ofp_group_stats", '''
length group_id ref_count packet_count byte_count
duration_sec duration_nsec bucket_stats''')(
length,group_id,ref_count,packet_count,byte_count,
duration_sec,duration_nsec,bucket_stats)
def ofp_bucket_counter(message, offset):
return namedtuple("ofp_bucket_counter",
"packet_count byte_count")(*_unpack("QQ", message, offset))
# 7.3.5.10
def ofp_group_desc(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
(length, type, group_id) = _unpack("HBxI", message, cursor)
buckets = _list_fetch(message, cursor, offset+length, ofp_bucket)
return namedtuple("ofp_group_desc",
"length type group_id buckets")(
length,type,group_id,buckets)
# 7.3.5.11
def ofp_group_features(message, offset):
cursor = _cursor(offset)
(type,capabilities) = _unpack("II", message, cursor)
max_groups = _unpack("4I", message, cursor)
actions = _unpack("4I", message, cursor)
return namedtuple("ofp_group_features",
"type,capabilities,max_groups,actions")(
type,capabilities,max_groups,actions)
# 7.3.5.12
def ofp_meter_multipart_request(message, offset):
# and 7.3.5.13
return namedtuple("ofp_meter_multipart_request",
"meter_id")(*_unpack("I4x", message, offset))
def ofp_meter_stats(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
(meter_id,len,flow_count,packet_in_count,byte_in_count,
duration_sec,duration_nsec) = _unpack("IH6xIQQII", message, cursor)
band_stats = _list_fetch(message, cursor, offset+len, ofp_meter_band_stats)
return namedtuple("ofp_meter_stats", '''
meter_id len flow_count packet_in_count byte_in_count
duration_sec duration_nsec band_stats''')(
meter_id,len,flow_count,packet_in_count,byte_in_count,
duration_sec,duration_nsec,band_stats)
def ofp_meter_band_stats(message, offset):
return namedtuple("ofp_meter_band_stats",
"packet_band_count,byte_band_count")(*_unpack("QQ", message, offset))
# 7.3.5.13
def ofp_meter_config(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
(length,flags,meter_id) = _unpack("HHI", message, cursor)
bands = _list_fetch(message, cursor, offset+length, ofp_meter_band_)
return namedtuple("ofp_meter_config",
"length,flags,meter_id,bands")(
length,flags,meter_id,bands)
# 7.3.5.14
def ofp_meter_features(message, offset):
return namedtuple("ofp_meter_features", '''
max_meter band_types capabilities
max_bands max_color''')(*_unpack("3IBB2x", message, offset))
# 7.3.5.15
def ofp_experimenter_multipart_header(message, offset):
return namedtuple("ofp_experimenter_multipart_header",
"experimenter,exp_type")(*_unpack("II", message, offset))
def ofp_experimenter_multipart_(message, offset, limit):
cursor = _cursor(offset)
offset = cursor.offset
(experimenter,exp_type) = ofp_experimenter_multipart_header(message, cursor)
data = message[cursor.offset:limit]
cursor.offset = limit
return namedtuple("ofp_experimenter_multipart_",
"experimenter,exp_type,data")(experimenter,exp_type,data)
# 7.3.6
def ofp_queue_get_config_request(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
header = ofp_header(message, cursor)
(port,) = _unpack("I4x", message, cursor)
return namedtuple("ofp_queue_get_config_request",
"header,port")(header,port)
def ofp_queue_get_config_reply(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
header = ofp_header(message, cursor)
(port,) = _unpack("I4x", message, cursor)
queues = _list_fetch(message, cursor, offset+header.length, ofp_packet_queue)
return namedtuple("ofp_queue_get_config_reply",
"header,port,queues")(header,port,queues)
# 7.3.7
def ofp_packet_out(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
header = ofp_header(message, cursor)
(buffer_id, in_port, actions_len) = _unpack("IIH6x", message, cursor)
actions_end = cursor.offset + actions_len
actions = []
while cursor.offset < actions_end:
actions.append(ofp_action_(message, cursor))
data = message[cursor.offset:offset+header.length]
return namedtuple("ofp_packet_out",
"header,buffer_id,in_port,actions_len,actions,data")(
header,buffer_id,in_port,actions_len,actions,data)
# 7.3.9
def ofp_role_request(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
header = ofp_header(message, cursor)
(role,generation_id) = _unpack("I4xQ", message, cursor)
return namedtuple("ofp_role_request",
"header,role,generation_id")(header,role,generation_id)
# 7.3.10
def ofp_async_config(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
header = ofp_header(message, cursor)
p = _unpack("6I", message, cursor)
return namedtuple("ofp_async_config",
"header,packet_in_mask,port_status_mask,flow_removed_mask")(
header,p[0:2],p[2:4],p[4:6])
# 7.4.1
def ofp_packet_in(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
header = ofp_header(message, cursor)
(buffer_id, total_len, reason, table_id, cookie) = _unpack("IHBBQ", message, cursor)
match = ofp_match(message, cursor)
_unpack("2x", message, cursor);
data = message[cursor.offset:offset+header.length]
return namedtuple("ofp_packet_in",
"header,buffer_id,total_len,reason,table_id,cookie,match,data")(
header,buffer_id,total_len,reason,table_id,cookie,match,data)
# 7.4.2
def ofp_flow_removed(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
header = ofp_header(message, cursor)
(cookie,priority,reason,table_id,
duration_sec,duration_nsec,
idle_timeout,hard_timeout,packet_count,byte_count) = _unpack("QHBBIIHHQQ", message, cursor)
match = ofp_match(message, cursor)
return namedtuple("ofp_flow_removed",
'''header cookie priority reason table_id
duration_sec duration_nsec
idle_timeout hard_timeout packet_count byte_count
match''')(
header,cookie,priority,reason,table_id,
duration_sec,duration_nsec,
idle_timeout,hard_timeout,packet_count,byte_count,
match)
# 7.4.3
def ofp_port_status(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
header = ofp_header(message, cursor)
(reason,) = _unpack("B7x", message, cursor)
desc = ofp_port(message, cursor)
return namedtuple("ofp_port_status",
"header,reason,desc")(
header,reason,desc)
# 7.4.4
def ofp_error_msg(message, offset=0):
cursor = _cursor(offset)
offset = cursor.offset
header = ofp_header(message, cursor)
(type, code) = _unpack("HH", message, cursor)
data = message[cursor.offset:offset+header.length]
cursor.offset = offset + header.length
return namedtuple("ofp_error_msg",
"header,type,code,data")(header,type,code,data)
# 7.5.1
def ofp_hello(message, offset=0):
cursor = _cursor(offset)
offset = cursor.offset
header = ofp_header(message, cursor)
elements = []
while cursor.offset < offset + header.length:
elem_header = ofp_hello_elem_header(message, cursor.offset)
if elem_header.type == 1:
elements.append(ofp_hello_elem_versionbitmap(message, cursor))
else:
raise ValueError("message offset=%d %s" % (cursor.offset, elem_header))
assert cursor.offset == offset + header.length
return namedtuple("ofp_hello", "header elements")(header, elements)
def ofp_hello_elem_header(message, offset):
return namedtuple("ofp_hello_elem_header",
"type length")(*_unpack("HH", message, offset))
def ofp_hello_elem_versionbitmap(message, offset):
cursor = _cursor(offset)
(type, length) = _unpack("HH", message, cursor)
assert type == OFPHET_VERSIONBITMAP
bitmaps = _unpack("%dI" % ((length-4)//4), message, cursor)
cursor.offset += _align(length) - length
return namedtuple("ofp_hello_elem_versionbitmap",
"type length bitmaps")(type,length,bitmaps)
# 7.5.4
def ofp_experimenter_header(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
header = ofp_header(message, cursor)
(experimenter,exp_type) = _unpack("II", message, cursor)
return namedtuple("ofp_experimenter_header",
"header,experimenter,exp_type")(header,experimenter,exp_type)
def ofp_experimenter_(message, offset):
cursor = _cursor(offset)
offset = cursor.offset
(header,experimenter,exp_type) = ofp_experimenter_header(message, cursor)
data = message[cursor.offset:offset+length]
cursor.offset = offset+length
return namedtuple("ofp_experimenter_",
"header,experimenter,exp_type,data")(header,experimenter,exp_type,data)
| apache-2.0 |
chromium/chromium | third_party/blink/web_tests/external/wpt/webdriver/tests/get_timeouts/get.py | 42 | 1278 | from tests.support.asserts import assert_success
def get_timeouts(session):
return session.transport.send(
"GET", "session/{session_id}/timeouts".format(**vars(session)))
def test_get_timeouts(session):
response = get_timeouts(session)
assert_success(response)
assert "value" in response.body
assert isinstance(response.body["value"], dict)
value = response.body["value"]
assert "script" in value
assert "implicit" in value
assert "pageLoad" in value
assert isinstance(value["script"], int)
assert isinstance(value["implicit"], int)
assert isinstance(value["pageLoad"], int)
def test_get_default_timeouts(session):
response = get_timeouts(session)
assert_success(response)
assert response.body["value"]["script"] == 30000
assert response.body["value"]["implicit"] == 0
assert response.body["value"]["pageLoad"] == 300000
def test_get_new_timeouts(session):
session.timeouts.script = 60
session.timeouts.implicit = 1
session.timeouts.page_load = 200
response = get_timeouts(session)
assert_success(response)
assert response.body["value"]["script"] == 60000
assert response.body["value"]["implicit"] == 1000
assert response.body["value"]["pageLoad"] == 200000
| bsd-3-clause |
dguerri/ansible-modules-core | cloud/openstack/quantum_floating_ip_associate.py | 11 | 8181 | #!/usr/bin/python
#coding: utf-8 -*-
# (c) 2013, Benno Joy <benno@ansible.com>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
import time
try:
from novaclient.v1_1 import client as nova_client
try:
from neutronclient.neutron import client
except ImportError:
from quantumclient.quantum import client
from keystoneclient.v2_0 import client as ksclient
HAVE_DEPS = True
except ImportError:
HAVE_DEPS = False
DOCUMENTATION = '''
---
module: quantum_floating_ip_associate
version_added: "1.2"
author: "Benno Joy (@bennojoy)"
short_description: Associate or disassociate a particular floating IP with an instance
description:
- Associates or disassociates a specific floating IP with a particular instance
options:
login_username:
description:
- login username to authenticate to keystone
required: true
default: admin
login_password:
description:
- password of login user
required: true
default: 'yes'
login_tenant_name:
description:
- the tenant name of the login user
required: true
default: true
auth_url:
description:
- the keystone url for authentication
required: false
default: 'http://127.0.0.1:35357/v2.0/'
region_name:
description:
- name of the region
required: false
default: None
state:
description:
- indicates the desired state of the resource
choices: ['present', 'absent']
default: present
instance_name:
description:
- name of the instance to which the public IP should be assigned
required: true
default: None
ip_address:
description:
- floating ip that should be assigned to the instance
required: true
default: None
requirements:
- "python >= 2.6"
- "python-novaclient"
- "python-neutronclient or python-quantumclient"
- "python-keystoneclient"
'''
EXAMPLES = '''
# Associate a specific floating IP with an Instance
- quantum_floating_ip_associate:
state=present
login_username=admin
login_password=admin
login_tenant_name=admin
ip_address=1.1.1.1
instance_name=vm1
'''
def _get_ksclient(module, kwargs):
try:
kclient = ksclient.Client(username=kwargs.get('login_username'),
password=kwargs.get('login_password'),
tenant_name=kwargs.get('login_tenant_name'),
auth_url=kwargs.get('auth_url'))
except Exception, e:
module.fail_json(msg = "Error authenticating to the keystone: %s " % e.message)
global _os_keystone
_os_keystone = kclient
return kclient
def _get_endpoint(module, ksclient):
try:
endpoint = ksclient.service_catalog.url_for(service_type='network', endpoint_type='publicURL')
except Exception, e:
module.fail_json(msg = "Error getting network endpoint: %s" % e.message)
return endpoint
def _get_neutron_client(module, kwargs):
_ksclient = _get_ksclient(module, kwargs)
token = _ksclient.auth_token
endpoint = _get_endpoint(module, _ksclient)
kwargs = {
'token': token,
'endpoint_url': endpoint
}
try:
neutron = client.Client('2.0', **kwargs)
except Exception, e:
module.fail_json(msg = "Error in connecting to neutron: %s " % e.message)
return neutron
def _get_server_state(module, nova):
server_info = None
server = None
try:
for server in nova.servers.list():
if server:
info = server._info
if info['name'] == module.params['instance_name']:
if info['status'] != 'ACTIVE' and module.params['state'] == 'present':
module.fail_json(msg="The VM is available but not Active. state:" + info['status'])
server_info = info
break
except Exception, e:
module.fail_json(msg = "Error in getting the server list: %s" % e.message)
return server_info, server
def _get_port_id(neutron, module, instance_id):
kwargs = dict(device_id = instance_id)
try:
ports = neutron.list_ports(**kwargs)
except Exception, e:
module.fail_json( msg = "Error in listing ports: %s" % e.message)
if not ports['ports']:
return None
return ports['ports'][0]['id']
def _get_floating_ip_id(module, neutron):
kwargs = {
'floating_ip_address': module.params['ip_address']
}
try:
ips = neutron.list_floatingips(**kwargs)
except Exception, e:
module.fail_json(msg = "error in fetching the floatingips's %s" % e.message)
if not ips['floatingips']:
module.fail_json(msg = "Could find the ip specified in parameter, Please check")
ip = ips['floatingips'][0]['id']
if not ips['floatingips'][0]['port_id']:
state = "detached"
else:
state = "attached"
return state, ip
def _update_floating_ip(neutron, module, port_id, floating_ip_id):
kwargs = {
'port_id': port_id
}
try:
result = neutron.update_floatingip(floating_ip_id, {'floatingip': kwargs})
except Exception, e:
module.fail_json(msg = "There was an error in updating the floating ip address: %s" % e.message)
module.exit_json(changed = True, result = result, public_ip=module.params['ip_address'])
def main():
argument_spec = openstack_argument_spec()
argument_spec.update(dict(
ip_address = dict(required=True),
instance_name = dict(required=True),
state = dict(default='present', choices=['absent', 'present'])
))
module = AnsibleModule(argument_spec=argument_spec)
if not HAVE_DEPS:
module.fail_json(msg='python-novaclient, python-keystoneclient, and either python-neutronclient or python-quantumclient are required')
try:
nova = nova_client.Client(module.params['login_username'], module.params['login_password'],
module.params['login_tenant_name'], module.params['auth_url'], service_type='compute')
except Exception, e:
module.fail_json( msg = " Error in authenticating to nova: %s" % e.message)
neutron = _get_neutron_client(module, module.params)
state, floating_ip_id = _get_floating_ip_id(module, neutron)
if module.params['state'] == 'present':
if state == 'attached':
module.exit_json(changed = False, result = 'attached', public_ip=module.params['ip_address'])
server_info, server_obj = _get_server_state(module, nova)
if not server_info:
module.fail_json(msg = " The instance name provided cannot be found")
port_id = _get_port_id(neutron, module, server_info['id'])
if not port_id:
module.fail_json(msg = "Cannot find a port for this instance, maybe fixed ip is not assigned")
_update_floating_ip(neutron, module, port_id, floating_ip_id)
if module.params['state'] == 'absent':
if state == 'detached':
module.exit_json(changed = False, result = 'detached')
if state == 'attached':
_update_floating_ip(neutron, module, None, floating_ip_id)
module.exit_json(changed = True, result = "detached")
# this is magic, see lib/ansible/module.params['common.py
from ansible.module_utils.basic import *
from ansible.module_utils.openstack import *
if __name__ == '__main__':
main()
| gpl-3.0 |
eric-stanley/youtube-dl | youtube_dl/extractor/eitb.py | 130 | 1458 | # encoding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from .brightcove import BrightcoveIE
from ..utils import ExtractorError
class EitbIE(InfoExtractor):
IE_NAME = 'eitb.tv'
_VALID_URL = r'https?://www\.eitb\.tv/(eu/bideoa|es/video)/[^/]+/(?P<playlist_id>\d+)/(?P<chapter_id>\d+)'
_TEST = {
'add_ie': ['Brightcove'],
'url': 'http://www.eitb.tv/es/video/60-minutos-60-minutos-2013-2014/2677100210001/2743577154001/lasa-y-zabala-30-anos/',
'md5': 'edf4436247185adee3ea18ce64c47998',
'info_dict': {
'id': '2743577154001',
'ext': 'mp4',
'title': '60 minutos (Lasa y Zabala, 30 años)',
# All videos from eitb has this description in the brightcove info
'description': '.',
'uploader': 'Euskal Telebista',
},
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
chapter_id = mobj.group('chapter_id')
webpage = self._download_webpage(url, chapter_id)
bc_url = BrightcoveIE._extract_brightcove_url(webpage)
if bc_url is None:
raise ExtractorError('Could not extract the Brightcove url')
# The BrightcoveExperience object doesn't contain the video id, we set
# it manually
bc_url += '&%40videoPlayer={0}'.format(chapter_id)
return self.url_result(bc_url, BrightcoveIE.ie_key())
| unlicense |
ak-67/ZeroNet | src/lib/pybitcointools/test_stealth.py | 36 | 4274 | import bitcoin as bc
import sys
import unittest
class TestStealth(unittest.TestCase):
def setUp(self):
if sys.getrecursionlimit() < 1000:
sys.setrecursionlimit(1000)
self.addr = 'vJmtjxSDxNPXL4RNapp9ARdqKz3uJyf1EDGjr1Fgqs9c8mYsVH82h8wvnA4i5rtJ57mr3kor1EVJrd4e5upACJd588xe52yXtzumxj'
self.scan_pub = '025e58a31122b38c86abc119b9379fe247410aee87a533f9c07b189aef6c3c1f52'
self.scan_priv = '3e49e7257cb31db997edb1cf8299af0f37e2663e2260e4b8033e49d39a6d02f2'
self.spend_pub = '03616562c98e7d7b74be409a787cec3a912122f3fb331a9bee9b0b73ce7b9f50af'
self.spend_priv = 'aa3db0cfb3edc94de4d10f873f8190843f2a17484f6021a95a7742302c744748'
self.ephem_pub = '03403d306ec35238384c7e340393335f9bc9bb4a2e574eb4e419452c4ea19f14b0'
self.ephem_priv = '9e63abaf8dcd5ea3919e6de0b6c544e00bf51bf92496113a01d6e369944dc091'
self.shared_secret = 'a4047ee231f4121e3a99a3a3378542e34a384b865a9917789920e1f13ffd91c6'
self.pay_pub = '02726112ad39cb6bf848b1b1ef30b88e35286bf99f746c2be575f96c0e02a9357c'
self.pay_priv = '4e422fb1e5e1db6c1f6ab32a7706d368ceb385e7fab098e633c5c5949c3b97cd'
self.testnet_addr = 'waPUuLLykSnY3itzf1AyrQZm42F7KyB7SR5zpfqmnzPXWhx9kXLzV3EcyqzDdpTwngiyCCMUqztS9S1d7XJs3JMt3MsHPDpBCudvx9'
def test_address_encoding(self):
sc_pub, sp_pub = bc.basic_stealth_address_to_pubkeys(self.addr)
self.assertEqual(sc_pub, self.scan_pub)
self.assertEqual(sp_pub, self.spend_pub)
stealth_addr2 = bc.pubkeys_to_basic_stealth_address(sc_pub, sp_pub)
self.assertEqual(stealth_addr2, self.addr)
magic_byte_testnet = 43
sc_pub, sp_pub = bc.basic_stealth_address_to_pubkeys(self.testnet_addr)
self.assertEqual(sc_pub, self.scan_pub)
self.assertEqual(sp_pub, self.spend_pub)
stealth_addr2 = bc.pubkeys_to_basic_stealth_address(sc_pub, sp_pub, magic_byte_testnet)
self.assertEqual(stealth_addr2, self.testnet_addr)
def test_shared_secret(self):
sh_sec = bc.shared_secret_sender(self.scan_pub, self.ephem_priv)
self.assertEqual(sh_sec, self.shared_secret)
sh_sec2 = bc.shared_secret_receiver(self.ephem_pub, self.scan_priv)
self.assertEqual(sh_sec2, self.shared_secret)
def test_uncover_pay_keys(self):
pub = bc.uncover_pay_pubkey_sender(self.scan_pub, self.spend_pub, self.ephem_priv)
pub2 = bc.uncover_pay_pubkey_receiver(self.scan_priv, self.spend_pub, self.ephem_pub)
self.assertEqual(pub, self.pay_pub)
self.assertEqual(pub2, self.pay_pub)
priv = bc.uncover_pay_privkey(self.scan_priv, self.spend_priv, self.ephem_pub)
self.assertEqual(priv, self.pay_priv)
def test_stealth_metadata_script(self):
nonce = int('deadbeef', 16)
script = bc.mk_stealth_metadata_script(self.ephem_pub, nonce)
self.assertEqual(script[6:], 'deadbeef' + self.ephem_pub)
eph_pub = bc.ephem_pubkey_from_tx_script(script)
self.assertEqual(eph_pub, self.ephem_pub)
def test_stealth_tx_outputs(self):
nonce = int('deadbeef', 16)
value = 10**8
outputs = bc.mk_stealth_tx_outputs(self.addr, value, self.ephem_priv, nonce)
self.assertEqual(outputs[0]['value'], 0)
self.assertEqual(outputs[0]['script'], '6a2606deadbeef' + self.ephem_pub)
self.assertEqual(outputs[1]['address'], bc.pubkey_to_address(self.pay_pub))
self.assertEqual(outputs[1]['value'], value)
outputs = bc.mk_stealth_tx_outputs(self.testnet_addr, value, self.ephem_priv, nonce, 'testnet')
self.assertEqual(outputs[0]['value'], 0)
self.assertEqual(outputs[0]['script'], '6a2606deadbeef' + self.ephem_pub)
self.assertEqual(outputs[1]['address'], bc.pubkey_to_address(self.pay_pub, 111))
self.assertEqual(outputs[1]['value'], value)
self.assertRaises(Exception, bc.mk_stealth_tx_outputs, self.testnet_addr, value, self.ephem_priv, nonce, 'btc')
self.assertRaises(Exception, bc.mk_stealth_tx_outputs, self.addr, value, self.ephem_priv, nonce, 'testnet')
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
wackymaster/QTClock | Libraries/numpy/core/tests/test_memmap.py | 26 | 4305 | from __future__ import division, absolute_import, print_function
import sys
import os
import shutil
from tempfile import NamedTemporaryFile, TemporaryFile, mktemp, mkdtemp
from numpy import memmap
from numpy import arange, allclose, asarray
from numpy.testing import (
TestCase, run_module_suite, assert_, assert_equal, assert_array_equal,
dec
)
class TestMemmap(TestCase):
def setUp(self):
self.tmpfp = NamedTemporaryFile(prefix='mmap')
self.tempdir = mkdtemp()
self.shape = (3, 4)
self.dtype = 'float32'
self.data = arange(12, dtype=self.dtype)
self.data.resize(self.shape)
def tearDown(self):
self.tmpfp.close()
shutil.rmtree(self.tempdir)
def test_roundtrip(self):
# Write data to file
fp = memmap(self.tmpfp, dtype=self.dtype, mode='w+',
shape=self.shape)
fp[:] = self.data[:]
del fp # Test __del__ machinery, which handles cleanup
# Read data back from file
newfp = memmap(self.tmpfp, dtype=self.dtype, mode='r',
shape=self.shape)
assert_(allclose(self.data, newfp))
assert_array_equal(self.data, newfp)
def test_open_with_filename(self):
tmpname = mktemp('', 'mmap', dir=self.tempdir)
fp = memmap(tmpname, dtype=self.dtype, mode='w+',
shape=self.shape)
fp[:] = self.data[:]
del fp
def test_unnamed_file(self):
with TemporaryFile() as f:
fp = memmap(f, dtype=self.dtype, shape=self.shape)
del fp
def test_attributes(self):
offset = 1
mode = "w+"
fp = memmap(self.tmpfp, dtype=self.dtype, mode=mode,
shape=self.shape, offset=offset)
self.assertEqual(offset, fp.offset)
self.assertEqual(mode, fp.mode)
del fp
def test_filename(self):
tmpname = mktemp('', 'mmap', dir=self.tempdir)
fp = memmap(tmpname, dtype=self.dtype, mode='w+',
shape=self.shape)
abspath = os.path.abspath(tmpname)
fp[:] = self.data[:]
self.assertEqual(abspath, fp.filename)
b = fp[:1]
self.assertEqual(abspath, b.filename)
del b
del fp
def test_filename_fileobj(self):
fp = memmap(self.tmpfp, dtype=self.dtype, mode="w+",
shape=self.shape)
self.assertEqual(fp.filename, self.tmpfp.name)
@dec.knownfailureif(sys.platform == 'gnu0', "This test is known to fail on hurd")
def test_flush(self):
fp = memmap(self.tmpfp, dtype=self.dtype, mode='w+',
shape=self.shape)
fp[:] = self.data[:]
assert_equal(fp[0], self.data[0])
fp.flush()
def test_del(self):
# Make sure a view does not delete the underlying mmap
fp_base = memmap(self.tmpfp, dtype=self.dtype, mode='w+',
shape=self.shape)
fp_base[0] = 5
fp_view = fp_base[0:1]
assert_equal(fp_view[0], 5)
del fp_view
# Should still be able to access and assign values after
# deleting the view
assert_equal(fp_base[0], 5)
fp_base[0] = 6
assert_equal(fp_base[0], 6)
def test_arithmetic_drops_references(self):
fp = memmap(self.tmpfp, dtype=self.dtype, mode='w+',
shape=self.shape)
tmp = (fp + 10)
if isinstance(tmp, memmap):
assert_(tmp._mmap is not fp._mmap)
def test_indexing_drops_references(self):
fp = memmap(self.tmpfp, dtype=self.dtype, mode='w+',
shape=self.shape)
tmp = fp[[(1, 2), (2, 3)]]
if isinstance(tmp, memmap):
assert_(tmp._mmap is not fp._mmap)
def test_slicing_keeps_references(self):
fp = memmap(self.tmpfp, dtype=self.dtype, mode='w+',
shape=self.shape)
assert_(fp[:2, :2]._mmap is fp._mmap)
def test_view(self):
fp = memmap(self.tmpfp, dtype=self.dtype, shape=self.shape)
new1 = fp.view()
new2 = new1.view()
assert_(new1.base is fp)
assert_(new2.base is fp)
new_array = asarray(fp)
assert_(new_array.base is fp)
if __name__ == "__main__":
run_module_suite()
| mit |
iniverno/RnR-LLC | simics-3.0-install/simics-3.0.31/amd64-linux/lib/python2.4/ftplib.py | 11 | 26935 | """An FTP client class and some helper functions.
Based on RFC 959: File Transfer Protocol (FTP), by J. Postel and J. Reynolds
Example:
>>> from ftplib import FTP
>>> ftp = FTP('ftp.python.org') # connect to host, default port
>>> ftp.login() # default, i.e.: user anonymous, passwd anonymous@
'230 Guest login ok, access restrictions apply.'
>>> ftp.retrlines('LIST') # list directory contents
total 9
drwxr-xr-x 8 root wheel 1024 Jan 3 1994 .
drwxr-xr-x 8 root wheel 1024 Jan 3 1994 ..
drwxr-xr-x 2 root wheel 1024 Jan 3 1994 bin
drwxr-xr-x 2 root wheel 1024 Jan 3 1994 etc
d-wxrwxr-x 2 ftp wheel 1024 Sep 5 13:43 incoming
drwxr-xr-x 2 root wheel 1024 Nov 17 1993 lib
drwxr-xr-x 6 1094 wheel 1024 Sep 13 19:07 pub
drwxr-xr-x 3 root wheel 1024 Jan 3 1994 usr
-rw-r--r-- 1 root root 312 Aug 1 1994 welcome.msg
'226 Transfer complete.'
>>> ftp.quit()
'221 Goodbye.'
>>>
A nice test that reveals some of the network dialogue would be:
python ftplib.py -d localhost -l -p -l
"""
#
# Changes and improvements suggested by Steve Majewski.
# Modified by Jack to work on the mac.
# Modified by Siebren to support docstrings and PASV.
#
import os
import sys
# Import SOCKS module if it exists, else standard socket module socket
try:
import SOCKS; socket = SOCKS; del SOCKS # import SOCKS as socket
from socket import getfqdn; socket.getfqdn = getfqdn; del getfqdn
except ImportError:
import socket
__all__ = ["FTP","Netrc"]
# Magic number from <socket.h>
MSG_OOB = 0x1 # Process data out of band
# The standard FTP server control port
FTP_PORT = 21
# Exception raised when an error or invalid response is received
class Error(Exception): pass
class error_reply(Error): pass # unexpected [123]xx reply
class error_temp(Error): pass # 4xx errors
class error_perm(Error): pass # 5xx errors
class error_proto(Error): pass # response does not begin with [1-5]
# All exceptions (hopefully) that may be raised here and that aren't
# (always) programming errors on our side
all_errors = (Error, socket.error, IOError, EOFError)
# Line terminators (we always output CRLF, but accept any of CRLF, CR, LF)
CRLF = '\r\n'
# The class itself
class FTP:
'''An FTP client class.
To create a connection, call the class using these argument:
host, user, passwd, acct
These are all strings, and have default value ''.
Then use self.connect() with optional host and port argument.
To download a file, use ftp.retrlines('RETR ' + filename),
or ftp.retrbinary() with slightly different arguments.
To upload a file, use ftp.storlines() or ftp.storbinary(),
which have an open file as argument (see their definitions
below for details).
The download/upload functions first issue appropriate TYPE
and PORT or PASV commands.
'''
debugging = 0
host = ''
port = FTP_PORT
sock = None
file = None
welcome = None
passiveserver = 1
# Initialization method (called by class instantiation).
# Initialize host to localhost, port to standard ftp port
# Optional arguments are host (for connect()),
# and user, passwd, acct (for login())
def __init__(self, host='', user='', passwd='', acct=''):
if host:
self.connect(host)
if user: self.login(user, passwd, acct)
def connect(self, host = '', port = 0):
'''Connect to host. Arguments are:
- host: hostname to connect to (string, default previous host)
- port: port to connect to (integer, default previous port)'''
if host: self.host = host
if port: self.port = port
msg = "getaddrinfo returns an empty list"
for res in socket.getaddrinfo(self.host, self.port, 0, socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
try:
self.sock = socket.socket(af, socktype, proto)
self.sock.connect(sa)
except socket.error, msg:
if self.sock:
self.sock.close()
self.sock = None
continue
break
if not self.sock:
raise socket.error, msg
self.af = af
self.file = self.sock.makefile('rb')
self.welcome = self.getresp()
return self.welcome
def getwelcome(self):
'''Get the welcome message from the server.
(this is read and squirreled away by connect())'''
if self.debugging:
print '*welcome*', self.sanitize(self.welcome)
return self.welcome
def set_debuglevel(self, level):
'''Set the debugging level.
The required argument level means:
0: no debugging output (default)
1: print commands and responses but not body text etc.
2: also print raw lines read and sent before stripping CR/LF'''
self.debugging = level
debug = set_debuglevel
def set_pasv(self, val):
'''Use passive or active mode for data transfers.
With a false argument, use the normal PORT mode,
With a true argument, use the PASV command.'''
self.passiveserver = val
# Internal: "sanitize" a string for printing
def sanitize(self, s):
if s[:5] == 'pass ' or s[:5] == 'PASS ':
i = len(s)
while i > 5 and s[i-1] in '\r\n':
i = i-1
s = s[:5] + '*'*(i-5) + s[i:]
return repr(s)
# Internal: send one line to the server, appending CRLF
def putline(self, line):
line = line + CRLF
if self.debugging > 1: print '*put*', self.sanitize(line)
self.sock.sendall(line)
# Internal: send one command to the server (through putline())
def putcmd(self, line):
if self.debugging: print '*cmd*', self.sanitize(line)
self.putline(line)
# Internal: return one line from the server, stripping CRLF.
# Raise EOFError if the connection is closed
def getline(self):
line = self.file.readline()
if self.debugging > 1:
print '*get*', self.sanitize(line)
if not line: raise EOFError
if line[-2:] == CRLF: line = line[:-2]
elif line[-1:] in CRLF: line = line[:-1]
return line
# Internal: get a response from the server, which may possibly
# consist of multiple lines. Return a single string with no
# trailing CRLF. If the response consists of multiple lines,
# these are separated by '\n' characters in the string
def getmultiline(self):
line = self.getline()
if line[3:4] == '-':
code = line[:3]
while 1:
nextline = self.getline()
line = line + ('\n' + nextline)
if nextline[:3] == code and \
nextline[3:4] != '-':
break
return line
# Internal: get a response from the server.
# Raise various errors if the response indicates an error
def getresp(self):
resp = self.getmultiline()
if self.debugging: print '*resp*', self.sanitize(resp)
self.lastresp = resp[:3]
c = resp[:1]
if c in ('1', '2', '3'):
return resp
if c == '4':
raise error_temp, resp
if c == '5':
raise error_perm, resp
raise error_proto, resp
def voidresp(self):
"""Expect a response beginning with '2'."""
resp = self.getresp()
if resp[0] != '2':
raise error_reply, resp
return resp
def abort(self):
'''Abort a file transfer. Uses out-of-band data.
This does not follow the procedure from the RFC to send Telnet
IP and Synch; that doesn't seem to work with the servers I've
tried. Instead, just send the ABOR command as OOB data.'''
line = 'ABOR' + CRLF
if self.debugging > 1: print '*put urgent*', self.sanitize(line)
self.sock.sendall(line, MSG_OOB)
resp = self.getmultiline()
if resp[:3] not in ('426', '226'):
raise error_proto, resp
def sendcmd(self, cmd):
'''Send a command and return the response.'''
self.putcmd(cmd)
return self.getresp()
def voidcmd(self, cmd):
"""Send a command and expect a response beginning with '2'."""
self.putcmd(cmd)
return self.voidresp()
def sendport(self, host, port):
'''Send a PORT command with the current host and the given
port number.
'''
hbytes = host.split('.')
pbytes = [repr(port/256), repr(port%256)]
bytes = hbytes + pbytes
cmd = 'PORT ' + ','.join(bytes)
return self.voidcmd(cmd)
def sendeprt(self, host, port):
'''Send a EPRT command with the current host and the given port number.'''
af = 0
if self.af == socket.AF_INET:
af = 1
if self.af == socket.AF_INET6:
af = 2
if af == 0:
raise error_proto, 'unsupported address family'
fields = ['', repr(af), host, repr(port), '']
cmd = 'EPRT ' + '|'.join(fields)
return self.voidcmd(cmd)
def makeport(self):
'''Create a new socket and send a PORT command for it.'''
msg = "getaddrinfo returns an empty list"
sock = None
for res in socket.getaddrinfo(None, 0, self.af, socket.SOCK_STREAM, 0, socket.AI_PASSIVE):
af, socktype, proto, canonname, sa = res
try:
sock = socket.socket(af, socktype, proto)
sock.bind(sa)
except socket.error, msg:
if sock:
sock.close()
sock = None
continue
break
if not sock:
raise socket.error, msg
sock.listen(1)
port = sock.getsockname()[1] # Get proper port
host = self.sock.getsockname()[0] # Get proper host
if self.af == socket.AF_INET:
resp = self.sendport(host, port)
else:
resp = self.sendeprt(host, port)
return sock
def makepasv(self):
if self.af == socket.AF_INET:
host, port = parse227(self.sendcmd('PASV'))
else:
host, port = parse229(self.sendcmd('EPSV'), self.sock.getpeername())
return host, port
def ntransfercmd(self, cmd, rest=None):
"""Initiate a transfer over the data connection.
If the transfer is active, send a port command and the
transfer command, and accept the connection. If the server is
passive, send a pasv command, connect to it, and start the
transfer command. Either way, return the socket for the
connection and the expected size of the transfer. The
expected size may be None if it could not be determined.
Optional `rest' argument can be a string that is sent as the
argument to a RESTART command. This is essentially a server
marker used to tell the server to skip over any data up to the
given marker.
"""
size = None
if self.passiveserver:
host, port = self.makepasv()
af, socktype, proto, canon, sa = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM)[0]
conn = socket.socket(af, socktype, proto)
conn.connect(sa)
if rest is not None:
self.sendcmd("REST %s" % rest)
resp = self.sendcmd(cmd)
if resp[0] != '1':
raise error_reply, resp
else:
sock = self.makeport()
if rest is not None:
self.sendcmd("REST %s" % rest)
resp = self.sendcmd(cmd)
if resp[0] != '1':
raise error_reply, resp
conn, sockaddr = sock.accept()
if resp[:3] == '150':
# this is conditional in case we received a 125
size = parse150(resp)
return conn, size
def transfercmd(self, cmd, rest=None):
"""Like ntransfercmd() but returns only the socket."""
return self.ntransfercmd(cmd, rest)[0]
def login(self, user = '', passwd = '', acct = ''):
'''Login, default anonymous.'''
if not user: user = 'anonymous'
if not passwd: passwd = ''
if not acct: acct = ''
if user == 'anonymous' and passwd in ('', '-'):
# If there is no anonymous ftp password specified
# then we'll just use anonymous@
# We don't send any other thing because:
# - We want to remain anonymous
# - We want to stop SPAM
# - We don't want to let ftp sites to discriminate by the user,
# host or country.
passwd = passwd + 'anonymous@'
resp = self.sendcmd('USER ' + user)
if resp[0] == '3': resp = self.sendcmd('PASS ' + passwd)
if resp[0] == '3': resp = self.sendcmd('ACCT ' + acct)
if resp[0] != '2':
raise error_reply, resp
return resp
def retrbinary(self, cmd, callback, blocksize=8192, rest=None):
"""Retrieve data in binary mode.
`cmd' is a RETR command. `callback' is a callback function is
called for each block. No more than `blocksize' number of
bytes will be read from the socket. Optional `rest' is passed
to transfercmd().
A new port is created for you. Return the response code.
"""
self.voidcmd('TYPE I')
conn = self.transfercmd(cmd, rest)
while 1:
data = conn.recv(blocksize)
if not data:
break
callback(data)
conn.close()
return self.voidresp()
def retrlines(self, cmd, callback = None):
'''Retrieve data in line mode.
The argument is a RETR or LIST command.
The callback function (2nd argument) is called for each line,
with trailing CRLF stripped. This creates a new port for you.
print_line() is the default callback.'''
if callback is None: callback = print_line
resp = self.sendcmd('TYPE A')
conn = self.transfercmd(cmd)
fp = conn.makefile('rb')
while 1:
line = fp.readline()
if self.debugging > 2: print '*retr*', repr(line)
if not line:
break
if line[-2:] == CRLF:
line = line[:-2]
elif line[-1:] == '\n':
line = line[:-1]
callback(line)
fp.close()
conn.close()
return self.voidresp()
def storbinary(self, cmd, fp, blocksize=8192):
'''Store a file in binary mode.'''
self.voidcmd('TYPE I')
conn = self.transfercmd(cmd)
while 1:
buf = fp.read(blocksize)
if not buf: break
conn.sendall(buf)
conn.close()
return self.voidresp()
def storlines(self, cmd, fp):
'''Store a file in line mode.'''
self.voidcmd('TYPE A')
conn = self.transfercmd(cmd)
while 1:
buf = fp.readline()
if not buf: break
if buf[-2:] != CRLF:
if buf[-1] in CRLF: buf = buf[:-1]
buf = buf + CRLF
conn.sendall(buf)
conn.close()
return self.voidresp()
def acct(self, password):
'''Send new account name.'''
cmd = 'ACCT ' + password
return self.voidcmd(cmd)
def nlst(self, *args):
'''Return a list of files in a given directory (default the current).'''
cmd = 'NLST'
for arg in args:
cmd = cmd + (' ' + arg)
files = []
self.retrlines(cmd, files.append)
return files
def dir(self, *args):
'''List a directory in long form.
By default list current directory to stdout.
Optional last argument is callback function; all
non-empty arguments before it are concatenated to the
LIST command. (This *should* only be used for a pathname.)'''
cmd = 'LIST'
func = None
if args[-1:] and type(args[-1]) != type(''):
args, func = args[:-1], args[-1]
for arg in args:
if arg:
cmd = cmd + (' ' + arg)
self.retrlines(cmd, func)
def rename(self, fromname, toname):
'''Rename a file.'''
resp = self.sendcmd('RNFR ' + fromname)
if resp[0] != '3':
raise error_reply, resp
return self.voidcmd('RNTO ' + toname)
def delete(self, filename):
'''Delete a file.'''
resp = self.sendcmd('DELE ' + filename)
if resp[:3] in ('250', '200'):
return resp
elif resp[:1] == '5':
raise error_perm, resp
else:
raise error_reply, resp
def cwd(self, dirname):
'''Change to a directory.'''
if dirname == '..':
try:
return self.voidcmd('CDUP')
except error_perm, msg:
if msg.args[0][:3] != '500':
raise
elif dirname == '':
dirname = '.' # does nothing, but could return error
cmd = 'CWD ' + dirname
return self.voidcmd(cmd)
def size(self, filename):
'''Retrieve the size of a file.'''
# Note that the RFC doesn't say anything about 'SIZE'
resp = self.sendcmd('SIZE ' + filename)
if resp[:3] == '213':
s = resp[3:].strip()
try:
return int(s)
except (OverflowError, ValueError):
return long(s)
def mkd(self, dirname):
'''Make a directory, return its full pathname.'''
resp = self.sendcmd('MKD ' + dirname)
return parse257(resp)
def rmd(self, dirname):
'''Remove a directory.'''
return self.voidcmd('RMD ' + dirname)
def pwd(self):
'''Return current working directory.'''
resp = self.sendcmd('PWD')
return parse257(resp)
def quit(self):
'''Quit, and close the connection.'''
resp = self.voidcmd('QUIT')
self.close()
return resp
def close(self):
'''Close the connection without assuming anything about it.'''
if self.file:
self.file.close()
self.sock.close()
self.file = self.sock = None
_150_re = None
def parse150(resp):
'''Parse the '150' response for a RETR request.
Returns the expected transfer size or None; size is not guaranteed to
be present in the 150 message.
'''
if resp[:3] != '150':
raise error_reply, resp
global _150_re
if _150_re is None:
import re
_150_re = re.compile("150 .* \((\d+) bytes\)", re.IGNORECASE)
m = _150_re.match(resp)
if not m:
return None
s = m.group(1)
try:
return int(s)
except (OverflowError, ValueError):
return long(s)
_227_re = None
def parse227(resp):
'''Parse the '227' response for a PASV request.
Raises error_proto if it does not contain '(h1,h2,h3,h4,p1,p2)'
Return ('host.addr.as.numbers', port#) tuple.'''
if resp[:3] != '227':
raise error_reply, resp
global _227_re
if _227_re is None:
import re
_227_re = re.compile(r'(\d+),(\d+),(\d+),(\d+),(\d+),(\d+)')
m = _227_re.search(resp)
if not m:
raise error_proto, resp
numbers = m.groups()
host = '.'.join(numbers[:4])
port = (int(numbers[4]) << 8) + int(numbers[5])
return host, port
def parse229(resp, peer):
'''Parse the '229' response for a EPSV request.
Raises error_proto if it does not contain '(|||port|)'
Return ('host.addr.as.numbers', port#) tuple.'''
if resp[:3] != '229':
raise error_reply, resp
left = resp.find('(')
if left < 0: raise error_proto, resp
right = resp.find(')', left + 1)
if right < 0:
raise error_proto, resp # should contain '(|||port|)'
if resp[left + 1] != resp[right - 1]:
raise error_proto, resp
parts = resp[left + 1:right].split(resp[left+1])
if len(parts) != 5:
raise error_proto, resp
host = peer[0]
port = int(parts[3])
return host, port
def parse257(resp):
'''Parse the '257' response for a MKD or PWD request.
This is a response to a MKD or PWD request: a directory name.
Returns the directoryname in the 257 reply.'''
if resp[:3] != '257':
raise error_reply, resp
if resp[3:5] != ' "':
return '' # Not compliant to RFC 959, but UNIX ftpd does this
dirname = ''
i = 5
n = len(resp)
while i < n:
c = resp[i]
i = i+1
if c == '"':
if i >= n or resp[i] != '"':
break
i = i+1
dirname = dirname + c
return dirname
def print_line(line):
'''Default retrlines callback to print a line.'''
print line
def ftpcp(source, sourcename, target, targetname = '', type = 'I'):
'''Copy file from one FTP-instance to another.'''
if not targetname: targetname = sourcename
type = 'TYPE ' + type
source.voidcmd(type)
target.voidcmd(type)
sourcehost, sourceport = parse227(source.sendcmd('PASV'))
target.sendport(sourcehost, sourceport)
# RFC 959: the user must "listen" [...] BEFORE sending the
# transfer request.
# So: STOR before RETR, because here the target is a "user".
treply = target.sendcmd('STOR ' + targetname)
if treply[:3] not in ('125', '150'): raise error_proto # RFC 959
sreply = source.sendcmd('RETR ' + sourcename)
if sreply[:3] not in ('125', '150'): raise error_proto # RFC 959
source.voidresp()
target.voidresp()
class Netrc:
"""Class to parse & provide access to 'netrc' format files.
See the netrc(4) man page for information on the file format.
WARNING: This class is obsolete -- use module netrc instead.
"""
__defuser = None
__defpasswd = None
__defacct = None
def __init__(self, filename=None):
if filename is None:
if "HOME" in os.environ:
filename = os.path.join(os.environ["HOME"],
".netrc")
else:
raise IOError, \
"specify file to load or set $HOME"
self.__hosts = {}
self.__macros = {}
fp = open(filename, "r")
in_macro = 0
while 1:
line = fp.readline()
if not line: break
if in_macro and line.strip():
macro_lines.append(line)
continue
elif in_macro:
self.__macros[macro_name] = tuple(macro_lines)
in_macro = 0
words = line.split()
host = user = passwd = acct = None
default = 0
i = 0
while i < len(words):
w1 = words[i]
if i+1 < len(words):
w2 = words[i + 1]
else:
w2 = None
if w1 == 'default':
default = 1
elif w1 == 'machine' and w2:
host = w2.lower()
i = i + 1
elif w1 == 'login' and w2:
user = w2
i = i + 1
elif w1 == 'password' and w2:
passwd = w2
i = i + 1
elif w1 == 'account' and w2:
acct = w2
i = i + 1
elif w1 == 'macdef' and w2:
macro_name = w2
macro_lines = []
in_macro = 1
break
i = i + 1
if default:
self.__defuser = user or self.__defuser
self.__defpasswd = passwd or self.__defpasswd
self.__defacct = acct or self.__defacct
if host:
if host in self.__hosts:
ouser, opasswd, oacct = \
self.__hosts[host]
user = user or ouser
passwd = passwd or opasswd
acct = acct or oacct
self.__hosts[host] = user, passwd, acct
fp.close()
def get_hosts(self):
"""Return a list of hosts mentioned in the .netrc file."""
return self.__hosts.keys()
def get_account(self, host):
"""Returns login information for the named host.
The return value is a triple containing userid,
password, and the accounting field.
"""
host = host.lower()
user = passwd = acct = None
if host in self.__hosts:
user, passwd, acct = self.__hosts[host]
user = user or self.__defuser
passwd = passwd or self.__defpasswd
acct = acct or self.__defacct
return user, passwd, acct
def get_macros(self):
"""Return a list of all defined macro names."""
return self.__macros.keys()
def get_macro(self, macro):
"""Return a sequence of lines which define a named macro."""
return self.__macros[macro]
def test():
'''Test program.
Usage: ftp [-d] [-r[file]] host [-l[dir]] [-d[dir]] [-p] [file] ...
-d dir
-l list
-p password
'''
if len(sys.argv) < 2:
print test.__doc__
sys.exit(0)
debugging = 0
rcfile = None
while sys.argv[1] == '-d':
debugging = debugging+1
del sys.argv[1]
if sys.argv[1][:2] == '-r':
# get name of alternate ~/.netrc file:
rcfile = sys.argv[1][2:]
del sys.argv[1]
host = sys.argv[1]
ftp = FTP(host)
ftp.set_debuglevel(debugging)
userid = passwd = acct = ''
try:
netrc = Netrc(rcfile)
except IOError:
if rcfile is not None:
sys.stderr.write("Could not open account file"
" -- using anonymous login.")
else:
try:
userid, passwd, acct = netrc.get_account(host)
except KeyError:
# no account for host
sys.stderr.write(
"No account -- using anonymous login.")
ftp.login(userid, passwd, acct)
for file in sys.argv[2:]:
if file[:2] == '-l':
ftp.dir(file[2:])
elif file[:2] == '-d':
cmd = 'CWD'
if file[2:]: cmd = cmd + ' ' + file[2:]
resp = ftp.sendcmd(cmd)
elif file == '-p':
ftp.set_pasv(not ftp.passiveserver)
else:
ftp.retrbinary('RETR ' + file, \
sys.stdout.write, 1024)
ftp.quit()
if __name__ == '__main__':
test()
| gpl-2.0 |
Zhongqilong/kbengine | kbe/src/lib/python/Lib/inspect.py | 63 | 104117 | """Get useful information from live Python objects.
This module encapsulates the interface provided by the internal special
attributes (co_*, im_*, tb_*, etc.) in a friendlier fashion.
It also provides some help for examining source code and class layout.
Here are some of the useful functions provided by this module:
ismodule(), isclass(), ismethod(), isfunction(), isgeneratorfunction(),
isgenerator(), istraceback(), isframe(), iscode(), isbuiltin(),
isroutine() - check object types
getmembers() - get members of an object that satisfy a given condition
getfile(), getsourcefile(), getsource() - find an object's source code
getdoc(), getcomments() - get documentation on an object
getmodule() - determine the module that an object came from
getclasstree() - arrange classes so as to represent their hierarchy
getargspec(), getargvalues(), getcallargs() - get info about function arguments
getfullargspec() - same, with support for Python-3000 features
formatargspec(), formatargvalues() - format an argument spec
getouterframes(), getinnerframes() - get info about frames
currentframe() - get the current stack frame
stack(), trace() - get info about frames on the stack or in a traceback
signature() - get a Signature object for the callable
"""
# This module is in the public domain. No warranties.
__author__ = ('Ka-Ping Yee <ping@lfw.org>',
'Yury Selivanov <yselivanov@sprymix.com>')
import ast
import importlib.machinery
import itertools
import linecache
import os
import re
import sys
import tokenize
import token
import types
import warnings
import functools
import builtins
from operator import attrgetter
from collections import namedtuple, OrderedDict
# Create constants for the compiler flags in Include/code.h
# We try to get them from dis to avoid duplication, but fall
# back to hardcoding so the dependency is optional
try:
from dis import COMPILER_FLAG_NAMES as _flag_names
except ImportError:
CO_OPTIMIZED, CO_NEWLOCALS = 0x1, 0x2
CO_VARARGS, CO_VARKEYWORDS = 0x4, 0x8
CO_NESTED, CO_GENERATOR, CO_NOFREE = 0x10, 0x20, 0x40
else:
mod_dict = globals()
for k, v in _flag_names.items():
mod_dict["CO_" + v] = k
# See Include/object.h
TPFLAGS_IS_ABSTRACT = 1 << 20
# ----------------------------------------------------------- type-checking
def ismodule(object):
"""Return true if the object is a module.
Module objects provide these attributes:
__cached__ pathname to byte compiled file
__doc__ documentation string
__file__ filename (missing for built-in modules)"""
return isinstance(object, types.ModuleType)
def isclass(object):
"""Return true if the object is a class.
Class objects provide these attributes:
__doc__ documentation string
__module__ name of module in which this class was defined"""
return isinstance(object, type)
def ismethod(object):
"""Return true if the object is an instance method.
Instance method objects provide these attributes:
__doc__ documentation string
__name__ name with which this method was defined
__func__ function object containing implementation of method
__self__ instance to which this method is bound"""
return isinstance(object, types.MethodType)
def ismethoddescriptor(object):
"""Return true if the object is a method descriptor.
But not if ismethod() or isclass() or isfunction() are true.
This is new in Python 2.2, and, for example, is true of int.__add__.
An object passing this test has a __get__ attribute but not a __set__
attribute, but beyond that the set of attributes varies. __name__ is
usually sensible, and __doc__ often is.
Methods implemented via descriptors that also pass one of the other
tests return false from the ismethoddescriptor() test, simply because
the other tests promise more -- you can, e.g., count on having the
__func__ attribute (etc) when an object passes ismethod()."""
if isclass(object) or ismethod(object) or isfunction(object):
# mutual exclusion
return False
tp = type(object)
return hasattr(tp, "__get__") and not hasattr(tp, "__set__")
def isdatadescriptor(object):
"""Return true if the object is a data descriptor.
Data descriptors have both a __get__ and a __set__ attribute. Examples are
properties (defined in Python) and getsets and members (defined in C).
Typically, data descriptors will also have __name__ and __doc__ attributes
(properties, getsets, and members have both of these attributes), but this
is not guaranteed."""
if isclass(object) or ismethod(object) or isfunction(object):
# mutual exclusion
return False
tp = type(object)
return hasattr(tp, "__set__") and hasattr(tp, "__get__")
if hasattr(types, 'MemberDescriptorType'):
# CPython and equivalent
def ismemberdescriptor(object):
"""Return true if the object is a member descriptor.
Member descriptors are specialized descriptors defined in extension
modules."""
return isinstance(object, types.MemberDescriptorType)
else:
# Other implementations
def ismemberdescriptor(object):
"""Return true if the object is a member descriptor.
Member descriptors are specialized descriptors defined in extension
modules."""
return False
if hasattr(types, 'GetSetDescriptorType'):
# CPython and equivalent
def isgetsetdescriptor(object):
"""Return true if the object is a getset descriptor.
getset descriptors are specialized descriptors defined in extension
modules."""
return isinstance(object, types.GetSetDescriptorType)
else:
# Other implementations
def isgetsetdescriptor(object):
"""Return true if the object is a getset descriptor.
getset descriptors are specialized descriptors defined in extension
modules."""
return False
def isfunction(object):
"""Return true if the object is a user-defined function.
Function objects provide these attributes:
__doc__ documentation string
__name__ name with which this function was defined
__code__ code object containing compiled function bytecode
__defaults__ tuple of any default values for arguments
__globals__ global namespace in which this function was defined
__annotations__ dict of parameter annotations
__kwdefaults__ dict of keyword only parameters with defaults"""
return isinstance(object, types.FunctionType)
def isgeneratorfunction(object):
"""Return true if the object is a user-defined generator function.
Generator function objects provides same attributes as functions.
See help(isfunction) for attributes listing."""
return bool((isfunction(object) or ismethod(object)) and
object.__code__.co_flags & CO_GENERATOR)
def isgenerator(object):
"""Return true if the object is a generator.
Generator objects provide these attributes:
__iter__ defined to support iteration over container
close raises a new GeneratorExit exception inside the
generator to terminate the iteration
gi_code code object
gi_frame frame object or possibly None once the generator has
been exhausted
gi_running set to 1 when generator is executing, 0 otherwise
next return the next item from the container
send resumes the generator and "sends" a value that becomes
the result of the current yield-expression
throw used to raise an exception inside the generator"""
return isinstance(object, types.GeneratorType)
def istraceback(object):
"""Return true if the object is a traceback.
Traceback objects provide these attributes:
tb_frame frame object at this level
tb_lasti index of last attempted instruction in bytecode
tb_lineno current line number in Python source code
tb_next next inner traceback object (called by this level)"""
return isinstance(object, types.TracebackType)
def isframe(object):
"""Return true if the object is a frame object.
Frame objects provide these attributes:
f_back next outer frame object (this frame's caller)
f_builtins built-in namespace seen by this frame
f_code code object being executed in this frame
f_globals global namespace seen by this frame
f_lasti index of last attempted instruction in bytecode
f_lineno current line number in Python source code
f_locals local namespace seen by this frame
f_trace tracing function for this frame, or None"""
return isinstance(object, types.FrameType)
def iscode(object):
"""Return true if the object is a code object.
Code objects provide these attributes:
co_argcount number of arguments (not including * or ** args)
co_code string of raw compiled bytecode
co_consts tuple of constants used in the bytecode
co_filename name of file in which this code object was created
co_firstlineno number of first line in Python source code
co_flags bitmap: 1=optimized | 2=newlocals | 4=*arg | 8=**arg
co_lnotab encoded mapping of line numbers to bytecode indices
co_name name with which this code object was defined
co_names tuple of names of local variables
co_nlocals number of local variables
co_stacksize virtual machine stack space required
co_varnames tuple of names of arguments and local variables"""
return isinstance(object, types.CodeType)
def isbuiltin(object):
"""Return true if the object is a built-in function or method.
Built-in functions and methods provide these attributes:
__doc__ documentation string
__name__ original name of this function or method
__self__ instance to which a method is bound, or None"""
return isinstance(object, types.BuiltinFunctionType)
def isroutine(object):
"""Return true if the object is any kind of function or method."""
return (isbuiltin(object)
or isfunction(object)
or ismethod(object)
or ismethoddescriptor(object))
def isabstract(object):
"""Return true if the object is an abstract base class (ABC)."""
return bool(isinstance(object, type) and object.__flags__ & TPFLAGS_IS_ABSTRACT)
def getmembers(object, predicate=None):
"""Return all members of an object as (name, value) pairs sorted by name.
Optionally, only return members that satisfy a given predicate."""
if isclass(object):
mro = (object,) + getmro(object)
else:
mro = ()
results = []
processed = set()
names = dir(object)
# :dd any DynamicClassAttributes to the list of names if object is a class;
# this may result in duplicate entries if, for example, a virtual
# attribute with the same name as a DynamicClassAttribute exists
try:
for base in object.__bases__:
for k, v in base.__dict__.items():
if isinstance(v, types.DynamicClassAttribute):
names.append(k)
except AttributeError:
pass
for key in names:
# First try to get the value via getattr. Some descriptors don't
# like calling their __get__ (see bug #1785), so fall back to
# looking in the __dict__.
try:
value = getattr(object, key)
# handle the duplicate key
if key in processed:
raise AttributeError
except AttributeError:
for base in mro:
if key in base.__dict__:
value = base.__dict__[key]
break
else:
# could be a (currently) missing slot member, or a buggy
# __dir__; discard and move on
continue
if not predicate or predicate(value):
results.append((key, value))
processed.add(key)
results.sort(key=lambda pair: pair[0])
return results
Attribute = namedtuple('Attribute', 'name kind defining_class object')
def classify_class_attrs(cls):
"""Return list of attribute-descriptor tuples.
For each name in dir(cls), the return list contains a 4-tuple
with these elements:
0. The name (a string).
1. The kind of attribute this is, one of these strings:
'class method' created via classmethod()
'static method' created via staticmethod()
'property' created via property()
'method' any other flavor of method or descriptor
'data' not a method
2. The class which defined this attribute (a class).
3. The object as obtained by calling getattr; if this fails, or if the
resulting object does not live anywhere in the class' mro (including
metaclasses) then the object is looked up in the defining class's
dict (found by walking the mro).
If one of the items in dir(cls) is stored in the metaclass it will now
be discovered and not have None be listed as the class in which it was
defined. Any items whose home class cannot be discovered are skipped.
"""
mro = getmro(cls)
metamro = getmro(type(cls)) # for attributes stored in the metaclass
metamro = tuple([cls for cls in metamro if cls not in (type, object)])
class_bases = (cls,) + mro
all_bases = class_bases + metamro
names = dir(cls)
# :dd any DynamicClassAttributes to the list of names;
# this may result in duplicate entries if, for example, a virtual
# attribute with the same name as a DynamicClassAttribute exists.
for base in mro:
for k, v in base.__dict__.items():
if isinstance(v, types.DynamicClassAttribute):
names.append(k)
result = []
processed = set()
for name in names:
# Get the object associated with the name, and where it was defined.
# Normal objects will be looked up with both getattr and directly in
# its class' dict (in case getattr fails [bug #1785], and also to look
# for a docstring).
# For DynamicClassAttributes on the second pass we only look in the
# class's dict.
#
# Getting an obj from the __dict__ sometimes reveals more than
# using getattr. Static and class methods are dramatic examples.
homecls = None
get_obj = None
dict_obj = None
if name not in processed:
try:
if name == '__dict__':
raise Exception("__dict__ is special, don't want the proxy")
get_obj = getattr(cls, name)
except Exception as exc:
pass
else:
homecls = getattr(get_obj, "__objclass__", homecls)
if homecls not in class_bases:
# if the resulting object does not live somewhere in the
# mro, drop it and search the mro manually
homecls = None
last_cls = None
# first look in the classes
for srch_cls in class_bases:
srch_obj = getattr(srch_cls, name, None)
if srch_obj == get_obj:
last_cls = srch_cls
# then check the metaclasses
for srch_cls in metamro:
try:
srch_obj = srch_cls.__getattr__(cls, name)
except AttributeError:
continue
if srch_obj == get_obj:
last_cls = srch_cls
if last_cls is not None:
homecls = last_cls
for base in all_bases:
if name in base.__dict__:
dict_obj = base.__dict__[name]
if homecls not in metamro:
homecls = base
break
if homecls is None:
# unable to locate the attribute anywhere, most likely due to
# buggy custom __dir__; discard and move on
continue
obj = get_obj or dict_obj
# Classify the object or its descriptor.
if isinstance(dict_obj, staticmethod):
kind = "static method"
obj = dict_obj
elif isinstance(dict_obj, classmethod):
kind = "class method"
obj = dict_obj
elif isinstance(dict_obj, property):
kind = "property"
obj = dict_obj
elif isroutine(obj):
kind = "method"
else:
kind = "data"
result.append(Attribute(name, kind, homecls, obj))
processed.add(name)
return result
# ----------------------------------------------------------- class helpers
def getmro(cls):
"Return tuple of base classes (including cls) in method resolution order."
return cls.__mro__
# -------------------------------------------------------- function helpers
def unwrap(func, *, stop=None):
"""Get the object wrapped by *func*.
Follows the chain of :attr:`__wrapped__` attributes returning the last
object in the chain.
*stop* is an optional callback accepting an object in the wrapper chain
as its sole argument that allows the unwrapping to be terminated early if
the callback returns a true value. If the callback never returns a true
value, the last object in the chain is returned as usual. For example,
:func:`signature` uses this to stop unwrapping if any object in the
chain has a ``__signature__`` attribute defined.
:exc:`ValueError` is raised if a cycle is encountered.
"""
if stop is None:
def _is_wrapper(f):
return hasattr(f, '__wrapped__')
else:
def _is_wrapper(f):
return hasattr(f, '__wrapped__') and not stop(f)
f = func # remember the original func for error reporting
memo = {id(f)} # Memoise by id to tolerate non-hashable objects
while _is_wrapper(func):
func = func.__wrapped__
id_func = id(func)
if id_func in memo:
raise ValueError('wrapper loop when unwrapping {!r}'.format(f))
memo.add(id_func)
return func
# -------------------------------------------------- source code extraction
def indentsize(line):
"""Return the indent size, in spaces, at the start of a line of text."""
expline = line.expandtabs()
return len(expline) - len(expline.lstrip())
def getdoc(object):
"""Get the documentation string for an object.
All tabs are expanded to spaces. To clean up docstrings that are
indented to line up with blocks of code, any whitespace than can be
uniformly removed from the second line onwards is removed."""
try:
doc = object.__doc__
except AttributeError:
return None
if not isinstance(doc, str):
return None
return cleandoc(doc)
def cleandoc(doc):
"""Clean up indentation from docstrings.
Any whitespace that can be uniformly removed from the second line
onwards is removed."""
try:
lines = doc.expandtabs().split('\n')
except UnicodeError:
return None
else:
# Find minimum indentation of any non-blank lines after first line.
margin = sys.maxsize
for line in lines[1:]:
content = len(line.lstrip())
if content:
indent = len(line) - content
margin = min(margin, indent)
# Remove indentation.
if lines:
lines[0] = lines[0].lstrip()
if margin < sys.maxsize:
for i in range(1, len(lines)): lines[i] = lines[i][margin:]
# Remove any trailing or leading blank lines.
while lines and not lines[-1]:
lines.pop()
while lines and not lines[0]:
lines.pop(0)
return '\n'.join(lines)
def getfile(object):
"""Work out which source or compiled file an object was defined in."""
if ismodule(object):
if hasattr(object, '__file__'):
return object.__file__
raise TypeError('{!r} is a built-in module'.format(object))
if isclass(object):
if hasattr(object, '__module__'):
object = sys.modules.get(object.__module__)
if hasattr(object, '__file__'):
return object.__file__
raise TypeError('{!r} is a built-in class'.format(object))
if ismethod(object):
object = object.__func__
if isfunction(object):
object = object.__code__
if istraceback(object):
object = object.tb_frame
if isframe(object):
object = object.f_code
if iscode(object):
return object.co_filename
raise TypeError('{!r} is not a module, class, method, '
'function, traceback, frame, or code object'.format(object))
ModuleInfo = namedtuple('ModuleInfo', 'name suffix mode module_type')
def getmoduleinfo(path):
"""Get the module name, suffix, mode, and module type for a given file."""
warnings.warn('inspect.getmoduleinfo() is deprecated', DeprecationWarning,
2)
with warnings.catch_warnings():
warnings.simplefilter('ignore', PendingDeprecationWarning)
import imp
filename = os.path.basename(path)
suffixes = [(-len(suffix), suffix, mode, mtype)
for suffix, mode, mtype in imp.get_suffixes()]
suffixes.sort() # try longest suffixes first, in case they overlap
for neglen, suffix, mode, mtype in suffixes:
if filename[neglen:] == suffix:
return ModuleInfo(filename[:neglen], suffix, mode, mtype)
def getmodulename(path):
"""Return the module name for a given file, or None."""
fname = os.path.basename(path)
# Check for paths that look like an actual module file
suffixes = [(-len(suffix), suffix)
for suffix in importlib.machinery.all_suffixes()]
suffixes.sort() # try longest suffixes first, in case they overlap
for neglen, suffix in suffixes:
if fname.endswith(suffix):
return fname[:neglen]
return None
def getsourcefile(object):
"""Return the filename that can be used to locate an object's source.
Return None if no way can be identified to get the source.
"""
filename = getfile(object)
all_bytecode_suffixes = importlib.machinery.DEBUG_BYTECODE_SUFFIXES[:]
all_bytecode_suffixes += importlib.machinery.OPTIMIZED_BYTECODE_SUFFIXES[:]
if any(filename.endswith(s) for s in all_bytecode_suffixes):
filename = (os.path.splitext(filename)[0] +
importlib.machinery.SOURCE_SUFFIXES[0])
elif any(filename.endswith(s) for s in
importlib.machinery.EXTENSION_SUFFIXES):
return None
if os.path.exists(filename):
return filename
# only return a non-existent filename if the module has a PEP 302 loader
if getattr(getmodule(object, filename), '__loader__', None) is not None:
return filename
# or it is in the linecache
if filename in linecache.cache:
return filename
def getabsfile(object, _filename=None):
"""Return an absolute path to the source or compiled file for an object.
The idea is for each object to have a unique origin, so this routine
normalizes the result as much as possible."""
if _filename is None:
_filename = getsourcefile(object) or getfile(object)
return os.path.normcase(os.path.abspath(_filename))
modulesbyfile = {}
_filesbymodname = {}
def getmodule(object, _filename=None):
"""Return the module an object was defined in, or None if not found."""
if ismodule(object):
return object
if hasattr(object, '__module__'):
return sys.modules.get(object.__module__)
# Try the filename to modulename cache
if _filename is not None and _filename in modulesbyfile:
return sys.modules.get(modulesbyfile[_filename])
# Try the cache again with the absolute file name
try:
file = getabsfile(object, _filename)
except TypeError:
return None
if file in modulesbyfile:
return sys.modules.get(modulesbyfile[file])
# Update the filename to module name cache and check yet again
# Copy sys.modules in order to cope with changes while iterating
for modname, module in list(sys.modules.items()):
if ismodule(module) and hasattr(module, '__file__'):
f = module.__file__
if f == _filesbymodname.get(modname, None):
# Have already mapped this module, so skip it
continue
_filesbymodname[modname] = f
f = getabsfile(module)
# Always map to the name the module knows itself by
modulesbyfile[f] = modulesbyfile[
os.path.realpath(f)] = module.__name__
if file in modulesbyfile:
return sys.modules.get(modulesbyfile[file])
# Check the main module
main = sys.modules['__main__']
if not hasattr(object, '__name__'):
return None
if hasattr(main, object.__name__):
mainobject = getattr(main, object.__name__)
if mainobject is object:
return main
# Check builtins
builtin = sys.modules['builtins']
if hasattr(builtin, object.__name__):
builtinobject = getattr(builtin, object.__name__)
if builtinobject is object:
return builtin
def findsource(object):
"""Return the entire source file and starting line number for an object.
The argument may be a module, class, method, function, traceback, frame,
or code object. The source code is returned as a list of all the lines
in the file and the line number indexes a line in that list. An OSError
is raised if the source code cannot be retrieved."""
file = getfile(object)
sourcefile = getsourcefile(object)
if not sourcefile and file[:1] + file[-1:] != '<>':
raise OSError('source code not available')
file = sourcefile if sourcefile else file
module = getmodule(object, file)
if module:
lines = linecache.getlines(file, module.__dict__)
else:
lines = linecache.getlines(file)
if not lines:
raise OSError('could not get source code')
if ismodule(object):
return lines, 0
if isclass(object):
name = object.__name__
pat = re.compile(r'^(\s*)class\s*' + name + r'\b')
# make some effort to find the best matching class definition:
# use the one with the least indentation, which is the one
# that's most probably not inside a function definition.
candidates = []
for i in range(len(lines)):
match = pat.match(lines[i])
if match:
# if it's at toplevel, it's already the best one
if lines[i][0] == 'c':
return lines, i
# else add whitespace to candidate list
candidates.append((match.group(1), i))
if candidates:
# this will sort by whitespace, and by line number,
# less whitespace first
candidates.sort()
return lines, candidates[0][1]
else:
raise OSError('could not find class definition')
if ismethod(object):
object = object.__func__
if isfunction(object):
object = object.__code__
if istraceback(object):
object = object.tb_frame
if isframe(object):
object = object.f_code
if iscode(object):
if not hasattr(object, 'co_firstlineno'):
raise OSError('could not find function definition')
lnum = object.co_firstlineno - 1
pat = re.compile(r'^(\s*def\s)|(.*(?<!\w)lambda(:|\s))|^(\s*@)')
while lnum > 0:
if pat.match(lines[lnum]): break
lnum = lnum - 1
return lines, lnum
raise OSError('could not find code object')
def getcomments(object):
"""Get lines of comments immediately preceding an object's source code.
Returns None when source can't be found.
"""
try:
lines, lnum = findsource(object)
except (OSError, TypeError):
return None
if ismodule(object):
# Look for a comment block at the top of the file.
start = 0
if lines and lines[0][:2] == '#!': start = 1
while start < len(lines) and lines[start].strip() in ('', '#'):
start = start + 1
if start < len(lines) and lines[start][:1] == '#':
comments = []
end = start
while end < len(lines) and lines[end][:1] == '#':
comments.append(lines[end].expandtabs())
end = end + 1
return ''.join(comments)
# Look for a preceding block of comments at the same indentation.
elif lnum > 0:
indent = indentsize(lines[lnum])
end = lnum - 1
if end >= 0 and lines[end].lstrip()[:1] == '#' and \
indentsize(lines[end]) == indent:
comments = [lines[end].expandtabs().lstrip()]
if end > 0:
end = end - 1
comment = lines[end].expandtabs().lstrip()
while comment[:1] == '#' and indentsize(lines[end]) == indent:
comments[:0] = [comment]
end = end - 1
if end < 0: break
comment = lines[end].expandtabs().lstrip()
while comments and comments[0].strip() == '#':
comments[:1] = []
while comments and comments[-1].strip() == '#':
comments[-1:] = []
return ''.join(comments)
class EndOfBlock(Exception): pass
class BlockFinder:
"""Provide a tokeneater() method to detect the end of a code block."""
def __init__(self):
self.indent = 0
self.islambda = False
self.started = False
self.passline = False
self.last = 1
def tokeneater(self, type, token, srowcol, erowcol, line):
if not self.started:
# look for the first "def", "class" or "lambda"
if token in ("def", "class", "lambda"):
if token == "lambda":
self.islambda = True
self.started = True
self.passline = True # skip to the end of the line
elif type == tokenize.NEWLINE:
self.passline = False # stop skipping when a NEWLINE is seen
self.last = srowcol[0]
if self.islambda: # lambdas always end at the first NEWLINE
raise EndOfBlock
elif self.passline:
pass
elif type == tokenize.INDENT:
self.indent = self.indent + 1
self.passline = True
elif type == tokenize.DEDENT:
self.indent = self.indent - 1
# the end of matching indent/dedent pairs end a block
# (note that this only works for "def"/"class" blocks,
# not e.g. for "if: else:" or "try: finally:" blocks)
if self.indent <= 0:
raise EndOfBlock
elif self.indent == 0 and type not in (tokenize.COMMENT, tokenize.NL):
# any other token on the same indentation level end the previous
# block as well, except the pseudo-tokens COMMENT and NL.
raise EndOfBlock
def getblock(lines):
"""Extract the block of code at the top of the given list of lines."""
blockfinder = BlockFinder()
try:
tokens = tokenize.generate_tokens(iter(lines).__next__)
for _token in tokens:
blockfinder.tokeneater(*_token)
except (EndOfBlock, IndentationError):
pass
return lines[:blockfinder.last]
def getsourcelines(object):
"""Return a list of source lines and starting line number for an object.
The argument may be a module, class, method, function, traceback, frame,
or code object. The source code is returned as a list of the lines
corresponding to the object and the line number indicates where in the
original source file the first line of code was found. An OSError is
raised if the source code cannot be retrieved."""
lines, lnum = findsource(object)
if ismodule(object): return lines, 0
else: return getblock(lines[lnum:]), lnum + 1
def getsource(object):
"""Return the text of the source code for an object.
The argument may be a module, class, method, function, traceback, frame,
or code object. The source code is returned as a single string. An
OSError is raised if the source code cannot be retrieved."""
lines, lnum = getsourcelines(object)
return ''.join(lines)
# --------------------------------------------------- class tree extraction
def walktree(classes, children, parent):
"""Recursive helper function for getclasstree()."""
results = []
classes.sort(key=attrgetter('__module__', '__name__'))
for c in classes:
results.append((c, c.__bases__))
if c in children:
results.append(walktree(children[c], children, c))
return results
def getclasstree(classes, unique=False):
"""Arrange the given list of classes into a hierarchy of nested lists.
Where a nested list appears, it contains classes derived from the class
whose entry immediately precedes the list. Each entry is a 2-tuple
containing a class and a tuple of its base classes. If the 'unique'
argument is true, exactly one entry appears in the returned structure
for each class in the given list. Otherwise, classes using multiple
inheritance and their descendants will appear multiple times."""
children = {}
roots = []
for c in classes:
if c.__bases__:
for parent in c.__bases__:
if not parent in children:
children[parent] = []
if c not in children[parent]:
children[parent].append(c)
if unique and parent in classes: break
elif c not in roots:
roots.append(c)
for parent in children:
if parent not in classes:
roots.append(parent)
return walktree(roots, children, None)
# ------------------------------------------------ argument list extraction
Arguments = namedtuple('Arguments', 'args, varargs, varkw')
def getargs(co):
"""Get information about the arguments accepted by a code object.
Three things are returned: (args, varargs, varkw), where
'args' is the list of argument names. Keyword-only arguments are
appended. 'varargs' and 'varkw' are the names of the * and **
arguments or None."""
args, varargs, kwonlyargs, varkw = _getfullargs(co)
return Arguments(args + kwonlyargs, varargs, varkw)
def _getfullargs(co):
"""Get information about the arguments accepted by a code object.
Four things are returned: (args, varargs, kwonlyargs, varkw), where
'args' and 'kwonlyargs' are lists of argument names, and 'varargs'
and 'varkw' are the names of the * and ** arguments or None."""
if not iscode(co):
raise TypeError('{!r} is not a code object'.format(co))
nargs = co.co_argcount
names = co.co_varnames
nkwargs = co.co_kwonlyargcount
args = list(names[:nargs])
kwonlyargs = list(names[nargs:nargs+nkwargs])
step = 0
nargs += nkwargs
varargs = None
if co.co_flags & CO_VARARGS:
varargs = co.co_varnames[nargs]
nargs = nargs + 1
varkw = None
if co.co_flags & CO_VARKEYWORDS:
varkw = co.co_varnames[nargs]
return args, varargs, kwonlyargs, varkw
ArgSpec = namedtuple('ArgSpec', 'args varargs keywords defaults')
def getargspec(func):
"""Get the names and default values of a function's arguments.
A tuple of four things is returned: (args, varargs, varkw, defaults).
'args' is a list of the argument names.
'args' will include keyword-only argument names.
'varargs' and 'varkw' are the names of the * and ** arguments or None.
'defaults' is an n-tuple of the default values of the last n arguments.
Use the getfullargspec() API for Python-3000 code, as annotations
and keyword arguments are supported. getargspec() will raise ValueError
if the func has either annotations or keyword arguments.
"""
args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, ann = \
getfullargspec(func)
if kwonlyargs or ann:
raise ValueError("Function has keyword-only arguments or annotations"
", use getfullargspec() API which can support them")
return ArgSpec(args, varargs, varkw, defaults)
FullArgSpec = namedtuple('FullArgSpec',
'args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, annotations')
def getfullargspec(func):
"""Get the names and default values of a callable object's arguments.
A tuple of seven things is returned:
(args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults annotations).
'args' is a list of the argument names.
'varargs' and 'varkw' are the names of the * and ** arguments or None.
'defaults' is an n-tuple of the default values of the last n arguments.
'kwonlyargs' is a list of keyword-only argument names.
'kwonlydefaults' is a dictionary mapping names from kwonlyargs to defaults.
'annotations' is a dictionary mapping argument names to annotations.
The first four items in the tuple correspond to getargspec().
"""
try:
# Re: `skip_bound_arg=False`
#
# There is a notable difference in behaviour between getfullargspec
# and Signature: the former always returns 'self' parameter for bound
# methods, whereas the Signature always shows the actual calling
# signature of the passed object.
#
# To simulate this behaviour, we "unbind" bound methods, to trick
# inspect.signature to always return their first parameter ("self",
# usually)
# Re: `follow_wrapper_chains=False`
#
# getfullargspec() historically ignored __wrapped__ attributes,
# so we ensure that remains the case in 3.3+
sig = _signature_internal(func,
follow_wrapper_chains=False,
skip_bound_arg=False)
except Exception as ex:
# Most of the times 'signature' will raise ValueError.
# But, it can also raise AttributeError, and, maybe something
# else. So to be fully backwards compatible, we catch all
# possible exceptions here, and reraise a TypeError.
raise TypeError('unsupported callable') from ex
args = []
varargs = None
varkw = None
kwonlyargs = []
defaults = ()
annotations = {}
defaults = ()
kwdefaults = {}
if sig.return_annotation is not sig.empty:
annotations['return'] = sig.return_annotation
for param in sig.parameters.values():
kind = param.kind
name = param.name
if kind is _POSITIONAL_ONLY:
args.append(name)
elif kind is _POSITIONAL_OR_KEYWORD:
args.append(name)
if param.default is not param.empty:
defaults += (param.default,)
elif kind is _VAR_POSITIONAL:
varargs = name
elif kind is _KEYWORD_ONLY:
kwonlyargs.append(name)
if param.default is not param.empty:
kwdefaults[name] = param.default
elif kind is _VAR_KEYWORD:
varkw = name
if param.annotation is not param.empty:
annotations[name] = param.annotation
if not kwdefaults:
# compatibility with 'func.__kwdefaults__'
kwdefaults = None
if not defaults:
# compatibility with 'func.__defaults__'
defaults = None
return FullArgSpec(args, varargs, varkw, defaults,
kwonlyargs, kwdefaults, annotations)
ArgInfo = namedtuple('ArgInfo', 'args varargs keywords locals')
def getargvalues(frame):
"""Get information about arguments passed into a particular frame.
A tuple of four things is returned: (args, varargs, varkw, locals).
'args' is a list of the argument names.
'varargs' and 'varkw' are the names of the * and ** arguments or None.
'locals' is the locals dictionary of the given frame."""
args, varargs, varkw = getargs(frame.f_code)
return ArgInfo(args, varargs, varkw, frame.f_locals)
def formatannotation(annotation, base_module=None):
if isinstance(annotation, type):
if annotation.__module__ in ('builtins', base_module):
return annotation.__name__
return annotation.__module__+'.'+annotation.__name__
return repr(annotation)
def formatannotationrelativeto(object):
module = getattr(object, '__module__', None)
def _formatannotation(annotation):
return formatannotation(annotation, module)
return _formatannotation
def formatargspec(args, varargs=None, varkw=None, defaults=None,
kwonlyargs=(), kwonlydefaults={}, annotations={},
formatarg=str,
formatvarargs=lambda name: '*' + name,
formatvarkw=lambda name: '**' + name,
formatvalue=lambda value: '=' + repr(value),
formatreturns=lambda text: ' -> ' + text,
formatannotation=formatannotation):
"""Format an argument spec from the values returned by getargspec
or getfullargspec.
The first seven arguments are (args, varargs, varkw, defaults,
kwonlyargs, kwonlydefaults, annotations). The other five arguments
are the corresponding optional formatting functions that are called to
turn names and values into strings. The last argument is an optional
function to format the sequence of arguments."""
def formatargandannotation(arg):
result = formatarg(arg)
if arg in annotations:
result += ': ' + formatannotation(annotations[arg])
return result
specs = []
if defaults:
firstdefault = len(args) - len(defaults)
for i, arg in enumerate(args):
spec = formatargandannotation(arg)
if defaults and i >= firstdefault:
spec = spec + formatvalue(defaults[i - firstdefault])
specs.append(spec)
if varargs is not None:
specs.append(formatvarargs(formatargandannotation(varargs)))
else:
if kwonlyargs:
specs.append('*')
if kwonlyargs:
for kwonlyarg in kwonlyargs:
spec = formatargandannotation(kwonlyarg)
if kwonlydefaults and kwonlyarg in kwonlydefaults:
spec += formatvalue(kwonlydefaults[kwonlyarg])
specs.append(spec)
if varkw is not None:
specs.append(formatvarkw(formatargandannotation(varkw)))
result = '(' + ', '.join(specs) + ')'
if 'return' in annotations:
result += formatreturns(formatannotation(annotations['return']))
return result
def formatargvalues(args, varargs, varkw, locals,
formatarg=str,
formatvarargs=lambda name: '*' + name,
formatvarkw=lambda name: '**' + name,
formatvalue=lambda value: '=' + repr(value)):
"""Format an argument spec from the 4 values returned by getargvalues.
The first four arguments are (args, varargs, varkw, locals). The
next four arguments are the corresponding optional formatting functions
that are called to turn names and values into strings. The ninth
argument is an optional function to format the sequence of arguments."""
def convert(name, locals=locals,
formatarg=formatarg, formatvalue=formatvalue):
return formatarg(name) + formatvalue(locals[name])
specs = []
for i in range(len(args)):
specs.append(convert(args[i]))
if varargs:
specs.append(formatvarargs(varargs) + formatvalue(locals[varargs]))
if varkw:
specs.append(formatvarkw(varkw) + formatvalue(locals[varkw]))
return '(' + ', '.join(specs) + ')'
def _missing_arguments(f_name, argnames, pos, values):
names = [repr(name) for name in argnames if name not in values]
missing = len(names)
if missing == 1:
s = names[0]
elif missing == 2:
s = "{} and {}".format(*names)
else:
tail = ", {} and {}".format(*names[-2:])
del names[-2:]
s = ", ".join(names) + tail
raise TypeError("%s() missing %i required %s argument%s: %s" %
(f_name, missing,
"positional" if pos else "keyword-only",
"" if missing == 1 else "s", s))
def _too_many(f_name, args, kwonly, varargs, defcount, given, values):
atleast = len(args) - defcount
kwonly_given = len([arg for arg in kwonly if arg in values])
if varargs:
plural = atleast != 1
sig = "at least %d" % (atleast,)
elif defcount:
plural = True
sig = "from %d to %d" % (atleast, len(args))
else:
plural = len(args) != 1
sig = str(len(args))
kwonly_sig = ""
if kwonly_given:
msg = " positional argument%s (and %d keyword-only argument%s)"
kwonly_sig = (msg % ("s" if given != 1 else "", kwonly_given,
"s" if kwonly_given != 1 else ""))
raise TypeError("%s() takes %s positional argument%s but %d%s %s given" %
(f_name, sig, "s" if plural else "", given, kwonly_sig,
"was" if given == 1 and not kwonly_given else "were"))
def getcallargs(*func_and_positional, **named):
"""Get the mapping of arguments to values.
A dict is returned, with keys the function argument names (including the
names of the * and ** arguments, if any), and values the respective bound
values from 'positional' and 'named'."""
func = func_and_positional[0]
positional = func_and_positional[1:]
spec = getfullargspec(func)
args, varargs, varkw, defaults, kwonlyargs, kwonlydefaults, ann = spec
f_name = func.__name__
arg2value = {}
if ismethod(func) and func.__self__ is not None:
# implicit 'self' (or 'cls' for classmethods) argument
positional = (func.__self__,) + positional
num_pos = len(positional)
num_args = len(args)
num_defaults = len(defaults) if defaults else 0
n = min(num_pos, num_args)
for i in range(n):
arg2value[args[i]] = positional[i]
if varargs:
arg2value[varargs] = tuple(positional[n:])
possible_kwargs = set(args + kwonlyargs)
if varkw:
arg2value[varkw] = {}
for kw, value in named.items():
if kw not in possible_kwargs:
if not varkw:
raise TypeError("%s() got an unexpected keyword argument %r" %
(f_name, kw))
arg2value[varkw][kw] = value
continue
if kw in arg2value:
raise TypeError("%s() got multiple values for argument %r" %
(f_name, kw))
arg2value[kw] = value
if num_pos > num_args and not varargs:
_too_many(f_name, args, kwonlyargs, varargs, num_defaults,
num_pos, arg2value)
if num_pos < num_args:
req = args[:num_args - num_defaults]
for arg in req:
if arg not in arg2value:
_missing_arguments(f_name, req, True, arg2value)
for i, arg in enumerate(args[num_args - num_defaults:]):
if arg not in arg2value:
arg2value[arg] = defaults[i]
missing = 0
for kwarg in kwonlyargs:
if kwarg not in arg2value:
if kwonlydefaults and kwarg in kwonlydefaults:
arg2value[kwarg] = kwonlydefaults[kwarg]
else:
missing += 1
if missing:
_missing_arguments(f_name, kwonlyargs, False, arg2value)
return arg2value
ClosureVars = namedtuple('ClosureVars', 'nonlocals globals builtins unbound')
def getclosurevars(func):
"""
Get the mapping of free variables to their current values.
Returns a named tuple of dicts mapping the current nonlocal, global
and builtin references as seen by the body of the function. A final
set of unbound names that could not be resolved is also provided.
"""
if ismethod(func):
func = func.__func__
if not isfunction(func):
raise TypeError("'{!r}' is not a Python function".format(func))
code = func.__code__
# Nonlocal references are named in co_freevars and resolved
# by looking them up in __closure__ by positional index
if func.__closure__ is None:
nonlocal_vars = {}
else:
nonlocal_vars = {
var : cell.cell_contents
for var, cell in zip(code.co_freevars, func.__closure__)
}
# Global and builtin references are named in co_names and resolved
# by looking them up in __globals__ or __builtins__
global_ns = func.__globals__
builtin_ns = global_ns.get("__builtins__", builtins.__dict__)
if ismodule(builtin_ns):
builtin_ns = builtin_ns.__dict__
global_vars = {}
builtin_vars = {}
unbound_names = set()
for name in code.co_names:
if name in ("None", "True", "False"):
# Because these used to be builtins instead of keywords, they
# may still show up as name references. We ignore them.
continue
try:
global_vars[name] = global_ns[name]
except KeyError:
try:
builtin_vars[name] = builtin_ns[name]
except KeyError:
unbound_names.add(name)
return ClosureVars(nonlocal_vars, global_vars,
builtin_vars, unbound_names)
# -------------------------------------------------- stack frame extraction
Traceback = namedtuple('Traceback', 'filename lineno function code_context index')
def getframeinfo(frame, context=1):
"""Get information about a frame or traceback object.
A tuple of five things is returned: the filename, the line number of
the current line, the function name, a list of lines of context from
the source code, and the index of the current line within that list.
The optional second argument specifies the number of lines of context
to return, which are centered around the current line."""
if istraceback(frame):
lineno = frame.tb_lineno
frame = frame.tb_frame
else:
lineno = frame.f_lineno
if not isframe(frame):
raise TypeError('{!r} is not a frame or traceback object'.format(frame))
filename = getsourcefile(frame) or getfile(frame)
if context > 0:
start = lineno - 1 - context//2
try:
lines, lnum = findsource(frame)
except OSError:
lines = index = None
else:
start = max(start, 1)
start = max(0, min(start, len(lines) - context))
lines = lines[start:start+context]
index = lineno - 1 - start
else:
lines = index = None
return Traceback(filename, lineno, frame.f_code.co_name, lines, index)
def getlineno(frame):
"""Get the line number from a frame object, allowing for optimization."""
# FrameType.f_lineno is now a descriptor that grovels co_lnotab
return frame.f_lineno
def getouterframes(frame, context=1):
"""Get a list of records for a frame and all higher (calling) frames.
Each record contains a frame object, filename, line number, function
name, a list of lines of context, and index within the context."""
framelist = []
while frame:
framelist.append((frame,) + getframeinfo(frame, context))
frame = frame.f_back
return framelist
def getinnerframes(tb, context=1):
"""Get a list of records for a traceback's frame and all lower frames.
Each record contains a frame object, filename, line number, function
name, a list of lines of context, and index within the context."""
framelist = []
while tb:
framelist.append((tb.tb_frame,) + getframeinfo(tb, context))
tb = tb.tb_next
return framelist
def currentframe():
"""Return the frame of the caller or None if this is not possible."""
return sys._getframe(1) if hasattr(sys, "_getframe") else None
def stack(context=1):
"""Return a list of records for the stack above the caller's frame."""
return getouterframes(sys._getframe(1), context)
def trace(context=1):
"""Return a list of records for the stack below the current exception."""
return getinnerframes(sys.exc_info()[2], context)
# ------------------------------------------------ static version of getattr
_sentinel = object()
def _static_getmro(klass):
return type.__dict__['__mro__'].__get__(klass)
def _check_instance(obj, attr):
instance_dict = {}
try:
instance_dict = object.__getattribute__(obj, "__dict__")
except AttributeError:
pass
return dict.get(instance_dict, attr, _sentinel)
def _check_class(klass, attr):
for entry in _static_getmro(klass):
if _shadowed_dict(type(entry)) is _sentinel:
try:
return entry.__dict__[attr]
except KeyError:
pass
return _sentinel
def _is_type(obj):
try:
_static_getmro(obj)
except TypeError:
return False
return True
def _shadowed_dict(klass):
dict_attr = type.__dict__["__dict__"]
for entry in _static_getmro(klass):
try:
class_dict = dict_attr.__get__(entry)["__dict__"]
except KeyError:
pass
else:
if not (type(class_dict) is types.GetSetDescriptorType and
class_dict.__name__ == "__dict__" and
class_dict.__objclass__ is entry):
return class_dict
return _sentinel
def getattr_static(obj, attr, default=_sentinel):
"""Retrieve attributes without triggering dynamic lookup via the
descriptor protocol, __getattr__ or __getattribute__.
Note: this function may not be able to retrieve all attributes
that getattr can fetch (like dynamically created attributes)
and may find attributes that getattr can't (like descriptors
that raise AttributeError). It can also return descriptor objects
instead of instance members in some cases. See the
documentation for details.
"""
instance_result = _sentinel
if not _is_type(obj):
klass = type(obj)
dict_attr = _shadowed_dict(klass)
if (dict_attr is _sentinel or
type(dict_attr) is types.MemberDescriptorType):
instance_result = _check_instance(obj, attr)
else:
klass = obj
klass_result = _check_class(klass, attr)
if instance_result is not _sentinel and klass_result is not _sentinel:
if (_check_class(type(klass_result), '__get__') is not _sentinel and
_check_class(type(klass_result), '__set__') is not _sentinel):
return klass_result
if instance_result is not _sentinel:
return instance_result
if klass_result is not _sentinel:
return klass_result
if obj is klass:
# for types we check the metaclass too
for entry in _static_getmro(type(klass)):
if _shadowed_dict(type(entry)) is _sentinel:
try:
return entry.__dict__[attr]
except KeyError:
pass
if default is not _sentinel:
return default
raise AttributeError(attr)
# ------------------------------------------------ generator introspection
GEN_CREATED = 'GEN_CREATED'
GEN_RUNNING = 'GEN_RUNNING'
GEN_SUSPENDED = 'GEN_SUSPENDED'
GEN_CLOSED = 'GEN_CLOSED'
def getgeneratorstate(generator):
"""Get current state of a generator-iterator.
Possible states are:
GEN_CREATED: Waiting to start execution.
GEN_RUNNING: Currently being executed by the interpreter.
GEN_SUSPENDED: Currently suspended at a yield expression.
GEN_CLOSED: Execution has completed.
"""
if generator.gi_running:
return GEN_RUNNING
if generator.gi_frame is None:
return GEN_CLOSED
if generator.gi_frame.f_lasti == -1:
return GEN_CREATED
return GEN_SUSPENDED
def getgeneratorlocals(generator):
"""
Get the mapping of generator local variables to their current values.
A dict is returned, with the keys the local variable names and values the
bound values."""
if not isgenerator(generator):
raise TypeError("'{!r}' is not a Python generator".format(generator))
frame = getattr(generator, "gi_frame", None)
if frame is not None:
return generator.gi_frame.f_locals
else:
return {}
###############################################################################
### Function Signature Object (PEP 362)
###############################################################################
_WrapperDescriptor = type(type.__call__)
_MethodWrapper = type(all.__call__)
_ClassMethodWrapper = type(int.__dict__['from_bytes'])
_NonUserDefinedCallables = (_WrapperDescriptor,
_MethodWrapper,
_ClassMethodWrapper,
types.BuiltinFunctionType)
def _signature_get_user_defined_method(cls, method_name):
try:
meth = getattr(cls, method_name)
except AttributeError:
return
else:
if not isinstance(meth, _NonUserDefinedCallables):
# Once '__signature__' will be added to 'C'-level
# callables, this check won't be necessary
return meth
def _signature_get_partial(wrapped_sig, partial, extra_args=()):
# Internal helper to calculate how 'wrapped_sig' signature will
# look like after applying a 'functools.partial' object (or alike)
# on it.
old_params = wrapped_sig.parameters
new_params = OrderedDict(old_params.items())
partial_args = partial.args or ()
partial_keywords = partial.keywords or {}
if extra_args:
partial_args = extra_args + partial_args
try:
ba = wrapped_sig.bind_partial(*partial_args, **partial_keywords)
except TypeError as ex:
msg = 'partial object {!r} has incorrect arguments'.format(partial)
raise ValueError(msg) from ex
transform_to_kwonly = False
for param_name, param in old_params.items():
try:
arg_value = ba.arguments[param_name]
except KeyError:
pass
else:
if param.kind is _POSITIONAL_ONLY:
# If positional-only parameter is bound by partial,
# it effectively disappears from the signature
new_params.pop(param_name)
continue
if param.kind is _POSITIONAL_OR_KEYWORD:
if param_name in partial_keywords:
# This means that this parameter, and all parameters
# after it should be keyword-only (and var-positional
# should be removed). Here's why. Consider the following
# function:
# foo(a, b, *args, c):
# pass
#
# "partial(foo, a='spam')" will have the following
# signature: "(*, a='spam', b, c)". Because attempting
# to call that partial with "(10, 20)" arguments will
# raise a TypeError, saying that "a" argument received
# multiple values.
transform_to_kwonly = True
# Set the new default value
new_params[param_name] = param.replace(default=arg_value)
else:
# was passed as a positional argument
new_params.pop(param.name)
continue
if param.kind is _KEYWORD_ONLY:
# Set the new default value
new_params[param_name] = param.replace(default=arg_value)
if transform_to_kwonly:
assert param.kind is not _POSITIONAL_ONLY
if param.kind is _POSITIONAL_OR_KEYWORD:
new_param = new_params[param_name].replace(kind=_KEYWORD_ONLY)
new_params[param_name] = new_param
new_params.move_to_end(param_name)
elif param.kind in (_KEYWORD_ONLY, _VAR_KEYWORD):
new_params.move_to_end(param_name)
elif param.kind is _VAR_POSITIONAL:
new_params.pop(param.name)
return wrapped_sig.replace(parameters=new_params.values())
def _signature_bound_method(sig):
# Internal helper to transform signatures for unbound
# functions to bound methods
params = tuple(sig.parameters.values())
if not params or params[0].kind in (_VAR_KEYWORD, _KEYWORD_ONLY):
raise ValueError('invalid method signature')
kind = params[0].kind
if kind in (_POSITIONAL_OR_KEYWORD, _POSITIONAL_ONLY):
# Drop first parameter:
# '(p1, p2[, ...])' -> '(p2[, ...])'
params = params[1:]
else:
if kind is not _VAR_POSITIONAL:
# Unless we add a new parameter type we never
# get here
raise ValueError('invalid argument type')
# It's a var-positional parameter.
# Do nothing. '(*args[, ...])' -> '(*args[, ...])'
return sig.replace(parameters=params)
def _signature_is_builtin(obj):
# Internal helper to test if `obj` is a callable that might
# support Argument Clinic's __text_signature__ protocol.
return (isbuiltin(obj) or
ismethoddescriptor(obj) or
isinstance(obj, _NonUserDefinedCallables) or
# Can't test 'isinstance(type)' here, as it would
# also be True for regular python classes
obj in (type, object))
def _signature_is_functionlike(obj):
# Internal helper to test if `obj` is a duck type of FunctionType.
# A good example of such objects are functions compiled with
# Cython, which have all attributes that a pure Python function
# would have, but have their code statically compiled.
if not callable(obj) or isclass(obj):
# All function-like objects are obviously callables,
# and not classes.
return False
name = getattr(obj, '__name__', None)
code = getattr(obj, '__code__', None)
defaults = getattr(obj, '__defaults__', _void) # Important to use _void ...
kwdefaults = getattr(obj, '__kwdefaults__', _void) # ... and not None here
annotations = getattr(obj, '__annotations__', None)
return (isinstance(code, types.CodeType) and
isinstance(name, str) and
(defaults is None or isinstance(defaults, tuple)) and
(kwdefaults is None or isinstance(kwdefaults, dict)) and
isinstance(annotations, dict))
def _signature_get_bound_param(spec):
# Internal helper to get first parameter name from a
# __text_signature__ of a builtin method, which should
# be in the following format: '($param1, ...)'.
# Assumptions are that the first argument won't have
# a default value or an annotation.
assert spec.startswith('($')
pos = spec.find(',')
if pos == -1:
pos = spec.find(')')
cpos = spec.find(':')
assert cpos == -1 or cpos > pos
cpos = spec.find('=')
assert cpos == -1 or cpos > pos
return spec[2:pos]
def _signature_strip_non_python_syntax(signature):
"""
Takes a signature in Argument Clinic's extended signature format.
Returns a tuple of three things:
* that signature re-rendered in standard Python syntax,
* the index of the "self" parameter (generally 0), or None if
the function does not have a "self" parameter, and
* the index of the last "positional only" parameter,
or None if the signature has no positional-only parameters.
"""
if not signature:
return signature, None, None
self_parameter = None
last_positional_only = None
lines = [l.encode('ascii') for l in signature.split('\n')]
generator = iter(lines).__next__
token_stream = tokenize.tokenize(generator)
delayed_comma = False
skip_next_comma = False
text = []
add = text.append
current_parameter = 0
OP = token.OP
ERRORTOKEN = token.ERRORTOKEN
# token stream always starts with ENCODING token, skip it
t = next(token_stream)
assert t.type == tokenize.ENCODING
for t in token_stream:
type, string = t.type, t.string
if type == OP:
if string == ',':
if skip_next_comma:
skip_next_comma = False
else:
assert not delayed_comma
delayed_comma = True
current_parameter += 1
continue
if string == '/':
assert not skip_next_comma
assert last_positional_only is None
skip_next_comma = True
last_positional_only = current_parameter - 1
continue
if (type == ERRORTOKEN) and (string == '$'):
assert self_parameter is None
self_parameter = current_parameter
continue
if delayed_comma:
delayed_comma = False
if not ((type == OP) and (string == ')')):
add(', ')
add(string)
if (string == ','):
add(' ')
clean_signature = ''.join(text)
return clean_signature, self_parameter, last_positional_only
def _signature_fromstr(cls, obj, s, skip_bound_arg=True):
# Internal helper to parse content of '__text_signature__'
# and return a Signature based on it
Parameter = cls._parameter_cls
clean_signature, self_parameter, last_positional_only = \
_signature_strip_non_python_syntax(s)
program = "def foo" + clean_signature + ": pass"
try:
module = ast.parse(program)
except SyntaxError:
module = None
if not isinstance(module, ast.Module):
raise ValueError("{!r} builtin has invalid signature".format(obj))
f = module.body[0]
parameters = []
empty = Parameter.empty
invalid = object()
module = None
module_dict = {}
module_name = getattr(obj, '__module__', None)
if module_name:
module = sys.modules.get(module_name, None)
if module:
module_dict = module.__dict__
sys_module_dict = sys.modules
def parse_name(node):
assert isinstance(node, ast.arg)
if node.annotation != None:
raise ValueError("Annotations are not currently supported")
return node.arg
def wrap_value(s):
try:
value = eval(s, module_dict)
except NameError:
try:
value = eval(s, sys_module_dict)
except NameError:
raise RuntimeError()
if isinstance(value, str):
return ast.Str(value)
if isinstance(value, (int, float)):
return ast.Num(value)
if isinstance(value, bytes):
return ast.Bytes(value)
if value in (True, False, None):
return ast.NameConstant(value)
raise RuntimeError()
class RewriteSymbolics(ast.NodeTransformer):
def visit_Attribute(self, node):
a = []
n = node
while isinstance(n, ast.Attribute):
a.append(n.attr)
n = n.value
if not isinstance(n, ast.Name):
raise RuntimeError()
a.append(n.id)
value = ".".join(reversed(a))
return wrap_value(value)
def visit_Name(self, node):
if not isinstance(node.ctx, ast.Load):
raise ValueError()
return wrap_value(node.id)
def p(name_node, default_node, default=empty):
name = parse_name(name_node)
if name is invalid:
return None
if default_node and default_node is not _empty:
try:
default_node = RewriteSymbolics().visit(default_node)
o = ast.literal_eval(default_node)
except ValueError:
o = invalid
if o is invalid:
return None
default = o if o is not invalid else default
parameters.append(Parameter(name, kind, default=default, annotation=empty))
# non-keyword-only parameters
args = reversed(f.args.args)
defaults = reversed(f.args.defaults)
iter = itertools.zip_longest(args, defaults, fillvalue=None)
if last_positional_only is not None:
kind = Parameter.POSITIONAL_ONLY
else:
kind = Parameter.POSITIONAL_OR_KEYWORD
for i, (name, default) in enumerate(reversed(list(iter))):
p(name, default)
if i == last_positional_only:
kind = Parameter.POSITIONAL_OR_KEYWORD
# *args
if f.args.vararg:
kind = Parameter.VAR_POSITIONAL
p(f.args.vararg, empty)
# keyword-only arguments
kind = Parameter.KEYWORD_ONLY
for name, default in zip(f.args.kwonlyargs, f.args.kw_defaults):
p(name, default)
# **kwargs
if f.args.kwarg:
kind = Parameter.VAR_KEYWORD
p(f.args.kwarg, empty)
if self_parameter is not None:
# Possibly strip the bound argument:
# - We *always* strip first bound argument if
# it is a module.
# - We don't strip first bound argument if
# skip_bound_arg is False.
assert parameters
_self = getattr(obj, '__self__', None)
self_isbound = _self is not None
self_ismodule = ismodule(_self)
if self_isbound and (self_ismodule or skip_bound_arg):
parameters.pop(0)
else:
# for builtins, self parameter is always positional-only!
p = parameters[0].replace(kind=Parameter.POSITIONAL_ONLY)
parameters[0] = p
return cls(parameters, return_annotation=cls.empty)
def _signature_from_builtin(cls, func, skip_bound_arg=True):
# Internal helper function to get signature for
# builtin callables
if not _signature_is_builtin(func):
raise TypeError("{!r} is not a Python builtin "
"function".format(func))
s = getattr(func, "__text_signature__", None)
if not s:
raise ValueError("no signature found for builtin {!r}".format(func))
return _signature_fromstr(cls, func, s, skip_bound_arg)
def _signature_internal(obj, follow_wrapper_chains=True, skip_bound_arg=True):
if not callable(obj):
raise TypeError('{!r} is not a callable object'.format(obj))
if isinstance(obj, types.MethodType):
# In this case we skip the first parameter of the underlying
# function (usually `self` or `cls`).
sig = _signature_internal(obj.__func__,
follow_wrapper_chains,
skip_bound_arg)
if skip_bound_arg:
return _signature_bound_method(sig)
else:
return sig
# Was this function wrapped by a decorator?
if follow_wrapper_chains:
obj = unwrap(obj, stop=(lambda f: hasattr(f, "__signature__")))
try:
sig = obj.__signature__
except AttributeError:
pass
else:
if sig is not None:
if not isinstance(sig, Signature):
raise TypeError(
'unexpected object {!r} in __signature__ '
'attribute'.format(sig))
return sig
try:
partialmethod = obj._partialmethod
except AttributeError:
pass
else:
if isinstance(partialmethod, functools.partialmethod):
# Unbound partialmethod (see functools.partialmethod)
# This means, that we need to calculate the signature
# as if it's a regular partial object, but taking into
# account that the first positional argument
# (usually `self`, or `cls`) will not be passed
# automatically (as for boundmethods)
wrapped_sig = _signature_internal(partialmethod.func,
follow_wrapper_chains,
skip_bound_arg)
sig = _signature_get_partial(wrapped_sig, partialmethod, (None,))
first_wrapped_param = tuple(wrapped_sig.parameters.values())[0]
new_params = (first_wrapped_param,) + tuple(sig.parameters.values())
return sig.replace(parameters=new_params)
if isfunction(obj) or _signature_is_functionlike(obj):
# If it's a pure Python function, or an object that is duck type
# of a Python function (Cython functions, for instance), then:
return Signature.from_function(obj)
if _signature_is_builtin(obj):
return _signature_from_builtin(Signature, obj,
skip_bound_arg=skip_bound_arg)
if isinstance(obj, functools.partial):
wrapped_sig = _signature_internal(obj.func,
follow_wrapper_chains,
skip_bound_arg)
return _signature_get_partial(wrapped_sig, obj)
sig = None
if isinstance(obj, type):
# obj is a class or a metaclass
# First, let's see if it has an overloaded __call__ defined
# in its metaclass
call = _signature_get_user_defined_method(type(obj), '__call__')
if call is not None:
sig = _signature_internal(call,
follow_wrapper_chains,
skip_bound_arg)
else:
# Now we check if the 'obj' class has a '__new__' method
new = _signature_get_user_defined_method(obj, '__new__')
if new is not None:
sig = _signature_internal(new,
follow_wrapper_chains,
skip_bound_arg)
else:
# Finally, we should have at least __init__ implemented
init = _signature_get_user_defined_method(obj, '__init__')
if init is not None:
sig = _signature_internal(init,
follow_wrapper_chains,
skip_bound_arg)
if sig is None:
# At this point we know, that `obj` is a class, with no user-
# defined '__init__', '__new__', or class-level '__call__'
for base in obj.__mro__[:-1]:
# Since '__text_signature__' is implemented as a
# descriptor that extracts text signature from the
# class docstring, if 'obj' is derived from a builtin
# class, its own '__text_signature__' may be 'None'.
# Therefore, we go through the MRO (except the last
# class in there, which is 'object') to find the first
# class with non-empty text signature.
try:
text_sig = base.__text_signature__
except AttributeError:
pass
else:
if text_sig:
# If 'obj' class has a __text_signature__ attribute:
# return a signature based on it
return _signature_fromstr(Signature, obj, text_sig)
# No '__text_signature__' was found for the 'obj' class.
# Last option is to check if its '__init__' is
# object.__init__ or type.__init__.
if type not in obj.__mro__:
# We have a class (not metaclass), but no user-defined
# __init__ or __new__ for it
if obj.__init__ is object.__init__:
# Return a signature of 'object' builtin.
return signature(object)
elif not isinstance(obj, _NonUserDefinedCallables):
# An object with __call__
# We also check that the 'obj' is not an instance of
# _WrapperDescriptor or _MethodWrapper to avoid
# infinite recursion (and even potential segfault)
call = _signature_get_user_defined_method(type(obj), '__call__')
if call is not None:
try:
sig = _signature_internal(call,
follow_wrapper_chains,
skip_bound_arg)
except ValueError as ex:
msg = 'no signature found for {!r}'.format(obj)
raise ValueError(msg) from ex
if sig is not None:
# For classes and objects we skip the first parameter of their
# __call__, __new__, or __init__ methods
if skip_bound_arg:
return _signature_bound_method(sig)
else:
return sig
if isinstance(obj, types.BuiltinFunctionType):
# Raise a nicer error message for builtins
msg = 'no signature found for builtin function {!r}'.format(obj)
raise ValueError(msg)
raise ValueError('callable {!r} is not supported by signature'.format(obj))
def signature(obj):
'''Get a signature object for the passed callable.'''
return _signature_internal(obj)
class _void:
'''A private marker - used in Parameter & Signature'''
class _empty:
pass
class _ParameterKind(int):
def __new__(self, *args, name):
obj = int.__new__(self, *args)
obj._name = name
return obj
def __str__(self):
return self._name
def __repr__(self):
return '<_ParameterKind: {!r}>'.format(self._name)
_POSITIONAL_ONLY = _ParameterKind(0, name='POSITIONAL_ONLY')
_POSITIONAL_OR_KEYWORD = _ParameterKind(1, name='POSITIONAL_OR_KEYWORD')
_VAR_POSITIONAL = _ParameterKind(2, name='VAR_POSITIONAL')
_KEYWORD_ONLY = _ParameterKind(3, name='KEYWORD_ONLY')
_VAR_KEYWORD = _ParameterKind(4, name='VAR_KEYWORD')
class Parameter:
'''Represents a parameter in a function signature.
Has the following public attributes:
* name : str
The name of the parameter as a string.
* default : object
The default value for the parameter if specified. If the
parameter has no default value, this attribute is set to
`Parameter.empty`.
* annotation
The annotation for the parameter if specified. If the
parameter has no annotation, this attribute is set to
`Parameter.empty`.
* kind : str
Describes how argument values are bound to the parameter.
Possible values: `Parameter.POSITIONAL_ONLY`,
`Parameter.POSITIONAL_OR_KEYWORD`, `Parameter.VAR_POSITIONAL`,
`Parameter.KEYWORD_ONLY`, `Parameter.VAR_KEYWORD`.
'''
__slots__ = ('_name', '_kind', '_default', '_annotation')
POSITIONAL_ONLY = _POSITIONAL_ONLY
POSITIONAL_OR_KEYWORD = _POSITIONAL_OR_KEYWORD
VAR_POSITIONAL = _VAR_POSITIONAL
KEYWORD_ONLY = _KEYWORD_ONLY
VAR_KEYWORD = _VAR_KEYWORD
empty = _empty
def __init__(self, name, kind, *, default=_empty, annotation=_empty):
if kind not in (_POSITIONAL_ONLY, _POSITIONAL_OR_KEYWORD,
_VAR_POSITIONAL, _KEYWORD_ONLY, _VAR_KEYWORD):
raise ValueError("invalid value for 'Parameter.kind' attribute")
self._kind = kind
if default is not _empty:
if kind in (_VAR_POSITIONAL, _VAR_KEYWORD):
msg = '{} parameters cannot have default values'.format(kind)
raise ValueError(msg)
self._default = default
self._annotation = annotation
if name is _empty:
raise ValueError('name is a required attribute for Parameter')
if not isinstance(name, str):
raise TypeError("name must be a str, not a {!r}".format(name))
if not name.isidentifier():
raise ValueError('{!r} is not a valid parameter name'.format(name))
self._name = name
@property
def name(self):
return self._name
@property
def default(self):
return self._default
@property
def annotation(self):
return self._annotation
@property
def kind(self):
return self._kind
def replace(self, *, name=_void, kind=_void,
annotation=_void, default=_void):
'''Creates a customized copy of the Parameter.'''
if name is _void:
name = self._name
if kind is _void:
kind = self._kind
if annotation is _void:
annotation = self._annotation
if default is _void:
default = self._default
return type(self)(name, kind, default=default, annotation=annotation)
def __str__(self):
kind = self.kind
formatted = self._name
# Add annotation and default value
if self._annotation is not _empty:
formatted = '{}:{}'.format(formatted,
formatannotation(self._annotation))
if self._default is not _empty:
formatted = '{}={}'.format(formatted, repr(self._default))
if kind == _VAR_POSITIONAL:
formatted = '*' + formatted
elif kind == _VAR_KEYWORD:
formatted = '**' + formatted
return formatted
def __repr__(self):
return '<{} at {:#x} {!r}>'.format(self.__class__.__name__,
id(self), self.name)
def __eq__(self, other):
return (issubclass(other.__class__, Parameter) and
self._name == other._name and
self._kind == other._kind and
self._default == other._default and
self._annotation == other._annotation)
def __ne__(self, other):
return not self.__eq__(other)
class BoundArguments:
'''Result of `Signature.bind` call. Holds the mapping of arguments
to the function's parameters.
Has the following public attributes:
* arguments : OrderedDict
An ordered mutable mapping of parameters' names to arguments' values.
Does not contain arguments' default values.
* signature : Signature
The Signature object that created this instance.
* args : tuple
Tuple of positional arguments values.
* kwargs : dict
Dict of keyword arguments values.
'''
def __init__(self, signature, arguments):
self.arguments = arguments
self._signature = signature
@property
def signature(self):
return self._signature
@property
def args(self):
args = []
for param_name, param in self._signature.parameters.items():
if param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY):
break
try:
arg = self.arguments[param_name]
except KeyError:
# We're done here. Other arguments
# will be mapped in 'BoundArguments.kwargs'
break
else:
if param.kind == _VAR_POSITIONAL:
# *args
args.extend(arg)
else:
# plain argument
args.append(arg)
return tuple(args)
@property
def kwargs(self):
kwargs = {}
kwargs_started = False
for param_name, param in self._signature.parameters.items():
if not kwargs_started:
if param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY):
kwargs_started = True
else:
if param_name not in self.arguments:
kwargs_started = True
continue
if not kwargs_started:
continue
try:
arg = self.arguments[param_name]
except KeyError:
pass
else:
if param.kind == _VAR_KEYWORD:
# **kwargs
kwargs.update(arg)
else:
# plain keyword argument
kwargs[param_name] = arg
return kwargs
def __eq__(self, other):
return (issubclass(other.__class__, BoundArguments) and
self.signature == other.signature and
self.arguments == other.arguments)
def __ne__(self, other):
return not self.__eq__(other)
class Signature:
'''A Signature object represents the overall signature of a function.
It stores a Parameter object for each parameter accepted by the
function, as well as information specific to the function itself.
A Signature object has the following public attributes and methods:
* parameters : OrderedDict
An ordered mapping of parameters' names to the corresponding
Parameter objects (keyword-only arguments are in the same order
as listed in `code.co_varnames`).
* return_annotation : object
The annotation for the return type of the function if specified.
If the function has no annotation for its return type, this
attribute is set to `Signature.empty`.
* bind(*args, **kwargs) -> BoundArguments
Creates a mapping from positional and keyword arguments to
parameters.
* bind_partial(*args, **kwargs) -> BoundArguments
Creates a partial mapping from positional and keyword arguments
to parameters (simulating 'functools.partial' behavior.)
'''
__slots__ = ('_return_annotation', '_parameters')
_parameter_cls = Parameter
_bound_arguments_cls = BoundArguments
empty = _empty
def __init__(self, parameters=None, *, return_annotation=_empty,
__validate_parameters__=True):
'''Constructs Signature from the given list of Parameter
objects and 'return_annotation'. All arguments are optional.
'''
if parameters is None:
params = OrderedDict()
else:
if __validate_parameters__:
params = OrderedDict()
top_kind = _POSITIONAL_ONLY
kind_defaults = False
for idx, param in enumerate(parameters):
kind = param.kind
name = param.name
if kind < top_kind:
msg = 'wrong parameter order: {!r} before {!r}'
msg = msg.format(top_kind, kind)
raise ValueError(msg)
elif kind > top_kind:
kind_defaults = False
top_kind = kind
if kind in (_POSITIONAL_ONLY, _POSITIONAL_OR_KEYWORD):
if param.default is _empty:
if kind_defaults:
# No default for this parameter, but the
# previous parameter of the same kind had
# a default
msg = 'non-default argument follows default ' \
'argument'
raise ValueError(msg)
else:
# There is a default for this parameter.
kind_defaults = True
if name in params:
msg = 'duplicate parameter name: {!r}'.format(name)
raise ValueError(msg)
params[name] = param
else:
params = OrderedDict(((param.name, param)
for param in parameters))
self._parameters = types.MappingProxyType(params)
self._return_annotation = return_annotation
@classmethod
def from_function(cls, func):
'''Constructs Signature for the given python function'''
is_duck_function = False
if not isfunction(func):
if _signature_is_functionlike(func):
is_duck_function = True
else:
# If it's not a pure Python function, and not a duck type
# of pure function:
raise TypeError('{!r} is not a Python function'.format(func))
Parameter = cls._parameter_cls
# Parameter information.
func_code = func.__code__
pos_count = func_code.co_argcount
arg_names = func_code.co_varnames
positional = tuple(arg_names[:pos_count])
keyword_only_count = func_code.co_kwonlyargcount
keyword_only = arg_names[pos_count:(pos_count + keyword_only_count)]
annotations = func.__annotations__
defaults = func.__defaults__
kwdefaults = func.__kwdefaults__
if defaults:
pos_default_count = len(defaults)
else:
pos_default_count = 0
parameters = []
# Non-keyword-only parameters w/o defaults.
non_default_count = pos_count - pos_default_count
for name in positional[:non_default_count]:
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_POSITIONAL_OR_KEYWORD))
# ... w/ defaults.
for offset, name in enumerate(positional[non_default_count:]):
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_POSITIONAL_OR_KEYWORD,
default=defaults[offset]))
# *args
if func_code.co_flags & CO_VARARGS:
name = arg_names[pos_count + keyword_only_count]
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_VAR_POSITIONAL))
# Keyword-only parameters.
for name in keyword_only:
default = _empty
if kwdefaults is not None:
default = kwdefaults.get(name, _empty)
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_KEYWORD_ONLY,
default=default))
# **kwargs
if func_code.co_flags & CO_VARKEYWORDS:
index = pos_count + keyword_only_count
if func_code.co_flags & CO_VARARGS:
index += 1
name = arg_names[index]
annotation = annotations.get(name, _empty)
parameters.append(Parameter(name, annotation=annotation,
kind=_VAR_KEYWORD))
# Is 'func' is a pure Python function - don't validate the
# parameters list (for correct order and defaults), it should be OK.
return cls(parameters,
return_annotation=annotations.get('return', _empty),
__validate_parameters__=is_duck_function)
@classmethod
def from_builtin(cls, func):
return _signature_from_builtin(cls, func)
@property
def parameters(self):
return self._parameters
@property
def return_annotation(self):
return self._return_annotation
def replace(self, *, parameters=_void, return_annotation=_void):
'''Creates a customized copy of the Signature.
Pass 'parameters' and/or 'return_annotation' arguments
to override them in the new copy.
'''
if parameters is _void:
parameters = self.parameters.values()
if return_annotation is _void:
return_annotation = self._return_annotation
return type(self)(parameters,
return_annotation=return_annotation)
def __eq__(self, other):
if (not issubclass(type(other), Signature) or
self.return_annotation != other.return_annotation or
len(self.parameters) != len(other.parameters)):
return False
other_positions = {param: idx
for idx, param in enumerate(other.parameters.keys())}
for idx, (param_name, param) in enumerate(self.parameters.items()):
if param.kind == _KEYWORD_ONLY:
try:
other_param = other.parameters[param_name]
except KeyError:
return False
else:
if param != other_param:
return False
else:
try:
other_idx = other_positions[param_name]
except KeyError:
return False
else:
if (idx != other_idx or
param != other.parameters[param_name]):
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
def _bind(self, args, kwargs, *, partial=False):
'''Private method. Don't use directly.'''
arguments = OrderedDict()
parameters = iter(self.parameters.values())
parameters_ex = ()
arg_vals = iter(args)
while True:
# Let's iterate through the positional arguments and corresponding
# parameters
try:
arg_val = next(arg_vals)
except StopIteration:
# No more positional arguments
try:
param = next(parameters)
except StopIteration:
# No more parameters. That's it. Just need to check that
# we have no `kwargs` after this while loop
break
else:
if param.kind == _VAR_POSITIONAL:
# That's OK, just empty *args. Let's start parsing
# kwargs
break
elif param.name in kwargs:
if param.kind == _POSITIONAL_ONLY:
msg = '{arg!r} parameter is positional only, ' \
'but was passed as a keyword'
msg = msg.format(arg=param.name)
raise TypeError(msg) from None
parameters_ex = (param,)
break
elif (param.kind == _VAR_KEYWORD or
param.default is not _empty):
# That's fine too - we have a default value for this
# parameter. So, lets start parsing `kwargs`, starting
# with the current parameter
parameters_ex = (param,)
break
else:
# No default, not VAR_KEYWORD, not VAR_POSITIONAL,
# not in `kwargs`
if partial:
parameters_ex = (param,)
break
else:
msg = '{arg!r} parameter lacking default value'
msg = msg.format(arg=param.name)
raise TypeError(msg) from None
else:
# We have a positional argument to process
try:
param = next(parameters)
except StopIteration:
raise TypeError('too many positional arguments') from None
else:
if param.kind in (_VAR_KEYWORD, _KEYWORD_ONLY):
# Looks like we have no parameter for this positional
# argument
raise TypeError('too many positional arguments')
if param.kind == _VAR_POSITIONAL:
# We have an '*args'-like argument, let's fill it with
# all positional arguments we have left and move on to
# the next phase
values = [arg_val]
values.extend(arg_vals)
arguments[param.name] = tuple(values)
break
if param.name in kwargs:
raise TypeError('multiple values for argument '
'{arg!r}'.format(arg=param.name))
arguments[param.name] = arg_val
# Now, we iterate through the remaining parameters to process
# keyword arguments
kwargs_param = None
for param in itertools.chain(parameters_ex, parameters):
if param.kind == _VAR_KEYWORD:
# Memorize that we have a '**kwargs'-like parameter
kwargs_param = param
continue
if param.kind == _VAR_POSITIONAL:
# Named arguments don't refer to '*args'-like parameters.
# We only arrive here if the positional arguments ended
# before reaching the last parameter before *args.
continue
param_name = param.name
try:
arg_val = kwargs.pop(param_name)
except KeyError:
# We have no value for this parameter. It's fine though,
# if it has a default value, or it is an '*args'-like
# parameter, left alone by the processing of positional
# arguments.
if (not partial and param.kind != _VAR_POSITIONAL and
param.default is _empty):
raise TypeError('{arg!r} parameter lacking default value'. \
format(arg=param_name)) from None
else:
if param.kind == _POSITIONAL_ONLY:
# This should never happen in case of a properly built
# Signature object (but let's have this check here
# to ensure correct behaviour just in case)
raise TypeError('{arg!r} parameter is positional only, '
'but was passed as a keyword'. \
format(arg=param.name))
arguments[param_name] = arg_val
if kwargs:
if kwargs_param is not None:
# Process our '**kwargs'-like parameter
arguments[kwargs_param.name] = kwargs
else:
raise TypeError('too many keyword arguments')
return self._bound_arguments_cls(self, arguments)
def bind(*args, **kwargs):
'''Get a BoundArguments object, that maps the passed `args`
and `kwargs` to the function's signature. Raises `TypeError`
if the passed arguments can not be bound.
'''
return args[0]._bind(args[1:], kwargs)
def bind_partial(*args, **kwargs):
'''Get a BoundArguments object, that partially maps the
passed `args` and `kwargs` to the function's signature.
Raises `TypeError` if the passed arguments can not be bound.
'''
return args[0]._bind(args[1:], kwargs, partial=True)
def __str__(self):
result = []
render_pos_only_separator = False
render_kw_only_separator = True
for param in self.parameters.values():
formatted = str(param)
kind = param.kind
if kind == _POSITIONAL_ONLY:
render_pos_only_separator = True
elif render_pos_only_separator:
# It's not a positional-only parameter, and the flag
# is set to 'True' (there were pos-only params before.)
result.append('/')
render_pos_only_separator = False
if kind == _VAR_POSITIONAL:
# OK, we have an '*args'-like parameter, so we won't need
# a '*' to separate keyword-only arguments
render_kw_only_separator = False
elif kind == _KEYWORD_ONLY and render_kw_only_separator:
# We have a keyword-only parameter to render and we haven't
# rendered an '*args'-like parameter before, so add a '*'
# separator to the parameters list ("foo(arg1, *, arg2)" case)
result.append('*')
# This condition should be only triggered once, so
# reset the flag
render_kw_only_separator = False
result.append(formatted)
if render_pos_only_separator:
# There were only positional-only parameters, hence the
# flag was not reset to 'False'
result.append('/')
rendered = '({})'.format(', '.join(result))
if self.return_annotation is not _empty:
anno = formatannotation(self.return_annotation)
rendered += ' -> {}'.format(anno)
return rendered
def _main():
""" Logic for inspecting an object given at command line """
import argparse
import importlib
parser = argparse.ArgumentParser()
parser.add_argument(
'object',
help="The object to be analysed. "
"It supports the 'module:qualname' syntax")
parser.add_argument(
'-d', '--details', action='store_true',
help='Display info about the module rather than its source code')
args = parser.parse_args()
target = args.object
mod_name, has_attrs, attrs = target.partition(":")
try:
obj = module = importlib.import_module(mod_name)
except Exception as exc:
msg = "Failed to import {} ({}: {})".format(mod_name,
type(exc).__name__,
exc)
print(msg, file=sys.stderr)
exit(2)
if has_attrs:
parts = attrs.split(".")
obj = module
for part in parts:
obj = getattr(obj, part)
if module.__name__ in sys.builtin_module_names:
print("Can't get info for builtin modules.", file=sys.stderr)
exit(1)
if args.details:
print('Target: {}'.format(target))
print('Origin: {}'.format(getsourcefile(module)))
print('Cached: {}'.format(module.__cached__))
if obj is module:
print('Loader: {}'.format(repr(module.__loader__)))
if hasattr(module, '__path__'):
print('Submodule search path: {}'.format(module.__path__))
else:
try:
__, lineno = findsource(obj)
except Exception:
pass
else:
print('Line: {}'.format(lineno))
print('\n')
else:
print(getsource(obj))
if __name__ == "__main__":
_main()
| lgpl-3.0 |
arju88nair/projectCulminate | venv/lib/python3.5/site-packages/astroid/helpers.py | 3 | 5450 | # Copyright (c) 2015-2016 Cara Vinson <ceridwenv@gmail.com>
# Copyright (c) 2015-2016 Claudiu Popa <pcmanticore@gmail.com>
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
# For details: https://github.com/PyCQA/astroid/blob/master/COPYING.LESSER
"""
Various helper utilities.
"""
import six
from astroid import bases
from astroid import context as contextmod
from astroid import exceptions
from astroid import manager
from astroid import nodes
from astroid import raw_building
from astroid import scoped_nodes
from astroid import util
BUILTINS = six.moves.builtins.__name__
def _build_proxy_class(cls_name, builtins):
proxy = raw_building.build_class(cls_name)
proxy.parent = builtins
return proxy
def _function_type(function, builtins):
if isinstance(function, scoped_nodes.Lambda):
if function.root().name == BUILTINS:
cls_name = 'builtin_function_or_method'
else:
cls_name = 'function'
elif isinstance(function, bases.BoundMethod):
if six.PY2:
cls_name = 'instancemethod'
else:
cls_name = 'method'
elif isinstance(function, bases.UnboundMethod):
if six.PY2:
cls_name = 'instancemethod'
else:
cls_name = 'function'
return _build_proxy_class(cls_name, builtins)
def _object_type(node, context=None):
astroid_manager = manager.AstroidManager()
builtins = astroid_manager.astroid_cache[BUILTINS]
context = context or contextmod.InferenceContext()
for inferred in node.infer(context=context):
if isinstance(inferred, scoped_nodes.ClassDef):
if inferred.newstyle:
metaclass = inferred.metaclass()
if metaclass:
yield metaclass
continue
yield builtins.getattr('type')[0]
elif isinstance(inferred, (scoped_nodes.Lambda, bases.UnboundMethod)):
yield _function_type(inferred, builtins)
elif isinstance(inferred, scoped_nodes.Module):
yield _build_proxy_class('module', builtins)
else:
yield inferred._proxied
def object_type(node, context=None):
"""Obtain the type of the given node
This is used to implement the ``type`` builtin, which means that it's
used for inferring type calls, as well as used in a couple of other places
in the inference.
The node will be inferred first, so this function can support all
sorts of objects, as long as they support inference.
"""
try:
types = set(_object_type(node, context))
except exceptions.InferenceError:
return util.Uninferable
if len(types) > 1 or not types:
return util.Uninferable
return list(types)[0]
def safe_infer(node, context=None):
"""Return the inferred value for the given node.
Return None if inference failed or if there is some ambiguity (more than
one node has been inferred).
"""
try:
inferit = node.infer(context=context)
value = next(inferit)
except exceptions.InferenceError:
return
try:
next(inferit)
return # None if there is ambiguity on the inferred node
except exceptions.InferenceError:
return # there is some kind of ambiguity
except StopIteration:
return value
def has_known_bases(klass, context=None):
"""Return true if all base classes of a class could be inferred."""
try:
return klass._all_bases_known
except AttributeError:
pass
for base in klass.bases:
result = safe_infer(base, context=context)
# TODO: check for A->B->A->B pattern in class structure too?
if (not isinstance(result, scoped_nodes.ClassDef) or
result is klass or
not has_known_bases(result, context=context)):
klass._all_bases_known = False
return False
klass._all_bases_known = True
return True
def _type_check(type1, type2):
if not all(map(has_known_bases, (type1, type2))):
raise exceptions._NonDeducibleTypeHierarchy
if not all([type1.newstyle, type2.newstyle]):
return False
try:
return type1 in type2.mro()[:-1]
except exceptions.MroError:
# The MRO is invalid.
raise exceptions._NonDeducibleTypeHierarchy
def is_subtype(type1, type2):
"""Check if *type1* is a subtype of *typ2*."""
return _type_check(type2, type1)
def is_supertype(type1, type2):
"""Check if *type2* is a supertype of *type1*."""
return _type_check(type1, type2)
def class_instance_as_index(node):
"""Get the value as an index for the given instance.
If an instance provides an __index__ method, then it can
be used in some scenarios where an integer is expected,
for instance when multiplying or subscripting a list.
"""
context = contextmod.InferenceContext()
context.callcontext = contextmod.CallContext(args=[node])
try:
for inferred in node.igetattr('__index__', context=context):
if not isinstance(inferred, bases.BoundMethod):
continue
for result in inferred.infer_call_result(node, context=context):
if (isinstance(result, nodes.Const)
and isinstance(result.value, int)):
return result
except exceptions.InferenceError:
pass
| apache-2.0 |
Nowheresly/odoo | addons/payment_buckaroo/controllers/main.py | 53 | 1155 | # -*- coding: utf-8 -*-
try:
import simplejson as json
except ImportError:
import json
import logging
import pprint
import werkzeug
from openerp import http, SUPERUSER_ID
from openerp.http import request
_logger = logging.getLogger(__name__)
class BuckarooController(http.Controller):
_return_url = '/payment/buckaroo/return'
_cancel_url = '/payment/buckaroo/cancel'
_exception_url = '/payment/buckaroo/error'
_reject_url = '/payment/buckaroo/reject'
@http.route([
'/payment/buckaroo/return',
'/payment/buckaroo/cancel',
'/payment/buckaroo/error',
'/payment/buckaroo/reject',
], type='http', auth='none')
def buckaroo_return(self, **post):
""" Buckaroo."""
_logger.info('Buckaroo: entering form_feedback with post data %s', pprint.pformat(post)) # debug
request.registry['payment.transaction'].form_feedback(request.cr, SUPERUSER_ID, post, 'buckaroo', context=request.context)
post = dict((key.upper(), value) for key, value in post.items())
return_url = post.get('ADD_RETURNDATA') or '/'
return werkzeug.utils.redirect(return_url)
| agpl-3.0 |
pmarques/ansible | test/lib/ansible_test/_internal/commands/sanity/bin_symlinks.py | 13 | 3587 | """Sanity test for symlinks in the bin directory."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ... import types as t
from . import (
SanityVersionNeutral,
SanityMessage,
SanityFailure,
SanitySuccess,
)
from ...config import (
SanityConfig,
)
from ...data import (
data_context,
)
from ...payload import (
ANSIBLE_BIN_SYMLINK_MAP,
__file__ as symlink_map_full_path,
)
from ...util import (
ANSIBLE_BIN_PATH,
ANSIBLE_TEST_DATA_ROOT,
)
class BinSymlinksTest(SanityVersionNeutral):
"""Sanity test for symlinks in the bin directory."""
ansible_only = True
@property
def can_ignore(self): # type: () -> bool
"""True if the test supports ignore entries."""
return False
@property
def no_targets(self): # type: () -> bool
"""True if the test does not use test targets. Mutually exclusive with all_targets."""
return True
# noinspection PyUnusedLocal
def test(self, args, targets): # pylint: disable=locally-disabled, unused-argument
"""
:type args: SanityConfig
:type targets: SanityTargets
:rtype: TestResult
"""
bin_root = ANSIBLE_BIN_PATH
bin_names = os.listdir(bin_root)
bin_paths = sorted(os.path.join(bin_root, path) for path in bin_names)
injector_root = os.path.join(ANSIBLE_TEST_DATA_ROOT, 'injector')
injector_names = os.listdir(injector_root)
errors = [] # type: t.List[t.Tuple[str, str]]
symlink_map_path = os.path.relpath(symlink_map_full_path, data_context().content.root)
for bin_path in bin_paths:
if not os.path.islink(bin_path):
errors.append((bin_path, 'not a symbolic link'))
continue
dest = os.readlink(bin_path)
if not os.path.exists(bin_path):
errors.append((bin_path, 'points to non-existent path "%s"' % dest))
continue
if not os.path.isfile(bin_path):
errors.append((bin_path, 'points to non-file "%s"' % dest))
continue
map_dest = ANSIBLE_BIN_SYMLINK_MAP.get(os.path.basename(bin_path))
if not map_dest:
errors.append((bin_path, 'missing from ANSIBLE_BIN_SYMLINK_MAP in file "%s"' % symlink_map_path))
continue
if dest != map_dest:
errors.append((bin_path, 'points to "%s" instead of "%s" from ANSIBLE_BIN_SYMLINK_MAP in file "%s"' % (dest, map_dest, symlink_map_path)))
continue
if not os.access(bin_path, os.X_OK):
errors.append((bin_path, 'points to non-executable file "%s"' % dest))
continue
for bin_name, dest in ANSIBLE_BIN_SYMLINK_MAP.items():
if bin_name not in bin_names:
bin_path = os.path.join(bin_root, bin_name)
errors.append((bin_path, 'missing symlink to "%s" defined in ANSIBLE_BIN_SYMLINK_MAP in file "%s"' % (dest, symlink_map_path)))
if bin_name not in injector_names:
injector_path = os.path.join(injector_root, bin_name)
errors.append((injector_path, 'missing symlink to "python.py"'))
messages = [SanityMessage(message=message, path=os.path.relpath(path, data_context().content.root), confidence=100) for path, message in errors]
if errors:
return SanityFailure(self.name, messages=messages)
return SanitySuccess(self.name)
| gpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.