repo_name
stringlengths
5
100
path
stringlengths
4
294
copies
stringclasses
990 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
viruxel/ansible-modules-extras
cloud/centurylink/clc_loadbalancer.py
43
35413
#!/usr/bin/python # # Copyright (c) 2015 CenturyLink # # This file is part of Ansible. # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/> # DOCUMENTATION = ''' module: clc_loadbalancer short_description: Create, Delete shared loadbalancers in CenturyLink Cloud. description: - An Ansible module to Create, Delete shared loadbalancers in CenturyLink Cloud. version_added: "2.0" options: name: description: - The name of the loadbalancer required: True description: description: - A description for the loadbalancer required: False default: None alias: description: - The alias of your CLC Account required: True location: description: - The location of the datacenter where the load balancer resides in required: True method: description: -The balancing method for the load balancer pool required: False default: None choices: ['leastConnection', 'roundRobin'] persistence: description: - The persistence method for the load balancer required: False default: None choices: ['standard', 'sticky'] port: description: - Port to configure on the public-facing side of the load balancer pool required: False default: None choices: [80, 443] nodes: description: - A list of nodes that needs to be added to the load balancer pool required: False default: [] status: description: - The status of the loadbalancer required: False default: enabled choices: ['enabled', 'disabled'] state: description: - Whether to create or delete the load balancer pool required: False default: present choices: ['present', 'absent', 'port_absent', 'nodes_present', 'nodes_absent'] requirements: - python = 2.7 - requests >= 2.5.0 - clc-sdk author: "CLC Runner (@clc-runner)" notes: - To use this module, it is required to set the below environment variables which enables access to the Centurylink Cloud - CLC_V2_API_USERNAME, the account login id for the centurylink cloud - CLC_V2_API_PASSWORD, the account password for the centurylink cloud - Alternatively, the module accepts the API token and account alias. The API token can be generated using the CLC account login and password via the HTTP api call @ https://api.ctl.io/v2/authentication/login - CLC_V2_API_TOKEN, the API token generated from https://api.ctl.io/v2/authentication/login - CLC_ACCT_ALIAS, the account alias associated with the centurylink cloud - Users can set CLC_V2_API_URL to specify an endpoint for pointing to a different CLC environment. ''' EXAMPLES = ''' # Note - You must set the CLC_V2_API_USERNAME And CLC_V2_API_PASSWD Environment variables before running these examples - name: Create Loadbalancer hosts: localhost connection: local tasks: - name: Actually Create things clc_loadbalancer: name: test description: test alias: TEST location: WA1 port: 443 nodes: - { 'ipAddress': '10.11.22.123', 'privatePort': 80 } state: present - name: Add node to an existing loadbalancer pool hosts: localhost connection: local tasks: - name: Actually Create things clc_loadbalancer: name: test description: test alias: TEST location: WA1 port: 443 nodes: - { 'ipAddress': '10.11.22.234', 'privatePort': 80 } state: nodes_present - name: Remove node from an existing loadbalancer pool hosts: localhost connection: local tasks: - name: Actually Create things clc_loadbalancer: name: test description: test alias: TEST location: WA1 port: 443 nodes: - { 'ipAddress': '10.11.22.234', 'privatePort': 80 } state: nodes_absent - name: Delete LoadbalancerPool hosts: localhost connection: local tasks: - name: Actually Delete things clc_loadbalancer: name: test description: test alias: TEST location: WA1 port: 443 nodes: - { 'ipAddress': '10.11.22.123', 'privatePort': 80 } state: port_absent - name: Delete Loadbalancer hosts: localhost connection: local tasks: - name: Actually Delete things clc_loadbalancer: name: test description: test alias: TEST location: WA1 port: 443 nodes: - { 'ipAddress': '10.11.22.123', 'privatePort': 80 } state: absent ''' RETURN = ''' changed: description: A flag indicating if any change was made or not returned: success type: boolean sample: True loadbalancer: description: The load balancer result object from CLC returned: success type: dict sample: { "description":"test-lb", "id":"ab5b18cb81e94ab9925b61d1ca043fb5", "ipAddress":"66.150.174.197", "links":[ { "href":"/v2/sharedLoadBalancers/wfad/wa1/ab5b18cb81e94ab9925b61d1ca043fb5", "rel":"self", "verbs":[ "GET", "PUT", "DELETE" ] }, { "href":"/v2/sharedLoadBalancers/wfad/wa1/ab5b18cb81e94ab9925b61d1ca043fb5/pools", "rel":"pools", "verbs":[ "GET", "POST" ] } ], "name":"test-lb", "pools":[ ], "status":"enabled" } ''' __version__ = '${version}' from time import sleep from distutils.version import LooseVersion try: import requests except ImportError: REQUESTS_FOUND = False else: REQUESTS_FOUND = True # # Requires the clc-python-sdk. # sudo pip install clc-sdk # try: import clc as clc_sdk from clc import APIFailedResponse except ImportError: CLC_FOUND = False clc_sdk = None else: CLC_FOUND = True class ClcLoadBalancer: clc = None def __init__(self, module): """ Construct module """ self.clc = clc_sdk self.module = module self.lb_dict = {} if not CLC_FOUND: self.module.fail_json( msg='clc-python-sdk required for this module') if not REQUESTS_FOUND: self.module.fail_json( msg='requests library is required for this module') if requests.__version__ and LooseVersion( requests.__version__) < LooseVersion('2.5.0'): self.module.fail_json( msg='requests library version should be >= 2.5.0') self._set_user_agent(self.clc) def process_request(self): """ Execute the main code path, and handle the request :return: none """ changed = False result_lb = None loadbalancer_name = self.module.params.get('name') loadbalancer_alias = self.module.params.get('alias') loadbalancer_location = self.module.params.get('location') loadbalancer_description = self.module.params.get('description') loadbalancer_port = self.module.params.get('port') loadbalancer_method = self.module.params.get('method') loadbalancer_persistence = self.module.params.get('persistence') loadbalancer_nodes = self.module.params.get('nodes') loadbalancer_status = self.module.params.get('status') state = self.module.params.get('state') if loadbalancer_description is None: loadbalancer_description = loadbalancer_name self._set_clc_credentials_from_env() self.lb_dict = self._get_loadbalancer_list( alias=loadbalancer_alias, location=loadbalancer_location) if state == 'present': changed, result_lb, lb_id = self.ensure_loadbalancer_present( name=loadbalancer_name, alias=loadbalancer_alias, location=loadbalancer_location, description=loadbalancer_description, status=loadbalancer_status) if loadbalancer_port: changed, result_pool, pool_id = self.ensure_loadbalancerpool_present( lb_id=lb_id, alias=loadbalancer_alias, location=loadbalancer_location, method=loadbalancer_method, persistence=loadbalancer_persistence, port=loadbalancer_port) if loadbalancer_nodes: changed, result_nodes = self.ensure_lbpool_nodes_set( alias=loadbalancer_alias, location=loadbalancer_location, name=loadbalancer_name, port=loadbalancer_port, nodes=loadbalancer_nodes) elif state == 'absent': changed, result_lb = self.ensure_loadbalancer_absent( name=loadbalancer_name, alias=loadbalancer_alias, location=loadbalancer_location) elif state == 'port_absent': changed, result_lb = self.ensure_loadbalancerpool_absent( alias=loadbalancer_alias, location=loadbalancer_location, name=loadbalancer_name, port=loadbalancer_port) elif state == 'nodes_present': changed, result_lb = self.ensure_lbpool_nodes_present( alias=loadbalancer_alias, location=loadbalancer_location, name=loadbalancer_name, port=loadbalancer_port, nodes=loadbalancer_nodes) elif state == 'nodes_absent': changed, result_lb = self.ensure_lbpool_nodes_absent( alias=loadbalancer_alias, location=loadbalancer_location, name=loadbalancer_name, port=loadbalancer_port, nodes=loadbalancer_nodes) self.module.exit_json(changed=changed, loadbalancer=result_lb) def ensure_loadbalancer_present( self, name, alias, location, description, status): """ Checks to see if a load balancer exists and creates one if it does not. :param name: Name of loadbalancer :param alias: Alias of account :param location: Datacenter :param description: Description of loadbalancer :param status: Enabled / Disabled :return: (changed, result, lb_id) changed: Boolean whether a change was made result: The result object from the CLC load balancer request lb_id: The load balancer id """ changed = False result = name lb_id = self._loadbalancer_exists(name=name) if not lb_id: if not self.module.check_mode: result = self.create_loadbalancer(name=name, alias=alias, location=location, description=description, status=status) lb_id = result.get('id') changed = True return changed, result, lb_id def ensure_loadbalancerpool_present( self, lb_id, alias, location, method, persistence, port): """ Checks to see if a load balancer pool exists and creates one if it does not. :param lb_id: The loadbalancer id :param alias: The account alias :param location: the datacenter the load balancer resides in :param method: the load balancing method :param persistence: the load balancing persistence type :param port: the port that the load balancer will listen on :return: (changed, group, pool_id) - changed: Boolean whether a change was made result: The result from the CLC API call pool_id: The string id of the load balancer pool """ changed = False result = port if not lb_id: return changed, None, None pool_id = self._loadbalancerpool_exists( alias=alias, location=location, port=port, lb_id=lb_id) if not pool_id: if not self.module.check_mode: result = self.create_loadbalancerpool( alias=alias, location=location, lb_id=lb_id, method=method, persistence=persistence, port=port) pool_id = result.get('id') changed = True return changed, result, pool_id def ensure_loadbalancer_absent(self, name, alias, location): """ Checks to see if a load balancer exists and deletes it if it does :param name: Name of the load balancer :param alias: Alias of account :param location: Datacenter :return: (changed, result) changed: Boolean whether a change was made result: The result from the CLC API Call """ changed = False result = name lb_exists = self._loadbalancer_exists(name=name) if lb_exists: if not self.module.check_mode: result = self.delete_loadbalancer(alias=alias, location=location, name=name) changed = True return changed, result def ensure_loadbalancerpool_absent(self, alias, location, name, port): """ Checks to see if a load balancer pool exists and deletes it if it does :param alias: The account alias :param location: the datacenter the load balancer resides in :param name: the name of the load balancer :param port: the port that the load balancer listens on :return: (changed, result) - changed: Boolean whether a change was made result: The result from the CLC API call """ changed = False result = None lb_exists = self._loadbalancer_exists(name=name) if lb_exists: lb_id = self._get_loadbalancer_id(name=name) pool_id = self._loadbalancerpool_exists( alias=alias, location=location, port=port, lb_id=lb_id) if pool_id: changed = True if not self.module.check_mode: result = self.delete_loadbalancerpool( alias=alias, location=location, lb_id=lb_id, pool_id=pool_id) else: result = "Pool doesn't exist" else: result = "LB Doesn't Exist" return changed, result def ensure_lbpool_nodes_set(self, alias, location, name, port, nodes): """ Checks to see if the provided list of nodes exist for the pool and set the nodes if any in the list those doesn't exist :param alias: The account alias :param location: the datacenter the load balancer resides in :param name: the name of the load balancer :param port: the port that the load balancer will listen on :param nodes: The list of nodes to be updated to the pool :return: (changed, result) - changed: Boolean whether a change was made result: The result from the CLC API call """ result = {} changed = False lb_exists = self._loadbalancer_exists(name=name) if lb_exists: lb_id = self._get_loadbalancer_id(name=name) pool_id = self._loadbalancerpool_exists( alias=alias, location=location, port=port, lb_id=lb_id) if pool_id: nodes_exist = self._loadbalancerpool_nodes_exists(alias=alias, location=location, lb_id=lb_id, pool_id=pool_id, nodes_to_check=nodes) if not nodes_exist: changed = True result = self.set_loadbalancernodes(alias=alias, location=location, lb_id=lb_id, pool_id=pool_id, nodes=nodes) else: result = "Pool doesn't exist" else: result = "Load balancer doesn't Exist" return changed, result def ensure_lbpool_nodes_present(self, alias, location, name, port, nodes): """ Checks to see if the provided list of nodes exist for the pool and add the missing nodes to the pool :param alias: The account alias :param location: the datacenter the load balancer resides in :param name: the name of the load balancer :param port: the port that the load balancer will listen on :param nodes: the list of nodes to be added :return: (changed, result) - changed: Boolean whether a change was made result: The result from the CLC API call """ changed = False lb_exists = self._loadbalancer_exists(name=name) if lb_exists: lb_id = self._get_loadbalancer_id(name=name) pool_id = self._loadbalancerpool_exists( alias=alias, location=location, port=port, lb_id=lb_id) if pool_id: changed, result = self.add_lbpool_nodes(alias=alias, location=location, lb_id=lb_id, pool_id=pool_id, nodes_to_add=nodes) else: result = "Pool doesn't exist" else: result = "Load balancer doesn't Exist" return changed, result def ensure_lbpool_nodes_absent(self, alias, location, name, port, nodes): """ Checks to see if the provided list of nodes exist for the pool and removes them if found any :param alias: The account alias :param location: the datacenter the load balancer resides in :param name: the name of the load balancer :param port: the port that the load balancer will listen on :param nodes: the list of nodes to be removed :return: (changed, result) - changed: Boolean whether a change was made result: The result from the CLC API call """ changed = False lb_exists = self._loadbalancer_exists(name=name) if lb_exists: lb_id = self._get_loadbalancer_id(name=name) pool_id = self._loadbalancerpool_exists( alias=alias, location=location, port=port, lb_id=lb_id) if pool_id: changed, result = self.remove_lbpool_nodes(alias=alias, location=location, lb_id=lb_id, pool_id=pool_id, nodes_to_remove=nodes) else: result = "Pool doesn't exist" else: result = "Load balancer doesn't Exist" return changed, result def create_loadbalancer(self, name, alias, location, description, status): """ Create a loadbalancer w/ params :param name: Name of loadbalancer :param alias: Alias of account :param location: Datacenter :param description: Description for loadbalancer to be created :param status: Enabled / Disabled :return: result: The result from the CLC API call """ result = None try: result = self.clc.v2.API.Call('POST', '/v2/sharedLoadBalancers/%s/%s' % (alias, location), json.dumps({"name": name, "description": description, "status": status})) sleep(1) except APIFailedResponse as e: self.module.fail_json( msg='Unable to create load balancer "{0}". {1}'.format( name, str(e.response_text))) return result def create_loadbalancerpool( self, alias, location, lb_id, method, persistence, port): """ Creates a pool on the provided load balancer :param alias: the account alias :param location: the datacenter the load balancer resides in :param lb_id: the id string of the load balancer :param method: the load balancing method :param persistence: the load balancing persistence type :param port: the port that the load balancer will listen on :return: result: The result from the create API call """ result = None try: result = self.clc.v2.API.Call( 'POST', '/v2/sharedLoadBalancers/%s/%s/%s/pools' % (alias, location, lb_id), json.dumps( { "port": port, "method": method, "persistence": persistence })) except APIFailedResponse as e: self.module.fail_json( msg='Unable to create pool for load balancer id "{0}". {1}'.format( lb_id, str(e.response_text))) return result def delete_loadbalancer(self, alias, location, name): """ Delete CLC loadbalancer :param alias: Alias for account :param location: Datacenter :param name: Name of the loadbalancer to delete :return: result: The result from the CLC API call """ result = None lb_id = self._get_loadbalancer_id(name=name) try: result = self.clc.v2.API.Call( 'DELETE', '/v2/sharedLoadBalancers/%s/%s/%s' % (alias, location, lb_id)) except APIFailedResponse as e: self.module.fail_json( msg='Unable to delete load balancer "{0}". {1}'.format( name, str(e.response_text))) return result def delete_loadbalancerpool(self, alias, location, lb_id, pool_id): """ Delete the pool on the provided load balancer :param alias: The account alias :param location: the datacenter the load balancer resides in :param lb_id: the id string of the load balancer :param pool_id: the id string of the load balancer pool :return: result: The result from the delete API call """ result = None try: result = self.clc.v2.API.Call( 'DELETE', '/v2/sharedLoadBalancers/%s/%s/%s/pools/%s' % (alias, location, lb_id, pool_id)) except APIFailedResponse as e: self.module.fail_json( msg='Unable to delete pool for load balancer id "{0}". {1}'.format( lb_id, str(e.response_text))) return result def _get_loadbalancer_id(self, name): """ Retrieves unique ID of loadbalancer :param name: Name of loadbalancer :return: Unique ID of the loadbalancer """ id = None for lb in self.lb_dict: if lb.get('name') == name: id = lb.get('id') return id def _get_loadbalancer_list(self, alias, location): """ Retrieve a list of loadbalancers :param alias: Alias for account :param location: Datacenter :return: JSON data for all loadbalancers at datacenter """ result = None try: result = self.clc.v2.API.Call( 'GET', '/v2/sharedLoadBalancers/%s/%s' % (alias, location)) except APIFailedResponse as e: self.module.fail_json( msg='Unable to fetch load balancers for account: {0}. {1}'.format( alias, str(e.response_text))) return result def _loadbalancer_exists(self, name): """ Verify a loadbalancer exists :param name: Name of loadbalancer :return: False or the ID of the existing loadbalancer """ result = False for lb in self.lb_dict: if lb.get('name') == name: result = lb.get('id') return result def _loadbalancerpool_exists(self, alias, location, port, lb_id): """ Checks to see if a pool exists on the specified port on the provided load balancer :param alias: the account alias :param location: the datacenter the load balancer resides in :param port: the port to check and see if it exists :param lb_id: the id string of the provided load balancer :return: result: The id string of the pool or False """ result = False try: pool_list = self.clc.v2.API.Call( 'GET', '/v2/sharedLoadBalancers/%s/%s/%s/pools' % (alias, location, lb_id)) except APIFailedResponse as e: return self.module.fail_json( msg='Unable to fetch the load balancer pools for for load balancer id: {0}. {1}'.format( lb_id, str(e.response_text))) for pool in pool_list: if int(pool.get('port')) == int(port): result = pool.get('id') return result def _loadbalancerpool_nodes_exists( self, alias, location, lb_id, pool_id, nodes_to_check): """ Checks to see if a set of nodes exists on the specified port on the provided load balancer :param alias: the account alias :param location: the datacenter the load balancer resides in :param lb_id: the id string of the provided load balancer :param pool_id: the id string of the load balancer pool :param nodes_to_check: the list of nodes to check for :return: result: True / False indicating if the given nodes exist """ result = False nodes = self._get_lbpool_nodes(alias, location, lb_id, pool_id) for node in nodes_to_check: if not node.get('status'): node['status'] = 'enabled' if node in nodes: result = True else: result = False return result def set_loadbalancernodes(self, alias, location, lb_id, pool_id, nodes): """ Updates nodes to the provided pool :param alias: the account alias :param location: the datacenter the load balancer resides in :param lb_id: the id string of the load balancer :param pool_id: the id string of the pool :param nodes: a list of dictionaries containing the nodes to set :return: result: The result from the CLC API call """ result = None if not lb_id: return result if not self.module.check_mode: try: result = self.clc.v2.API.Call('PUT', '/v2/sharedLoadBalancers/%s/%s/%s/pools/%s/nodes' % (alias, location, lb_id, pool_id), json.dumps(nodes)) except APIFailedResponse as e: self.module.fail_json( msg='Unable to set nodes for the load balancer pool id "{0}". {1}'.format( pool_id, str(e.response_text))) return result def add_lbpool_nodes(self, alias, location, lb_id, pool_id, nodes_to_add): """ Add nodes to the provided pool :param alias: the account alias :param location: the datacenter the load balancer resides in :param lb_id: the id string of the load balancer :param pool_id: the id string of the pool :param nodes_to_add: a list of dictionaries containing the nodes to add :return: (changed, result) - changed: Boolean whether a change was made result: The result from the CLC API call """ changed = False result = {} nodes = self._get_lbpool_nodes(alias, location, lb_id, pool_id) for node in nodes_to_add: if not node.get('status'): node['status'] = 'enabled' if not node in nodes: changed = True nodes.append(node) if changed == True and not self.module.check_mode: result = self.set_loadbalancernodes( alias, location, lb_id, pool_id, nodes) return changed, result def remove_lbpool_nodes( self, alias, location, lb_id, pool_id, nodes_to_remove): """ Removes nodes from the provided pool :param alias: the account alias :param location: the datacenter the load balancer resides in :param lb_id: the id string of the load balancer :param pool_id: the id string of the pool :param nodes_to_remove: a list of dictionaries containing the nodes to remove :return: (changed, result) - changed: Boolean whether a change was made result: The result from the CLC API call """ changed = False result = {} nodes = self._get_lbpool_nodes(alias, location, lb_id, pool_id) for node in nodes_to_remove: if not node.get('status'): node['status'] = 'enabled' if node in nodes: changed = True nodes.remove(node) if changed == True and not self.module.check_mode: result = self.set_loadbalancernodes( alias, location, lb_id, pool_id, nodes) return changed, result def _get_lbpool_nodes(self, alias, location, lb_id, pool_id): """ Return the list of nodes available to the provided load balancer pool :param alias: the account alias :param location: the datacenter the load balancer resides in :param lb_id: the id string of the load balancer :param pool_id: the id string of the pool :return: result: The list of nodes """ result = None try: result = self.clc.v2.API.Call('GET', '/v2/sharedLoadBalancers/%s/%s/%s/pools/%s/nodes' % (alias, location, lb_id, pool_id)) except APIFailedResponse as e: self.module.fail_json( msg='Unable to fetch list of available nodes for load balancer pool id: {0}. {1}'.format( pool_id, str(e.response_text))) return result @staticmethod def define_argument_spec(): """ Define the argument spec for the ansible module :return: argument spec dictionary """ argument_spec = dict( name=dict(required=True), description=dict(default=None), location=dict(required=True), alias=dict(required=True), port=dict(choices=[80, 443]), method=dict(choices=['leastConnection', 'roundRobin']), persistence=dict(choices=['standard', 'sticky']), nodes=dict(type='list', default=[]), status=dict(default='enabled', choices=['enabled', 'disabled']), state=dict( default='present', choices=[ 'present', 'absent', 'port_absent', 'nodes_present', 'nodes_absent']) ) return argument_spec def _set_clc_credentials_from_env(self): """ Set the CLC Credentials on the sdk by reading environment variables :return: none """ env = os.environ v2_api_token = env.get('CLC_V2_API_TOKEN', False) v2_api_username = env.get('CLC_V2_API_USERNAME', False) v2_api_passwd = env.get('CLC_V2_API_PASSWD', False) clc_alias = env.get('CLC_ACCT_ALIAS', False) api_url = env.get('CLC_V2_API_URL', False) if api_url: self.clc.defaults.ENDPOINT_URL_V2 = api_url if v2_api_token and clc_alias: self.clc._LOGIN_TOKEN_V2 = v2_api_token self.clc._V2_ENABLED = True self.clc.ALIAS = clc_alias elif v2_api_username and v2_api_passwd: self.clc.v2.SetCredentials( api_username=v2_api_username, api_passwd=v2_api_passwd) else: return self.module.fail_json( msg="You must set the CLC_V2_API_USERNAME and CLC_V2_API_PASSWD " "environment variables") @staticmethod def _set_user_agent(clc): if hasattr(clc, 'SetRequestsSession'): agent_string = "ClcAnsibleModule/" + __version__ ses = requests.Session() ses.headers.update({"Api-Client": agent_string}) ses.headers['User-Agent'] += " " + agent_string clc.SetRequestsSession(ses) def main(): """ The main function. Instantiates the module and calls process_request. :return: none """ module = AnsibleModule(argument_spec=ClcLoadBalancer.define_argument_spec(), supports_check_mode=True) clc_loadbalancer = ClcLoadBalancer(module) clc_loadbalancer.process_request() from ansible.module_utils.basic import * # pylint: disable=W0614 if __name__ == '__main__': main()
gpl-3.0
vberaudi/scipy
scipy/optimize/tests/test_optimize.py
21
39326
""" Unit tests for optimization routines from optimize.py Authors: Ed Schofield, Nov 2005 Andrew Straw, April 2008 To run it in its simplest form:: nosetests test_optimize.py """ from __future__ import division, print_function, absolute_import import warnings import numpy as np from numpy.testing import (assert_raises, assert_allclose, assert_equal, assert_, TestCase, run_module_suite, dec, assert_almost_equal) from scipy._lib._testutils import suppressed_stdout from scipy import optimize def test_check_grad(): # Verify if check_grad is able to estimate the derivative of the # logistic function. def logit(x): return 1 / (1 + np.exp(-x)) def der_logit(x): return np.exp(-x) / (1 + np.exp(-x))**2 x0 = np.array([1.5]) r = optimize.check_grad(logit, der_logit, x0) assert_almost_equal(r, 0) r = optimize.check_grad(logit, der_logit, x0, epsilon=1e-6) assert_almost_equal(r, 0) # Check if the epsilon parameter is being considered. r = abs(optimize.check_grad(logit, der_logit, x0, epsilon=1e-1) - 0) assert_(r > 1e-7) class CheckOptimize(object): """ Base test case for a simple constrained entropy maximization problem (the machine translation example of Berger et al in Computational Linguistics, vol 22, num 1, pp 39--72, 1996.) """ def setUp(self): self.F = np.array([[1,1,1],[1,1,0],[1,0,1],[1,0,0],[1,0,0]]) self.K = np.array([1., 0.3, 0.5]) self.startparams = np.zeros(3, np.float64) self.solution = np.array([0., -0.524869316, 0.487525860]) self.maxiter = 1000 self.funccalls = 0 self.gradcalls = 0 self.trace = [] def func(self, x): self.funccalls += 1 if self.funccalls > 6000: raise RuntimeError("too many iterations in optimization routine") log_pdot = np.dot(self.F, x) logZ = np.log(sum(np.exp(log_pdot))) f = logZ - np.dot(self.K, x) self.trace.append(x) return f def grad(self, x): self.gradcalls += 1 log_pdot = np.dot(self.F, x) logZ = np.log(sum(np.exp(log_pdot))) p = np.exp(log_pdot - logZ) return np.dot(self.F.transpose(), p) - self.K def hess(self, x): log_pdot = np.dot(self.F, x) logZ = np.log(sum(np.exp(log_pdot))) p = np.exp(log_pdot - logZ) return np.dot(self.F.T, np.dot(np.diag(p), self.F - np.dot(self.F.T, p))) def hessp(self, x, p): return np.dot(self.hess(x), p) class CheckOptimizeParameterized(CheckOptimize): @suppressed_stdout def test_cg(self): # conjugate gradient optimization routine if self.use_wrapper: opts = {'maxiter': self.maxiter, 'disp': self.disp, 'return_all': False} res = optimize.minimize(self.func, self.startparams, args=(), method='CG', jac=self.grad, options=opts) params, fopt, func_calls, grad_calls, warnflag = \ res['x'], res['fun'], res['nfev'], res['njev'], res['status'] else: retval = optimize.fmin_cg(self.func, self.startparams, self.grad, (), maxiter=self.maxiter, full_output=True, disp=self.disp, retall=False) (params, fopt, func_calls, grad_calls, warnflag) = retval assert_allclose(self.func(params), self.func(self.solution), atol=1e-6) # Ensure that function call counts are 'known good'; these are from # Scipy 0.7.0. Don't allow them to increase. assert_(self.funccalls == 9, self.funccalls) assert_(self.gradcalls == 7, self.gradcalls) # Ensure that the function behaves the same; this is from Scipy 0.7.0 assert_allclose(self.trace[2:4], [[0, -0.5, 0.5], [0, -5.05700028e-01, 4.95985862e-01]], atol=1e-14, rtol=1e-7) @suppressed_stdout def test_bfgs(self): # Broyden-Fletcher-Goldfarb-Shanno optimization routine if self.use_wrapper: opts = {'maxiter': self.maxiter, 'disp': self.disp, 'return_all': False} res = optimize.minimize(self.func, self.startparams, jac=self.grad, method='BFGS', args=(), options=opts) params, fopt, gopt, Hopt, func_calls, grad_calls, warnflag = ( res['x'], res['fun'], res['jac'], res['hess_inv'], res['nfev'], res['njev'], res['status']) else: retval = optimize.fmin_bfgs(self.func, self.startparams, self.grad, args=(), maxiter=self.maxiter, full_output=True, disp=self.disp, retall=False) (params, fopt, gopt, Hopt, func_calls, grad_calls, warnflag) = retval assert_allclose(self.func(params), self.func(self.solution), atol=1e-6) # Ensure that function call counts are 'known good'; these are from # Scipy 0.7.0. Don't allow them to increase. assert_(self.funccalls == 10, self.funccalls) assert_(self.gradcalls == 8, self.gradcalls) # Ensure that the function behaves the same; this is from Scipy 0.7.0 assert_allclose(self.trace[6:8], [[0, -5.25060743e-01, 4.87748473e-01], [0, -5.24885582e-01, 4.87530347e-01]], atol=1e-14, rtol=1e-7) @suppressed_stdout def test_bfgs_infinite(self): # Test corner case where -Inf is the minimum. See gh-2019. func = lambda x: -np.e**-x fprime = lambda x: -func(x) x0 = [0] olderr = np.seterr(over='ignore') try: if self.use_wrapper: opts = {'disp': self.disp} x = optimize.minimize(func, x0, jac=fprime, method='BFGS', args=(), options=opts)['x'] else: x = optimize.fmin_bfgs(func, x0, fprime, disp=self.disp) assert_(not np.isfinite(func(x))) finally: np.seterr(**olderr) @suppressed_stdout def test_powell(self): # Powell (direction set) optimization routine if self.use_wrapper: opts = {'maxiter': self.maxiter, 'disp': self.disp, 'return_all': False} res = optimize.minimize(self.func, self.startparams, args=(), method='Powell', options=opts) params, fopt, direc, numiter, func_calls, warnflag = ( res['x'], res['fun'], res['direc'], res['nit'], res['nfev'], res['status']) else: retval = optimize.fmin_powell(self.func, self.startparams, args=(), maxiter=self.maxiter, full_output=True, disp=self.disp, retall=False) (params, fopt, direc, numiter, func_calls, warnflag) = retval assert_allclose(self.func(params), self.func(self.solution), atol=1e-6) # Ensure that function call counts are 'known good'; these are from # Scipy 0.7.0. Don't allow them to increase. # # However, some leeway must be added: the exact evaluation # count is sensitive to numerical error, and floating-point # computations are not bit-for-bit reproducible across # machines, and when using e.g. MKL, data alignment # etc. affect the rounding error. # assert_(self.funccalls <= 116 + 20, self.funccalls) assert_(self.gradcalls == 0, self.gradcalls) # Ensure that the function behaves the same; this is from Scipy 0.7.0 assert_allclose(self.trace[34:39], [[0.72949016, -0.44156936, 0.47100962], [0.72949016, -0.44156936, 0.48052496], [1.45898031, -0.88313872, 0.95153458], [0.72949016, -0.44156936, 0.47576729], [1.72949016, -0.44156936, 0.47576729]], atol=1e-14, rtol=1e-7) @suppressed_stdout def test_neldermead(self): # Nelder-Mead simplex algorithm if self.use_wrapper: opts = {'maxiter': self.maxiter, 'disp': self.disp, 'return_all': False} res = optimize.minimize(self.func, self.startparams, args=(), method='Nelder-mead', options=opts) params, fopt, numiter, func_calls, warnflag = ( res['x'], res['fun'], res['nit'], res['nfev'], res['status']) else: retval = optimize.fmin(self.func, self.startparams, args=(), maxiter=self.maxiter, full_output=True, disp=self.disp, retall=False) (params, fopt, numiter, func_calls, warnflag) = retval assert_allclose(self.func(params), self.func(self.solution), atol=1e-6) # Ensure that function call counts are 'known good'; these are from # Scipy 0.7.0. Don't allow them to increase. assert_(self.funccalls == 167, self.funccalls) assert_(self.gradcalls == 0, self.gradcalls) # Ensure that the function behaves the same; this is from Scipy 0.7.0 assert_allclose(self.trace[76:78], [[0.1928968, -0.62780447, 0.35166118], [0.19572515, -0.63648426, 0.35838135]], atol=1e-14, rtol=1e-7) @suppressed_stdout def test_ncg(self): # line-search Newton conjugate gradient optimization routine if self.use_wrapper: opts = {'maxiter': self.maxiter, 'disp': self.disp, 'return_all': False} retval = optimize.minimize(self.func, self.startparams, method='Newton-CG', jac=self.grad, args=(), options=opts)['x'] else: retval = optimize.fmin_ncg(self.func, self.startparams, self.grad, args=(), maxiter=self.maxiter, full_output=False, disp=self.disp, retall=False) params = retval assert_allclose(self.func(params), self.func(self.solution), atol=1e-6) # Ensure that function call counts are 'known good'; these are from # Scipy 0.7.0. Don't allow them to increase. assert_(self.funccalls == 7, self.funccalls) assert_(self.gradcalls <= 22, self.gradcalls) # 0.13.0 #assert_(self.gradcalls <= 18, self.gradcalls) # 0.9.0 #assert_(self.gradcalls == 18, self.gradcalls) # 0.8.0 #assert_(self.gradcalls == 22, self.gradcalls) # 0.7.0 # Ensure that the function behaves the same; this is from Scipy 0.7.0 assert_allclose(self.trace[3:5], [[-4.35700753e-07, -5.24869435e-01, 4.87527480e-01], [-4.35700753e-07, -5.24869401e-01, 4.87527774e-01]], atol=1e-6, rtol=1e-7) @suppressed_stdout def test_ncg_hess(self): # Newton conjugate gradient with Hessian if self.use_wrapper: opts = {'maxiter': self.maxiter, 'disp': self.disp, 'return_all': False} retval = optimize.minimize(self.func, self.startparams, method='Newton-CG', jac=self.grad, hess=self.hess, args=(), options=opts)['x'] else: retval = optimize.fmin_ncg(self.func, self.startparams, self.grad, fhess=self.hess, args=(), maxiter=self.maxiter, full_output=False, disp=self.disp, retall=False) params = retval assert_allclose(self.func(params), self.func(self.solution), atol=1e-6) # Ensure that function call counts are 'known good'; these are from # Scipy 0.7.0. Don't allow them to increase. assert_(self.funccalls == 7, self.funccalls) assert_(self.gradcalls <= 18, self.gradcalls) # 0.9.0 # assert_(self.gradcalls == 18, self.gradcalls) # 0.8.0 # assert_(self.gradcalls == 22, self.gradcalls) # 0.7.0 # Ensure that the function behaves the same; this is from Scipy 0.7.0 assert_allclose(self.trace[3:5], [[-4.35700753e-07, -5.24869435e-01, 4.87527480e-01], [-4.35700753e-07, -5.24869401e-01, 4.87527774e-01]], atol=1e-6, rtol=1e-7) @suppressed_stdout def test_ncg_hessp(self): # Newton conjugate gradient with Hessian times a vector p. if self.use_wrapper: opts = {'maxiter': self.maxiter, 'disp': self.disp, 'return_all': False} retval = optimize.minimize(self.func, self.startparams, method='Newton-CG', jac=self.grad, hessp=self.hessp, args=(), options=opts)['x'] else: retval = optimize.fmin_ncg(self.func, self.startparams, self.grad, fhess_p=self.hessp, args=(), maxiter=self.maxiter, full_output=False, disp=self.disp, retall=False) params = retval assert_allclose(self.func(params), self.func(self.solution), atol=1e-6) # Ensure that function call counts are 'known good'; these are from # Scipy 0.7.0. Don't allow them to increase. assert_(self.funccalls == 7, self.funccalls) assert_(self.gradcalls <= 18, self.gradcalls) # 0.9.0 # assert_(self.gradcalls == 18, self.gradcalls) # 0.8.0 # assert_(self.gradcalls == 22, self.gradcalls) # 0.7.0 # Ensure that the function behaves the same; this is from Scipy 0.7.0 assert_allclose(self.trace[3:5], [[-4.35700753e-07, -5.24869435e-01, 4.87527480e-01], [-4.35700753e-07, -5.24869401e-01, 4.87527774e-01]], atol=1e-6, rtol=1e-7) class TestOptimizeWrapperDisp(CheckOptimizeParameterized): use_wrapper = True disp = True class TestOptimizeWrapperNoDisp(CheckOptimizeParameterized): use_wrapper = True disp = False class TestOptimizeNoWrapperDisp(CheckOptimizeParameterized): use_wrapper = False disp = True class TestOptimizeNoWrapperNoDisp(CheckOptimizeParameterized): use_wrapper = False disp = False class TestOptimizeSimple(CheckOptimize): def test_bfgs_nan(self): # Test corner case where nan is fed to optimizer. See gh-2067. func = lambda x: x fprime = lambda x: np.ones_like(x) x0 = [np.nan] with np.errstate(over='ignore', invalid='ignore'): x = optimize.fmin_bfgs(func, x0, fprime, disp=False) assert_(np.isnan(func(x))) def test_bfgs_nan_return(self): # Test corner cases where fun returns NaN. See gh-4793. # First case: NaN from first call. func = lambda x: np.nan result = optimize.minimize(func, 0) assert_(np.isnan(result['fun'])) assert_(result['success'] is False) # Second case: NaN from second call. func = lambda x: 0 if x == 0 else np.nan fprime = lambda x: np.ones_like(x) # Steer away from zero. result = optimize.minimize(func, 0, jac=fprime) assert_(np.isnan(result['fun'])) assert_(result['success'] is False) def test_bfgs_numerical_jacobian(self): # BFGS with numerical jacobian and a vector epsilon parameter. # define the epsilon parameter using a random vector epsilon = np.sqrt(np.finfo(float).eps) * np.random.rand(len(self.solution)) params = optimize.fmin_bfgs(self.func, self.startparams, epsilon=epsilon, args=(), maxiter=self.maxiter, disp=False) assert_allclose(self.func(params), self.func(self.solution), atol=1e-6) def test_bfgs_gh_2169(self): def f(x): if x < 0: return 1.79769313e+308 else: return x + 1./x xs = optimize.fmin_bfgs(f, [10.], disp=False) assert_allclose(xs, 1.0, rtol=1e-4, atol=1e-4) def test_l_bfgs_b(self): # limited-memory bound-constrained BFGS algorithm retval = optimize.fmin_l_bfgs_b(self.func, self.startparams, self.grad, args=(), maxiter=self.maxiter) (params, fopt, d) = retval assert_allclose(self.func(params), self.func(self.solution), atol=1e-6) # Ensure that function call counts are 'known good'; these are from # Scipy 0.7.0. Don't allow them to increase. assert_(self.funccalls == 7, self.funccalls) assert_(self.gradcalls == 5, self.gradcalls) # Ensure that the function behaves the same; this is from Scipy 0.7.0 assert_allclose(self.trace[3:5], [[0., -0.52489628, 0.48753042], [0., -0.52489628, 0.48753042]], atol=1e-14, rtol=1e-7) def test_l_bfgs_b_numjac(self): # L-BFGS-B with numerical jacobian retval = optimize.fmin_l_bfgs_b(self.func, self.startparams, approx_grad=True, maxiter=self.maxiter) (params, fopt, d) = retval assert_allclose(self.func(params), self.func(self.solution), atol=1e-6) def test_l_bfgs_b_funjac(self): # L-BFGS-B with combined objective function and jacobian def fun(x): return self.func(x), self.grad(x) retval = optimize.fmin_l_bfgs_b(fun, self.startparams, maxiter=self.maxiter) (params, fopt, d) = retval assert_allclose(self.func(params), self.func(self.solution), atol=1e-6) def test_minimize_l_bfgs_b(self): # Minimize with L-BFGS-B method opts = {'disp': False, 'maxiter': self.maxiter} r = optimize.minimize(self.func, self.startparams, method='L-BFGS-B', jac=self.grad, options=opts) assert_allclose(self.func(r.x), self.func(self.solution), atol=1e-6) # approximate jacobian ra = optimize.minimize(self.func, self.startparams, method='L-BFGS-B', options=opts) assert_allclose(self.func(ra.x), self.func(self.solution), atol=1e-6) # check that function evaluations in approximate jacobian are counted assert_(ra.nfev > r.nfev) def test_minimize_l_bfgs_b_ftol(self): # Check that the `ftol` parameter in l_bfgs_b works as expected v0 = None for tol in [1e-1, 1e-4, 1e-7, 1e-10]: opts = {'disp': False, 'maxiter': self.maxiter, 'ftol': tol} sol = optimize.minimize(self.func, self.startparams, method='L-BFGS-B', jac=self.grad, options=opts) v = self.func(sol.x) if v0 is None: v0 = v else: assert_(v < v0) assert_allclose(v, self.func(self.solution), rtol=tol) def test_minimize_l_bfgs_maxls(self): # check that the maxls is passed down to the Fortran routine sol = optimize.minimize(optimize.rosen, np.array([-1.2,1.0]), method='L-BFGS-B', jac=optimize.rosen_der, options={'disp': False, 'maxls': 1}) assert_(not sol.success) def test_custom(self): # This function comes from the documentation example. def custmin(fun, x0, args=(), maxfev=None, stepsize=0.1, maxiter=100, callback=None, **options): bestx = x0 besty = fun(x0) funcalls = 1 niter = 0 improved = True stop = False while improved and not stop and niter < maxiter: improved = False niter += 1 for dim in range(np.size(x0)): for s in [bestx[dim] - stepsize, bestx[dim] + stepsize]: testx = np.copy(bestx) testx[dim] = s testy = fun(testx, *args) funcalls += 1 if testy < besty: besty = testy bestx = testx improved = True if callback is not None: callback(bestx) if maxfev is not None and funcalls >= maxfev: stop = True break return optimize.OptimizeResult(fun=besty, x=bestx, nit=niter, nfev=funcalls, success=(niter > 1)) x0 = [1.35, 0.9, 0.8, 1.1, 1.2] res = optimize.minimize(optimize.rosen, x0, method=custmin, options=dict(stepsize=0.05)) assert_allclose(res.x, 1.0, rtol=1e-4, atol=1e-4) def test_minimize_tol_parameter(self): # Check that the minimize() tol= argument does something def func(z): x, y = z return x**2*y**2 + x**4 + 1 def dfunc(z): x, y = z return np.array([2*x*y**2 + 4*x**3, 2*x**2*y]) for method in ['nelder-mead', 'powell', 'cg', 'bfgs', 'newton-cg', 'l-bfgs-b', 'tnc', 'cobyla', 'slsqp']: if method in ('nelder-mead', 'powell', 'cobyla'): jac = None else: jac = dfunc sol1 = optimize.minimize(func, [1, 1], jac=jac, tol=1e-10, method=method) sol2 = optimize.minimize(func, [1, 1], jac=jac, tol=1.0, method=method) assert_(func(sol1.x) < func(sol2.x), "%s: %s vs. %s" % (method, func(sol1.x), func(sol2.x))) def test_no_increase(self): # Check that the solver doesn't return a value worse than the # initial point. def func(x): return (x - 1)**2 def bad_grad(x): # purposefully invalid gradient function, simulates a case # where line searches start failing return 2*(x - 1) * (-1) - 2 def check(method): x0 = np.array([2.0]) f0 = func(x0) jac = bad_grad if method in ['nelder-mead', 'powell', 'cobyla']: jac = None sol = optimize.minimize(func, x0, jac=jac, method=method, options=dict(maxiter=20)) assert_equal(func(sol.x), sol.fun) dec.knownfailureif(method == 'slsqp', "SLSQP returns slightly worse")(lambda: None)() assert_(func(sol.x) <= f0) for method in ['nelder-mead', 'powell', 'cg', 'bfgs', 'newton-cg', 'l-bfgs-b', 'tnc', 'cobyla', 'slsqp']: yield check, method def test_slsqp_respect_bounds(self): # Regression test for gh-3108 def f(x): return sum((x - np.array([1., 2., 3., 4.]))**2) def cons(x): a = np.array([[-1, -1, -1, -1], [-3, -3, -2, -1]]) return np.concatenate([np.dot(a, x) + np.array([5, 10]), x]) x0 = np.array([0.5, 1., 1.5, 2.]) res = optimize.minimize(f, x0, method='slsqp', constraints={'type': 'ineq', 'fun': cons}) assert_allclose(res.x, np.array([0., 2, 5, 8])/3, atol=1e-12) def test_minimize_automethod(self): def f(x): return x**2 def cons(x): return x - 2 x0 = np.array([10.]) sol_0 = optimize.minimize(f, x0) sol_1 = optimize.minimize(f, x0, constraints=[{'type': 'ineq', 'fun': cons}]) sol_2 = optimize.minimize(f, x0, bounds=[(5, 10)]) sol_3 = optimize.minimize(f, x0, constraints=[{'type': 'ineq', 'fun': cons}], bounds=[(5, 10)]) sol_4 = optimize.minimize(f, x0, constraints=[{'type': 'ineq', 'fun': cons}], bounds=[(1, 10)]) for sol in [sol_0, sol_1, sol_2, sol_3, sol_4]: assert_(sol.success) assert_allclose(sol_0.x, 0, atol=1e-8) assert_allclose(sol_1.x, 2, atol=1e-8) assert_allclose(sol_2.x, 5, atol=1e-8) assert_allclose(sol_3.x, 5, atol=1e-8) assert_allclose(sol_4.x, 2, atol=1e-8) def test_minimize_coerce_args_param(self): # Regression test for gh-3503 def Y(x, c): return np.sum((x-c)**2) def dY_dx(x, c=None): return 2*(x-c) c = np.array([3, 1, 4, 1, 5, 9, 2, 6, 5, 3, 5]) xinit = np.random.randn(len(c)) optimize.minimize(Y, xinit, jac=dY_dx, args=(c), method="BFGS") class TestLBFGSBBounds(TestCase): def setUp(self): self.bounds = ((1, None), (None, None)) self.solution = (1, 0) def fun(self, x, p=2.0): return 1.0 / p * (x[0]**p + x[1]**p) def jac(self, x, p=2.0): return x**(p - 1) def fj(self, x, p=2.0): return self.fun(x, p), self.jac(x, p) def test_l_bfgs_b_bounds(self): x, f, d = optimize.fmin_l_bfgs_b(self.fun, [0, -1], fprime=self.jac, bounds=self.bounds) assert_(d['warnflag'] == 0, d['task']) assert_allclose(x, self.solution, atol=1e-6) def test_l_bfgs_b_funjac(self): # L-BFGS-B with fun and jac combined and extra arguments x, f, d = optimize.fmin_l_bfgs_b(self.fj, [0, -1], args=(2.0, ), bounds=self.bounds) assert_(d['warnflag'] == 0, d['task']) assert_allclose(x, self.solution, atol=1e-6) def test_minimize_l_bfgs_b_bounds(self): # Minimize with method='L-BFGS-B' with bounds res = optimize.minimize(self.fun, [0, -1], method='L-BFGS-B', jac=self.jac, bounds=self.bounds) assert_(res['success'], res['message']) assert_allclose(res.x, self.solution, atol=1e-6) class TestOptimizeScalar(TestCase): def setUp(self): self.solution = 1.5 def fun(self, x, a=1.5): """Objective function""" return (x - a)**2 - 0.8 def test_brent(self): x = optimize.brent(self.fun) assert_allclose(x, self.solution, atol=1e-6) x = optimize.brent(self.fun, brack=(-3, -2)) assert_allclose(x, self.solution, atol=1e-6) x = optimize.brent(self.fun, full_output=True) assert_allclose(x[0], self.solution, atol=1e-6) x = optimize.brent(self.fun, brack=(-15, -1, 15)) assert_allclose(x, self.solution, atol=1e-6) def test_golden(self): x = optimize.golden(self.fun) assert_allclose(x, self.solution, atol=1e-6) x = optimize.golden(self.fun, brack=(-3, -2)) assert_allclose(x, self.solution, atol=1e-6) x = optimize.golden(self.fun, full_output=True) assert_allclose(x[0], self.solution, atol=1e-6) x = optimize.golden(self.fun, brack=(-15, -1, 15)) assert_allclose(x, self.solution, atol=1e-6) def test_fminbound(self): x = optimize.fminbound(self.fun, 0, 1) assert_allclose(x, 1, atol=1e-4) x = optimize.fminbound(self.fun, 1, 5) assert_allclose(x, self.solution, atol=1e-6) x = optimize.fminbound(self.fun, np.array([1]), np.array([5])) assert_allclose(x, self.solution, atol=1e-6) assert_raises(ValueError, optimize.fminbound, self.fun, 5, 1) def test_fminbound_scalar(self): try: optimize.fminbound(self.fun, np.zeros((1, 2)), 1) self.fail("exception not raised") except ValueError as e: assert_('must be scalar' in str(e)) x = optimize.fminbound(self.fun, 1, np.array(5)) assert_allclose(x, self.solution, atol=1e-6) def test_minimize_scalar(self): # combine all tests above for the minimize_scalar wrapper x = optimize.minimize_scalar(self.fun).x assert_allclose(x, self.solution, atol=1e-6) x = optimize.minimize_scalar(self.fun, method='Brent') assert_(x.success) x = optimize.minimize_scalar(self.fun, method='Brent', options=dict(maxiter=3)) assert_(not x.success) x = optimize.minimize_scalar(self.fun, bracket=(-3, -2), args=(1.5, ), method='Brent').x assert_allclose(x, self.solution, atol=1e-6) x = optimize.minimize_scalar(self.fun, method='Brent', args=(1.5,)).x assert_allclose(x, self.solution, atol=1e-6) x = optimize.minimize_scalar(self.fun, bracket=(-15, -1, 15), args=(1.5, ), method='Brent').x assert_allclose(x, self.solution, atol=1e-6) x = optimize.minimize_scalar(self.fun, bracket=(-3, -2), args=(1.5, ), method='golden').x assert_allclose(x, self.solution, atol=1e-6) x = optimize.minimize_scalar(self.fun, method='golden', args=(1.5,)).x assert_allclose(x, self.solution, atol=1e-6) x = optimize.minimize_scalar(self.fun, bracket=(-15, -1, 15), args=(1.5, ), method='golden').x assert_allclose(x, self.solution, atol=1e-6) x = optimize.minimize_scalar(self.fun, bounds=(0, 1), args=(1.5,), method='Bounded').x assert_allclose(x, 1, atol=1e-4) x = optimize.minimize_scalar(self.fun, bounds=(1, 5), args=(1.5, ), method='bounded').x assert_allclose(x, self.solution, atol=1e-6) x = optimize.minimize_scalar(self.fun, bounds=(np.array([1]), np.array([5])), args=(np.array([1.5]), ), method='bounded').x assert_allclose(x, self.solution, atol=1e-6) assert_raises(ValueError, optimize.minimize_scalar, self.fun, bounds=(5, 1), method='bounded', args=(1.5, )) assert_raises(ValueError, optimize.minimize_scalar, self.fun, bounds=(np.zeros(2), 1), method='bounded', args=(1.5, )) x = optimize.minimize_scalar(self.fun, bounds=(1, np.array(5)), method='bounded').x assert_allclose(x, self.solution, atol=1e-6) def test_minimize_scalar_custom(self): # This function comes from the documentation example. def custmin(fun, bracket, args=(), maxfev=None, stepsize=0.1, maxiter=100, callback=None, **options): bestx = (bracket[1] + bracket[0]) / 2.0 besty = fun(bestx) funcalls = 1 niter = 0 improved = True stop = False while improved and not stop and niter < maxiter: improved = False niter += 1 for testx in [bestx - stepsize, bestx + stepsize]: testy = fun(testx, *args) funcalls += 1 if testy < besty: besty = testy bestx = testx improved = True if callback is not None: callback(bestx) if maxfev is not None and funcalls >= maxfev: stop = True break return optimize.OptimizeResult(fun=besty, x=bestx, nit=niter, nfev=funcalls, success=(niter > 1)) res = optimize.minimize_scalar(self.fun, bracket=(0, 4), method=custmin, options=dict(stepsize=0.05)) assert_allclose(res.x, self.solution, atol=1e-6) def test_minimize_scalar_coerce_args_param(self): # Regression test for gh-3503 optimize.minimize_scalar(self.fun, args=1.5) def test_brent_negative_tolerance(): assert_raises(ValueError, optimize.brent, np.cos, tol=-.01) class TestNewtonCg(object): def test_rosenbrock(self): x0 = np.array([-1.2, 1.0]) sol = optimize.minimize(optimize.rosen, x0, jac=optimize.rosen_der, hess=optimize.rosen_hess, tol=1e-5, method='Newton-CG') assert_(sol.success, sol.message) assert_allclose(sol.x, np.array([1, 1]), rtol=1e-4) def test_himmelblau(self): x0 = np.array(himmelblau_x0) sol = optimize.minimize(himmelblau, x0, jac=himmelblau_grad, hess=himmelblau_hess, method='Newton-CG', tol=1e-6) assert_(sol.success, sol.message) assert_allclose(sol.x, himmelblau_xopt, rtol=1e-4) assert_allclose(sol.fun, himmelblau_min, atol=1e-4) class TestRosen(TestCase): def test_hess(self): # Compare rosen_hess(x) times p with rosen_hess_prod(x,p). See gh-1775 x = np.array([3, 4, 5]) p = np.array([2, 2, 2]) hp = optimize.rosen_hess_prod(x, p) dothp = np.dot(optimize.rosen_hess(x), p) assert_equal(hp, dothp) def himmelblau(p): """ R^2 -> R^1 test function for optimization. The function has four local minima where himmelblau(xopt) == 0. """ x, y = p a = x*x + y - 11 b = x + y*y - 7 return a*a + b*b def himmelblau_grad(p): x, y = p return np.array([4*x**3 + 4*x*y - 42*x + 2*y**2 - 14, 2*x**2 + 4*x*y + 4*y**3 - 26*y - 22]) def himmelblau_hess(p): x, y = p return np.array([[12*x**2 + 4*y - 42, 4*x + 4*y], [4*x + 4*y, 4*x + 12*y**2 - 26]]) himmelblau_x0 = [-0.27, -0.9] himmelblau_xopt = [3, 2] himmelblau_min = 0.0 def test_minimize_multiple_constraints(): # Regression test for gh-4240. def func(x): return np.array([25 - 0.2 * x[0] - 0.4 * x[1] - 0.33 * x[2]]) def func1(x): return np.array([x[1]]) def func2(x): return np.array([x[2]]) cons = ({'type': 'ineq', 'fun': func}, {'type': 'ineq', 'fun': func1}, {'type': 'ineq', 'fun': func2}) f = lambda x: -1 * (x[0] + x[1] + x[2]) res = optimize.minimize(f, [0, 0, 0], method='SLSQP', constraints=cons) assert_allclose(res.x, [125, 0, 0], atol=1e-10) class TestOptimizeResultAttributes(TestCase): # Test that all minimizers return an OptimizeResult containing # all the OptimizeResult attributes def setUp(self): self.x0 = [5, 5] self.func = optimize.rosen self.jac = optimize.rosen_der self.hess = optimize.rosen_hess self.hessp = optimize.rosen_hess_prod self.bounds = [(0., 10.), (0., 10.)] def test_attributes_present(self): methods = ['Nelder-Mead', 'Powell', 'CG', 'BFGS', 'Newton-CG', 'L-BFGS-B', 'TNC', 'COBYLA', 'SLSQP', 'dogleg', 'trust-ncg'] attributes = ['nit', 'nfev', 'x', 'success', 'status', 'fun', 'message'] skip = {'COBYLA': ['nit']} for method in methods: with warnings.catch_warnings(): warnings.simplefilter("ignore") res = optimize.minimize(self.func, self.x0, method=method, jac=self.jac, hess=self.hess, hessp=self.hessp) for attribute in attributes: if method in skip and attribute in skip[method]: continue assert_(hasattr(res, attribute)) class TestBrute: # Test the "brute force" method def setUp(self): self.params = (2, 3, 7, 8, 9, 10, 44, -1, 2, 26, 1, -2, 0.5) self.rranges = (slice(-4, 4, 0.25), slice(-4, 4, 0.25)) self.solution = np.array([-1.05665192, 1.80834843]) def f1(self, z, *params): x, y = z a, b, c, d, e, f, g, h, i, j, k, l, scale = params return (a * x**2 + b * x * y + c * y**2 + d*x + e*y + f) def f2(self, z, *params): x, y = z a, b, c, d, e, f, g, h, i, j, k, l, scale = params return (-g*np.exp(-((x-h)**2 + (y-i)**2) / scale)) def f3(self, z, *params): x, y = z a, b, c, d, e, f, g, h, i, j, k, l, scale = params return (-j*np.exp(-((x-k)**2 + (y-l)**2) / scale)) def func(self, z, *params): return self.f1(z, *params) + self.f2(z, *params) + self.f3(z, *params) @suppressed_stdout def test_brute(self): # test fmin resbrute = optimize.brute(self.func, self.rranges, args=self.params, full_output=True, finish=optimize.fmin) assert_allclose(resbrute[0], self.solution, atol=1e-3) assert_allclose(resbrute[1], self.func(self.solution, *self.params), atol=1e-3) # test minimize resbrute = optimize.brute(self.func, self.rranges, args=self.params, full_output=True, finish=optimize.minimize) assert_allclose(resbrute[0], self.solution, atol=1e-3) assert_allclose(resbrute[1], self.func(self.solution, *self.params), atol=1e-3) if __name__ == "__main__": run_module_suite()
bsd-3-clause
yongshengwang/hue
build/env/lib/python2.7/site-packages/Django-1.6.10-py2.7.egg/django/contrib/admin/validation.py
108
23236
from django.core.exceptions import ImproperlyConfigured from django.db import models from django.db.models.fields import FieldDoesNotExist from django.forms.models import BaseModelForm, BaseModelFormSet, _get_foreign_key from django.contrib.admin.util import get_fields_from_path, NotRelationField """ Does basic ModelAdmin option validation. Calls custom validation classmethod in the end if it is provided in cls. The signature of the custom validation classmethod should be: def validate(cls, model). """ __all__ = ['BaseValidator', 'InlineValidator'] class BaseValidator(object): def __init__(self): # Before we can introspect models, they need to be fully loaded so that # inter-relations are set up correctly. We force that here. models.get_apps() def validate(self, cls, model): for m in dir(self): if m.startswith('validate_'): getattr(self, m)(cls, model) def check_field_spec(self, cls, model, flds, label): """ Validate the fields specification in `flds` from a ModelAdmin subclass `cls` for the `model` model. Use `label` for reporting problems to the user. The fields specification can be a ``fields`` option or a ``fields`` sub-option from a ``fieldsets`` option component. """ for fields in flds: # The entry in fields might be a tuple. If it is a standalone # field, make it into a tuple to make processing easier. if type(fields) != tuple: fields = (fields,) for field in fields: if field in cls.readonly_fields: # Stuff can be put in fields that isn't actually a # model field if it's in readonly_fields, # readonly_fields will handle the validation of such # things. continue try: f = model._meta.get_field(field) except models.FieldDoesNotExist: # If we can't find a field on the model that matches, it could be an # extra field on the form; nothing to check so move on to the next field. continue if isinstance(f, models.ManyToManyField) and not f.rel.through._meta.auto_created: raise ImproperlyConfigured("'%s.%s' " "can't include the ManyToManyField field '%s' because " "'%s' manually specifies a 'through' model." % ( cls.__name__, label, field, field)) def validate_raw_id_fields(self, cls, model): " Validate that raw_id_fields only contains field names that are listed on the model. " if hasattr(cls, 'raw_id_fields'): check_isseq(cls, 'raw_id_fields', cls.raw_id_fields) for idx, field in enumerate(cls.raw_id_fields): f = get_field(cls, model, 'raw_id_fields', field) if not isinstance(f, (models.ForeignKey, models.ManyToManyField)): raise ImproperlyConfigured("'%s.raw_id_fields[%d]', '%s' must " "be either a ForeignKey or ManyToManyField." % (cls.__name__, idx, field)) def validate_fields(self, cls, model): " Validate that fields only refer to existing fields, doesn't contain duplicates. " # fields if cls.fields: # default value is None check_isseq(cls, 'fields', cls.fields) self.check_field_spec(cls, model, cls.fields, 'fields') if cls.fieldsets: raise ImproperlyConfigured('Both fieldsets and fields are specified in %s.' % cls.__name__) if len(cls.fields) > len(set(cls.fields)): raise ImproperlyConfigured('There are duplicate field(s) in %s.fields' % cls.__name__) def validate_fieldsets(self, cls, model): " Validate that fieldsets is properly formatted and doesn't contain duplicates. " from django.contrib.admin.options import flatten_fieldsets if cls.fieldsets: # default value is None check_isseq(cls, 'fieldsets', cls.fieldsets) for idx, fieldset in enumerate(cls.fieldsets): check_isseq(cls, 'fieldsets[%d]' % idx, fieldset) if len(fieldset) != 2: raise ImproperlyConfigured("'%s.fieldsets[%d]' does not " "have exactly two elements." % (cls.__name__, idx)) check_isdict(cls, 'fieldsets[%d][1]' % idx, fieldset[1]) if 'fields' not in fieldset[1]: raise ImproperlyConfigured("'fields' key is required in " "%s.fieldsets[%d][1] field options dict." % (cls.__name__, idx)) self.check_field_spec(cls, model, fieldset[1]['fields'], "fieldsets[%d][1]['fields']" % idx) flattened_fieldsets = flatten_fieldsets(cls.fieldsets) if len(flattened_fieldsets) > len(set(flattened_fieldsets)): raise ImproperlyConfigured('There are duplicate field(s) in %s.fieldsets' % cls.__name__) def validate_exclude(self, cls, model): " Validate that exclude is a sequence without duplicates. " if cls.exclude: # default value is None check_isseq(cls, 'exclude', cls.exclude) if len(cls.exclude) > len(set(cls.exclude)): raise ImproperlyConfigured('There are duplicate field(s) in %s.exclude' % cls.__name__) def validate_form(self, cls, model): " Validate that form subclasses BaseModelForm. " if hasattr(cls, 'form') and not issubclass(cls.form, BaseModelForm): raise ImproperlyConfigured("%s.form does not inherit from " "BaseModelForm." % cls.__name__) def validate_filter_vertical(self, cls, model): " Validate that filter_vertical is a sequence of field names. " if hasattr(cls, 'filter_vertical'): check_isseq(cls, 'filter_vertical', cls.filter_vertical) for idx, field in enumerate(cls.filter_vertical): f = get_field(cls, model, 'filter_vertical', field) if not isinstance(f, models.ManyToManyField): raise ImproperlyConfigured("'%s.filter_vertical[%d]' must be " "a ManyToManyField." % (cls.__name__, idx)) def validate_filter_horizontal(self, cls, model): " Validate that filter_horizontal is a sequence of field names. " if hasattr(cls, 'filter_horizontal'): check_isseq(cls, 'filter_horizontal', cls.filter_horizontal) for idx, field in enumerate(cls.filter_horizontal): f = get_field(cls, model, 'filter_horizontal', field) if not isinstance(f, models.ManyToManyField): raise ImproperlyConfigured("'%s.filter_horizontal[%d]' must be " "a ManyToManyField." % (cls.__name__, idx)) def validate_radio_fields(self, cls, model): " Validate that radio_fields is a dictionary of choice or foreign key fields. " from django.contrib.admin.options import HORIZONTAL, VERTICAL if hasattr(cls, 'radio_fields'): check_isdict(cls, 'radio_fields', cls.radio_fields) for field, val in cls.radio_fields.items(): f = get_field(cls, model, 'radio_fields', field) if not (isinstance(f, models.ForeignKey) or f.choices): raise ImproperlyConfigured("'%s.radio_fields['%s']' " "is neither an instance of ForeignKey nor does " "have choices set." % (cls.__name__, field)) if not val in (HORIZONTAL, VERTICAL): raise ImproperlyConfigured("'%s.radio_fields['%s']' " "is neither admin.HORIZONTAL nor admin.VERTICAL." % (cls.__name__, field)) def validate_prepopulated_fields(self, cls, model): " Validate that prepopulated_fields if a dictionary containing allowed field types. " # prepopulated_fields if hasattr(cls, 'prepopulated_fields'): check_isdict(cls, 'prepopulated_fields', cls.prepopulated_fields) for field, val in cls.prepopulated_fields.items(): f = get_field(cls, model, 'prepopulated_fields', field) if isinstance(f, (models.DateTimeField, models.ForeignKey, models.ManyToManyField)): raise ImproperlyConfigured("'%s.prepopulated_fields['%s']' " "is either a DateTimeField, ForeignKey or " "ManyToManyField. This isn't allowed." % (cls.__name__, field)) check_isseq(cls, "prepopulated_fields['%s']" % field, val) for idx, f in enumerate(val): get_field(cls, model, "prepopulated_fields['%s'][%d]" % (field, idx), f) def validate_ordering(self, cls, model): " Validate that ordering refers to existing fields or is random. " # ordering = None if cls.ordering: check_isseq(cls, 'ordering', cls.ordering) for idx, field in enumerate(cls.ordering): if field == '?' and len(cls.ordering) != 1: raise ImproperlyConfigured("'%s.ordering' has the random " "ordering marker '?', but contains other fields as " "well. Please either remove '?' or the other fields." % cls.__name__) if field == '?': continue if field.startswith('-'): field = field[1:] # Skip ordering in the format field1__field2 (FIXME: checking # this format would be nice, but it's a little fiddly). if '__' in field: continue get_field(cls, model, 'ordering[%d]' % idx, field) def validate_readonly_fields(self, cls, model): " Validate that readonly_fields refers to proper attribute or field. " if hasattr(cls, "readonly_fields"): check_isseq(cls, "readonly_fields", cls.readonly_fields) for idx, field in enumerate(cls.readonly_fields): if not callable(field): if not hasattr(cls, field): if not hasattr(model, field): try: model._meta.get_field(field) except models.FieldDoesNotExist: raise ImproperlyConfigured("%s.readonly_fields[%d], %r is not a callable or an attribute of %r or found in the model %r." % (cls.__name__, idx, field, cls.__name__, model._meta.object_name)) class ModelAdminValidator(BaseValidator): def validate_save_as(self, cls, model): " Validate save_as is a boolean. " check_type(cls, 'save_as', bool) def validate_save_on_top(self, cls, model): " Validate save_on_top is a boolean. " check_type(cls, 'save_on_top', bool) def validate_inlines(self, cls, model): " Validate inline model admin classes. " from django.contrib.admin.options import BaseModelAdmin if hasattr(cls, 'inlines'): check_isseq(cls, 'inlines', cls.inlines) for idx, inline in enumerate(cls.inlines): if not issubclass(inline, BaseModelAdmin): raise ImproperlyConfigured("'%s.inlines[%d]' does not inherit " "from BaseModelAdmin." % (cls.__name__, idx)) if not inline.model: raise ImproperlyConfigured("'model' is a required attribute " "of '%s.inlines[%d]'." % (cls.__name__, idx)) if not issubclass(inline.model, models.Model): raise ImproperlyConfigured("'%s.inlines[%d].model' does not " "inherit from models.Model." % (cls.__name__, idx)) inline.validate(inline.model) self.check_inline(inline, model) def check_inline(self, cls, parent_model): " Validate inline class's fk field is not excluded. " fk = _get_foreign_key(parent_model, cls.model, fk_name=cls.fk_name, can_fail=True) if hasattr(cls, 'exclude') and cls.exclude: if fk and fk.name in cls.exclude: raise ImproperlyConfigured("%s cannot exclude the field " "'%s' - this is the foreign key to the parent model " "%s.%s." % (cls.__name__, fk.name, parent_model._meta.app_label, parent_model.__name__)) def validate_list_display(self, cls, model): " Validate that list_display only contains fields or usable attributes. " if hasattr(cls, 'list_display'): check_isseq(cls, 'list_display', cls.list_display) for idx, field in enumerate(cls.list_display): if not callable(field): if not hasattr(cls, field): if not hasattr(model, field): try: model._meta.get_field(field) except models.FieldDoesNotExist: raise ImproperlyConfigured("%s.list_display[%d], %r is not a callable or an attribute of %r or found in the model %r." % (cls.__name__, idx, field, cls.__name__, model._meta.object_name)) else: # getattr(model, field) could be an X_RelatedObjectsDescriptor f = fetch_attr(cls, model, "list_display[%d]" % idx, field) if isinstance(f, models.ManyToManyField): raise ImproperlyConfigured("'%s.list_display[%d]', '%s' is a ManyToManyField which is not supported." % (cls.__name__, idx, field)) def validate_list_display_links(self, cls, model): " Validate that list_display_links is a unique subset of list_display. " if hasattr(cls, 'list_display_links'): check_isseq(cls, 'list_display_links', cls.list_display_links) for idx, field in enumerate(cls.list_display_links): if field not in cls.list_display: raise ImproperlyConfigured("'%s.list_display_links[%d]' " "refers to '%s' which is not defined in 'list_display'." % (cls.__name__, idx, field)) def validate_list_filter(self, cls, model): """ Validate that list_filter is a sequence of one of three options: 1: 'field' - a basic field filter, possibly w/ relationships (eg, 'field__rel') 2: ('field', SomeFieldListFilter) - a field-based list filter class 3: SomeListFilter - a non-field list filter class """ from django.contrib.admin import ListFilter, FieldListFilter if hasattr(cls, 'list_filter'): check_isseq(cls, 'list_filter', cls.list_filter) for idx, item in enumerate(cls.list_filter): if callable(item) and not isinstance(item, models.Field): # If item is option 3, it should be a ListFilter... if not issubclass(item, ListFilter): raise ImproperlyConfigured("'%s.list_filter[%d]' is '%s'" " which is not a descendant of ListFilter." % (cls.__name__, idx, item.__name__)) # ... but not a FieldListFilter. if issubclass(item, FieldListFilter): raise ImproperlyConfigured("'%s.list_filter[%d]' is '%s'" " which is of type FieldListFilter but is not" " associated with a field name." % (cls.__name__, idx, item.__name__)) else: if isinstance(item, (tuple, list)): # item is option #2 field, list_filter_class = item if not issubclass(list_filter_class, FieldListFilter): raise ImproperlyConfigured("'%s.list_filter[%d][1]'" " is '%s' which is not of type FieldListFilter." % (cls.__name__, idx, list_filter_class.__name__)) else: # item is option #1 field = item # Validate the field string try: get_fields_from_path(model, field) except (NotRelationField, FieldDoesNotExist): raise ImproperlyConfigured("'%s.list_filter[%d]' refers to '%s'" " which does not refer to a Field." % (cls.__name__, idx, field)) def validate_list_select_related(self, cls, model): " Validate that list_select_related is a boolean, a list or a tuple. " list_select_related = getattr(cls, 'list_select_related', None) if list_select_related: types = (bool, tuple, list) if not isinstance(list_select_related, types): raise ImproperlyConfigured("'%s.list_select_related' should be " "either a bool, a tuple or a list" % cls.__name__) def validate_list_per_page(self, cls, model): " Validate that list_per_page is an integer. " check_type(cls, 'list_per_page', int) def validate_list_max_show_all(self, cls, model): " Validate that list_max_show_all is an integer. " check_type(cls, 'list_max_show_all', int) def validate_list_editable(self, cls, model): """ Validate that list_editable is a sequence of editable fields from list_display without first element. """ if hasattr(cls, 'list_editable') and cls.list_editable: check_isseq(cls, 'list_editable', cls.list_editable) for idx, field_name in enumerate(cls.list_editable): try: field = model._meta.get_field_by_name(field_name)[0] except models.FieldDoesNotExist: raise ImproperlyConfigured("'%s.list_editable[%d]' refers to a " "field, '%s', not defined on %s.%s." % (cls.__name__, idx, field_name, model._meta.app_label, model.__name__)) if field_name not in cls.list_display: raise ImproperlyConfigured("'%s.list_editable[%d]' refers to " "'%s' which is not defined in 'list_display'." % (cls.__name__, idx, field_name)) if field_name in cls.list_display_links: raise ImproperlyConfigured("'%s' cannot be in both '%s.list_editable'" " and '%s.list_display_links'" % (field_name, cls.__name__, cls.__name__)) if not cls.list_display_links and cls.list_display[0] in cls.list_editable: raise ImproperlyConfigured("'%s.list_editable[%d]' refers to" " the first field in list_display, '%s', which can't be" " used unless list_display_links is set." % (cls.__name__, idx, cls.list_display[0])) if not field.editable: raise ImproperlyConfigured("'%s.list_editable[%d]' refers to a " "field, '%s', which isn't editable through the admin." % (cls.__name__, idx, field_name)) def validate_search_fields(self, cls, model): " Validate search_fields is a sequence. " if hasattr(cls, 'search_fields'): check_isseq(cls, 'search_fields', cls.search_fields) def validate_date_hierarchy(self, cls, model): " Validate that date_hierarchy refers to DateField or DateTimeField. " if cls.date_hierarchy: f = get_field(cls, model, 'date_hierarchy', cls.date_hierarchy) if not isinstance(f, (models.DateField, models.DateTimeField)): raise ImproperlyConfigured("'%s.date_hierarchy is " "neither an instance of DateField nor DateTimeField." % cls.__name__) class InlineValidator(BaseValidator): def validate_fk_name(self, cls, model): " Validate that fk_name refers to a ForeignKey. " if cls.fk_name: # default value is None f = get_field(cls, model, 'fk_name', cls.fk_name) if not isinstance(f, models.ForeignKey): raise ImproperlyConfigured("'%s.fk_name is not an instance of " "models.ForeignKey." % cls.__name__) def validate_extra(self, cls, model): " Validate that extra is an integer. " check_type(cls, 'extra', int) def validate_max_num(self, cls, model): " Validate that max_num is an integer. " check_type(cls, 'max_num', int) def validate_formset(self, cls, model): " Validate formset is a subclass of BaseModelFormSet. " if hasattr(cls, 'formset') and not issubclass(cls.formset, BaseModelFormSet): raise ImproperlyConfigured("'%s.formset' does not inherit from " "BaseModelFormSet." % cls.__name__) def check_type(cls, attr, type_): if getattr(cls, attr, None) is not None and not isinstance(getattr(cls, attr), type_): raise ImproperlyConfigured("'%s.%s' should be a %s." % (cls.__name__, attr, type_.__name__ )) def check_isseq(cls, label, obj): if not isinstance(obj, (list, tuple)): raise ImproperlyConfigured("'%s.%s' must be a list or tuple." % (cls.__name__, label)) def check_isdict(cls, label, obj): if not isinstance(obj, dict): raise ImproperlyConfigured("'%s.%s' must be a dictionary." % (cls.__name__, label)) def get_field(cls, model, label, field): try: return model._meta.get_field(field) except models.FieldDoesNotExist: raise ImproperlyConfigured("'%s.%s' refers to field '%s' that is missing from model '%s.%s'." % (cls.__name__, label, field, model._meta.app_label, model.__name__)) def fetch_attr(cls, model, label, field): try: return model._meta.get_field(field) except models.FieldDoesNotExist: pass try: return getattr(model, field) except AttributeError: raise ImproperlyConfigured("'%s.%s' refers to '%s' that is neither a field, method or property of model '%s.%s'." % (cls.__name__, label, field, model._meta.app_label, model.__name__))
apache-2.0
tpsatish95/Universal-MultiDomain-Sentiment-Classifier
SentiHandlers/general.py
1
1193
# Author : Satish Palaniappan __author__ = "Satish Palaniappan" import pickle import config import sys sys.path.append(config.basePath+config.SocialFilter) from SocialFilter.Twokenize.twokenize import * import re class extractor(object): def __init__(self): self.path = config.basePath +config.general self.SentiModel = self.load_obj("_model") self.ch2 = self.load_obj("_feature_selector") self.vectorizer = self.load_obj("_vectorizer") def load_obj(self,name): with open( self.path + name + '.pkl', 'rb') as f: return pickle.load(f) def simpleProcess(self,text): text = text.lower().strip() line = re.sub(Url_RE,"",text) line = re.sub(r"[@#]","",line) line =u" ".join(tokenize(line)) return line def getSentimentScore(self,message): vec = self.vectorizer.transform([self.simpleProcess(message)]) Tvec = self.ch2.transform(vec) predScore = self.SentiModel.predict(Tvec) return float(predScore) # #### TEST # S = extractor() # message = "More Americans Are Renting, and Paying More, as Homeownership Falls" # print(S.getSentimentScore(message)) # # from scoreScaler import scorer # # print(scorer().scaleScore(S.getSentimentScore(message)))
apache-2.0
thnee/ansible
lib/ansible/module_utils/network/meraki/meraki.py
10
19085
# -*- coding: utf-8 -*- # This code is part of Ansible, but is an independent component # This particular file snippet, and this file snippet only, is BSD licensed. # Modules you write using this snippet, which is embedded dynamically by Ansible # still belong to the author of the module, and may assign their own license # to the complete work. # Copyright: (c) 2018, Kevin Breit <kevin.breit@kevinbreit.net> # All rights reserved. # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import time import os import re from ansible.module_utils.basic import AnsibleModule, json, env_fallback from ansible.module_utils.common.dict_transformations import camel_dict_to_snake_dict from ansible.module_utils.urls import fetch_url from ansible.module_utils.six.moves.urllib.parse import urlencode from ansible.module_utils._text import to_native, to_bytes, to_text RATE_LIMIT_RETRY_MULTIPLIER = 3 INTERNAL_ERROR_RETRY_MULTIPLIER = 3 def meraki_argument_spec(): return dict(auth_key=dict(type='str', no_log=True, fallback=(env_fallback, ['MERAKI_KEY']), required=True), host=dict(type='str', default='api.meraki.com'), use_proxy=dict(type='bool', default=False), use_https=dict(type='bool', default=True), validate_certs=dict(type='bool', default=True), output_format=dict(type='str', choices=['camelcase', 'snakecase'], default='snakecase', fallback=(env_fallback, ['ANSIBLE_MERAKI_FORMAT'])), output_level=dict(type='str', default='normal', choices=['normal', 'debug']), timeout=dict(type='int', default=30), org_name=dict(type='str', aliases=['organization']), org_id=dict(type='str'), rate_limit_retry_time=dict(type='int', default=165), internal_error_retry_time=dict(type='int', default=60) ) class RateLimitException(Exception): def __init__(self, *args, **kwargs): Exception.__init__(self, *args, **kwargs) class InternalErrorException(Exception): def __init__(self, *args, **kwargs): Exception.__init__(self, *args, **kwargs) class HTTPError(Exception): def __init__(self, *args, **kwargs): Exception.__init__(self, *args, **kwargs) def _error_report(function): def inner(self, *args, **kwargs): while True: try: response = function(self, *args, **kwargs) if self.status == 429: raise RateLimitException( "Rate limiter hit, retry {0}".format(self.retry)) elif self.status == 500: raise InternalErrorException( "Internal server error 500, retry {0}".format(self.retry)) elif self.status == 502: raise InternalErrorException( "Internal server error 502, retry {0}".format(self.retry)) elif self.status >= 400: raise HTTPError("HTTP error {0} - {1}".format(self.status, response)) self.retry = 0 # Needs to reset in case of future retries return response except RateLimitException as e: self.retry += 1 if self.retry <= 10: self.retry_time += self.retry * RATE_LIMIT_RETRY_MULTIPLIER time.sleep(self.retry * RATE_LIMIT_RETRY_MULTIPLIER) else: self.retry_time += 30 time.sleep(30) if self.retry_time > self.params['rate_limit_retry_time']: raise RateLimitException(e) except InternalErrorException as e: self.retry += 1 if self.retry <= 10: self.retry_time += self.retry * INTERNAL_ERROR_RETRY_MULTIPLIER time.sleep(self.retry * INTERNAL_ERROR_RETRY_MULTIPLIER) else: self.retry_time += 9 time.sleep(9) if self.retry_time > self.params['internal_error_retry_time']: raise InternalErrorException(e) except HTTPError as e: raise HTTPError(e) return inner class MerakiModule(object): def __init__(self, module, function=None): self.module = module self.params = module.params self.result = dict(changed=False) self.headers = dict() self.function = function self.orgs = None self.nets = None self.org_id = None self.net_id = None self.check_mode = module.check_mode self.key_map = {} self.request_attempts = 0 # normal output self.existing = None # info output self.config = dict() self.original = None self.proposed = dict() self.merged = None self.ignored_keys = ['id', 'organizationId'] # debug output self.filter_string = '' self.method = None self.path = None self.response = None self.status = None self.url = None # rate limiting statistics self.retry = 0 self.retry_time = 0 # If URLs need to be modified or added for specific purposes, use .update() on the url_catalog dictionary self.get_urls = {'organizations': '/organizations', 'network': '/organizations/{org_id}/networks', 'admins': '/organizations/{org_id}/admins', 'configTemplates': '/organizations/{org_id}/configTemplates', 'samlymbols': '/organizations/{org_id}/samlRoles', 'ssids': '/networks/{net_id}/ssids', 'groupPolicies': '/networks/{net_id}/groupPolicies', 'staticRoutes': '/networks/{net_id}/staticRoutes', 'vlans': '/networks/{net_id}/vlans', 'devices': '/networks/{net_id}/devices', } # Used to retrieve only one item self.get_one_urls = {'organizations': '/organizations/{org_id}', 'network': '/networks/{net_id}', } # Module should add URLs which are required by the module self.url_catalog = {'get_all': self.get_urls, 'get_one': self.get_one_urls, 'create': None, 'update': None, 'delete': None, 'misc': None, } if self.module._debug or self.params['output_level'] == 'debug': self.module.warn('Enable debug output because ANSIBLE_DEBUG was set or output_level is set to debug.') # TODO: This should be removed as org_name isn't always required self.module.required_if = [('state', 'present', ['org_name']), ('state', 'absent', ['org_name']), ] # self.module.mutually_exclusive = [('org_id', 'org_name'), # ] self.modifiable_methods = ['POST', 'PUT', 'DELETE'] self.headers = {'Content-Type': 'application/json', 'X-Cisco-Meraki-API-Key': module.params['auth_key'], } def define_protocol(self): """Set protocol based on use_https parameters.""" if self.params['use_https'] is True: self.params['protocol'] = 'https' else: self.params['protocol'] = 'http' def sanitize_keys(self, data): if isinstance(data, dict): items = {} for k, v in data.items(): try: new = {self.key_map[k]: data[k]} items[self.key_map[k]] = self.sanitize_keys(data[k]) except KeyError: snake_k = re.sub('([a-z0-9])([A-Z])', r'\1_\2', k).lower() new = {snake_k: data[k]} items[snake_k] = self.sanitize_keys(data[k]) return items elif isinstance(data, list): items = [] for i in data: items.append(self.sanitize_keys(i)) return items elif isinstance(data, int) or isinstance(data, str) or isinstance(data, float): return data def is_update_required(self, original, proposed, optional_ignore=None): ''' Compare two data-structures ''' self.ignored_keys.append('net_id') if optional_ignore is not None: self.ignored_keys = self.ignored_keys + optional_ignore if isinstance(original, list): if len(original) != len(proposed): # self.fail_json(msg="Length of lists don't match") return True for a, b in zip(original, proposed): if self.is_update_required(a, b): # self.fail_json(msg="List doesn't match", a=a, b=b) return True elif isinstance(original, dict): for k, v in proposed.items(): if k not in self.ignored_keys: if k in original: if self.is_update_required(original[k], proposed[k]): return True else: # self.fail_json(msg="Key not in original", k=k) return True else: if original != proposed: # self.fail_json(msg="Fallback", original=original, proposed=proposed) return True return False def get_orgs(self): """Downloads all organizations for a user.""" response = self.request('/organizations', method='GET') if self.status != 200: self.fail_json(msg='Organization lookup failed') self.orgs = response return self.orgs def is_org_valid(self, data, org_name=None, org_id=None): """Checks whether a specific org exists and is duplicated. If 0, doesn't exist. 1, exists and not duplicated. >1 duplicated. """ org_count = 0 if org_name is not None: for o in data: if o['name'] == org_name: org_count += 1 if org_id is not None: for o in data: if o['id'] == org_id: org_count += 1 return org_count def get_org_id(self, org_name): """Returns an organization id based on organization name, only if unique. If org_id is specified as parameter, return that instead of a lookup. """ orgs = self.get_orgs() # self.fail_json(msg='ogs', orgs=orgs) if self.params['org_id'] is not None: if self.is_org_valid(orgs, org_id=self.params['org_id']) is True: return self.params['org_id'] org_count = self.is_org_valid(orgs, org_name=org_name) if org_count == 0: self.fail_json(msg='There are no organizations with the name {org_name}'.format(org_name=org_name)) if org_count > 1: self.fail_json(msg='There are multiple organizations with the name {org_name}'.format(org_name=org_name)) elif org_count == 1: for i in orgs: if org_name == i['name']: # self.fail_json(msg=i['id']) return str(i['id']) def get_nets(self, org_name=None, org_id=None): """Downloads all networks in an organization.""" if org_name: org_id = self.get_org_id(org_name) path = self.construct_path('get_all', org_id=org_id, function='network') r = self.request(path, method='GET') if self.status != 200: self.fail_json(msg='Network lookup failed') self.nets = r templates = self.get_config_templates(org_id) for t in templates: self.nets.append(t) return self.nets def get_net(self, org_name, net_name=None, org_id=None, data=None, net_id=None): ''' Return network information ''' if not data: if not org_id: org_id = self.get_org_id(org_name) data = self.get_nets(org_id=org_id) for n in data: if net_id: if n['id'] == net_id: return n elif net_name: if n['name'] == net_name: return n return False def get_net_id(self, org_name=None, net_name=None, data=None): """Return network id from lookup or existing data.""" if data is None: self.fail_json(msg='Must implement lookup') for n in data: if n['name'] == net_name: return n['id'] self.fail_json(msg='No network found with the name {0}'.format(net_name)) def get_config_templates(self, org_id): path = self.construct_path('get_all', function='configTemplates', org_id=org_id) response = self.request(path, 'GET') if self.status != 200: self.fail_json(msg='Unable to get configuration templates') return response def get_template_id(self, name, data): for template in data: if name == template['name']: return template['id'] self.fail_json(msg='No configuration template named {0} found'.format(name)) def convert_camel_to_snake(self, data): """ Converts a dictionary or list to snake case from camel case :type data: dict or list :return: Converted data structure, if list or dict """ if isinstance(data, dict): return camel_dict_to_snake_dict(data, ignore_list=('tags', 'tag')) elif isinstance(data, list): return [camel_dict_to_snake_dict(item, ignore_list=('tags', 'tag')) for item in data] else: return data def construct_params_list(self, keys, aliases=None): qs = {} for key in keys: if key in aliases: qs[aliases[key]] = self.module.params[key] else: qs[key] = self.module.params[key] return qs def encode_url_params(self, params): """Encodes key value pairs for URL""" return "?{0}".format(urlencode(params)) def construct_path(self, action, function=None, org_id=None, net_id=None, org_name=None, custom=None, params=None): """Build a path from the URL catalog. Uses function property from class for catalog lookup. """ built_path = None if function is None: built_path = self.url_catalog[action][self.function] else: built_path = self.url_catalog[action][function] if org_name: org_id = self.get_org_id(org_name) if custom: built_path = built_path.format(org_id=org_id, net_id=net_id, **custom) else: built_path = built_path.format(org_id=org_id, net_id=net_id) if params: built_path += self.encode_url_params(params) return built_path @_error_report def request(self, path, method=None, payload=None): """Generic HTTP method for Meraki requests.""" self.path = path self.define_protocol() if method is not None: self.method = method self.url = '{protocol}://{host}/api/v0/{path}'.format(path=self.path.lstrip('/'), **self.params) resp, info = fetch_url(self.module, self.url, headers=self.headers, data=payload, method=self.method, timeout=self.params['timeout'], use_proxy=self.params['use_proxy'], ) self.response = info['msg'] self.status = info['status'] try: return json.loads(to_native(resp.read())) except Exception: pass def exit_json(self, **kwargs): """Custom written method to exit from module.""" self.result['response'] = self.response self.result['status'] = self.status if self.retry > 0: self.module.warn("Rate limiter triggered - retry count {0}".format(self.retry)) # Return the gory details when we need it if self.params['output_level'] == 'debug': self.result['method'] = self.method self.result['url'] = self.url self.result.update(**kwargs) if self.params['output_format'] == 'camelcase': self.module.deprecate("Update your playbooks to support snake_case format instead of camelCase format.", version=2.13) else: if 'data' in self.result: try: self.result['data'] = self.convert_camel_to_snake(self.result['data']) except (KeyError, AttributeError): pass self.module.exit_json(**self.result) def fail_json(self, msg, **kwargs): """Custom written method to return info on failure.""" self.result['response'] = self.response self.result['status'] = self.status if self.params['output_level'] == 'debug': if self.url is not None: self.result['method'] = self.method self.result['url'] = self.url self.result.update(**kwargs) self.module.fail_json(msg=msg, **self.result)
gpl-3.0
TwinkleChawla/nova
nova/tests/functional/api_sample_tests/test_access_ips.py
16
4156
# Copyright 2012 Nebula, Inc. # Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from nova.tests.functional.api_sample_tests import api_sample_base from nova.tests.unit.image import fake CONF = cfg.CONF CONF.import_opt('osapi_compute_extension', 'nova.api.openstack.compute.legacy_v2.extensions') class AccessIPsSampleJsonTest(api_sample_base.ApiSampleTestBaseV21): extension_name = 'os-access-ips' def _get_flags(self): f = super(AccessIPsSampleJsonTest, self)._get_flags() f['osapi_compute_extension'] = CONF.osapi_compute_extension[:] f['osapi_compute_extension'].append( 'nova.api.openstack.compute.contrib.keypairs.Keypairs') f['osapi_compute_extension'].append( 'nova.api.openstack.compute.contrib.extended_ips.Extended_ips') f['osapi_compute_extension'].append( 'nova.api.openstack.compute.contrib.extended_ips_mac.' 'Extended_ips_mac') return f def _servers_post(self, subs): response = self._do_post('servers', 'server-post-req', subs) subs.update(self._get_regexes()) return self._verify_response('server-post-resp', subs, response, 202) def test_servers_post(self): subs = { 'image_id': fake.get_valid_image_id(), 'host': self._get_host(), 'access_ip_v4': '1.2.3.4', 'access_ip_v6': 'fe80::' } self._servers_post(subs) def test_servers_get(self): subs = { 'image_id': fake.get_valid_image_id(), 'host': self._get_host(), 'access_ip_v4': '1.2.3.4', 'access_ip_v6': 'fe80::' } uuid = self._servers_post(subs) response = self._do_get('servers/%s' % uuid) subs['hostid'] = '[a-f0-9]+' subs['id'] = uuid self._verify_response('server-get-resp', subs, response, 200) def test_servers_details(self): subs = { 'image_id': fake.get_valid_image_id(), 'host': self._get_host(), 'access_ip_v4': '1.2.3.4', 'access_ip_v6': 'fe80::' } uuid = self._servers_post(subs) response = self._do_get('servers/detail') subs['hostid'] = '[a-f0-9]+' subs['id'] = uuid self._verify_response('servers-details-resp', subs, response, 200) def test_servers_rebuild(self): subs = { 'image_id': fake.get_valid_image_id(), 'host': self._get_host(), 'access_ip_v4': '1.2.3.4', 'access_ip_v6': 'fe80::' } uuid = self._servers_post(subs) subs['access_ip_v4'] = "4.3.2.1" subs['access_ip_v6'] = '80fe::' response = self._do_post('servers/%s/action' % uuid, 'server-action-rebuild', subs) subs['hostid'] = '[a-f0-9]+' subs['id'] = uuid self._verify_response('server-action-rebuild-resp', subs, response, 202) def test_servers_update(self): subs = { 'image_id': fake.get_valid_image_id(), 'host': self._get_host(), 'access_ip_v4': '1.2.3.4', 'access_ip_v6': 'fe80::' } uuid = self._servers_post(subs) subs['access_ip_v4'] = "4.3.2.1" subs['access_ip_v6'] = '80fe::' response = self._do_put('servers/%s' % uuid, 'server-put-req', subs) subs['hostid'] = '[a-f0-9]+' subs['id'] = uuid self._verify_response('server-put-resp', subs, response, 200)
apache-2.0
N-Parsons/exercism-python
exercises/simple-cipher/simple_cipher_test.py
2
2510
import unittest import re from simple_cipher import Cipher # Tests adapted from `problem-specifications//canonical-data.json` @ v2.0.0 class SimpleCipherTest(unittest.TestCase): # Utility functions def setUp(self): try: self.assertRaisesRegex except AttributeError: self.assertRaisesRegex = self.assertRaisesRegexp def assertRaisesWithMessage(self, exception): return self.assertRaisesRegex(exception, r".+") class RandomKeyCipherTest(SimpleCipherTest): def test_can_encode(self): cipher = Cipher() plaintext = 'aaaaaaaaaa' self.assertEqual(cipher.encode(plaintext), cipher.key[:len(plaintext)]) def test_can_decode(self): cipher = Cipher() plaintext = 'aaaaaaaaaa' self.assertEqual(cipher.decode(cipher.key[:len(plaintext)]), plaintext) def test_is_reversible(self): cipher = Cipher() plaintext = 'abcdefghij' self.assertEqual(cipher.decode(cipher.encode(plaintext)), plaintext) def test_key_is_only_made_of_lowercase_letters(self): self.assertIsNotNone(re.match('^[a-z]+$', Cipher().key)) class SubstitutionCipherTest(SimpleCipherTest): def test_can_encode(self): cipher = Cipher('abcdefghij') self.assertEqual(cipher.encode('aaaaaaaaaa'), cipher.key) def test_can_decode(self): cipher = Cipher('abcdefghij') self.assertEqual(cipher.decode(cipher.key), 'aaaaaaaaaa') def test_is_reversible(self): cipher = Cipher('abcdefghij') plaintext = 'abcdefghij' self.assertEqual(cipher.decode(cipher.encode(plaintext)), plaintext) def test_can_double_shift_encode(self): plaintext = 'iamapandabear' cipher = Cipher(plaintext) self.assertEqual(cipher.encode(plaintext), 'qayaeaagaciai') def test_can_wrap_on_encode(self): cipher = Cipher('abcdefghij') self.assertEqual(cipher.encode('zzzzzzzzzz'), 'zabcdefghi') def test_can_wrap_on_decode(self): cipher = Cipher('abcdefghij') self.assertEqual(cipher.decode('zabcdefghi'), 'zzzzzzzzzz') def test_can_encode_messages_longer_than_key(self): cipher = Cipher('abc') self.assertEqual(cipher.encode('iamapandabear'), 'iboaqcnecbfcr') def test_can_decode_messages_longer_than_key(self): cipher = Cipher('abc') self.assertEqual(cipher.decode('iboaqcnecbfcr'), 'iamapandabear') if __name__ == '__main__': unittest.main()
mit
lorensen/VTKExamples
src/Python/Visualization/ColorSeriesPatches.py
1
7594
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Produce a HTML page called VTKColorSeriesPatches.html showing the available color series in vtkColorSeries. It also shows how to select the text color based on luminance. In this case Digital CCIR601 is used which gives less weight to the red and blue components of a color. """ from __future__ import print_function import vtk def main(): ncpt = HTMLTableMaker() res = ncpt.MakeHTMLTable() f = open("VTKColorSeriesPatches.html", "w", newline="\n") f.write(res) f.close() class HTMLToFromRGBAColor: @staticmethod def RGBToHTMLColor(rgb): """ Convert an [R, G, B] list to #RRGGBB. :param: rgb - The elements of the array rgb are unsigned chars (0..255). :return: The html color. """ hexcolor = "#" + ''.join(['{:02x}'.format(x) for x in rgb]) return hexcolor @staticmethod def HTMLColorToRGB(colorString): """ Convert #RRGGBB to a [R, G, B] list. :param: colorString a string in the form: #RRGGBB where RR, GG, BB are hexadecimal. The elements of the array rgb are unsigned chars (0..255). :return: The red, green and blue components as a list. """ colorString = colorString.strip() if colorString[0] == '#': colorString = colorString[1:] if len(colorString) != 6: raise ValueError("Input #%s is not in #RRGGBB format" % colorString) r, g, b = colorString[:2], colorString[2:4], colorString[4:] r, g, b = [int(n, 16) for n in (r, g, b)] return [r, g, b] @staticmethod def RGBToLumaCCIR601(rgb): """ RGB -> Luma conversion Digital CCIR601 (gives less weight to the R and B components) :param: rgb - The elements of the array rgb are unsigned chars (0..255). :return: The luminance. """ Y = 0.299 * rgb[0] + 0.587 * rgb[1] + 0.114 * rgb[2] return Y class ColorStructures: """ Holds the color series id, name and colors. """ cs_colors = dict() max_colors = 0 def __init__(self): cs = vtk.vtkColorSeries() sizes = list() for i in range(0, cs.GetNumberOfColorSchemes()): cs.SetColorScheme(i) sizes.append(cs.GetNumberOfColors()) vc = list() for j in range(0, cs.GetNumberOfColors()): vc.append(cs.GetColor(j)) self.cs_colors[i] = [cs.GetColorSchemeName(), vc] self.max_colors = max(sizes) class HTMLTableMaker: """ This class creates HTML Tables displaying all the colors in the class vtkNamedColors grouped by various categories. """ def __init__(self): self.cs = ColorStructures() self.nc = vtk.vtkNamedColors() self.htmlRGBA = HTMLToFromRGBAColor() @staticmethod def MakeHTMLStyle(): s = ' <style>\n' s += '\n' s += ' body {\n' s += ' background-color: snow\n' s += ' }\n' s += ' h1 {text-align:left;}\n' s += ' h2 {text-align:left;}\n' s += ' h3 {text-align:left;}\n' s += ' h4 {text-align:left;}\n' s += ' h5 {text-align:left;}\n' s += ' h6 {text-align:left;}\n' s += '\n' s += ' p {text-align:left;}\n' s += '\n' s += ' table {\n' s += ' font-family: arial, sans-serif;\n' s += ' border-collapse: collapse;\n' s += ' font-size: medium;\n' s += ' padding: 4px;\n' s += ' }\n' s += '\n' s += ' th {\n' s += ' background: LightSteelBlue;\n' s += ' font-size: medium;\n' s += ' }\n' s += '\n' s += ' th[colspan]:not([colspan="1"]) {\n' s += ' background: LightSteelBlue;\n' s += ' font-size: medium;\n' s += ' text-align : center;\n' s += ' vertical-align : top;\n' s += ' }\n' s += '\n' s += ' tr {\n' s += ' background: MintCream;\n' s += ' vertical-align : top;\n' s += ' }\n' s += '\n' s += ' td {\n' s += ' background: MintCream;\n' s += ' border: 1px solid #dddddd;\n' s += ' text-align: left;\n' s += ' padding: 8px;\n' s += ' font-family: monospace;\n' s += ' font-size: medium;\n' s += ' font-weight: bold;\n' s += ' }\n' s += '\n' s += ' td[colspan]:not([colspan="1"]) {\n' s += ' text-align : center;\n' s += ' }\n' s += '\n' s += ' .cour {\n' s += ' font-family: Courier;\n' s += ' }\n' s += '\n' s += ' html, body {\n' s += ' height: 100%;\n' s += ' }\n' s += '\n' s += ' html {\n' s += ' display: table;\n' s += ' margin: auto;\n' s += ' }\n' s += '\n' s += ' body {\n' s += ' display: table-cell;\n' s += ' vertical-align: middle;\n' s += ' }\n' s += '\n' s += ' thead {color: DarkGreen;}\n' s += ' tbody {color: MidnightBlue;}\n' s += ' tfoot {color: SaddleBrown;}\n' s += '\n' s += ' </style>\n' return s def MakeHTMLHeader(self): s = '<!DOCTYPE html>\n' s += '<html lang="en">\n' s += '<head>\n' s += '<meta charset="UTF-8" />\n' s += '<title>vtkColorSeries</title>\n' s += self.MakeHTMLStyle() s += '</head>\n' return s def MakeTableHeader(self): s = '<tr>\n' s += '<th>Index</th>\n' s += '<th colspan="' + str(self.cs.max_colors) + '">Name</th>\n' s += '</tr>\n' s += '<tr>\n' s += '<th></th>\n' s += '<th colspan="' + str(self.cs.max_colors) + '">Colors in the Series</th>\n' s += '</tr>\n' return s def MakeTD1(self, idx, name): s = '<tr>\n' s += '<td>' s += '<b>' + str(idx) + '</b>' s += '</td>\n' s += '<td colspan="' + str(self.cs.max_colors) + '">' s += '<b>' + name + '</b>' s += '</td>\n' s += '</tr>\n' return s def MakeTD2(self, rgbs): s = '<tr>\n' s += '<td></td>\n' cnt = 0 for p in rgbs: ss = '{:3d} '.format(cnt) ss = ss.replace(' ', '&#160;') y = self.htmlRGBA.RGBToLumaCCIR601(p) textColor = '#000000' # Black if y < 255 / 2.0: textColor = '#ffffff' # White s += '<td style="background:' + self.htmlRGBA.RGBToHTMLColor(p) + ';color:' s += textColor + '">' + ss + '</td>\n' cnt += 1 if cnt < self.cs.max_colors: s += '<td colspan="' + str(self.cs.max_colors - cnt) + '"> &#160; </td>\n' s += '</tr>\n' return s def MakeTable(self): res = self.MakeTableHeader() for idx, v in self.cs.cs_colors.items(): name = v[0] res += self.MakeTD1(idx, name) res += self.MakeTD2(v[1]) return res def MakeHTMLTable(self): res = self.MakeHTMLHeader() res += '<body>\n' res += '<h1>Color series available in vtkColorSeries</h1>\n' res += '<table>\n' res += self.MakeTable() res += '</table>\n' res += '</body>\n' return res if __name__ == "__main__": main()
apache-2.0
itmanagerro/tresting
qa/rpc-tests/test_framework/mininode.py
1
54396
#!/usr/bin/env python3 # Copyright (c) 2010 ArtForz -- public domain half-a-node # Copyright (c) 2012 Jeff Garzik # Copyright (c) 2010-2016 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # # mininode.py - Bitcoin P2P network half-a-node # # This python code was modified from ArtForz' public domain half-a-node, as # found in the mini-node branch of http://github.com/jgarzik/pynode. # # NodeConn: an object which manages p2p connectivity to a bitcoin node # NodeConnCB: a base class that describes the interface for receiving # callbacks with network messages from a NodeConn # CBlock, CTransaction, CBlockHeader, CTxIn, CTxOut, etc....: # data structures that should map to corresponding structures in # bitcoin/primitives # msg_block, msg_tx, msg_headers, etc.: # data structures that represent network messages # ser_*, deser_*: functions that handle serialization/deserialization import struct import socket import asyncore import time import sys import random from .util import hex_str_to_bytes, bytes_to_hex_str from io import BytesIO from codecs import encode import hashlib from threading import RLock from threading import Thread import logging import copy import linuxcoin_scrypt from test_framework.siphash import siphash256 BIP0031_VERSION = 60000 MY_VERSION = 80014 # past bip-31 for ping/pong MY_SUBVERSION = b"/python-mininode-tester:0.0.3/" MAX_INV_SZ = 50000 MAX_BLOCK_SIZE = 1000000 COIN = 100000000 # 1 btc in satoshis NODE_NETWORK = (1 << 0) NODE_GETUTXO = (1 << 1) NODE_BLOOM = (1 << 2) NODE_WITNESS = (1 << 3) # Keep our own socket map for asyncore, so that we can track disconnects # ourselves (to workaround an issue with closing an asyncore socket when # using select) mininode_socket_map = dict() # One lock for synchronizing all data access between the networking thread (see # NetworkThread below) and the thread running the test logic. For simplicity, # NodeConn acquires this lock whenever delivering a message to to a NodeConnCB, # and whenever adding anything to the send buffer (in send_message()). This # lock should be acquired in the thread running the test logic to synchronize # access to any data shared with the NodeConnCB or NodeConn. mininode_lock = RLock() # Serialization/deserialization tools def sha256(s): return hashlib.new('sha256', s).digest() def ripemd160(s): return hashlib.new('ripemd160', s).digest() def hash256(s): return sha256(sha256(s)) def ser_compact_size(l): r = b"" if l < 253: r = struct.pack("B", l) elif l < 0x10000: r = struct.pack("<BH", 253, l) elif l < 0x100000000: r = struct.pack("<BI", 254, l) else: r = struct.pack("<BQ", 255, l) return r def deser_compact_size(f): nit = struct.unpack("<B", f.read(1))[0] if nit == 253: nit = struct.unpack("<H", f.read(2))[0] elif nit == 254: nit = struct.unpack("<I", f.read(4))[0] elif nit == 255: nit = struct.unpack("<Q", f.read(8))[0] return nit def deser_string(f): nit = deser_compact_size(f) return f.read(nit) def ser_string(s): return ser_compact_size(len(s)) + s def deser_uint256(f): r = 0 for i in range(8): t = struct.unpack("<I", f.read(4))[0] r += t << (i * 32) return r def ser_uint256(u): rs = b"" for i in range(8): rs += struct.pack("<I", u & 0xFFFFFFFF) u >>= 32 return rs def uint256_from_str(s): r = 0 t = struct.unpack("<IIIIIIII", s[:32]) for i in range(8): r += t[i] << (i * 32) return r def uint256_from_compact(c): nbytes = (c >> 24) & 0xFF v = (c & 0xFFFFFF) << (8 * (nbytes - 3)) return v def deser_vector(f, c): nit = deser_compact_size(f) r = [] for i in range(nit): t = c() t.deserialize(f) r.append(t) return r # ser_function_name: Allow for an alternate serialization function on the # entries in the vector (we use this for serializing the vector of transactions # for a witness block). def ser_vector(l, ser_function_name=None): r = ser_compact_size(len(l)) for i in l: if ser_function_name: r += getattr(i, ser_function_name)() else: r += i.serialize() return r def deser_uint256_vector(f): nit = deser_compact_size(f) r = [] for i in range(nit): t = deser_uint256(f) r.append(t) return r def ser_uint256_vector(l): r = ser_compact_size(len(l)) for i in l: r += ser_uint256(i) return r def deser_string_vector(f): nit = deser_compact_size(f) r = [] for i in range(nit): t = deser_string(f) r.append(t) return r def ser_string_vector(l): r = ser_compact_size(len(l)) for sv in l: r += ser_string(sv) return r def deser_int_vector(f): nit = deser_compact_size(f) r = [] for i in range(nit): t = struct.unpack("<i", f.read(4))[0] r.append(t) return r def ser_int_vector(l): r = ser_compact_size(len(l)) for i in l: r += struct.pack("<i", i) return r # Deserialize from a hex string representation (eg from RPC) def FromHex(obj, hex_string): obj.deserialize(BytesIO(hex_str_to_bytes(hex_string))) return obj # Convert a binary-serializable object to hex (eg for submission via RPC) def ToHex(obj): return bytes_to_hex_str(obj.serialize()) # Objects that map to bitcoind objects, which can be serialized/deserialized class CAddress(object): def __init__(self): self.nServices = 1 self.pchReserved = b"\x00" * 10 + b"\xff" * 2 self.ip = "0.0.0.0" self.port = 0 def deserialize(self, f): self.nServices = struct.unpack("<Q", f.read(8))[0] self.pchReserved = f.read(12) self.ip = socket.inet_ntoa(f.read(4)) self.port = struct.unpack(">H", f.read(2))[0] def serialize(self): r = b"" r += struct.pack("<Q", self.nServices) r += self.pchReserved r += socket.inet_aton(self.ip) r += struct.pack(">H", self.port) return r def __repr__(self): return "CAddress(nServices=%i ip=%s port=%i)" % (self.nServices, self.ip, self.port) MSG_WITNESS_FLAG = 1<<30 class CInv(object): typemap = { 0: "Error", 1: "TX", 2: "Block", 1|MSG_WITNESS_FLAG: "WitnessTx", 2|MSG_WITNESS_FLAG : "WitnessBlock", 4: "CompactBlock" } def __init__(self, t=0, h=0): self.type = t self.hash = h def deserialize(self, f): self.type = struct.unpack("<i", f.read(4))[0] self.hash = deser_uint256(f) def serialize(self): r = b"" r += struct.pack("<i", self.type) r += ser_uint256(self.hash) return r def __repr__(self): return "CInv(type=%s hash=%064x)" \ % (self.typemap[self.type], self.hash) class CBlockLocator(object): def __init__(self): self.nVersion = MY_VERSION self.vHave = [] def deserialize(self, f): self.nVersion = struct.unpack("<i", f.read(4))[0] self.vHave = deser_uint256_vector(f) def serialize(self): r = b"" r += struct.pack("<i", self.nVersion) r += ser_uint256_vector(self.vHave) return r def __repr__(self): return "CBlockLocator(nVersion=%i vHave=%s)" \ % (self.nVersion, repr(self.vHave)) class COutPoint(object): def __init__(self, hash=0, n=0): self.hash = hash self.n = n def deserialize(self, f): self.hash = deser_uint256(f) self.n = struct.unpack("<I", f.read(4))[0] def serialize(self): r = b"" r += ser_uint256(self.hash) r += struct.pack("<I", self.n) return r def __repr__(self): return "COutPoint(hash=%064x n=%i)" % (self.hash, self.n) class CTxIn(object): def __init__(self, outpoint=None, scriptSig=b"", nSequence=0): if outpoint is None: self.prevout = COutPoint() else: self.prevout = outpoint self.scriptSig = scriptSig self.nSequence = nSequence def deserialize(self, f): self.prevout = COutPoint() self.prevout.deserialize(f) self.scriptSig = deser_string(f) self.nSequence = struct.unpack("<I", f.read(4))[0] def serialize(self): r = b"" r += self.prevout.serialize() r += ser_string(self.scriptSig) r += struct.pack("<I", self.nSequence) return r def __repr__(self): return "CTxIn(prevout=%s scriptSig=%s nSequence=%i)" \ % (repr(self.prevout), bytes_to_hex_str(self.scriptSig), self.nSequence) class CTxOut(object): def __init__(self, nValue=0, scriptPubKey=b""): self.nValue = nValue self.scriptPubKey = scriptPubKey def deserialize(self, f): self.nValue = struct.unpack("<q", f.read(8))[0] self.scriptPubKey = deser_string(f) def serialize(self): r = b"" r += struct.pack("<q", self.nValue) r += ser_string(self.scriptPubKey) return r def __repr__(self): return "CTxOut(nValue=%i.%08i scriptPubKey=%s)" \ % (self.nValue // COIN, self.nValue % COIN, bytes_to_hex_str(self.scriptPubKey)) class CScriptWitness(object): def __init__(self): # stack is a vector of strings self.stack = [] def __repr__(self): return "CScriptWitness(%s)" % \ (",".join([bytes_to_hex_str(x) for x in self.stack])) def is_null(self): if self.stack: return False return True class CTxInWitness(object): def __init__(self): self.scriptWitness = CScriptWitness() def deserialize(self, f): self.scriptWitness.stack = deser_string_vector(f) def serialize(self): return ser_string_vector(self.scriptWitness.stack) def __repr__(self): return repr(self.scriptWitness) def is_null(self): return self.scriptWitness.is_null() class CTxWitness(object): def __init__(self): self.vtxinwit = [] def deserialize(self, f): for i in range(len(self.vtxinwit)): self.vtxinwit[i].deserialize(f) def serialize(self): r = b"" # This is different than the usual vector serialization -- # we omit the length of the vector, which is required to be # the same length as the transaction's vin vector. for x in self.vtxinwit: r += x.serialize() return r def __repr__(self): return "CTxWitness(%s)" % \ (';'.join([repr(x) for x in self.vtxinwit])) def is_null(self): for x in self.vtxinwit: if not x.is_null(): return False return True class CTransaction(object): def __init__(self, tx=None): if tx is None: self.nVersion = 1 self.vin = [] self.vout = [] self.wit = CTxWitness() self.nLockTime = 0 self.sha256 = None self.hash = None else: self.nVersion = tx.nVersion self.vin = copy.deepcopy(tx.vin) self.vout = copy.deepcopy(tx.vout) self.nLockTime = tx.nLockTime self.sha256 = tx.sha256 self.hash = tx.hash self.wit = copy.deepcopy(tx.wit) def deserialize(self, f): self.nVersion = struct.unpack("<i", f.read(4))[0] self.vin = deser_vector(f, CTxIn) flags = 0 if len(self.vin) == 0: flags = struct.unpack("<B", f.read(1))[0] # Not sure why flags can't be zero, but this # matches the implementation in bitcoind if (flags != 0): self.vin = deser_vector(f, CTxIn) self.vout = deser_vector(f, CTxOut) else: self.vout = deser_vector(f, CTxOut) if flags != 0: self.wit.vtxinwit = [CTxInWitness() for i in range(len(self.vin))] self.wit.deserialize(f) self.nLockTime = struct.unpack("<I", f.read(4))[0] self.sha256 = None self.hash = None def serialize_without_witness(self): r = b"" r += struct.pack("<i", self.nVersion) r += ser_vector(self.vin) r += ser_vector(self.vout) r += struct.pack("<I", self.nLockTime) return r # Only serialize with witness when explicitly called for def serialize_with_witness(self): flags = 0 if not self.wit.is_null(): flags |= 1 r = b"" r += struct.pack("<i", self.nVersion) if flags: dummy = [] r += ser_vector(dummy) r += struct.pack("<B", flags) r += ser_vector(self.vin) r += ser_vector(self.vout) if flags & 1: if (len(self.wit.vtxinwit) != len(self.vin)): # vtxinwit must have the same length as vin self.wit.vtxinwit = self.wit.vtxinwit[:len(self.vin)] for i in range(len(self.wit.vtxinwit), len(self.vin)): self.wit.vtxinwit.append(CTxInWitness()) r += self.wit.serialize() r += struct.pack("<I", self.nLockTime) return r # Regular serialization is without witness -- must explicitly # call serialize_with_witness to include witness data. def serialize(self): return self.serialize_without_witness() # Recalculate the txid (transaction hash without witness) def rehash(self): self.sha256 = None self.calc_sha256() # We will only cache the serialization without witness in # self.sha256 and self.hash -- those are expected to be the txid. def calc_sha256(self, with_witness=False): if with_witness: # Don't cache the result, just return it return uint256_from_str(hash256(self.serialize_with_witness())) if self.sha256 is None: self.sha256 = uint256_from_str(hash256(self.serialize_without_witness())) self.hash = encode(hash256(self.serialize())[::-1], 'hex_codec').decode('ascii') def is_valid(self): self.calc_sha256() for tout in self.vout: if tout.nValue < 0 or tout.nValue > 84000000 * COIN: return False return True def __repr__(self): return "CTransaction(nVersion=%i vin=%s vout=%s wit=%s nLockTime=%i)" \ % (self.nVersion, repr(self.vin), repr(self.vout), repr(self.wit), self.nLockTime) class CBlockHeader(object): def __init__(self, header=None): if header is None: self.set_null() else: self.nVersion = header.nVersion self.hashPrevBlock = header.hashPrevBlock self.hashMerkleRoot = header.hashMerkleRoot self.nTime = header.nTime self.nBits = header.nBits self.nNonce = header.nNonce self.sha256 = header.sha256 self.hash = header.hash self.scrypt256 = header.scrypt256 self.calc_sha256() def set_null(self): self.nVersion = 1 self.hashPrevBlock = 0 self.hashMerkleRoot = 0 self.nTime = 0 self.nBits = 0 self.nNonce = 0 self.sha256 = None self.hash = None self.scrypt256 = None def deserialize(self, f): self.nVersion = struct.unpack("<i", f.read(4))[0] self.hashPrevBlock = deser_uint256(f) self.hashMerkleRoot = deser_uint256(f) self.nTime = struct.unpack("<I", f.read(4))[0] self.nBits = struct.unpack("<I", f.read(4))[0] self.nNonce = struct.unpack("<I", f.read(4))[0] self.sha256 = None self.hash = None self.scrypt256 = None def serialize(self): r = b"" r += struct.pack("<i", self.nVersion) r += ser_uint256(self.hashPrevBlock) r += ser_uint256(self.hashMerkleRoot) r += struct.pack("<I", self.nTime) r += struct.pack("<I", self.nBits) r += struct.pack("<I", self.nNonce) return r def calc_sha256(self): if self.sha256 is None: r = b"" r += struct.pack("<i", self.nVersion) r += ser_uint256(self.hashPrevBlock) r += ser_uint256(self.hashMerkleRoot) r += struct.pack("<I", self.nTime) r += struct.pack("<I", self.nBits) r += struct.pack("<I", self.nNonce) self.sha256 = uint256_from_str(hash256(r)) self.hash = encode(hash256(r)[::-1], 'hex_codec').decode('ascii') self.scrypt256 = uint256_from_str(linuxcoin_scrypt.getPoWHash(r)) def rehash(self): self.sha256 = None self.scrypt256 = None self.calc_sha256() return self.sha256 def __repr__(self): return "CBlockHeader(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x)" \ % (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot, time.ctime(self.nTime), self.nBits, self.nNonce) class CBlock(CBlockHeader): def __init__(self, header=None): super(CBlock, self).__init__(header) self.vtx = [] def deserialize(self, f): super(CBlock, self).deserialize(f) self.vtx = deser_vector(f, CTransaction) def serialize(self, with_witness=False): r = b"" r += super(CBlock, self).serialize() if with_witness: r += ser_vector(self.vtx, "serialize_with_witness") else: r += ser_vector(self.vtx) return r # Calculate the merkle root given a vector of transaction hashes def get_merkle_root(self, hashes): while len(hashes) > 1: newhashes = [] for i in range(0, len(hashes), 2): i2 = min(i+1, len(hashes)-1) newhashes.append(hash256(hashes[i] + hashes[i2])) hashes = newhashes return uint256_from_str(hashes[0]) def calc_merkle_root(self): hashes = [] for tx in self.vtx: tx.calc_sha256() hashes.append(ser_uint256(tx.sha256)) return self.get_merkle_root(hashes) def calc_witness_merkle_root(self): # For witness root purposes, the hash of the # coinbase, with witness, is defined to be 0...0 hashes = [ser_uint256(0)] for tx in self.vtx[1:]: # Calculate the hashes with witness data hashes.append(ser_uint256(tx.calc_sha256(True))) return self.get_merkle_root(hashes) def is_valid(self): self.calc_sha256() target = uint256_from_compact(self.nBits) if self.scrypt256 > target: return False for tx in self.vtx: if not tx.is_valid(): return False if self.calc_merkle_root() != self.hashMerkleRoot: return False return True def solve(self): self.rehash() target = uint256_from_compact(self.nBits) while self.scrypt256 > target: self.nNonce += 1 self.rehash() def __repr__(self): return "CBlock(nVersion=%i hashPrevBlock=%064x hashMerkleRoot=%064x nTime=%s nBits=%08x nNonce=%08x vtx=%s)" \ % (self.nVersion, self.hashPrevBlock, self.hashMerkleRoot, time.ctime(self.nTime), self.nBits, self.nNonce, repr(self.vtx)) class CUnsignedAlert(object): def __init__(self): self.nVersion = 1 self.nRelayUntil = 0 self.nExpiration = 0 self.nID = 0 self.nCancel = 0 self.setCancel = [] self.nMinVer = 0 self.nMaxVer = 0 self.setSubVer = [] self.nPriority = 0 self.strComment = b"" self.strStatusBar = b"" self.strReserved = b"" def deserialize(self, f): self.nVersion = struct.unpack("<i", f.read(4))[0] self.nRelayUntil = struct.unpack("<q", f.read(8))[0] self.nExpiration = struct.unpack("<q", f.read(8))[0] self.nID = struct.unpack("<i", f.read(4))[0] self.nCancel = struct.unpack("<i", f.read(4))[0] self.setCancel = deser_int_vector(f) self.nMinVer = struct.unpack("<i", f.read(4))[0] self.nMaxVer = struct.unpack("<i", f.read(4))[0] self.setSubVer = deser_string_vector(f) self.nPriority = struct.unpack("<i", f.read(4))[0] self.strComment = deser_string(f) self.strStatusBar = deser_string(f) self.strReserved = deser_string(f) def serialize(self): r = b"" r += struct.pack("<i", self.nVersion) r += struct.pack("<q", self.nRelayUntil) r += struct.pack("<q", self.nExpiration) r += struct.pack("<i", self.nID) r += struct.pack("<i", self.nCancel) r += ser_int_vector(self.setCancel) r += struct.pack("<i", self.nMinVer) r += struct.pack("<i", self.nMaxVer) r += ser_string_vector(self.setSubVer) r += struct.pack("<i", self.nPriority) r += ser_string(self.strComment) r += ser_string(self.strStatusBar) r += ser_string(self.strReserved) return r def __repr__(self): return "CUnsignedAlert(nVersion %d, nRelayUntil %d, nExpiration %d, nID %d, nCancel %d, nMinVer %d, nMaxVer %d, nPriority %d, strComment %s, strStatusBar %s, strReserved %s)" \ % (self.nVersion, self.nRelayUntil, self.nExpiration, self.nID, self.nCancel, self.nMinVer, self.nMaxVer, self.nPriority, self.strComment, self.strStatusBar, self.strReserved) class CAlert(object): def __init__(self): self.vchMsg = b"" self.vchSig = b"" def deserialize(self, f): self.vchMsg = deser_string(f) self.vchSig = deser_string(f) def serialize(self): r = b"" r += ser_string(self.vchMsg) r += ser_string(self.vchSig) return r def __repr__(self): return "CAlert(vchMsg.sz %d, vchSig.sz %d)" \ % (len(self.vchMsg), len(self.vchSig)) class PrefilledTransaction(object): def __init__(self, index=0, tx = None): self.index = index self.tx = tx def deserialize(self, f): self.index = deser_compact_size(f) self.tx = CTransaction() self.tx.deserialize(f) def serialize(self, with_witness=False): r = b"" r += ser_compact_size(self.index) if with_witness: r += self.tx.serialize_with_witness() else: r += self.tx.serialize_without_witness() return r def serialize_with_witness(self): return self.serialize(with_witness=True) def __repr__(self): return "PrefilledTransaction(index=%d, tx=%s)" % (self.index, repr(self.tx)) # This is what we send on the wire, in a cmpctblock message. class P2PHeaderAndShortIDs(object): def __init__(self): self.header = CBlockHeader() self.nonce = 0 self.shortids_length = 0 self.shortids = [] self.prefilled_txn_length = 0 self.prefilled_txn = [] def deserialize(self, f): self.header.deserialize(f) self.nonce = struct.unpack("<Q", f.read(8))[0] self.shortids_length = deser_compact_size(f) for i in range(self.shortids_length): # shortids are defined to be 6 bytes in the spec, so append # two zero bytes and read it in as an 8-byte number self.shortids.append(struct.unpack("<Q", f.read(6) + b'\x00\x00')[0]) self.prefilled_txn = deser_vector(f, PrefilledTransaction) self.prefilled_txn_length = len(self.prefilled_txn) # When using version 2 compact blocks, we must serialize with_witness. def serialize(self, with_witness=False): r = b"" r += self.header.serialize() r += struct.pack("<Q", self.nonce) r += ser_compact_size(self.shortids_length) for x in self.shortids: # We only want the first 6 bytes r += struct.pack("<Q", x)[0:6] if with_witness: r += ser_vector(self.prefilled_txn, "serialize_with_witness") else: r += ser_vector(self.prefilled_txn) return r def __repr__(self): return "P2PHeaderAndShortIDs(header=%s, nonce=%d, shortids_length=%d, shortids=%s, prefilled_txn_length=%d, prefilledtxn=%s" % (repr(self.header), self.nonce, self.shortids_length, repr(self.shortids), self.prefilled_txn_length, repr(self.prefilled_txn)) # P2P version of the above that will use witness serialization (for compact # block version 2) class P2PHeaderAndShortWitnessIDs(P2PHeaderAndShortIDs): def serialize(self): return super(P2PHeaderAndShortWitnessIDs, self).serialize(with_witness=True) # Calculate the BIP 152-compact blocks shortid for a given transaction hash def calculate_shortid(k0, k1, tx_hash): expected_shortid = siphash256(k0, k1, tx_hash) expected_shortid &= 0x0000ffffffffffff return expected_shortid # This version gets rid of the array lengths, and reinterprets the differential # encoding into indices that can be used for lookup. class HeaderAndShortIDs(object): def __init__(self, p2pheaders_and_shortids = None): self.header = CBlockHeader() self.nonce = 0 self.shortids = [] self.prefilled_txn = [] self.use_witness = False if p2pheaders_and_shortids != None: self.header = p2pheaders_and_shortids.header self.nonce = p2pheaders_and_shortids.nonce self.shortids = p2pheaders_and_shortids.shortids last_index = -1 for x in p2pheaders_and_shortids.prefilled_txn: self.prefilled_txn.append(PrefilledTransaction(x.index + last_index + 1, x.tx)) last_index = self.prefilled_txn[-1].index def to_p2p(self): if self.use_witness: ret = P2PHeaderAndShortWitnessIDs() else: ret = P2PHeaderAndShortIDs() ret.header = self.header ret.nonce = self.nonce ret.shortids_length = len(self.shortids) ret.shortids = self.shortids ret.prefilled_txn_length = len(self.prefilled_txn) ret.prefilled_txn = [] last_index = -1 for x in self.prefilled_txn: ret.prefilled_txn.append(PrefilledTransaction(x.index - last_index - 1, x.tx)) last_index = x.index return ret def get_siphash_keys(self): header_nonce = self.header.serialize() header_nonce += struct.pack("<Q", self.nonce) hash_header_nonce_as_str = sha256(header_nonce) key0 = struct.unpack("<Q", hash_header_nonce_as_str[0:8])[0] key1 = struct.unpack("<Q", hash_header_nonce_as_str[8:16])[0] return [ key0, key1 ] # Version 2 compact blocks use wtxid in shortids (rather than txid) def initialize_from_block(self, block, nonce=0, prefill_list = [0], use_witness = False): self.header = CBlockHeader(block) self.nonce = nonce self.prefilled_txn = [ PrefilledTransaction(i, block.vtx[i]) for i in prefill_list ] self.shortids = [] self.use_witness = use_witness [k0, k1] = self.get_siphash_keys() for i in range(len(block.vtx)): if i not in prefill_list: tx_hash = block.vtx[i].sha256 if use_witness: tx_hash = block.vtx[i].calc_sha256(with_witness=True) self.shortids.append(calculate_shortid(k0, k1, tx_hash)) def __repr__(self): return "HeaderAndShortIDs(header=%s, nonce=%d, shortids=%s, prefilledtxn=%s" % (repr(self.header), self.nonce, repr(self.shortids), repr(self.prefilled_txn)) class BlockTransactionsRequest(object): def __init__(self, blockhash=0, indexes = None): self.blockhash = blockhash self.indexes = indexes if indexes != None else [] def deserialize(self, f): self.blockhash = deser_uint256(f) indexes_length = deser_compact_size(f) for i in range(indexes_length): self.indexes.append(deser_compact_size(f)) def serialize(self): r = b"" r += ser_uint256(self.blockhash) r += ser_compact_size(len(self.indexes)) for x in self.indexes: r += ser_compact_size(x) return r # helper to set the differentially encoded indexes from absolute ones def from_absolute(self, absolute_indexes): self.indexes = [] last_index = -1 for x in absolute_indexes: self.indexes.append(x-last_index-1) last_index = x def to_absolute(self): absolute_indexes = [] last_index = -1 for x in self.indexes: absolute_indexes.append(x+last_index+1) last_index = absolute_indexes[-1] return absolute_indexes def __repr__(self): return "BlockTransactionsRequest(hash=%064x indexes=%s)" % (self.blockhash, repr(self.indexes)) class BlockTransactions(object): def __init__(self, blockhash=0, transactions = None): self.blockhash = blockhash self.transactions = transactions if transactions != None else [] def deserialize(self, f): self.blockhash = deser_uint256(f) self.transactions = deser_vector(f, CTransaction) def serialize(self, with_witness=False): r = b"" r += ser_uint256(self.blockhash) if with_witness: r += ser_vector(self.transactions, "serialize_with_witness") else: r += ser_vector(self.transactions) return r def __repr__(self): return "BlockTransactions(hash=%064x transactions=%s)" % (self.blockhash, repr(self.transactions)) # Objects that correspond to messages on the wire class msg_version(object): command = b"version" def __init__(self): self.nVersion = MY_VERSION self.nServices = 1 self.nTime = int(time.time()) self.addrTo = CAddress() self.addrFrom = CAddress() self.nNonce = random.getrandbits(64) self.strSubVer = MY_SUBVERSION self.nStartingHeight = -1 def deserialize(self, f): self.nVersion = struct.unpack("<i", f.read(4))[0] if self.nVersion == 10300: self.nVersion = 300 self.nServices = struct.unpack("<Q", f.read(8))[0] self.nTime = struct.unpack("<q", f.read(8))[0] self.addrTo = CAddress() self.addrTo.deserialize(f) if self.nVersion >= 106: self.addrFrom = CAddress() self.addrFrom.deserialize(f) self.nNonce = struct.unpack("<Q", f.read(8))[0] self.strSubVer = deser_string(f) if self.nVersion >= 209: self.nStartingHeight = struct.unpack("<i", f.read(4))[0] else: self.nStartingHeight = None else: self.addrFrom = None self.nNonce = None self.strSubVer = None self.nStartingHeight = None def serialize(self): r = b"" r += struct.pack("<i", self.nVersion) r += struct.pack("<Q", self.nServices) r += struct.pack("<q", self.nTime) r += self.addrTo.serialize() r += self.addrFrom.serialize() r += struct.pack("<Q", self.nNonce) r += ser_string(self.strSubVer) r += struct.pack("<i", self.nStartingHeight) return r def __repr__(self): return 'msg_version(nVersion=%i nServices=%i nTime=%s addrTo=%s addrFrom=%s nNonce=0x%016X strSubVer=%s nStartingHeight=%i)' \ % (self.nVersion, self.nServices, time.ctime(self.nTime), repr(self.addrTo), repr(self.addrFrom), self.nNonce, self.strSubVer, self.nStartingHeight) class msg_verack(object): command = b"verack" def __init__(self): pass def deserialize(self, f): pass def serialize(self): return b"" def __repr__(self): return "msg_verack()" class msg_addr(object): command = b"addr" def __init__(self): self.addrs = [] def deserialize(self, f): self.addrs = deser_vector(f, CAddress) def serialize(self): return ser_vector(self.addrs) def __repr__(self): return "msg_addr(addrs=%s)" % (repr(self.addrs)) class msg_alert(object): command = b"alert" def __init__(self): self.alert = CAlert() def deserialize(self, f): self.alert = CAlert() self.alert.deserialize(f) def serialize(self): r = b"" r += self.alert.serialize() return r def __repr__(self): return "msg_alert(alert=%s)" % (repr(self.alert), ) class msg_inv(object): command = b"inv" def __init__(self, inv=None): if inv is None: self.inv = [] else: self.inv = inv def deserialize(self, f): self.inv = deser_vector(f, CInv) def serialize(self): return ser_vector(self.inv) def __repr__(self): return "msg_inv(inv=%s)" % (repr(self.inv)) class msg_getdata(object): command = b"getdata" def __init__(self, inv=None): self.inv = inv if inv != None else [] def deserialize(self, f): self.inv = deser_vector(f, CInv) def serialize(self): return ser_vector(self.inv) def __repr__(self): return "msg_getdata(inv=%s)" % (repr(self.inv)) class msg_getblocks(object): command = b"getblocks" def __init__(self): self.locator = CBlockLocator() self.hashstop = 0 def deserialize(self, f): self.locator = CBlockLocator() self.locator.deserialize(f) self.hashstop = deser_uint256(f) def serialize(self): r = b"" r += self.locator.serialize() r += ser_uint256(self.hashstop) return r def __repr__(self): return "msg_getblocks(locator=%s hashstop=%064x)" \ % (repr(self.locator), self.hashstop) class msg_tx(object): command = b"tx" def __init__(self, tx=CTransaction()): self.tx = tx def deserialize(self, f): self.tx.deserialize(f) def serialize(self): return self.tx.serialize_without_witness() def __repr__(self): return "msg_tx(tx=%s)" % (repr(self.tx)) class msg_witness_tx(msg_tx): def serialize(self): return self.tx.serialize_with_witness() class msg_block(object): command = b"block" def __init__(self, block=None): if block is None: self.block = CBlock() else: self.block = block def deserialize(self, f): self.block.deserialize(f) def serialize(self): return self.block.serialize() def __repr__(self): return "msg_block(block=%s)" % (repr(self.block)) # for cases where a user needs tighter control over what is sent over the wire # note that the user must supply the name of the command, and the data class msg_generic(object): def __init__(self, command, data=None): self.command = command self.data = data def serialize(self): return self.data def __repr__(self): return "msg_generic()" class msg_witness_block(msg_block): def serialize(self): r = self.block.serialize(with_witness=True) return r class msg_getaddr(object): command = b"getaddr" def __init__(self): pass def deserialize(self, f): pass def serialize(self): return b"" def __repr__(self): return "msg_getaddr()" class msg_ping_prebip31(object): command = b"ping" def __init__(self): pass def deserialize(self, f): pass def serialize(self): return b"" def __repr__(self): return "msg_ping() (pre-bip31)" class msg_ping(object): command = b"ping" def __init__(self, nonce=0): self.nonce = nonce def deserialize(self, f): self.nonce = struct.unpack("<Q", f.read(8))[0] def serialize(self): r = b"" r += struct.pack("<Q", self.nonce) return r def __repr__(self): return "msg_ping(nonce=%08x)" % self.nonce class msg_pong(object): command = b"pong" def __init__(self, nonce=0): self.nonce = nonce def deserialize(self, f): self.nonce = struct.unpack("<Q", f.read(8))[0] def serialize(self): r = b"" r += struct.pack("<Q", self.nonce) return r def __repr__(self): return "msg_pong(nonce=%08x)" % self.nonce class msg_mempool(object): command = b"mempool" def __init__(self): pass def deserialize(self, f): pass def serialize(self): return b"" def __repr__(self): return "msg_mempool()" class msg_sendheaders(object): command = b"sendheaders" def __init__(self): pass def deserialize(self, f): pass def serialize(self): return b"" def __repr__(self): return "msg_sendheaders()" # getheaders message has # number of entries # vector of hashes # hash_stop (hash of last desired block header, 0 to get as many as possible) class msg_getheaders(object): command = b"getheaders" def __init__(self): self.locator = CBlockLocator() self.hashstop = 0 def deserialize(self, f): self.locator = CBlockLocator() self.locator.deserialize(f) self.hashstop = deser_uint256(f) def serialize(self): r = b"" r += self.locator.serialize() r += ser_uint256(self.hashstop) return r def __repr__(self): return "msg_getheaders(locator=%s, stop=%064x)" \ % (repr(self.locator), self.hashstop) # headers message has # <count> <vector of block headers> class msg_headers(object): command = b"headers" def __init__(self): self.headers = [] def deserialize(self, f): # comment in bitcoind indicates these should be deserialized as blocks blocks = deser_vector(f, CBlock) for x in blocks: self.headers.append(CBlockHeader(x)) def serialize(self): blocks = [CBlock(x) for x in self.headers] return ser_vector(blocks) def __repr__(self): return "msg_headers(headers=%s)" % repr(self.headers) class msg_reject(object): command = b"reject" REJECT_MALFORMED = 1 def __init__(self): self.message = b"" self.code = 0 self.reason = b"" self.data = 0 def deserialize(self, f): self.message = deser_string(f) self.code = struct.unpack("<B", f.read(1))[0] self.reason = deser_string(f) if (self.code != self.REJECT_MALFORMED and (self.message == b"block" or self.message == b"tx")): self.data = deser_uint256(f) def serialize(self): r = ser_string(self.message) r += struct.pack("<B", self.code) r += ser_string(self.reason) if (self.code != self.REJECT_MALFORMED and (self.message == b"block" or self.message == b"tx")): r += ser_uint256(self.data) return r def __repr__(self): return "msg_reject: %s %d %s [%064x]" \ % (self.message, self.code, self.reason, self.data) # Helper function def wait_until(predicate, *, attempts=float('inf'), timeout=float('inf')): attempt = 0 elapsed = 0 while attempt < attempts and elapsed < timeout: with mininode_lock: if predicate(): return True attempt += 1 elapsed += 0.05 time.sleep(0.05) return False class msg_feefilter(object): command = b"feefilter" def __init__(self, feerate=0): self.feerate = feerate def deserialize(self, f): self.feerate = struct.unpack("<Q", f.read(8))[0] def serialize(self): r = b"" r += struct.pack("<Q", self.feerate) return r def __repr__(self): return "msg_feefilter(feerate=%08x)" % self.feerate class msg_sendcmpct(object): command = b"sendcmpct" def __init__(self): self.announce = False self.version = 1 def deserialize(self, f): self.announce = struct.unpack("<?", f.read(1))[0] self.version = struct.unpack("<Q", f.read(8))[0] def serialize(self): r = b"" r += struct.pack("<?", self.announce) r += struct.pack("<Q", self.version) return r def __repr__(self): return "msg_sendcmpct(announce=%s, version=%lu)" % (self.announce, self.version) class msg_cmpctblock(object): command = b"cmpctblock" def __init__(self, header_and_shortids = None): self.header_and_shortids = header_and_shortids def deserialize(self, f): self.header_and_shortids = P2PHeaderAndShortIDs() self.header_and_shortids.deserialize(f) def serialize(self): r = b"" r += self.header_and_shortids.serialize() return r def __repr__(self): return "msg_cmpctblock(HeaderAndShortIDs=%s)" % repr(self.header_and_shortids) class msg_getblocktxn(object): command = b"getblocktxn" def __init__(self): self.block_txn_request = None def deserialize(self, f): self.block_txn_request = BlockTransactionsRequest() self.block_txn_request.deserialize(f) def serialize(self): r = b"" r += self.block_txn_request.serialize() return r def __repr__(self): return "msg_getblocktxn(block_txn_request=%s)" % (repr(self.block_txn_request)) class msg_blocktxn(object): command = b"blocktxn" def __init__(self): self.block_transactions = BlockTransactions() def deserialize(self, f): self.block_transactions.deserialize(f) def serialize(self): r = b"" r += self.block_transactions.serialize() return r def __repr__(self): return "msg_blocktxn(block_transactions=%s)" % (repr(self.block_transactions)) class msg_witness_blocktxn(msg_blocktxn): def serialize(self): r = b"" r += self.block_transactions.serialize(with_witness=True) return r # This is what a callback should look like for NodeConn # Reimplement the on_* functions to provide handling for events class NodeConnCB(object): def __init__(self): self.verack_received = False # deliver_sleep_time is helpful for debugging race conditions in p2p # tests; it causes message delivery to sleep for the specified time # before acquiring the global lock and delivering the next message. self.deliver_sleep_time = None # Remember the services our peer has advertised self.peer_services = None def set_deliver_sleep_time(self, value): with mininode_lock: self.deliver_sleep_time = value def get_deliver_sleep_time(self): with mininode_lock: return self.deliver_sleep_time # Spin until verack message is received from the node. # Tests may want to use this as a signal that the test can begin. # This can be called from the testing thread, so it needs to acquire the # global lock. def wait_for_verack(self): while True: with mininode_lock: if self.verack_received: return time.sleep(0.05) def deliver(self, conn, message): deliver_sleep = self.get_deliver_sleep_time() if deliver_sleep is not None: time.sleep(deliver_sleep) with mininode_lock: try: getattr(self, 'on_' + message.command.decode('ascii'))(conn, message) except: print("ERROR delivering %s (%s)" % (repr(message), sys.exc_info()[0])) def on_version(self, conn, message): if message.nVersion >= 209: conn.send_message(msg_verack()) conn.ver_send = min(MY_VERSION, message.nVersion) if message.nVersion < 209: conn.ver_recv = conn.ver_send conn.nServices = message.nServices def on_verack(self, conn, message): conn.ver_recv = conn.ver_send self.verack_received = True def on_inv(self, conn, message): want = msg_getdata() for i in message.inv: if i.type != 0: want.inv.append(i) if len(want.inv): conn.send_message(want) def on_addr(self, conn, message): pass def on_alert(self, conn, message): pass def on_getdata(self, conn, message): pass def on_getblocks(self, conn, message): pass def on_tx(self, conn, message): pass def on_block(self, conn, message): pass def on_getaddr(self, conn, message): pass def on_headers(self, conn, message): pass def on_getheaders(self, conn, message): pass def on_ping(self, conn, message): if conn.ver_send > BIP0031_VERSION: conn.send_message(msg_pong(message.nonce)) def on_reject(self, conn, message): pass def on_close(self, conn): pass def on_mempool(self, conn): pass def on_pong(self, conn, message): pass def on_feefilter(self, conn, message): pass def on_sendheaders(self, conn, message): pass def on_sendcmpct(self, conn, message): pass def on_cmpctblock(self, conn, message): pass def on_getblocktxn(self, conn, message): pass def on_blocktxn(self, conn, message): pass # More useful callbacks and functions for NodeConnCB's which have a single NodeConn class SingleNodeConnCB(NodeConnCB): def __init__(self): NodeConnCB.__init__(self) self.connection = None self.ping_counter = 1 self.last_pong = msg_pong() def add_connection(self, conn): self.connection = conn # Wrapper for the NodeConn's send_message function def send_message(self, message): self.connection.send_message(message) def send_and_ping(self, message): self.send_message(message) self.sync_with_ping() def on_pong(self, conn, message): self.last_pong = message # Sync up with the node def sync_with_ping(self, timeout=30): def received_pong(): return (self.last_pong.nonce == self.ping_counter) self.send_message(msg_ping(nonce=self.ping_counter)) success = wait_until(received_pong, timeout=timeout) self.ping_counter += 1 return success # The actual NodeConn class # This class provides an interface for a p2p connection to a specified node class NodeConn(asyncore.dispatcher): messagemap = { b"version": msg_version, b"verack": msg_verack, b"addr": msg_addr, b"alert": msg_alert, b"inv": msg_inv, b"getdata": msg_getdata, b"getblocks": msg_getblocks, b"tx": msg_tx, b"block": msg_block, b"getaddr": msg_getaddr, b"ping": msg_ping, b"pong": msg_pong, b"headers": msg_headers, b"getheaders": msg_getheaders, b"reject": msg_reject, b"mempool": msg_mempool, b"feefilter": msg_feefilter, b"sendheaders": msg_sendheaders, b"sendcmpct": msg_sendcmpct, b"cmpctblock": msg_cmpctblock, b"getblocktxn": msg_getblocktxn, b"blocktxn": msg_blocktxn } MAGIC_BYTES = { "mainnet": b"\xfb\xc0\xb6\xdb", # mainnet "testnet3": b"\xfc\xc1\xb7\xdc", # testnet3 "regtest": b"\xfa\xbf\xb5\xda", # regtest } def __init__(self, dstaddr, dstport, rpc, callback, net="regtest", services=NODE_NETWORK): asyncore.dispatcher.__init__(self, map=mininode_socket_map) self.log = logging.getLogger("NodeConn(%s:%d)" % (dstaddr, dstport)) self.dstaddr = dstaddr self.dstport = dstport self.create_socket(socket.AF_INET, socket.SOCK_STREAM) self.sendbuf = b"" self.recvbuf = b"" self.ver_send = 209 self.ver_recv = 209 self.last_sent = 0 self.state = "connecting" self.network = net self.cb = callback self.disconnect = False self.nServices = 0 # stuff version msg into sendbuf vt = msg_version() vt.nServices = services vt.addrTo.ip = self.dstaddr vt.addrTo.port = self.dstport vt.addrFrom.ip = "0.0.0.0" vt.addrFrom.port = 0 self.send_message(vt, True) print('MiniNode: Connecting to Linuxcoin Node IP # ' + dstaddr + ':' \ + str(dstport)) try: self.connect((dstaddr, dstport)) except: self.handle_close() self.rpc = rpc def show_debug_msg(self, msg): self.log.debug(msg) def handle_connect(self): self.show_debug_msg("MiniNode: Connected & Listening: \n") self.state = "connected" def handle_close(self): self.show_debug_msg("MiniNode: Closing Connection to %s:%d... " % (self.dstaddr, self.dstport)) self.state = "closed" self.recvbuf = b"" self.sendbuf = b"" try: self.close() except: pass self.cb.on_close(self) def handle_read(self): try: t = self.recv(8192) if len(t) > 0: self.recvbuf += t self.got_data() except: pass def readable(self): return True def writable(self): with mininode_lock: length = len(self.sendbuf) return (length > 0) def handle_write(self): with mininode_lock: try: sent = self.send(self.sendbuf) except: self.handle_close() return self.sendbuf = self.sendbuf[sent:] def got_data(self): try: while True: if len(self.recvbuf) < 4: return if self.recvbuf[:4] != self.MAGIC_BYTES[self.network]: raise ValueError("got garbage %s" % repr(self.recvbuf)) if self.ver_recv < 209: if len(self.recvbuf) < 4 + 12 + 4: return command = self.recvbuf[4:4+12].split(b"\x00", 1)[0] msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0] checksum = None if len(self.recvbuf) < 4 + 12 + 4 + msglen: return msg = self.recvbuf[4+12+4:4+12+4+msglen] self.recvbuf = self.recvbuf[4+12+4+msglen:] else: if len(self.recvbuf) < 4 + 12 + 4 + 4: return command = self.recvbuf[4:4+12].split(b"\x00", 1)[0] msglen = struct.unpack("<i", self.recvbuf[4+12:4+12+4])[0] checksum = self.recvbuf[4+12+4:4+12+4+4] if len(self.recvbuf) < 4 + 12 + 4 + 4 + msglen: return msg = self.recvbuf[4+12+4+4:4+12+4+4+msglen] th = sha256(msg) h = sha256(th) if checksum != h[:4]: raise ValueError("got bad checksum " + repr(self.recvbuf)) self.recvbuf = self.recvbuf[4+12+4+4+msglen:] if command in self.messagemap: f = BytesIO(msg) t = self.messagemap[command]() t.deserialize(f) self.got_message(t) else: self.show_debug_msg("Unknown command: '" + command + "' " + repr(msg)) except Exception as e: print('got_data:', repr(e)) # import traceback # traceback.print_tb(sys.exc_info()[2]) def send_message(self, message, pushbuf=False): if self.state != "connected" and not pushbuf: raise IOError('Not connected, no pushbuf') self.show_debug_msg("Send %s" % repr(message)) command = message.command data = message.serialize() tmsg = self.MAGIC_BYTES[self.network] tmsg += command tmsg += b"\x00" * (12 - len(command)) tmsg += struct.pack("<I", len(data)) if self.ver_send >= 209: th = sha256(data) h = sha256(th) tmsg += h[:4] tmsg += data with mininode_lock: self.sendbuf += tmsg self.last_sent = time.time() def got_message(self, message): if message.command == b"version": if message.nVersion <= BIP0031_VERSION: self.messagemap[b'ping'] = msg_ping_prebip31 if self.last_sent + 30 * 60 < time.time(): self.send_message(self.messagemap[b'ping']()) self.show_debug_msg("Recv %s" % repr(message)) self.cb.deliver(self, message) def disconnect_node(self): self.disconnect = True class NetworkThread(Thread): def run(self): while mininode_socket_map: # We check for whether to disconnect outside of the asyncore # loop to workaround the behavior of asyncore when using # select disconnected = [] for fd, obj in mininode_socket_map.items(): if obj.disconnect: disconnected.append(obj) [ obj.handle_close() for obj in disconnected ] asyncore.loop(0.1, use_poll=True, map=mininode_socket_map, count=1) # An exception we can raise if we detect a potential disconnect # (p2p or rpc) before the test is complete class EarlyDisconnectError(Exception): def __init__(self, value): self.value = value def __str__(self): return repr(self.value)
mit
gundalow/ansible
test/units/module_utils/basic/test_get_file_attributes.py
30
2683
# -*- coding: utf-8 -*- # Copyright: # (c) 2017, Pierre-Louis Bonicoli <pierre-louis@libregerbil.fr> # License: GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type from itertools import product from ansible.module_utils.basic import AnsibleModule import pytest DATA = ( ( '3353595900 --------------e---- /usr/lib32', {'attr_flags': 'e', 'version': '3353595900', 'attributes': ['extents']} ), # with e2fsprogs < 1.43, output isn't aligned ( '78053594 -----------I--e---- /usr/lib', {'attr_flags': 'Ie', 'version': '78053594', 'attributes': ['indexed', 'extents']} ), ( '15711607 -------A------e---- /tmp/test', {'attr_flags': 'Ae', 'version': '15711607', 'attributes': ['noatime', 'extents']} ), # with e2fsprogs >= 1.43, output is aligned ( '78053594 -----------I--e---- /usr/lib', {'attr_flags': 'Ie', 'version': '78053594', 'attributes': ['indexed', 'extents']} ), ( '15711607 -------A------e---- /tmp/test', {'attr_flags': 'Ae', 'version': '15711607', 'attributes': ['noatime', 'extents']} ), ) NO_VERSION_DATA = ( ( '--------------e---- /usr/lib32', {'attr_flags': 'e', 'attributes': ['extents']} ), ( '-----------I--e---- /usr/lib', {'attr_flags': 'Ie', 'attributes': ['indexed', 'extents']} ), ( '-------A------e---- /tmp/test', {'attr_flags': 'Ae', 'attributes': ['noatime', 'extents']} ), ) @pytest.mark.parametrize('stdin, data', product(({},), DATA), indirect=['stdin']) def test_get_file_attributes(am, stdin, mocker, data): # Test #18731 mocker.patch.object(AnsibleModule, 'get_bin_path', return_value=(0, '/usr/bin/lsattr', '')) mocker.patch.object(AnsibleModule, 'run_command', return_value=(0, data[0], '')) result = am.get_file_attributes('/path/to/file') for key, value in data[1].items(): assert key in result and result[key] == value @pytest.mark.parametrize('stdin, data', product(({},), NO_VERSION_DATA), indirect=['stdin']) def test_get_file_attributes_no_version(am, stdin, mocker, data): # Test #18731 mocker.patch.object(AnsibleModule, 'get_bin_path', return_value=(0, '/usr/bin/lsattr', '')) mocker.patch.object(AnsibleModule, 'run_command', return_value=(0, data[0], '')) result = am.get_file_attributes('/path/to/file', include_version=False) for key, value in data[1].items(): assert key in result and result[key] == value
gpl-3.0
huangkuan/hack
lib/gcloud/datastore/test_connection.py
7
31745
# Copyright 2014 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest2 class TestConnection(unittest2.TestCase): def _getTargetClass(self): from gcloud.datastore.connection import Connection return Connection def _make_key_pb(self, project, id_=1234): from gcloud.datastore.key import Key path_args = ('Kind',) if id_ is not None: path_args += (id_,) return Key(*path_args, project=project).to_protobuf() def _make_query_pb(self, kind): from gcloud.datastore._generated import query_pb2 pb = query_pb2.Query() pb.kind.add().name = kind return pb def _makeOne(self, *args, **kw): return self._getTargetClass()(*args, **kw) def _verifyProtobufCall(self, called_with, URI, conn): self.assertEqual(called_with['uri'], URI) self.assertEqual(called_with['method'], 'POST') self.assertEqual(called_with['headers']['Content-Type'], 'application/x-protobuf') self.assertEqual(called_with['headers']['User-Agent'], conn.USER_AGENT) def test_default_url(self): klass = self._getTargetClass() conn = self._makeOne() self.assertEqual(conn.api_base_url, klass.API_BASE_URL) def test_custom_url_from_env(self): import os from gcloud._testing import _Monkey from gcloud.connection import API_BASE_URL from gcloud.environment_vars import GCD_HOST HOST = 'CURR_HOST' fake_environ = {GCD_HOST: HOST} with _Monkey(os, environ=fake_environ): conn = self._makeOne() self.assertNotEqual(conn.api_base_url, API_BASE_URL) self.assertEqual(conn.api_base_url, HOST + '/datastore') def test_custom_url_from_constructor(self): from gcloud.connection import API_BASE_URL HOST = object() conn = self._makeOne(api_base_url=HOST) self.assertNotEqual(conn.api_base_url, API_BASE_URL) self.assertEqual(conn.api_base_url, HOST) def test_custom_url_constructor_and_env(self): import os from gcloud._testing import _Monkey from gcloud.connection import API_BASE_URL from gcloud.environment_vars import GCD_HOST HOST1 = object() HOST2 = object() fake_environ = {GCD_HOST: HOST1} with _Monkey(os, environ=fake_environ): conn = self._makeOne(api_base_url=HOST2) self.assertNotEqual(conn.api_base_url, API_BASE_URL) self.assertNotEqual(conn.api_base_url, HOST1) self.assertEqual(conn.api_base_url, HOST2) def test_ctor_defaults(self): conn = self._makeOne() self.assertEqual(conn.credentials, None) def test_ctor_explicit(self): class Creds(object): def create_scoped_required(self): return False creds = Creds() conn = self._makeOne(creds) self.assertTrue(conn.credentials is creds) def test_http_w_existing(self): conn = self._makeOne() conn._http = http = object() self.assertTrue(conn.http is http) def test_http_wo_creds(self): import httplib2 conn = self._makeOne() self.assertTrue(isinstance(conn.http, httplib2.Http)) def test_http_w_creds(self): import httplib2 authorized = object() class Creds(object): def authorize(self, http): self._called_with = http return authorized def create_scoped_required(self): return False creds = Creds() conn = self._makeOne(creds) self.assertTrue(conn.http is authorized) self.assertTrue(isinstance(creds._called_with, httplib2.Http)) def test__request_w_200(self): PROJECT = 'PROJECT' METHOD = 'METHOD' DATA = b'DATA' conn = self._makeOne() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':' + METHOD, ]) http = conn._http = Http({'status': '200'}, 'CONTENT') self.assertEqual(conn._request(PROJECT, METHOD, DATA), 'CONTENT') self._verifyProtobufCall(http._called_with, URI, conn) self.assertEqual(http._called_with['body'], DATA) def test__request_not_200(self): from gcloud.exceptions import BadRequest from google.rpc import status_pb2 error = status_pb2.Status() error.message = 'Entity value is indexed.' error.code = 9 # FAILED_PRECONDITION PROJECT = 'PROJECT' METHOD = 'METHOD' DATA = 'DATA' conn = self._makeOne() conn._http = Http({'status': '400'}, error.SerializeToString()) with self.assertRaises(BadRequest) as e: conn._request(PROJECT, METHOD, DATA) expected_message = '400 Entity value is indexed.' self.assertEqual(str(e.exception), expected_message) def test__rpc(self): class ReqPB(object): def SerializeToString(self): return REQPB class RspPB(object): def __init__(self, pb): self._pb = pb @classmethod def FromString(cls, pb): return cls(pb) REQPB = b'REQPB' PROJECT = 'PROJECT' METHOD = 'METHOD' conn = self._makeOne() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':' + METHOD, ]) http = conn._http = Http({'status': '200'}, 'CONTENT') response = conn._rpc(PROJECT, METHOD, ReqPB(), RspPB) self.assertTrue(isinstance(response, RspPB)) self.assertEqual(response._pb, 'CONTENT') self._verifyProtobufCall(http._called_with, URI, conn) self.assertEqual(http._called_with['body'], REQPB) def test_build_api_url_w_default_base_version(self): PROJECT = 'PROJECT' METHOD = 'METHOD' conn = self._makeOne() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':' + METHOD, ]) self.assertEqual(conn.build_api_url(PROJECT, METHOD), URI) def test_build_api_url_w_explicit_base_version(self): BASE = 'http://example.com/' VER = '3.1415926' PROJECT = 'PROJECT' METHOD = 'METHOD' conn = self._makeOne() URI = '/'.join([ BASE, VER, 'projects', PROJECT + ':' + METHOD, ]) self.assertEqual(conn.build_api_url(PROJECT, METHOD, BASE, VER), URI) def test_lookup_single_key_empty_response(self): from gcloud.datastore._generated import datastore_pb2 PROJECT = 'PROJECT' key_pb = self._make_key_pb(PROJECT) rsp_pb = datastore_pb2.LookupResponse() conn = self._makeOne() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':lookup', ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) found, missing, deferred = conn.lookup(PROJECT, [key_pb]) self.assertEqual(len(found), 0) self.assertEqual(len(missing), 0) self.assertEqual(len(deferred), 0) cw = http._called_with self._verifyProtobufCall(cw, URI, conn) rq_class = datastore_pb2.LookupRequest request = rq_class() request.ParseFromString(cw['body']) keys = list(request.keys) self.assertEqual(len(keys), 1) self.assertEqual(key_pb, keys[0]) def test_lookup_single_key_empty_response_w_eventual(self): from gcloud.datastore._generated import datastore_pb2 PROJECT = 'PROJECT' key_pb = self._make_key_pb(PROJECT) rsp_pb = datastore_pb2.LookupResponse() conn = self._makeOne() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':lookup', ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) found, missing, deferred = conn.lookup(PROJECT, [key_pb], eventual=True) self.assertEqual(len(found), 0) self.assertEqual(len(missing), 0) self.assertEqual(len(deferred), 0) cw = http._called_with self._verifyProtobufCall(cw, URI, conn) rq_class = datastore_pb2.LookupRequest request = rq_class() request.ParseFromString(cw['body']) keys = list(request.keys) self.assertEqual(len(keys), 1) self.assertEqual(key_pb, keys[0]) self.assertEqual(request.read_options.read_consistency, datastore_pb2.ReadOptions.EVENTUAL) self.assertEqual(request.read_options.transaction, b'') def test_lookup_single_key_empty_response_w_eventual_and_transaction(self): PROJECT = 'PROJECT' TRANSACTION = b'TRANSACTION' key_pb = self._make_key_pb(PROJECT) conn = self._makeOne() self.assertRaises(ValueError, conn.lookup, PROJECT, key_pb, eventual=True, transaction_id=TRANSACTION) def test_lookup_single_key_empty_response_w_transaction(self): from gcloud.datastore._generated import datastore_pb2 PROJECT = 'PROJECT' TRANSACTION = b'TRANSACTION' key_pb = self._make_key_pb(PROJECT) rsp_pb = datastore_pb2.LookupResponse() conn = self._makeOne() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':lookup', ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) found, missing, deferred = conn.lookup(PROJECT, [key_pb], transaction_id=TRANSACTION) self.assertEqual(len(found), 0) self.assertEqual(len(missing), 0) self.assertEqual(len(deferred), 0) cw = http._called_with self._verifyProtobufCall(cw, URI, conn) rq_class = datastore_pb2.LookupRequest request = rq_class() request.ParseFromString(cw['body']) keys = list(request.keys) self.assertEqual(len(keys), 1) self.assertEqual(key_pb, keys[0]) self.assertEqual(request.read_options.transaction, TRANSACTION) def test_lookup_single_key_nonempty_response(self): from gcloud.datastore._generated import datastore_pb2 from gcloud.datastore._generated import entity_pb2 PROJECT = 'PROJECT' key_pb = self._make_key_pb(PROJECT) rsp_pb = datastore_pb2.LookupResponse() entity = entity_pb2.Entity() entity.key.CopyFrom(key_pb) rsp_pb.found.add(entity=entity) conn = self._makeOne() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':lookup', ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) (found,), missing, deferred = conn.lookup(PROJECT, [key_pb]) self.assertEqual(len(missing), 0) self.assertEqual(len(deferred), 0) self.assertEqual(found.key.path[0].kind, 'Kind') self.assertEqual(found.key.path[0].id, 1234) cw = http._called_with self._verifyProtobufCall(cw, URI, conn) rq_class = datastore_pb2.LookupRequest request = rq_class() request.ParseFromString(cw['body']) keys = list(request.keys) self.assertEqual(len(keys), 1) self.assertEqual(key_pb, keys[0]) def test_lookup_multiple_keys_empty_response(self): from gcloud.datastore._generated import datastore_pb2 PROJECT = 'PROJECT' key_pb1 = self._make_key_pb(PROJECT) key_pb2 = self._make_key_pb(PROJECT, id_=2345) rsp_pb = datastore_pb2.LookupResponse() conn = self._makeOne() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':lookup', ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) found, missing, deferred = conn.lookup(PROJECT, [key_pb1, key_pb2]) self.assertEqual(len(found), 0) self.assertEqual(len(missing), 0) self.assertEqual(len(deferred), 0) cw = http._called_with self._verifyProtobufCall(cw, URI, conn) rq_class = datastore_pb2.LookupRequest request = rq_class() request.ParseFromString(cw['body']) keys = list(request.keys) self.assertEqual(len(keys), 2) self.assertEqual(key_pb1, keys[0]) self.assertEqual(key_pb2, keys[1]) def test_lookup_multiple_keys_w_missing(self): from gcloud.datastore._generated import datastore_pb2 PROJECT = 'PROJECT' key_pb1 = self._make_key_pb(PROJECT) key_pb2 = self._make_key_pb(PROJECT, id_=2345) rsp_pb = datastore_pb2.LookupResponse() er_1 = rsp_pb.missing.add() er_1.entity.key.CopyFrom(key_pb1) er_2 = rsp_pb.missing.add() er_2.entity.key.CopyFrom(key_pb2) conn = self._makeOne() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':lookup', ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) result, missing, deferred = conn.lookup(PROJECT, [key_pb1, key_pb2]) self.assertEqual(result, []) self.assertEqual(len(deferred), 0) self.assertEqual([missed.key for missed in missing], [key_pb1, key_pb2]) cw = http._called_with self._verifyProtobufCall(cw, URI, conn) rq_class = datastore_pb2.LookupRequest request = rq_class() request.ParseFromString(cw['body']) keys = list(request.keys) self.assertEqual(len(keys), 2) self.assertEqual(key_pb1, keys[0]) self.assertEqual(key_pb2, keys[1]) def test_lookup_multiple_keys_w_deferred(self): from gcloud.datastore._generated import datastore_pb2 PROJECT = 'PROJECT' key_pb1 = self._make_key_pb(PROJECT) key_pb2 = self._make_key_pb(PROJECT, id_=2345) rsp_pb = datastore_pb2.LookupResponse() rsp_pb.deferred.add().CopyFrom(key_pb1) rsp_pb.deferred.add().CopyFrom(key_pb2) conn = self._makeOne() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':lookup', ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) result, missing, deferred = conn.lookup(PROJECT, [key_pb1, key_pb2]) self.assertEqual(result, []) self.assertEqual(len(missing), 0) self.assertEqual([def_key for def_key in deferred], [key_pb1, key_pb2]) cw = http._called_with self._verifyProtobufCall(cw, URI, conn) self.assertEqual(cw['uri'], URI) self.assertEqual(cw['method'], 'POST') self.assertEqual(cw['headers']['Content-Type'], 'application/x-protobuf') self.assertEqual(cw['headers']['User-Agent'], conn.USER_AGENT) rq_class = datastore_pb2.LookupRequest request = rq_class() request.ParseFromString(cw['body']) keys = list(request.keys) self.assertEqual(len(keys), 2) self.assertEqual(key_pb1, keys[0]) self.assertEqual(key_pb2, keys[1]) def test_run_query_w_eventual_no_transaction(self): from gcloud.datastore._generated import datastore_pb2 from gcloud.datastore._generated import query_pb2 PROJECT = 'PROJECT' KIND = 'Nonesuch' CURSOR = b'\x00' q_pb = self._make_query_pb(KIND) rsp_pb = datastore_pb2.RunQueryResponse() rsp_pb.batch.end_cursor = CURSOR no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS rsp_pb.batch.more_results = no_more rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL conn = self._makeOne() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':runQuery', ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) pbs, end, more, skipped = conn.run_query(PROJECT, q_pb, eventual=True) self.assertEqual(pbs, []) self.assertEqual(end, CURSOR) self.assertTrue(more) self.assertEqual(skipped, 0) cw = http._called_with self._verifyProtobufCall(cw, URI, conn) rq_class = datastore_pb2.RunQueryRequest request = rq_class() request.ParseFromString(cw['body']) self.assertEqual(request.partition_id.namespace_id, '') self.assertEqual(request.query, q_pb) self.assertEqual(request.read_options.read_consistency, datastore_pb2.ReadOptions.EVENTUAL) self.assertEqual(request.read_options.transaction, b'') def test_run_query_wo_eventual_w_transaction(self): from gcloud.datastore._generated import datastore_pb2 from gcloud.datastore._generated import query_pb2 PROJECT = 'PROJECT' KIND = 'Nonesuch' CURSOR = b'\x00' TRANSACTION = b'TRANSACTION' q_pb = self._make_query_pb(KIND) rsp_pb = datastore_pb2.RunQueryResponse() rsp_pb.batch.end_cursor = CURSOR no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS rsp_pb.batch.more_results = no_more rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL conn = self._makeOne() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':runQuery', ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) pbs, end, more, skipped = conn.run_query( PROJECT, q_pb, transaction_id=TRANSACTION) self.assertEqual(pbs, []) self.assertEqual(end, CURSOR) self.assertTrue(more) self.assertEqual(skipped, 0) cw = http._called_with self._verifyProtobufCall(cw, URI, conn) rq_class = datastore_pb2.RunQueryRequest request = rq_class() request.ParseFromString(cw['body']) self.assertEqual(request.partition_id.namespace_id, '') self.assertEqual(request.query, q_pb) self.assertEqual( request.read_options.read_consistency, datastore_pb2.ReadOptions.READ_CONSISTENCY_UNSPECIFIED) self.assertEqual(request.read_options.transaction, TRANSACTION) def test_run_query_w_eventual_and_transaction(self): from gcloud.datastore._generated import datastore_pb2 from gcloud.datastore._generated import query_pb2 PROJECT = 'PROJECT' KIND = 'Nonesuch' CURSOR = b'\x00' TRANSACTION = b'TRANSACTION' q_pb = self._make_query_pb(KIND) rsp_pb = datastore_pb2.RunQueryResponse() rsp_pb.batch.end_cursor = CURSOR no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS rsp_pb.batch.more_results = no_more rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL conn = self._makeOne() self.assertRaises(ValueError, conn.run_query, PROJECT, q_pb, eventual=True, transaction_id=TRANSACTION) def test_run_query_wo_namespace_empty_result(self): from gcloud.datastore._generated import datastore_pb2 from gcloud.datastore._generated import query_pb2 PROJECT = 'PROJECT' KIND = 'Nonesuch' CURSOR = b'\x00' q_pb = self._make_query_pb(KIND) rsp_pb = datastore_pb2.RunQueryResponse() rsp_pb.batch.end_cursor = CURSOR no_more = query_pb2.QueryResultBatch.NO_MORE_RESULTS rsp_pb.batch.more_results = no_more rsp_pb.batch.entity_result_type = query_pb2.EntityResult.FULL conn = self._makeOne() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':runQuery', ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) pbs, end, more, skipped = conn.run_query(PROJECT, q_pb) self.assertEqual(pbs, []) self.assertEqual(end, CURSOR) self.assertTrue(more) self.assertEqual(skipped, 0) cw = http._called_with self._verifyProtobufCall(cw, URI, conn) rq_class = datastore_pb2.RunQueryRequest request = rq_class() request.ParseFromString(cw['body']) self.assertEqual(request.partition_id.namespace_id, '') self.assertEqual(request.query, q_pb) def test_run_query_w_namespace_nonempty_result(self): from gcloud.datastore._generated import datastore_pb2 from gcloud.datastore._generated import entity_pb2 PROJECT = 'PROJECT' KIND = 'Kind' entity = entity_pb2.Entity() q_pb = self._make_query_pb(KIND) rsp_pb = datastore_pb2.RunQueryResponse() rsp_pb.batch.entity_results.add(entity=entity) rsp_pb.batch.entity_result_type = 1 # FULL rsp_pb.batch.more_results = 3 # NO_MORE_RESULTS conn = self._makeOne() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':runQuery', ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) pbs = conn.run_query(PROJECT, q_pb, 'NS')[0] self.assertEqual(len(pbs), 1) cw = http._called_with self._verifyProtobufCall(cw, URI, conn) rq_class = datastore_pb2.RunQueryRequest request = rq_class() request.ParseFromString(cw['body']) self.assertEqual(request.partition_id.namespace_id, 'NS') self.assertEqual(request.query, q_pb) def test_begin_transaction(self): from gcloud.datastore._generated import datastore_pb2 PROJECT = 'PROJECT' TRANSACTION = b'TRANSACTION' rsp_pb = datastore_pb2.BeginTransactionResponse() rsp_pb.transaction = TRANSACTION conn = self._makeOne() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':beginTransaction', ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) self.assertEqual(conn.begin_transaction(PROJECT), TRANSACTION) cw = http._called_with self._verifyProtobufCall(cw, URI, conn) rq_class = datastore_pb2.BeginTransactionRequest request = rq_class() request.ParseFromString(cw['body']) def test_commit_wo_transaction(self): from gcloud._testing import _Monkey from gcloud.datastore._generated import datastore_pb2 from gcloud.datastore import connection as MUT from gcloud.datastore.helpers import _new_value_pb PROJECT = 'PROJECT' key_pb = self._make_key_pb(PROJECT) rsp_pb = datastore_pb2.CommitResponse() req_pb = datastore_pb2.CommitRequest() mutation = req_pb.mutations.add() insert = mutation.upsert insert.key.CopyFrom(key_pb) value_pb = _new_value_pb(insert, 'foo') value_pb.string_value = u'Foo' conn = self._makeOne() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':commit', ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) # Set up mock for parsing the response. expected_result = object() _parsed = [] def mock_parse(response): _parsed.append(response) return expected_result with _Monkey(MUT, _parse_commit_response=mock_parse): result = conn.commit(PROJECT, req_pb, None) self.assertTrue(result is expected_result) cw = http._called_with self._verifyProtobufCall(cw, URI, conn) rq_class = datastore_pb2.CommitRequest request = rq_class() request.ParseFromString(cw['body']) self.assertEqual(request.transaction, b'') self.assertEqual(list(request.mutations), [mutation]) self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL) self.assertEqual(_parsed, [rsp_pb]) def test_commit_w_transaction(self): from gcloud._testing import _Monkey from gcloud.datastore._generated import datastore_pb2 from gcloud.datastore import connection as MUT from gcloud.datastore.helpers import _new_value_pb PROJECT = 'PROJECT' key_pb = self._make_key_pb(PROJECT) rsp_pb = datastore_pb2.CommitResponse() req_pb = datastore_pb2.CommitRequest() mutation = req_pb.mutations.add() insert = mutation.upsert insert.key.CopyFrom(key_pb) value_pb = _new_value_pb(insert, 'foo') value_pb.string_value = u'Foo' conn = self._makeOne() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':commit', ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) # Set up mock for parsing the response. expected_result = object() _parsed = [] def mock_parse(response): _parsed.append(response) return expected_result with _Monkey(MUT, _parse_commit_response=mock_parse): result = conn.commit(PROJECT, req_pb, b'xact') self.assertTrue(result is expected_result) cw = http._called_with self._verifyProtobufCall(cw, URI, conn) rq_class = datastore_pb2.CommitRequest request = rq_class() request.ParseFromString(cw['body']) self.assertEqual(request.transaction, b'xact') self.assertEqual(list(request.mutations), [mutation]) self.assertEqual(request.mode, rq_class.TRANSACTIONAL) self.assertEqual(_parsed, [rsp_pb]) def test_rollback_ok(self): from gcloud.datastore._generated import datastore_pb2 PROJECT = 'PROJECT' TRANSACTION = b'xact' rsp_pb = datastore_pb2.RollbackResponse() conn = self._makeOne() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':rollback', ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) self.assertEqual(conn.rollback(PROJECT, TRANSACTION), None) cw = http._called_with self._verifyProtobufCall(cw, URI, conn) rq_class = datastore_pb2.RollbackRequest request = rq_class() request.ParseFromString(cw['body']) self.assertEqual(request.transaction, TRANSACTION) def test_allocate_ids_empty(self): from gcloud.datastore._generated import datastore_pb2 PROJECT = 'PROJECT' rsp_pb = datastore_pb2.AllocateIdsResponse() conn = self._makeOne() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':allocateIds', ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) self.assertEqual(conn.allocate_ids(PROJECT, []), []) cw = http._called_with self._verifyProtobufCall(cw, URI, conn) rq_class = datastore_pb2.AllocateIdsRequest request = rq_class() request.ParseFromString(cw['body']) self.assertEqual(list(request.keys), []) def test_allocate_ids_non_empty(self): from gcloud.datastore._generated import datastore_pb2 PROJECT = 'PROJECT' before_key_pbs = [ self._make_key_pb(PROJECT, id_=None), self._make_key_pb(PROJECT, id_=None), ] after_key_pbs = [ self._make_key_pb(PROJECT), self._make_key_pb(PROJECT, id_=2345), ] rsp_pb = datastore_pb2.AllocateIdsResponse() rsp_pb.keys.add().CopyFrom(after_key_pbs[0]) rsp_pb.keys.add().CopyFrom(after_key_pbs[1]) conn = self._makeOne() URI = '/'.join([ conn.api_base_url, conn.API_VERSION, 'projects', PROJECT + ':allocateIds', ]) http = conn._http = Http({'status': '200'}, rsp_pb.SerializeToString()) self.assertEqual(conn.allocate_ids(PROJECT, before_key_pbs), after_key_pbs) cw = http._called_with self._verifyProtobufCall(cw, URI, conn) rq_class = datastore_pb2.AllocateIdsRequest request = rq_class() request.ParseFromString(cw['body']) self.assertEqual(len(request.keys), len(before_key_pbs)) for key_before, key_after in zip(before_key_pbs, request.keys): self.assertEqual(key_before, key_after) class Test__parse_commit_response(unittest2.TestCase): def _callFUT(self, commit_response_pb): from gcloud.datastore.connection import _parse_commit_response return _parse_commit_response(commit_response_pb) def test_it(self): from gcloud.datastore._generated import datastore_pb2 from gcloud.datastore._generated import entity_pb2 index_updates = 1337 keys = [ entity_pb2.Key( path=[ entity_pb2.Key.PathElement( kind='Foo', id=1234, ), ], ), entity_pb2.Key( path=[ entity_pb2.Key.PathElement( kind='Bar', name='baz', ), ], ), ] response = datastore_pb2.CommitResponse( mutation_results=[ datastore_pb2.MutationResult(key=key) for key in keys ], index_updates=index_updates, ) result = self._callFUT(response) self.assertEqual(result, (index_updates, keys)) class Http(object): _called_with = None def __init__(self, headers, content): from httplib2 import Response self._response = Response(headers) self._content = content def request(self, **kw): self._called_with = kw return self._response, self._content class _PathElementProto(object): def __init__(self, _id): self.id = _id class _KeyProto(object): def __init__(self, id_): self.path = [_PathElementProto(id_)]
apache-2.0
lifei96/Medium-crawler-with-data-analyzer
User_Crawler/medium_users_data_analyzer.py
2
6117
# -*- coding: utf-8 -*- import numpy as np import pandas as pd import matplotlib.pyplot as plt import matplotlib.patches as mpatches import datetime import os def users_data_parser(): if not os.path.exists('./result'): os.mkdir('./result') file_in = open('./suspended_username_list.txt', 'r') suspended_username_list = str(file_in.read()).split(' ') file_in.close() users_data = pd.read_csv('./result/users_raw_data.csv', sep='\t', encoding='utf-8') users_data['last_post_date'] = pd.to_datetime(users_data['last_post_date'], errors='coerce') users_data['reg_date'] = pd.to_datetime(users_data['reg_date'], errors='coerce') mask = (users_data['reg_date'] >= datetime.datetime(2013, 1, 1)) & (users_data['reg_date'] <= datetime.datetime(2016, 6, 30)) users_data = users_data.loc[mask] mask = users_data['username'].isin(suspended_username_list) suspended_users_data = users_data.loc[mask] twitter_data = pd.read_csv('./result/twitter.csv', sep='\t', encoding='utf-8') f_f_list = np.sort(((users_data['following_count'] + 0.1) / (users_data['followers_count'] + 0.1)).tolist()) f_f_list2 = np.sort(((twitter_data['following_count'] + 0.1) / (twitter_data['followers_count'] + 0.1)).tolist()) t_f_f_list = np.sort(((twitter_data['t_following_count'] + 0.1) / (twitter_data['t_followers_count'] + 0.1)).tolist()) s_f_f_list = np.sort(((suspended_users_data['following_count'] + 0.1) / (suspended_users_data['followers_count'] + 0.1)).tolist()) plt.figure(figsize=(15, 10)) plt.axis([0.01, 1000, 0, 1]) ax = plt.gca() ax.set_autoscale_on(False) plt.xlabel('(following+0.1)/(followers+0.1)') plt.ylabel('CDF') plt.yticks(np.linspace(0, 1, 21)) plt.grid() plt.title('Balance') line1, = plt.semilogx(f_f_list, np.linspace(0, 1, f_f_list.size), '-g') line2, = plt.semilogx(f_f_list2, np.linspace(0, 1, f_f_list2.size), '-r') line3, = plt.semilogx(t_f_f_list, np.linspace(0, 1, t_f_f_list.size), '-b') line4, = plt.semilogx(s_f_f_list, np.linspace(0, 1, s_f_f_list.size), '--g') plt.legend((line1, line2, line3, line4), ("all Medium users", "Medium users connected to Twitter", "Twitter users", "Medium users whose Twitter are suspended"), loc=2) plt.savefig('./result/CDF_balance.png') plt.close() reciprocity_data = pd.read_csv('./result/reciprocity.csv', sep='\t', encoding='utf-8') reciprocity_list = np.sort(((reciprocity_data['reciprocity_count'] + 0.0000000001) / (reciprocity_data['following_count'] + 0.0000000001)).tolist()) plt.figure(figsize=(10, 10)) ax = plt.gca() ax.set_autoscale_on(False) plt.xlabel('friends/following') plt.ylabel('CDF') plt.yticks(np.linspace(0, 1, 21)) plt.xticks(np.linspace(0, 1, 21)) plt.grid() plt.title('Reciprocity') plt.plot(reciprocity_list, np.linspace(0, 1, reciprocity_list.size), label='Reciprocity') plt.savefig('./result/CDF_reciprocity.png') plt.close() f_f_list = np.sort((users_data['following_count']).tolist()) s_f_f_list = np.sort((suspended_users_data['following_count']).tolist()) plt.figure(figsize=(15, 10)) plt.axis([1, 1000, 0, 1]) ax = plt.gca() ax.set_autoscale_on(False) plt.xlabel('following') plt.ylabel('CDF') plt.grid() plt.title('CDF_following') line1, = plt.semilogx(f_f_list, np.linspace(0, 1, f_f_list.size), '-g') line2, = plt.semilogx(s_f_f_list, np.linspace(0, 1, s_f_f_list.size), '-b') plt.legend((line1, line2), ("all Medium users", "Medium users whose Twitter are suspended"), loc=4) plt.savefig('./result/CDF_following.png') plt.close() f_f_list = np.sort((users_data['followers_count']).tolist()) s_f_f_list = np.sort((suspended_users_data['followers_count']).tolist()) plt.figure(figsize=(15, 10)) plt.axis([1, 2000, 0, 1]) ax = plt.gca() ax.set_autoscale_on(False) plt.xlabel('followers') plt.ylabel('CDF') plt.grid() plt.title('CDF_followers') line1, = plt.semilogx(f_f_list, np.linspace(0, 1, f_f_list.size), '-g') line2, = plt.semilogx(s_f_f_list, np.linspace(0, 1, s_f_f_list.size), '-b') plt.legend((line1, line2), ("all Medium users", "Medium users whose Twitter are suspended"), loc=4) plt.savefig('./result/CDF_followers.png') plt.close() f_f_list = np.sort((users_data['posts_count']).tolist()) s_f_f_list = np.sort((suspended_users_data['posts_count']).tolist()) plt.figure(figsize=(15, 10)) plt.axis([1, 50, 0, 1]) ax = plt.gca() ax.set_autoscale_on(False) plt.xlabel('posts') plt.ylabel('CDF') plt.grid() plt.title('CDF_posts') line1, = plt.semilogx(f_f_list, np.linspace(0, 1, f_f_list.size), '-g') line2, = plt.semilogx(s_f_f_list, np.linspace(0, 1, s_f_f_list.size), '-b') plt.legend((line1, line2), ("all Medium users", "Medium users whose Twitter are suspended"), loc=4) plt.savefig('./result/CDF_posts.png') plt.close() mean_median_list = [[users_data['following_count'].mean(), suspended_users_data['following_count'].mean()], [users_data['following_count'].median(), suspended_users_data['following_count'].median()], [users_data['followers_count'].mean(), suspended_users_data['followers_count'].mean()], [users_data['followers_count'].median(), suspended_users_data['followers_count'].median()], [users_data['posts_count'].mean(), suspended_users_data['posts_count'].mean()], [users_data['posts_count'].median(), suspended_users_data['posts_count'].median()]] mean_median = pd.DataFrame(mean_median_list, columns=['All users', 'Suspended users']) ax = mean_median.plot.bar(figsize=(15, 10), fontsize=16) ax.set_xticks(mean_median.index) ax.set_xticklabels(['following_mean', 'following_median', 'followers_mean', 'followers_median', 'posts_mean', 'posts_median'], rotation=0) plt.savefig('./result/mean_median.png') plt.close() if __name__ == '__main__': users_data_parser()
mit
mdakin/engine
build/util/lib/common/unittest_util.py
29
4933
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Utilities for dealing with the python unittest module.""" import fnmatch import sys import unittest class _TextTestResult(unittest._TextTestResult): """A test result class that can print formatted text results to a stream. Results printed in conformance with gtest output format, like: [ RUN ] autofill.AutofillTest.testAutofillInvalid: "test desc." [ OK ] autofill.AutofillTest.testAutofillInvalid [ RUN ] autofill.AutofillTest.testFillProfile: "test desc." [ OK ] autofill.AutofillTest.testFillProfile [ RUN ] autofill.AutofillTest.testFillProfileCrazyCharacters: "Test." [ OK ] autofill.AutofillTest.testFillProfileCrazyCharacters """ def __init__(self, stream, descriptions, verbosity): unittest._TextTestResult.__init__(self, stream, descriptions, verbosity) self._fails = set() def _GetTestURI(self, test): return '%s.%s.%s' % (test.__class__.__module__, test.__class__.__name__, test._testMethodName) def getDescription(self, test): return '%s: "%s"' % (self._GetTestURI(test), test.shortDescription()) def startTest(self, test): unittest.TestResult.startTest(self, test) self.stream.writeln('[ RUN ] %s' % self.getDescription(test)) def addSuccess(self, test): unittest.TestResult.addSuccess(self, test) self.stream.writeln('[ OK ] %s' % self._GetTestURI(test)) def addError(self, test, err): unittest.TestResult.addError(self, test, err) self.stream.writeln('[ ERROR ] %s' % self._GetTestURI(test)) self._fails.add(self._GetTestURI(test)) def addFailure(self, test, err): unittest.TestResult.addFailure(self, test, err) self.stream.writeln('[ FAILED ] %s' % self._GetTestURI(test)) self._fails.add(self._GetTestURI(test)) def getRetestFilter(self): return ':'.join(self._fails) class TextTestRunner(unittest.TextTestRunner): """Test Runner for displaying test results in textual format. Results are displayed in conformance with google test output. """ def __init__(self, verbosity=1): unittest.TextTestRunner.__init__(self, stream=sys.stderr, verbosity=verbosity) def _makeResult(self): return _TextTestResult(self.stream, self.descriptions, self.verbosity) def GetTestsFromSuite(suite): """Returns all the tests from a given test suite.""" tests = [] for x in suite: if isinstance(x, unittest.TestSuite): tests += GetTestsFromSuite(x) else: tests += [x] return tests def GetTestNamesFromSuite(suite): """Returns a list of every test name in the given suite.""" return map(lambda x: GetTestName(x), GetTestsFromSuite(suite)) def GetTestName(test): """Gets the test name of the given unittest test.""" return '.'.join([test.__class__.__module__, test.__class__.__name__, test._testMethodName]) def FilterTestSuite(suite, gtest_filter): """Returns a new filtered tests suite based on the given gtest filter. See http://code.google.com/p/googletest/wiki/AdvancedGuide for gtest_filter specification. """ return unittest.TestSuite(FilterTests(GetTestsFromSuite(suite), gtest_filter)) def FilterTests(all_tests, gtest_filter): """Filter a list of tests based on the given gtest filter. Args: all_tests: List of tests (unittest.TestSuite) gtest_filter: Filter to apply. Returns: Filtered subset of the given list of tests. """ test_names = [GetTestName(test) for test in all_tests] filtered_names = FilterTestNames(test_names, gtest_filter) return [test for test in all_tests if GetTestName(test) in filtered_names] def FilterTestNames(all_tests, gtest_filter): """Filter a list of test names based on the given gtest filter. See http://code.google.com/p/googletest/wiki/AdvancedGuide for gtest_filter specification. Args: all_tests: List of test names. gtest_filter: Filter to apply. Returns: Filtered subset of the given list of test names. """ pattern_groups = gtest_filter.split('-') positive_patterns = ['*'] if pattern_groups[0]: positive_patterns = pattern_groups[0].split(':') negative_patterns = None if len(pattern_groups) > 1: negative_patterns = pattern_groups[1].split(':') tests = [] for test in all_tests: # Test name must by matched by one positive pattern. for pattern in positive_patterns: if fnmatch.fnmatch(test, pattern): break else: continue # Test name must not be matched by any negative patterns. for pattern in negative_patterns or []: if fnmatch.fnmatch(test, pattern): break else: tests += [test] return tests
bsd-3-clause
ZombieGroup/WormForZhihu
zhihu.py
2
47123
# -*- coding: utf-8 -*- ''' ;$$; ############# #############;#####o ## o######################### ##### $############################### ## ###$ ######! ########################## ## ### $### ################### ###### ### ### ##o####################### ###### ;### #### ##################### ## ### ###### ######&&################ ## ### ###### ## ############ ####### o## ######## ## ################## ##o ### #### #######o####### ## ###### ###########&##### ## #### #############! ### ######### #####& ## o#### ###### ## ####* ## !## ##### ## ##* ####; ## ##### #####o ##### #### ### ### $###o ### ## ####! $### ## ##### ## ## ;## ### ; ##$ ## ####### ## ##### &## ## ### ### ### ### ### ## ## ;## ## ## ### ## ### ### ## #### ## ### ## ##; ## ##$ ##& ## ## ##; ## ## ##; ### ### ##$ ### ### ## ###################### #####&&&&&&&&&&&&### ### $#####$ ############&$o$&################################ # $&########&o ''' # Build-in / Std import os, sys, time, platform, random import re, json, cookielib # requirements import requests, termcolor, html2text try: from bs4 import BeautifulSoup except: import BeautifulSoup # module from auth import islogin from auth import Logging """ Note: 1. 身份验证由 `auth.py` 完成。 2. 身份信息保存在当前目录的 `cookies` 文件中。 3. `requests` 对象可以直接使用,身份信息已经自动加载。 By Luozijun (https://github.com/LuoZijun), 09/09 2015 """ requests = requests.Session() requests.cookies = cookielib.LWPCookieJar('cookies') try: requests.cookies.load(ignore_discard=True) except: Logging.error(u"你还没有登录知乎哦 ...") Logging.info(u"执行 `python auth.py` 即可以完成登录。") raise Exception("无权限(403)") if islogin() != True: Logging.error(u"你的身份信息已经失效,请重新生成身份信息( `python auth.py` )。") raise Exception("无权限(403)") reload(sys) sys.setdefaultencoding('utf8') class Question: url = None soup = None def __init__(self, url, title=None): if url[0:len(url) - 8] != "http://www.zhihu.com/question/": raise ValueError("\"" + url + "\"" + " : it isn't a question url.") else: self.url = url if title != None: self.title = title def parser(self): r = requests.get(self.url) self.soup = BeautifulSoup(r.content) def get_question_id(self): return self.url[len(self.url)-7:len(self.url)] def get_title(self): if hasattr(self, "title"): if platform.system() == 'Windows': title = self.title.decode('utf-8').encode('gbk') return title else: return self.title else: if self.soup == None: self.parser() soup = self.soup title = soup.find("h2", class_="zm-item-title").string.encode("utf-8").replace("\n", "") self.title = title if platform.system() == 'Windows': title = title.decode('utf-8').encode('gbk') return title else: return title def get_detail(self): if self.soup == None: self.parser() soup = self.soup detail = soup.find("div", id="zh-question-detail").div.get_text().encode("utf-8") if platform.system() == 'Windows': detail = detail.decode('utf-8').encode('gbk') return detail else: return detail def get_answers_num(self): if self.soup == None: self.parser() soup = self.soup answers_num = 0 if soup.find("h3", id="zh-question-answer-num") != None: answers_num = int(soup.find("h3", id="zh-question-answer-num")["data-num"]) return answers_num def get_followers_num(self): if self.soup == None: self.parser() soup = self.soup followers_num = int(soup.find("div", class_="zg-gray-normal").a.strong.string) return followers_num def get_topics(self): if self.soup == None: self.parser() soup = self.soup topic_list = soup.find_all("a", class_="zm-item-tag") topics = [] for i in topic_list: topic = i.contents[0].encode("utf-8").replace("\n", "") if platform.system() == 'Windows': topic = topic.decode('utf-8').encode('gbk') topics.append(topic) return topics def get_all_answers(self): answers_num = self.get_answers_num() if answers_num == 0: print "No answer." return yield else: error_answer_count = 0 my_answer_count = 0 for i in xrange((answers_num - 1) / 50 + 1): if i == 0: for j in xrange(min(answers_num, 50)): if self.soup == None: self.parser() soup = BeautifulSoup(self.soup.encode("utf-8")) is_my_answer = False if soup.find_all("div", class_="zm-item-answer")[j].find("span", class_="count") == None: my_answer_count += 1 is_my_answer = True if soup.find_all("div", class_="zm-item-answer")[j].find("div", class_=" zm-editable-content clearfix") == None: error_answer_count += 1 continue author = None if soup.find_all("h3", class_="zm-item-answer-author-wrap")[j].string == u"匿名用户": author_url = None author = User(author_url) else: author_tag = soup.find_all("h3", class_="zm-item-answer-author-wrap")[j].find_all("a")[1] author_id = author_tag.string.encode("utf-8") author_url = "http://www.zhihu.com" + author_tag["href"] author = User(author_url, author_id) if is_my_answer == True: count = soup.find_all("div", class_="zm-item-answer")[j].find("a", class_="zm-item-vote-count").string else: count = soup.find_all("span", class_="count")[j - my_answer_count].string if count[-1] == "K": upvote = int(count[0:(len(count) - 1)]) * 1000 elif count[-1] == "W": upvote = int(count[0:(len(count) - 1)]) * 10000 else: upvote = int(count) answer_url = "http://www.zhihu.com" + soup.find_all("a", class_="answer-date-link")[j]["href"] answer = soup.find_all("div", class_=" zm-editable-content clearfix")[j - error_answer_count] soup.body.extract() soup.head.insert_after(soup.new_tag("body", **{'class': 'zhi'})) soup.body.append(answer) img_list = soup.find_all("img", class_="content_image lazy") for img in img_list: img["src"] = img["data-actualsrc"] img_list = soup.find_all("img", class_="origin_image zh-lightbox-thumb lazy") for img in img_list: img["src"] = img["data-actualsrc"] noscript_list = soup.find_all("noscript") for noscript in noscript_list: noscript.extract() content = soup answer = Answer(answer_url, self, author, upvote, content) yield answer else: post_url = "http://www.zhihu.com/node/QuestionAnswerListV2" _xsrf = self.soup.find("input", attrs={'name': '_xsrf'})["value"] offset = i * 50 params = json.dumps( {"url_token": int(self.url[-8:-1] + self.url[-1]), "pagesize": 50, "offset": offset}) data = { '_xsrf': _xsrf, 'method': "next", 'params': params } header = { 'User-Agent': "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:34.0) Gecko/20100101 Firefox/34.0", 'Host': "www.zhihu.com", 'Referer': self.url } r = requests.post(post_url, data=data, headers=header) answer_list = r.json()["msg"] for j in xrange(min(answers_num - i * 50, 50)): soup = BeautifulSoup(self.soup.encode("utf-8")) answer_soup = BeautifulSoup(answer_list[j]) if answer_soup.find("div", class_=" zm-editable-content clearfix") == None: continue author = None if answer_soup.find("h3", class_="zm-item-answer-author-wrap").string == u"匿名用户": author_url = None author = User(author_url) else: author_tag = answer_soup.find("h3", class_="zm-item-answer-author-wrap").find_all("a")[1] author_id = author_tag.string.encode("utf-8") author_url = "http://www.zhihu.com" + author_tag["href"] author = User(author_url, author_id) if answer_soup.find("span", class_="count") == None: count = answer_soup.find("a", class_="zm-item-vote-count").string else: count = answer_soup.find("span", class_="count").string if count[-1] == "K": upvote = int(count[0:(len(count) - 1)]) * 1000 elif count[-1] == "W": upvote = int(count[0:(len(count) - 1)]) * 10000 else: upvote = int(count) answer_url = "http://www.zhihu.com" + answer_soup.find("a", class_="answer-date-link")["href"] answer = answer_soup.find("div", class_=" zm-editable-content clearfix") soup.body.extract() soup.head.insert_after(soup.new_tag("body", **{'class': 'zhi'})) soup.body.append(answer) img_list = soup.find_all("img", class_="content_image lazy") for img in img_list: img["src"] = img["data-actualsrc"] img_list = soup.find_all("img", class_="origin_image zh-lightbox-thumb lazy") for img in img_list: img["src"] = img["data-actualsrc"] noscript_list = soup.find_all("noscript") for noscript in noscript_list: noscript.extract() content = soup answer = Answer(answer_url, self, author, upvote, content) yield answer def get_top_i_answers(self, n): # if n > self.get_answers_num(): # n = self.get_answers_num() j = 0 answers = self.get_all_answers() for answer in answers: j = j + 1 if j > n: break yield answer def get_top_answer(self): for answer in self.get_top_i_answers(1): return answer def get_visit_times(self): if self.soup == None: self.parser() soup = self.soup return int(soup.find("meta", itemprop="visitsCount")["content"]) class User: user_url = None # session = None soup = None def __init__(self, user_url, user_id=None): if user_url == None: self.user_id = "匿名用户" elif user_url[0:28] != "http://www.zhihu.com/people/": raise ValueError("\"" + user_url + "\"" + " : it isn't a user url.") else: self.user_url = user_url if user_id != None: self.user_id = user_id def parser(self): r = requests.get(self.user_url) soup = BeautifulSoup(r.content) self.soup = soup def get_user_id(self): if self.user_url == None: # print "I'm anonymous user." if platform.system() == 'Windows': return "匿名用户".decode('utf-8').encode('gbk') else: return "匿名用户" else: if hasattr(self, "user_id"): if platform.system() == 'Windows': return self.user_id.decode('utf-8').encode('gbk') else: return self.user_id else: if self.soup == None: self.parser() soup = self.soup user_id = soup.find("div", class_="title-section ellipsis") \ .find("span", class_="name").string.encode("utf-8") self.user_id = user_id if platform.system() == 'Windows': return user_id.decode('utf-8').encode('gbk') else: return user_id def get_followees_num(self): if self.user_url == None: print "I'm anonymous user." return 0 else: if self.soup == None: self.parser() soup = self.soup followees_num = int(soup.find("div", class_="zm-profile-side-following zg-clear") \ .find("a").strong.string) return followees_num def get_followers_num(self): if self.user_url == None: print "I'm anonymous user." return 0 else: if self.soup == None: self.parser() soup = self.soup followers_num = int(soup.find("div", class_="zm-profile-side-following zg-clear") \ .find_all("a")[1].strong.string) return followers_num def get_agree_num(self): if self.user_url == None: print "I'm anonymous user." return 0 else: if self.soup == None: self.parser() soup = self.soup agree_num = int(soup.find("span", class_="zm-profile-header-user-agree").strong.string) return agree_num def get_thanks_num(self): if self.user_url == None: print "I'm anonymous user." return 0 else: if self.soup == None: self.parser() soup = self.soup thanks_num = int(soup.find("span", class_="zm-profile-header-user-thanks").strong.string) return thanks_num def get_asks_num(self): if self.user_url == None: print "I'm anonymous user." return 0 else: if self.soup == None: self.parser() soup = self.soup asks_num = int(soup.find_all("span", class_="num")[0].string) return asks_num def get_answers_num(self): if self.user_url == None: print "I'm anonymous user." return 0 else: if self.soup == None: self.parser() soup = self.soup answers_num = int(soup.find_all("span", class_="num")[1].string) return answers_num def get_collections_num(self): if self.user_url == None: print "I'm anonymous user." return 0 else: if self.soup == None: self.parser() soup = self.soup collections_num = int(soup.find_all("span", class_="num")[3].string) return collections_num def get_followees(self): if self.user_url == None: print "I'm anonymous user." return yield else: followees_num = self.get_followees_num() if followees_num == 0: return yield else: followee_url = self.user_url + "/followees" r = requests.get(followee_url) soup = BeautifulSoup(r.content) for i in xrange((followees_num - 1) / 20 + 1): if i == 0: user_url_list = soup.find_all("h2", class_="zm-list-content-title") for j in xrange(min(followees_num, 20)): yield User(user_url_list[j].a["href"], user_url_list[j].a.string.encode("utf-8")) else: post_url = "http://www.zhihu.com/node/ProfileFolloweesListV2" _xsrf = soup.find("input", attrs={'name': '_xsrf'})["value"] offset = i * 20 hash_id = re.findall("hash_id&quot;: &quot;(.*)&quot;},", r.text)[0] params = json.dumps({"offset": offset, "order_by": "created", "hash_id": hash_id}) data = { '_xsrf': _xsrf, 'method': "next", 'params': params } header = { 'User-Agent': "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:34.0) Gecko/20100101 Firefox/34.0", 'Host': "www.zhihu.com", 'Referer': followee_url } r_post = requests.post(post_url, data=data, headers=header) followee_list = r_post.json()["msg"] for j in xrange(min(followees_num - i * 20, 20)): followee_soup = BeautifulSoup(followee_list[j]) user_link = followee_soup.find("h2", class_="zm-list-content-title").a yield User(user_link["href"], user_link.string.encode("utf-8")) def get_followers(self): if self.user_url == None: print "I'm anonymous user." return yield else: followers_num = self.get_followers_num() if followers_num == 0: return yield else: follower_url = self.user_url + "/followers" r = requests.get(follower_url) soup = BeautifulSoup(r.content) for i in xrange((followers_num - 1) / 20 + 1): if i == 0: user_url_list = soup.find_all("h2", class_="zm-list-content-title") for j in xrange(min(followers_num, 20)): yield User(user_url_list[j].a["href"], user_url_list[j].a.string.encode("utf-8")) else: post_url = "http://www.zhihu.com/node/ProfileFollowersListV2" _xsrf = soup.find("input", attrs={'name': '_xsrf'})["value"] offset = i * 20 hash_id = re.findall("hash_id&quot;: &quot;(.*)&quot;},", r.text)[0] params = json.dumps({"offset": offset, "order_by": "created", "hash_id": hash_id}) data = { '_xsrf': _xsrf, 'method': "next", 'params': params } header = { 'User-Agent': "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:34.0) Gecko/20100101 Firefox/34.0", 'Host': "www.zhihu.com", 'Referer': follower_url } r_post = requests.post(post_url, data=data, headers=header) follower_list = r_post.json()["msg"] for j in xrange(min(followers_num - i * 20, 20)): follower_soup = BeautifulSoup(follower_list[j]) user_link = follower_soup.find("h2", class_="zm-list-content-title").a yield User(user_link["href"], user_link.string.encode("utf-8")) def get_asks(self): if self.user_url == None: print "I'm anonymous user." return yield else: asks_num = self.get_asks_num() if asks_num == 0: return yield else: for i in xrange((asks_num - 1) / 20 + 1): ask_url = self.user_url + "/asks?page=" + str(i + 1) r = requests.get(ask_url) soup = BeautifulSoup(r.content) for question in soup.find_all("a", class_="question_link"): url = "http://www.zhihu.com" + question["href"] title = question.string.encode("utf-8") yield Question(url, title) def get_answers(self): if self.user_url == None: print "I'm anonymous user." return yield else: answers_num = self.get_answers_num() if answers_num == 0: return yield else: for i in xrange((answers_num - 1) / 20 + 1): answer_url = self.user_url + "/answers?page=" + str(i + 1) r = requests.get(answer_url) soup = BeautifulSoup(r.content) for answer in soup.find_all("a", class_="question_link"): question_url = "http://www.zhihu.com" + answer["href"][0:18] question_title = answer.string.encode("utf-8") question = Question(question_url, question_title) yield Answer("http://www.zhihu.com" + answer["href"], question, self) def get_collections(self): if self.user_url == None: print "I'm anonymous user." return yield else: collections_num = self.get_collections_num() if collections_num == 0: return yield else: for i in xrange((collections_num - 1) / 20 + 1): collection_url = self.user_url + "/collections?page=" + str(i + 1) r = requests.get(collection_url) soup = BeautifulSoup(r.content) for collection in soup.find_all("div", class_="zm-profile-section-item zg-clear"): url = "http://www.zhihu.com" + \ collection.find("a", class_="zm-profile-fav-item-title")["href"] name = collection.find("a", class_="zm-profile-fav-item-title").string.encode("utf-8") yield Collection(url, name, self) class Answer: answer_url = None # session = None soup = None def __init__(self, answer_url, question=None, author=None, upvote=None, content=None): self.answer_url = answer_url if question != None: self.question = question if author != None: self.author = author if upvote != None: self.upvote = upvote if content != None: self.content = content def parser(self): r = requests.get(self.answer_url) soup = BeautifulSoup(r.content) self.soup = soup def get_answer_id(self): return self.answer_url[len(self.answer_url)-7:len(self.answer_url)] def get_question(self): if hasattr(self, "question"): return self.question else: if self.soup == None: self.parser() soup = self.soup question_link = soup.find("h2", class_="zm-item-title zm-editable-content").a url = "http://www.zhihu.com" + question_link["href"] title = question_link.string.encode("utf-8") question = Question(url, title) return question def get_author(self): if hasattr(self, "author"): return self.author else: if self.soup == None: self.parser() soup = self.soup if soup.find("h3", class_="zm-item-answer-author-wrap").string == u"匿名用户": author_url = None author = User(author_url) else: author_tag = soup.find("h3", class_="zm-item-answer-author-wrap").find_all("a")[1] author_id = author_tag.string.encode("utf-8") author_url = "http://www.zhihu.com" + author_tag["href"] author = User(author_url, author_id) return author def get_upvote(self): if hasattr(self, "upvote"): return self.upvote else: if self.soup == None: self.parser() soup = self.soup count = soup.find("span", class_="count").string if count[-1] == "K": upvote = int(count[0:(len(count) - 1)]) * 1000 elif count[-1] == "W": upvote = int(count[0:(len(count) - 1)]) * 10000 else: upvote = int(count) return upvote def get_content(self): if hasattr(self, "content"): return self.content else: if self.soup == None: self.parser() soup = BeautifulSoup(self.soup.encode("utf-8")) answer = soup.find("div", class_=" zm-editable-content clearfix") soup.body.extract() soup.head.insert_after(soup.new_tag("body", **{'class': 'zhi'})) soup.body.append(answer) img_list = soup.find_all("img", class_="content_image lazy") for img in img_list: img["src"] = img["data-actualsrc"] img_list = soup.find_all("img", class_="origin_image zh-lightbox-thumb lazy") for img in img_list: img["src"] = img["data-actualsrc"] noscript_list = soup.find_all("noscript") for noscript in noscript_list: noscript.extract() content = soup self.content = content return content def to_txt(self): content = self.get_content() body = content.find("body") br_list = body.find_all("br") for br in br_list: br.insert_after(content.new_string("\n")) li_list = body.find_all("li") for li in li_list: li.insert_before(content.new_string("\n")) if platform.system() == 'Windows': anon_user_id = "匿名用户".decode('utf-8').encode('gbk') else: anon_user_id = "匿名用户" if self.get_author().get_user_id() == anon_user_id: if not os.path.isdir(os.path.join(os.path.join(os.getcwd(), "text"))): os.makedirs(os.path.join(os.path.join(os.getcwd(), "text"))) if platform.system() == 'Windows': file_name = self.get_question().get_title() + "--" + self.get_author().get_user_id() + "的回答.txt".decode( 'utf-8').encode('gbk') else: file_name = self.get_question().get_title() + "--" + self.get_author().get_user_id() + "的回答.txt" print file_name # if platform.system() == 'Windows': # file_name = file_name.decode('utf-8').encode('gbk') # print file_name # else: # print file_name if os.path.exists(os.path.join(os.path.join(os.getcwd(), "text"), file_name)): f = open(os.path.join(os.path.join(os.getcwd(), "text"), file_name), "a") f.write("\n\n") else: f = open(os.path.join(os.path.join(os.getcwd(), "text"), file_name), "a") f.write(self.get_question().get_title() + "\n\n") else: if not os.path.isdir(os.path.join(os.path.join(os.getcwd(), "text"))): os.makedirs(os.path.join(os.path.join(os.getcwd(), "text"))) if platform.system() == 'Windows': file_name = self.get_question().get_title() + "--" + self.get_author().get_user_id() + "的回答.txt".decode( 'utf-8').encode('gbk') else: file_name = self.get_question().get_title() + "--" + self.get_author().get_user_id() + "的回答.txt" print file_name # if platform.system() == 'Windows': # file_name = file_name.decode('utf-8').encode('gbk') # print file_name # else: # print file_name f = open(os.path.join(os.path.join(os.getcwd(), "text"), file_name), "wt") f.write(self.get_question().get_title() + "\n\n") if platform.system() == 'Windows': f.write("作者: ".decode('utf-8').encode('gbk') + self.get_author().get_user_id() + " 赞同: ".decode( 'utf-8').encode('gbk') + str(self.get_upvote()) + "\n\n") f.write(body.get_text().encode("gbk")) link_str = "原链接: ".decode('utf-8').encode('gbk') f.write("\n" + link_str + self.answer_url.decode('utf-8').encode('gbk')) else: f.write("作者: " + self.get_author().get_user_id() + " 赞同: " + str(self.get_upvote()) + "\n\n") f.write(body.get_text().encode("utf-8")) f.write("\n" + "原链接: " + self.answer_url) f.close() # def to_html(self): # content = self.get_content() # if self.get_author().get_user_id() == "匿名用户": # file_name = self.get_question().get_title() + "--" + self.get_author().get_user_id() + "的回答.html" # f = open(file_name, "wt") # print file_name # else: # file_name = self.get_question().get_title() + "--" + self.get_author().get_user_id() + "的回答.html" # f = open(file_name, "wt") # print file_name # f.write(str(content)) # f.close() def to_md(self): content = self.get_content() if platform.system() == 'Windows': anon_user_id = "匿名用户".decode('utf-8').encode('gbk') else: anon_user_id = "匿名用户" if self.get_author().get_user_id() == anon_user_id: if platform.system() == 'Windows': file_name = self.get_question().get_title() + "--" + self.get_author().get_user_id() + "的回答.md".decode( 'utf-8').encode('gbk') else: file_name = self.get_question().get_title() + "--" + self.get_author().get_user_id() + "的回答.md" print file_name # if platform.system() == 'Windows': # file_name = file_name.decode('utf-8').encode('gbk') # print file_name # else: # print file_name if not os.path.isdir(os.path.join(os.path.join(os.getcwd(), "markdown"))): os.makedirs(os.path.join(os.path.join(os.getcwd(), "markdown"))) if os.path.exists(os.path.join(os.path.join(os.getcwd(), "markdown"), file_name)): f = open(os.path.join(os.path.join(os.getcwd(), "markdown"), file_name), "a") f.write("\n") else: f = open(os.path.join(os.path.join(os.getcwd(), "markdown"), file_name), "a") f.write("# " + self.get_question().get_title() + "\n") else: if not os.path.isdir(os.path.join(os.path.join(os.getcwd(), "markdown"))): os.makedirs(os.path.join(os.path.join(os.getcwd(), "markdown"))) if platform.system() == 'Windows': file_name = self.get_question().get_title() + "--" + self.get_author().get_user_id() + "的回答.md".decode( 'utf-8').encode('gbk') else: file_name = self.get_question().get_title() + "--" + self.get_author().get_user_id() + "的回答.md" print file_name # file_name = self.get_question().get_title() + "--" + self.get_author().get_user_id() + "的回答.md" # if platform.system() == 'Windows': # file_name = file_name.decode('utf-8').encode('gbk') # print file_name # else: # print file_name f = open(os.path.join(os.path.join(os.getcwd(), "markdown"), file_name), "wt") f.write("# " + self.get_question().get_title() + "\n") if platform.system() == 'Windows': f.write("## 作者: ".decode('utf-8').encode('gbk') + self.get_author().get_user_id() + " 赞同: ".decode( 'utf-8').encode('gbk') + str(self.get_upvote()) + "\n") else: f.write("## 作者: " + self.get_author().get_user_id() + " 赞同: " + str(self.get_upvote()) + "\n") text = html2text.html2text(content.decode('utf-8')).encode("utf-8") r = re.findall(r'\*\*(.*?)\*\*', text) for i in r: if i != " ": text = text.replace(i, i.strip()) r = re.findall(r'_(.*)_', text) for i in r: if i != " ": text = text.replace(i, i.strip()) r = re.findall(r'!\[\]\((?:.*?)\)', text) for i in r: text = text.replace(i, i + "\n\n") if platform.system() == 'Windows': f.write(text.decode('utf-8').encode('gbk')) link_str = "#### 原链接: ".decode('utf-8').encode('gbk') f.write(link_str + self.answer_url.decode('utf-8').encode('gbk')) else: f.write(text) f.write("#### 原链接: " + self.answer_url) f.close() def get_visit_times(self): if self.soup == None: self.parser() soup = self.soup for tag_p in soup.find_all("p"): if "所属问题被浏览" in tag_p.contents[0].encode('utf-8'): return int(tag_p.contents[1].contents[0]) def get_voters(self): if self.soup == None: self.parser() soup = self.soup data_aid = soup.find("div", class_="zm-item-answer ")["data-aid"] request_url = 'http://www.zhihu.com/node/AnswerFullVoteInfoV2' # if session == None: # create_session() # s = session # r = s.get(request_url, params={"params": "{\"answer_id\":\"%d\"}" % int(data_aid)}) r = requests.get(request_url, params={"params": "{\"answer_id\":\"%d\"}" % int(data_aid)}) soup = BeautifulSoup(r.content) voters_info = soup.find_all("span")[1:-1] if len(voters_info) == 0: return yield else: for voter_info in voters_info: if voter_info.string == ( u"匿名用户、" or u"匿名用户"): voter_url = None yield User(voter_url) else: voter_url = "http://www.zhihu.com" + str(voter_info.a["href"]) voter_id = voter_info.a["title"].encode("utf-8") yield User(voter_url, voter_id) class Collection: url = None # session = None soup = None def __init__(self, url, name=None, creator=None): if url[0:len(url) - 8] != "http://www.zhihu.com/collection/": raise ValueError("\"" + url + "\"" + " : it isn't a collection url.") else: self.url = url # print 'collection url',url if name != None: self.name = name if creator != None: self.creator = creator def parser(self): r = requests.get(self.url) soup = BeautifulSoup(r.content) self.soup = soup def get_name(self): if hasattr(self, 'name'): if platform.system() == 'Windows': return self.name.decode('utf-8').encode('gbk') else: return self.name else: if self.soup == None: self.parser() soup = self.soup self.name = soup.find("h2", id="zh-fav-head-title").string.encode("utf-8").strip() if platform.system() == 'Windows': return self.name.decode('utf-8').encode('gbk') return self.name def get_creator(self): if hasattr(self, 'creator'): return self.creator else: if self.soup == None: self.parser() soup = self.soup creator_id = soup.find("h2", class_="zm-list-content-title").a.string.encode("utf-8") creator_url = "http://www.zhihu.com" + soup.find("h2", class_="zm-list-content-title").a["href"] creator = User(creator_url, creator_id) self.creator = creator return creator def get_all_answers(self): if self.soup == None: self.parser() soup = self.soup answer_list = soup.find_all("div", class_="zm-item") if len(answer_list) == 0: print "the collection is empty." return yield else: question_url = None question_title = None for answer in answer_list: if not answer.find("p", class_="note"): question_link = answer.find("h2") if question_link != None: question_url = "http://www.zhihu.com" + question_link.a["href"] question_title = question_link.a.string.encode("utf-8") question = Question(question_url, question_title) answer_url = "http://www.zhihu.com" + answer.find("span", class_="answer-date-link-wrap").a["href"] author = None if answer.find("h3", class_="zm-item-answer-author-wrap").string == u"匿名用户": author_url = None author = User(author_url) else: author_tag = answer.find("h3", class_="zm-item-answer-author-wrap").find_all("a")[0] author_id = author_tag.string.encode("utf-8") author_url = "http://www.zhihu.com" + author_tag["href"] author = User(author_url, author_id) yield Answer(answer_url, question, author) i = 2 while True: r = requests.get(self.url + "?page=" + str(i)) answer_soup = BeautifulSoup(r.content) answer_list = answer_soup.find_all("div", class_="zm-item") if len(answer_list) == 0: break else: for answer in answer_list: if not answer.find("p", class_="note"): question_link = answer.find("h2") if question_link != None: question_url = "http://www.zhihu.com" + question_link.a["href"] question_title = question_link.a.string.encode("utf-8") question = Question(question_url, question_title) answer_url = "http://www.zhihu.com" + answer.find("span", class_="answer-date-link-wrap").a[ "href"] author = None if answer.find("h3", class_="zm-item-answer-author-wrap").string == u"匿名用户": # author_id = "匿名用户" author_url = None author = User(author_url) else: author_tag = answer.find("h3", class_="zm-item-answer-author-wrap").find_all("a")[0] author_id = author_tag.string.encode("utf-8") author_url = "http://www.zhihu.com" + author_tag["href"] author = User(author_url, author_id) yield Answer(answer_url, question, author) i = i + 1 def get_top_i_answers(self, n): j = 0 answers = self.get_all_answers() for answer in answers: j = j + 1 if j > n: break yield answer class Search: keyword = None search_type = None search_types = ('question', 'people', 'topic') search_url = None soup = None def __init__(self, keyword, search_type=None, search_url=None): self.keyword = keyword if search_type != None and search_type in self.search_types: self.search_type=search_type if search_url != None: self.search_url = search_url else: self.getSearchURL() self.parser() def parser(self): r = requests.get(self.search_url) self.soup = BeautifulSoup(r.content) def get_search_url(self): urlhead='http://www.zhihu.com/search?type=' search_url = urlhead + self.search_type + '&q=' + self.keyword self.search_url = search_url return search_url def get_questions(self): if self.search_type != 'question': return False else: question_list = self.soup.find_all("a", class_="question-link") questions = [] for question in question_list: question_url = "http://www.zhihu.com" + question["href"] questions.append(question_url) return questions def get_peoples(self): if self.search_type != 'people': return False else: people_list = self.soup.find_all("a", class_="name-link") peoples = [] for people in people_list: people_url = "http://www.zhihu.com"+people["href"] peoples.append(people_url) return peoples def get_topics(self): if self.search_type != 'topic': return False else: topic_list = self.soup.find_all("a", class_="name-link")["href"] topics = [] for topic in topic_list: topic_url = "http://www.zhihu.com"+topic["href"] topics.append(topic_url) return topics #to be continue #class Topic: # def __init__(self):
mit
gkno/gkno_launcher
src/networkx/algorithms/richclub.py
47
3516
# -*- coding: utf-8 -*- import networkx as nx __author__ = """\n""".join(['Ben Edwards', 'Aric Hagberg <hagberg@lanl.gov>']) __all__ = ['rich_club_coefficient'] def rich_club_coefficient(G, normalized=True, Q=100): """Return the rich-club coefficient of the graph G. The rich-club coefficient is the ratio, for every degree k, of the number of actual to the number of potential edges for nodes with degree greater than k: .. math:: \\phi(k) = \\frac{2 Ek}{Nk(Nk-1)} where Nk is the number of nodes with degree larger than k, and Ek be the number of edges among those nodes. Parameters ---------- G : NetworkX graph normalized : bool (optional) Normalize using randomized network (see [1]_) Q : float (optional, default=100) If normalized=True build a random network by performing Q*M double-edge swaps, where M is the number of edges in G, to use as a null-model for normalization. Returns ------- rc : dictionary A dictionary, keyed by degree, with rich club coefficient values. Examples -------- >>> G = nx.Graph([(0,1),(0,2),(1,2),(1,3),(1,4),(4,5)]) >>> rc = nx.rich_club_coefficient(G,normalized=False) >>> rc[0] # doctest: +SKIP 0.4 Notes ------ The rich club definition and algorithm are found in [1]_. This algorithm ignores any edge weights and is not defined for directed graphs or graphs with parallel edges or self loops. Estimates for appropriate values of Q are found in [2]_. References ---------- .. [1] Julian J. McAuley, Luciano da Fontoura Costa, and Tibério S. Caetano, "The rich-club phenomenon across complex network hierarchies", Applied Physics Letters Vol 91 Issue 8, August 2007. http://arxiv.org/abs/physics/0701290 .. [2] R. Milo, N. Kashtan, S. Itzkovitz, M. E. J. Newman, U. Alon, "Uniform generation of random graphs with arbitrary degree sequences", 2006. http://arxiv.org/abs/cond-mat/0312028 """ if G.is_multigraph() or G.is_directed(): raise Exception('rich_club_coefficient is not implemented for ', 'directed or multiedge graphs.') if len(G.selfloop_edges()) > 0: raise Exception('rich_club_coefficient is not implemented for ', 'graphs with self loops.') rc=_compute_rc(G) if normalized: # make R a copy of G, randomize with Q*|E| double edge swaps # and use rich_club coefficient of R to normalize R = G.copy() E = R.number_of_edges() nx.double_edge_swap(R,Q*E,max_tries=Q*E*10) rcran=_compute_rc(R) for d in rc: # if rcran[d] > 0: rc[d]/=rcran[d] return rc def _compute_rc(G): # compute rich club coefficient for all k degrees in G deghist = nx.degree_histogram(G) total = sum(deghist) # number of nodes with degree > k (omit last entry which is zero) nks = [total-cs for cs in nx.utils.cumulative_sum(deghist) if total-cs > 1] deg=G.degree() edge_degrees=sorted(sorted((deg[u],deg[v])) for u,v in G.edges_iter()) ek=G.number_of_edges() k1,k2=edge_degrees.pop(0) rc={} for d,nk in zip(range(len(nks)),nks): while k1 <= d: if len(edge_degrees)==0: break k1,k2=edge_degrees.pop(0) ek-=1 rc[d] = 2.0*ek/(nk*(nk-1)) return rc
mit
jk1/intellij-community
python/helpers/py3only/docutils/parsers/rst/directives/images.py
44
6933
# $Id: images.py 7753 2014-06-24 14:52:59Z milde $ # Author: David Goodger <goodger@python.org> # Copyright: This module has been placed in the public domain. """ Directives for figures and simple images. """ __docformat__ = 'reStructuredText' import sys import urllib.error import urllib.parse import urllib.request from docutils import nodes from docutils.nodes import fully_normalize_name, whitespace_normalize_name from docutils.parsers.rst import Directive from docutils.parsers.rst import directives, states from docutils.parsers.rst.roles import set_classes try: # check for the Python Imaging Library import PIL.Image except ImportError: try: # sometimes PIL modules are put in PYTHONPATH's root import Image class PIL(object): pass # dummy wrapper PIL.Image = Image except ImportError: PIL = None class Image(Directive): align_h_values = ('left', 'center', 'right') align_v_values = ('top', 'middle', 'bottom') align_values = align_v_values + align_h_values def align(argument): # This is not callable as self.align. We cannot make it a # staticmethod because we're saving an unbound method in # option_spec below. return directives.choice(argument, Image.align_values) required_arguments = 1 optional_arguments = 0 final_argument_whitespace = True option_spec = {'alt': directives.unchanged, 'height': directives.length_or_unitless, 'width': directives.length_or_percentage_or_unitless, 'scale': directives.percentage, 'align': align, 'name': directives.unchanged, 'target': directives.unchanged_required, 'class': directives.class_option} def run(self): if 'align' in self.options: if isinstance(self.state, states.SubstitutionDef): # Check for align_v_values. if self.options['align'] not in self.align_v_values: raise self.error( 'Error in "%s" directive: "%s" is not a valid value ' 'for the "align" option within a substitution ' 'definition. Valid values for "align" are: "%s".' % (self.name, self.options['align'], '", "'.join(self.align_v_values))) elif self.options['align'] not in self.align_h_values: raise self.error( 'Error in "%s" directive: "%s" is not a valid value for ' 'the "align" option. Valid values for "align" are: "%s".' % (self.name, self.options['align'], '", "'.join(self.align_h_values))) messages = [] reference = directives.uri(self.arguments[0]) self.options['uri'] = reference reference_node = None if 'target' in self.options: block = states.escape2null( self.options['target']).splitlines() block = [line for line in block] target_type, data = self.state.parse_target( block, self.block_text, self.lineno) if target_type == 'refuri': reference_node = nodes.reference(refuri=data) elif target_type == 'refname': reference_node = nodes.reference( refname=fully_normalize_name(data), name=whitespace_normalize_name(data)) reference_node.indirect_reference_name = data self.state.document.note_refname(reference_node) else: # malformed target messages.append(data) # data is a system message del self.options['target'] set_classes(self.options) image_node = nodes.image(self.block_text, **self.options) self.add_name(image_node) if reference_node: reference_node += image_node return messages + [reference_node] else: return messages + [image_node] class Figure(Image): def align(argument): return directives.choice(argument, Figure.align_h_values) def figwidth_value(argument): if argument.lower() == 'image': return 'image' else: return directives.length_or_percentage_or_unitless(argument, 'px') option_spec = Image.option_spec.copy() option_spec['figwidth'] = figwidth_value option_spec['figclass'] = directives.class_option option_spec['align'] = align has_content = True def run(self): figwidth = self.options.pop('figwidth', None) figclasses = self.options.pop('figclass', None) align = self.options.pop('align', None) (image_node,) = Image.run(self) if isinstance(image_node, nodes.system_message): return [image_node] figure_node = nodes.figure('', image_node) if figwidth == 'image': if PIL and self.state.document.settings.file_insertion_enabled: imagepath = urllib.request.url2pathname(image_node['uri']) try: img = PIL.Image.open( imagepath.encode(sys.getfilesystemencoding())) except (IOError, UnicodeEncodeError): pass # TODO: warn? else: self.state.document.settings.record_dependencies.add( imagepath.replace('\\', '/')) figure_node['width'] = '%dpx' % img.size[0] del img elif figwidth is not None: figure_node['width'] = figwidth if figclasses: figure_node['classes'] += figclasses if align: figure_node['align'] = align if self.content: node = nodes.Element() # anonymous container for parsing self.state.nested_parse(self.content, self.content_offset, node) first_node = node[0] if isinstance(first_node, nodes.paragraph): caption = nodes.caption(first_node.rawsource, '', *first_node.children) caption.source = first_node.source caption.line = first_node.line figure_node += caption elif not (isinstance(first_node, nodes.comment) and len(first_node) == 0): error = self.state_machine.reporter.error( 'Figure caption must be a paragraph or empty comment.', nodes.literal_block(self.block_text, self.block_text), line=self.lineno) return [figure_node, error] if len(node) > 1: figure_node += nodes.legend('', *node[1:]) return [figure_node]
apache-2.0
Nick-OpusVL/odoo
addons/account/account_bank_statement.py
38
56599
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv from openerp.tools import float_is_zero from openerp.tools.translate import _ import openerp.addons.decimal_precision as dp from openerp.report import report_sxw from openerp.tools import float_compare, float_round import time class account_bank_statement(osv.osv): def create(self, cr, uid, vals, context=None): if vals.get('name', '/') == '/': journal_id = vals.get('journal_id', self._default_journal_id(cr, uid, context=context)) vals['name'] = self._compute_default_statement_name(cr, uid, journal_id, context=context) if 'line_ids' in vals: for idx, line in enumerate(vals['line_ids']): line[2]['sequence'] = idx + 1 return super(account_bank_statement, self).create(cr, uid, vals, context=context) def write(self, cr, uid, ids, vals, context=None): res = super(account_bank_statement, self).write(cr, uid, ids, vals, context=context) account_bank_statement_line_obj = self.pool.get('account.bank.statement.line') for statement in self.browse(cr, uid, ids, context): for idx, line in enumerate(statement.line_ids): account_bank_statement_line_obj.write(cr, uid, [line.id], {'sequence': idx + 1}, context=context) return res def _default_journal_id(self, cr, uid, context=None): if context is None: context = {} journal_pool = self.pool.get('account.journal') journal_type = context.get('journal_type', False) company_id = self.pool.get('res.company')._company_default_get(cr, uid, 'account.bank.statement',context=context) if journal_type: ids = journal_pool.search(cr, uid, [('type', '=', journal_type),('company_id','=',company_id)]) if ids: return ids[0] return False def _end_balance(self, cursor, user, ids, name, attr, context=None): res = {} for statement in self.browse(cursor, user, ids, context=context): res[statement.id] = statement.balance_start for line in statement.line_ids: res[statement.id] += line.amount return res def _get_period(self, cr, uid, context=None): periods = self.pool.get('account.period').find(cr, uid, context=context) if periods: return periods[0] return False def _compute_default_statement_name(self, cr, uid, journal_id, context=None): context = dict(context or {}) obj_seq = self.pool.get('ir.sequence') period = self.pool.get('account.period').browse(cr, uid, self._get_period(cr, uid, context=context), context=context) context['fiscalyear_id'] = period.fiscalyear_id.id journal = self.pool.get('account.journal').browse(cr, uid, journal_id, None) return obj_seq.next_by_id(cr, uid, journal.sequence_id.id, context=context) def _currency(self, cursor, user, ids, name, args, context=None): res = {} res_currency_obj = self.pool.get('res.currency') res_users_obj = self.pool.get('res.users') default_currency = res_users_obj.browse(cursor, user, user, context=context).company_id.currency_id for statement in self.browse(cursor, user, ids, context=context): currency = statement.journal_id.currency if not currency: currency = default_currency res[statement.id] = currency.id currency_names = {} for currency_id, currency_name in res_currency_obj.name_get(cursor, user, [x for x in res.values()], context=context): currency_names[currency_id] = currency_name for statement_id in res.keys(): currency_id = res[statement_id] res[statement_id] = (currency_id, currency_names[currency_id]) return res def _get_statement(self, cr, uid, ids, context=None): result = {} for line in self.pool.get('account.bank.statement.line').browse(cr, uid, ids, context=context): result[line.statement_id.id] = True return result.keys() def _all_lines_reconciled(self, cr, uid, ids, name, args, context=None): res = {} for statement in self.browse(cr, uid, ids, context=context): res[statement.id] = all([line.journal_entry_id.id or line.account_id.id for line in statement.line_ids]) return res _order = "date desc, id desc" _name = "account.bank.statement" _description = "Bank Statement" _inherit = ['mail.thread'] _columns = { 'name': fields.char( 'Reference', states={'draft': [('readonly', False)]}, readonly=True, # readonly for account_cash_statement copy=False, help='if you give the Name other then /, its created Accounting Entries Move ' 'will be with same name as statement name. ' 'This allows the statement entries to have the same references than the ' 'statement itself'), 'date': fields.date('Date', required=True, states={'confirm': [('readonly', True)]}, select=True, copy=False), 'journal_id': fields.many2one('account.journal', 'Journal', required=True, readonly=True, states={'draft':[('readonly',False)]}), 'period_id': fields.many2one('account.period', 'Period', required=True, states={'confirm':[('readonly', True)]}), 'balance_start': fields.float('Starting Balance', digits_compute=dp.get_precision('Account'), states={'confirm':[('readonly',True)]}), 'balance_end_real': fields.float('Ending Balance', digits_compute=dp.get_precision('Account'), states={'confirm': [('readonly', True)]}, help="Computed using the cash control lines"), 'balance_end': fields.function(_end_balance, store = { 'account.bank.statement': (lambda self, cr, uid, ids, c={}: ids, ['line_ids','move_line_ids','balance_start'], 10), 'account.bank.statement.line': (_get_statement, ['amount'], 10), }, string="Computed Balance", help='Balance as calculated based on Opening Balance and transaction lines'), 'company_id': fields.related('journal_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True), 'line_ids': fields.one2many('account.bank.statement.line', 'statement_id', 'Statement lines', states={'confirm':[('readonly', True)]}, copy=True), 'move_line_ids': fields.one2many('account.move.line', 'statement_id', 'Entry lines', states={'confirm':[('readonly',True)]}), 'state': fields.selection([('draft', 'New'), ('open','Open'), # used by cash statements ('confirm', 'Closed')], 'Status', required=True, readonly="1", copy=False, help='When new statement is created the status will be \'Draft\'.\n' 'And after getting confirmation from the bank it will be in \'Confirmed\' status.'), 'currency': fields.function(_currency, string='Currency', type='many2one', relation='res.currency'), 'account_id': fields.related('journal_id', 'default_debit_account_id', type='many2one', relation='account.account', string='Account used in this journal', readonly=True, help='used in statement reconciliation domain, but shouldn\'t be used elswhere.'), 'cash_control': fields.related('journal_id', 'cash_control' , type='boolean', relation='account.journal',string='Cash control'), 'all_lines_reconciled': fields.function(_all_lines_reconciled, string='All lines reconciled', type='boolean'), } _defaults = { 'name': '/', 'date': fields.date.context_today, 'state': 'draft', 'journal_id': _default_journal_id, 'period_id': _get_period, 'company_id': lambda self,cr,uid,c: self.pool.get('res.company')._company_default_get(cr, uid, 'account.bank.statement',context=c), } def _check_company_id(self, cr, uid, ids, context=None): for statement in self.browse(cr, uid, ids, context=context): if statement.company_id.id != statement.period_id.company_id.id: return False return True _constraints = [ (_check_company_id, 'The journal and period chosen have to belong to the same company.', ['journal_id','period_id']), ] def onchange_date(self, cr, uid, ids, date, company_id, context=None): """ Find the correct period to use for the given date and company_id, return it and set it in the context """ res = {} period_pool = self.pool.get('account.period') if context is None: context = {} ctx = context.copy() ctx.update({'company_id': company_id}) pids = period_pool.find(cr, uid, dt=date, context=ctx) if pids: res.update({'period_id': pids[0]}) context = dict(context, period_id=pids[0]) return { 'value':res, 'context':context, } def button_dummy(self, cr, uid, ids, context=None): return self.write(cr, uid, ids, {}, context=context) def _prepare_move(self, cr, uid, st_line, st_line_number, context=None): """Prepare the dict of values to create the move from a statement line. This method may be overridden to implement custom move generation (making sure to call super() to establish a clean extension chain). :param browse_record st_line: account.bank.statement.line record to create the move from. :param char st_line_number: will be used as the name of the generated account move :return: dict of value to create() the account.move """ return { 'journal_id': st_line.statement_id.journal_id.id, 'period_id': st_line.statement_id.period_id.id, 'date': st_line.date, 'name': st_line_number, 'ref': st_line.ref, } def _get_counter_part_account(self, cr, uid, st_line, context=None): """Retrieve the account to use in the counterpart move. :param browse_record st_line: account.bank.statement.line record to create the move from. :return: int/long of the account.account to use as counterpart """ if st_line.amount >= 0: return st_line.statement_id.journal_id.default_credit_account_id.id return st_line.statement_id.journal_id.default_debit_account_id.id def _get_counter_part_partner(self, cr, uid, st_line, context=None): """Retrieve the partner to use in the counterpart move. :param browse_record st_line: account.bank.statement.line record to create the move from. :return: int/long of the res.partner to use as counterpart """ return st_line.partner_id and st_line.partner_id.id or False def _prepare_bank_move_line(self, cr, uid, st_line, move_id, amount, company_currency_id, context=None): """Compute the args to build the dict of values to create the counter part move line from a statement line by calling the _prepare_move_line_vals. :param browse_record st_line: account.bank.statement.line record to create the move from. :param int/long move_id: ID of the account.move to link the move line :param float amount: amount of the move line :param int/long company_currency_id: ID of currency of the concerned company :return: dict of value to create() the bank account.move.line """ account_id = self._get_counter_part_account(cr, uid, st_line, context=context) partner_id = self._get_counter_part_partner(cr, uid, st_line, context=context) debit = ((amount > 0) and amount) or 0.0 credit = ((amount < 0) and -amount) or 0.0 cur_id = False amt_cur = False if st_line.statement_id.currency.id != company_currency_id: amt_cur = st_line.amount cur_id = st_line.statement_id.currency.id elif st_line.currency_id and st_line.amount_currency: amt_cur = st_line.amount_currency cur_id = st_line.currency_id.id return self._prepare_move_line_vals(cr, uid, st_line, move_id, debit, credit, amount_currency=amt_cur, currency_id=cur_id, account_id=account_id, partner_id=partner_id, context=context) def _prepare_move_line_vals(self, cr, uid, st_line, move_id, debit, credit, currency_id=False, amount_currency=False, account_id=False, partner_id=False, context=None): """Prepare the dict of values to create the move line from a statement line. :param browse_record st_line: account.bank.statement.line record to create the move from. :param int/long move_id: ID of the account.move to link the move line :param float debit: debit amount of the move line :param float credit: credit amount of the move line :param int/long currency_id: ID of currency of the move line to create :param float amount_currency: amount of the debit/credit expressed in the currency_id :param int/long account_id: ID of the account to use in the move line if different from the statement line account ID :param int/long partner_id: ID of the partner to put on the move line :return: dict of value to create() the account.move.line """ acc_id = account_id or st_line.account_id.id cur_id = currency_id or st_line.statement_id.currency.id par_id = partner_id or (((st_line.partner_id) and st_line.partner_id.id) or False) return { 'name': st_line.name, 'date': st_line.date, 'ref': st_line.ref, 'move_id': move_id, 'partner_id': par_id, 'account_id': acc_id, 'credit': credit, 'debit': debit, 'statement_id': st_line.statement_id.id, 'journal_id': st_line.statement_id.journal_id.id, 'period_id': st_line.statement_id.period_id.id, 'currency_id': amount_currency and cur_id, 'amount_currency': amount_currency, } def balance_check(self, cr, uid, st_id, journal_type='bank', context=None): st = self.browse(cr, uid, st_id, context=context) if not ((abs((st.balance_end or 0.0) - st.balance_end_real) < 0.0001) or (abs((st.balance_end or 0.0) - st.balance_end_real) < 0.0001)): raise osv.except_osv(_('Error!'), _('The statement balance is incorrect !\nThe expected balance (%.2f) is different than the computed one. (%.2f)') % (st.balance_end_real, st.balance_end)) return True def statement_close(self, cr, uid, ids, journal_type='bank', context=None): return self.write(cr, uid, ids, {'state':'confirm'}, context=context) def check_status_condition(self, cr, uid, state, journal_type='bank'): return state in ('draft','open') def button_confirm_bank(self, cr, uid, ids, context=None): if context is None: context = {} for st in self.browse(cr, uid, ids, context=context): j_type = st.journal_id.type if not self.check_status_condition(cr, uid, st.state, journal_type=j_type): continue self.balance_check(cr, uid, st.id, journal_type=j_type, context=context) if (not st.journal_id.default_credit_account_id) \ or (not st.journal_id.default_debit_account_id): raise osv.except_osv(_('Configuration Error!'), _('Please verify that an account is defined in the journal.')) for line in st.move_line_ids: if line.state != 'valid': raise osv.except_osv(_('Error!'), _('The account entries lines are not in valid state.')) move_ids = [] for st_line in st.line_ids: if not st_line.amount: continue if st_line.account_id and not st_line.journal_entry_id.id: #make an account move as before vals = { 'debit': st_line.amount < 0 and -st_line.amount or 0.0, 'credit': st_line.amount > 0 and st_line.amount or 0.0, 'account_id': st_line.account_id.id, 'name': st_line.name } self.pool.get('account.bank.statement.line').process_reconciliation(cr, uid, st_line.id, [vals], context=context) elif not st_line.journal_entry_id.id: raise osv.except_osv(_('Error!'), _('All the account entries lines must be processed in order to close the statement.')) move_ids.append(st_line.journal_entry_id.id) if move_ids: self.pool.get('account.move').post(cr, uid, move_ids, context=context) self.message_post(cr, uid, [st.id], body=_('Statement %s confirmed, journal items were created.') % (st.name,), context=context) self.link_bank_to_partner(cr, uid, ids, context=context) return self.write(cr, uid, ids, {'state': 'confirm', 'closing_date': time.strftime("%Y-%m-%d %H:%M:%S")}, context=context) def button_cancel(self, cr, uid, ids, context=None): bnk_st_line_ids = [] for st in self.browse(cr, uid, ids, context=context): bnk_st_line_ids += [line.id for line in st.line_ids] self.pool.get('account.bank.statement.line').cancel(cr, uid, bnk_st_line_ids, context=context) return self.write(cr, uid, ids, {'state': 'draft'}, context=context) def _compute_balance_end_real(self, cr, uid, journal_id, context=None): res = False if journal_id: journal = self.pool.get('account.journal').browse(cr, uid, journal_id, context=context) if journal.with_last_closing_balance: cr.execute('SELECT balance_end_real \ FROM account_bank_statement \ WHERE journal_id = %s AND NOT state = %s \ ORDER BY date DESC,id DESC LIMIT 1', (journal_id, 'draft')) res = cr.fetchone() return res and res[0] or 0.0 def onchange_journal_id(self, cr, uid, statement_id, journal_id, context=None): if not journal_id: return {} balance_start = self._compute_balance_end_real(cr, uid, journal_id, context=context) journal = self.pool.get('account.journal').browse(cr, uid, journal_id, context=context) currency = journal.currency or journal.company_id.currency_id res = {'balance_start': balance_start, 'company_id': journal.company_id.id, 'currency': currency.id} if journal.type == 'cash': res['cash_control'] = journal.cash_control return {'value': res} def unlink(self, cr, uid, ids, context=None): statement_line_obj = self.pool['account.bank.statement.line'] for item in self.browse(cr, uid, ids, context=context): if item.state != 'draft': raise osv.except_osv( _('Invalid Action!'), _('In order to delete a bank statement, you must first cancel it to delete related journal items.') ) # Explicitly unlink bank statement lines # so it will check that the related journal entries have # been deleted first statement_line_obj.unlink(cr, uid, [line.id for line in item.line_ids], context=context) return super(account_bank_statement, self).unlink(cr, uid, ids, context=context) def button_journal_entries(self, cr, uid, ids, context=None): ctx = (context or {}).copy() ctx['journal_id'] = self.browse(cr, uid, ids[0], context=context).journal_id.id return { 'name': _('Journal Items'), 'view_type':'form', 'view_mode':'tree', 'res_model':'account.move.line', 'view_id':False, 'type':'ir.actions.act_window', 'domain':[('statement_id','in',ids)], 'context':ctx, } def number_of_lines_reconciled(self, cr, uid, ids, context=None): bsl_obj = self.pool.get('account.bank.statement.line') return bsl_obj.search_count(cr, uid, [('statement_id', 'in', ids), ('journal_entry_id', '!=', False)], context=context) def link_bank_to_partner(self, cr, uid, ids, context=None): for statement in self.browse(cr, uid, ids, context=context): for st_line in statement.line_ids: if st_line.bank_account_id and st_line.partner_id and st_line.bank_account_id.partner_id.id != st_line.partner_id.id: # Update the partner informations of the bank account, possibly overriding existing ones bank_obj = self.pool.get('res.partner.bank') bank_vals = bank_obj.onchange_partner_id(cr, uid, [st_line.bank_account_id.id], st_line.partner_id.id, context=context)['value'] bank_vals.update({'partner_id': st_line.partner_id.id}) bank_obj.write(cr, uid, [st_line.bank_account_id.id], bank_vals, context=context) class account_bank_statement_line(osv.osv): def create(self, cr, uid, vals, context=None): if vals.get('amount_currency', 0) and not vals.get('amount', 0): raise osv.except_osv(_('Error!'), _('If "Amount Currency" is specified, then "Amount" must be as well.')) return super(account_bank_statement_line, self).create(cr, uid, vals, context=context) def unlink(self, cr, uid, ids, context=None): for item in self.browse(cr, uid, ids, context=context): if item.journal_entry_id: raise osv.except_osv( _('Invalid Action!'), _('In order to delete a bank statement line, you must first cancel it to delete related journal items.') ) return super(account_bank_statement_line, self).unlink(cr, uid, ids, context=context) def cancel(self, cr, uid, ids, context=None): account_move_obj = self.pool.get('account.move') move_ids = [] for line in self.browse(cr, uid, ids, context=context): if line.journal_entry_id: move_ids.append(line.journal_entry_id.id) for aml in line.journal_entry_id.line_id: if aml.reconcile_id: move_lines = [l.id for l in aml.reconcile_id.line_id] move_lines.remove(aml.id) self.pool.get('account.move.reconcile').unlink(cr, uid, [aml.reconcile_id.id], context=context) if len(move_lines) >= 2: self.pool.get('account.move.line').reconcile_partial(cr, uid, move_lines, 'auto', context=context) if move_ids: account_move_obj.button_cancel(cr, uid, move_ids, context=context) account_move_obj.unlink(cr, uid, move_ids, context) def get_data_for_reconciliations(self, cr, uid, ids, excluded_ids=None, search_reconciliation_proposition=True, context=None): """ Returns the data required to display a reconciliation, for each statement line id in ids """ ret = [] if excluded_ids is None: excluded_ids = [] for st_line in self.browse(cr, uid, ids, context=context): reconciliation_data = {} if search_reconciliation_proposition: reconciliation_proposition = self.get_reconciliation_proposition(cr, uid, st_line, excluded_ids=excluded_ids, context=context) for mv_line in reconciliation_proposition: excluded_ids.append(mv_line['id']) reconciliation_data['reconciliation_proposition'] = reconciliation_proposition else: reconciliation_data['reconciliation_proposition'] = [] st_line = self.get_statement_line_for_reconciliation(cr, uid, st_line, context=context) reconciliation_data['st_line'] = st_line ret.append(reconciliation_data) return ret def get_statement_line_for_reconciliation(self, cr, uid, st_line, context=None): """ Returns the data required by the bank statement reconciliation widget to display a statement line """ if context is None: context = {} statement_currency = st_line.journal_id.currency or st_line.journal_id.company_id.currency_id rml_parser = report_sxw.rml_parse(cr, uid, 'reconciliation_widget_asl', context=context) if st_line.amount_currency and st_line.currency_id: amount = st_line.amount_currency amount_currency = st_line.amount amount_currency_str = amount_currency > 0 and amount_currency or -amount_currency amount_currency_str = rml_parser.formatLang(amount_currency_str, currency_obj=statement_currency) else: amount = st_line.amount amount_currency_str = "" amount_str = amount > 0 and amount or -amount amount_str = rml_parser.formatLang(amount_str, currency_obj=st_line.currency_id or statement_currency) data = { 'id': st_line.id, 'ref': st_line.ref, 'note': st_line.note or "", 'name': st_line.name, 'date': st_line.date, 'amount': amount, 'amount_str': amount_str, # Amount in the statement line currency 'currency_id': st_line.currency_id.id or statement_currency.id, 'partner_id': st_line.partner_id.id, 'statement_id': st_line.statement_id.id, 'account_code': st_line.journal_id.default_debit_account_id.code, 'account_name': st_line.journal_id.default_debit_account_id.name, 'partner_name': st_line.partner_id.name, 'communication_partner_name': st_line.partner_name, 'amount_currency_str': amount_currency_str, # Amount in the statement currency 'has_no_partner': not st_line.partner_id.id, } if st_line.partner_id.id: if amount > 0: data['open_balance_account_id'] = st_line.partner_id.property_account_receivable.id else: data['open_balance_account_id'] = st_line.partner_id.property_account_payable.id return data def _domain_reconciliation_proposition(self, cr, uid, st_line, excluded_ids=None, context=None): if excluded_ids is None: excluded_ids = [] domain = [('ref', '=', st_line.name), ('reconcile_id', '=', False), ('state', '=', 'valid'), ('account_id.reconcile', '=', True), ('id', 'not in', excluded_ids)] return domain def get_reconciliation_proposition(self, cr, uid, st_line, excluded_ids=None, context=None): """ Returns move lines that constitute the best guess to reconcile a statement line. """ mv_line_pool = self.pool.get('account.move.line') # Look for structured communication if st_line.name: domain = self._domain_reconciliation_proposition(cr, uid, st_line, excluded_ids=excluded_ids, context=context) match_id = mv_line_pool.search(cr, uid, domain, offset=0, limit=2, context=context) if match_id and len(match_id) == 1: mv_line_br = mv_line_pool.browse(cr, uid, match_id, context=context) target_currency = st_line.currency_id or st_line.journal_id.currency or st_line.journal_id.company_id.currency_id mv_line = mv_line_pool.prepare_move_lines_for_reconciliation_widget(cr, uid, mv_line_br, target_currency=target_currency, target_date=st_line.date, context=context)[0] mv_line['has_no_partner'] = not bool(st_line.partner_id.id) # If the structured communication matches a move line that is associated with a partner, we can safely associate the statement line with the partner if (mv_line['partner_id']): self.write(cr, uid, st_line.id, {'partner_id': mv_line['partner_id']}, context=context) mv_line['has_no_partner'] = False return [mv_line] # How to compare statement line amount and move lines amount precision_digits = self.pool.get('decimal.precision').precision_get(cr, uid, 'Account') currency_id = st_line.currency_id.id or st_line.journal_id.currency.id # NB : amount can't be == 0 ; so float precision is not an issue for amount > 0 or amount < 0 amount = st_line.amount_currency or st_line.amount domain = [('reconcile_partial_id', '=', False)] if currency_id: domain += [('currency_id', '=', currency_id)] sign = 1 # correct the fact that st_line.amount is signed and debit/credit is not amount_field = 'debit' if currency_id == False: if amount < 0: amount_field = 'credit' sign = -1 else: amount_field = 'amount_currency' # Look for a matching amount domain_exact_amount = domain + [(amount_field, '=', float_round(sign * amount, precision_digits=precision_digits))] domain_exact_amount_ref = domain_exact_amount + [('ref', '=', st_line.ref)] match_id = self.get_move_lines_for_reconciliation(cr, uid, st_line, excluded_ids=excluded_ids, offset=0, limit=2, additional_domain=domain_exact_amount_ref) if not match_id: match_id = self.get_move_lines_for_reconciliation(cr, uid, st_line, excluded_ids=excluded_ids, offset=0, limit=2, additional_domain=domain_exact_amount) if match_id and len(match_id) == 1: return match_id if not st_line.partner_id.id: return [] # Look for a set of move line whose amount is <= to the line's amount if amount > 0: # Make sure we can't mix receivable and payable domain += [('account_id.type', '=', 'receivable')] else: domain += [('account_id.type', '=', 'payable')] if amount_field == 'amount_currency' and amount < 0: domain += [(amount_field, '<', 0), (amount_field, '>', (sign * amount))] else: domain += [(amount_field, '>', 0), (amount_field, '<', (sign * amount))] mv_lines = self.get_move_lines_for_reconciliation(cr, uid, st_line, excluded_ids=excluded_ids, limit=5, additional_domain=domain, context=context) ret = [] total = 0 for line in mv_lines: total += abs(line['debit'] - line['credit']) if float_compare(total, abs(amount), precision_digits=precision_digits) != 1: ret.append(line) else: break return ret def get_move_lines_for_reconciliation_by_statement_line_id(self, cr, uid, st_line_id, excluded_ids=None, str=False, offset=0, limit=None, count=False, additional_domain=None, context=None): """ Bridge between the web client reconciliation widget and get_move_lines_for_reconciliation (which expects a browse record) """ if excluded_ids is None: excluded_ids = [] if additional_domain is None: additional_domain = [] st_line = self.browse(cr, uid, st_line_id, context=context) return self.get_move_lines_for_reconciliation(cr, uid, st_line, excluded_ids, str, offset, limit, count, additional_domain, context=context) def _domain_move_lines_for_reconciliation(self, cr, uid, st_line, excluded_ids=None, str=False, additional_domain=None, context=None): if excluded_ids is None: excluded_ids = [] if additional_domain is None: additional_domain = [] # Make domain domain = additional_domain + [ ('reconcile_id', '=', False), ('state', '=', 'valid'), ('account_id.reconcile', '=', True) ] if st_line.partner_id.id: domain += [('partner_id', '=', st_line.partner_id.id)] if excluded_ids: domain.append(('id', 'not in', excluded_ids)) if str: domain += [ '|', ('move_id.name', 'ilike', str), '|', ('move_id.ref', 'ilike', str), ('date_maturity', 'like', str), ] if not st_line.partner_id.id: domain.insert(-1, '|', ) domain.append(('partner_id.name', 'ilike', str)) if str != '/': domain.insert(-1, '|', ) domain.append(('name', 'ilike', str)) return domain def get_move_lines_for_reconciliation(self, cr, uid, st_line, excluded_ids=None, str=False, offset=0, limit=None, count=False, additional_domain=None, context=None): """ Find the move lines that could be used to reconcile a statement line. If count is true, only returns the count. :param st_line: the browse record of the statement line :param integers list excluded_ids: ids of move lines that should not be fetched :param boolean count: just return the number of records :param tuples list additional_domain: additional domain restrictions """ mv_line_pool = self.pool.get('account.move.line') domain = self._domain_move_lines_for_reconciliation(cr, uid, st_line, excluded_ids=excluded_ids, str=str, additional_domain=additional_domain, context=context) # Get move lines ; in case of a partial reconciliation, only keep one line (the first whose amount is greater than # the residual amount because it is presumably the invoice, which is the relevant item in this situation) filtered_lines = [] reconcile_partial_ids = [] actual_offset = offset while True: line_ids = mv_line_pool.search(cr, uid, domain, offset=actual_offset, limit=limit, order="date_maturity asc, id asc", context=context) lines = mv_line_pool.browse(cr, uid, line_ids, context=context) make_one_more_loop = False for line in lines: if line.reconcile_partial_id and \ (line.reconcile_partial_id.id in reconcile_partial_ids or \ abs(line.debit - line.credit) < abs(line.amount_residual)): #if we filtered a line because it is partially reconciled with an already selected line, we must do one more loop #in order to get the right number of items in the pager make_one_more_loop = True continue filtered_lines.append(line) if line.reconcile_partial_id: reconcile_partial_ids.append(line.reconcile_partial_id.id) if not limit or not make_one_more_loop or len(filtered_lines) >= limit: break actual_offset = actual_offset + limit lines = limit and filtered_lines[:limit] or filtered_lines # Either return number of lines if count: return len(lines) # Or return list of dicts representing the formatted move lines else: target_currency = st_line.currency_id or st_line.journal_id.currency or st_line.journal_id.company_id.currency_id mv_lines = mv_line_pool.prepare_move_lines_for_reconciliation_widget(cr, uid, lines, target_currency=target_currency, target_date=st_line.date, context=context) has_no_partner = not bool(st_line.partner_id.id) for line in mv_lines: line['has_no_partner'] = has_no_partner return mv_lines def get_currency_rate_line(self, cr, uid, st_line, currency_diff, move_id, context=None): if currency_diff < 0: account_id = st_line.company_id.expense_currency_exchange_account_id.id if not account_id: raise osv.except_osv(_('Insufficient Configuration!'), _("You should configure the 'Loss Exchange Rate Account' in the accounting settings, to manage automatically the booking of accounting entries related to differences between exchange rates.")) else: account_id = st_line.company_id.income_currency_exchange_account_id.id if not account_id: raise osv.except_osv(_('Insufficient Configuration!'), _("You should configure the 'Gain Exchange Rate Account' in the accounting settings, to manage automatically the booking of accounting entries related to differences between exchange rates.")) return { 'move_id': move_id, 'name': _('change') + ': ' + (st_line.name or '/'), 'period_id': st_line.statement_id.period_id.id, 'journal_id': st_line.journal_id.id, 'partner_id': st_line.partner_id.id, 'company_id': st_line.company_id.id, 'statement_id': st_line.statement_id.id, 'debit': currency_diff < 0 and -currency_diff or 0, 'credit': currency_diff > 0 and currency_diff or 0, 'amount_currency': 0.0, 'date': st_line.date, 'account_id': account_id } def _get_exchange_lines(self, cr, uid, st_line, mv_line, currency_diff, currency_id, move_id, context=None): ''' Prepare the two lines in company currency due to currency rate difference. :param line: browse record of the voucher.line for which we want to create currency rate difference accounting entries :param move_id: Account move wher the move lines will be. :param currency_diff: Amount to be posted. :param company_currency: id of currency of the company to which the voucher belong :param current_currency: id of currency of the voucher :return: the account move line and its counterpart to create, depicted as mapping between fieldname and value :rtype: tuple of dict ''' if currency_diff > 0: exchange_account_id = st_line.company_id.expense_currency_exchange_account_id.id else: exchange_account_id = st_line.company_id.income_currency_exchange_account_id.id # Even if the amount_currency is never filled, we need to pass the foreign currency because otherwise # the receivable/payable account may have a secondary currency, which render this field mandatory if mv_line.account_id.currency_id: account_currency_id = mv_line.account_id.currency_id.id else: account_currency_id = st_line.company_id.currency_id.id != currency_id and currency_id or False move_line = { 'journal_id': st_line.journal_id.id, 'period_id': st_line.statement_id.period_id.id, 'name': _('change') + ': ' + (st_line.name or '/'), 'account_id': mv_line.account_id.id, 'move_id': move_id, 'partner_id': st_line.partner_id.id, 'currency_id': account_currency_id, 'amount_currency': 0.0, 'quantity': 1, 'credit': currency_diff > 0 and currency_diff or 0.0, 'debit': currency_diff < 0 and -currency_diff or 0.0, 'date': st_line.date, 'counterpart_move_line_id': mv_line.id, } move_line_counterpart = { 'journal_id': st_line.journal_id.id, 'period_id': st_line.statement_id.period_id.id, 'name': _('change') + ': ' + (st_line.name or '/'), 'account_id': exchange_account_id, 'move_id': move_id, 'amount_currency': 0.0, 'partner_id': st_line.partner_id.id, 'currency_id': account_currency_id, 'quantity': 1, 'debit': currency_diff > 0 and currency_diff or 0.0, 'credit': currency_diff < 0 and -currency_diff or 0.0, 'date': st_line.date, } return (move_line, move_line_counterpart) def process_reconciliations(self, cr, uid, data, context=None): for datum in data: self.process_reconciliation(cr, uid, datum[0], datum[1], context=context) def process_reconciliation(self, cr, uid, id, mv_line_dicts, context=None): """ Creates a move line for each item of mv_line_dicts and for the statement line. Reconcile a new move line with its counterpart_move_line_id if specified. Finally, mark the statement line as reconciled by putting the newly created move id in the column journal_entry_id. :param int id: id of the bank statement line :param list of dicts mv_line_dicts: move lines to create. If counterpart_move_line_id is specified, reconcile with it """ if context is None: context = {} st_line = self.browse(cr, uid, id, context=context) company_currency = st_line.journal_id.company_id.currency_id statement_currency = st_line.journal_id.currency or company_currency bs_obj = self.pool.get('account.bank.statement') am_obj = self.pool.get('account.move') aml_obj = self.pool.get('account.move.line') currency_obj = self.pool.get('res.currency') # Checks if st_line.journal_entry_id.id: raise osv.except_osv(_('Error!'), _('The bank statement line was already reconciled.')) for mv_line_dict in mv_line_dicts: for field in ['debit', 'credit', 'amount_currency']: if field not in mv_line_dict: mv_line_dict[field] = 0.0 if mv_line_dict.get('counterpart_move_line_id'): mv_line = aml_obj.browse(cr, uid, mv_line_dict.get('counterpart_move_line_id'), context=context) if mv_line.reconcile_id: raise osv.except_osv(_('Error!'), _('A selected move line was already reconciled.')) # Create the move move_name = (st_line.statement_id.name or st_line.name) + "/" + str(st_line.sequence) move_vals = bs_obj._prepare_move(cr, uid, st_line, move_name, context=context) move_id = am_obj.create(cr, uid, move_vals, context=context) # Create the move line for the statement line if st_line.statement_id.currency.id != company_currency.id: if st_line.currency_id == company_currency: amount = st_line.amount_currency else: ctx = context.copy() ctx['date'] = st_line.date amount = currency_obj.compute(cr, uid, st_line.statement_id.currency.id, company_currency.id, st_line.amount, context=ctx) else: amount = st_line.amount bank_st_move_vals = bs_obj._prepare_bank_move_line(cr, uid, st_line, move_id, amount, company_currency.id, context=context) aml_obj.create(cr, uid, bank_st_move_vals, context=context) # Complete the dicts st_line_currency = st_line.currency_id or statement_currency st_line_currency_rate = st_line.currency_id and (st_line.amount_currency / st_line.amount) or False to_create = [] for mv_line_dict in mv_line_dicts: if mv_line_dict.get('is_tax_line'): continue mv_line_dict['ref'] = move_name mv_line_dict['move_id'] = move_id mv_line_dict['period_id'] = st_line.statement_id.period_id.id mv_line_dict['journal_id'] = st_line.journal_id.id mv_line_dict['company_id'] = st_line.company_id.id mv_line_dict['statement_id'] = st_line.statement_id.id if mv_line_dict.get('counterpart_move_line_id'): mv_line = aml_obj.browse(cr, uid, mv_line_dict['counterpart_move_line_id'], context=context) mv_line_dict['partner_id'] = mv_line.partner_id.id or st_line.partner_id.id mv_line_dict['account_id'] = mv_line.account_id.id if st_line_currency.id != company_currency.id: ctx = context.copy() ctx['date'] = st_line.date mv_line_dict['amount_currency'] = mv_line_dict['debit'] - mv_line_dict['credit'] mv_line_dict['currency_id'] = st_line_currency.id if st_line.currency_id and statement_currency.id == company_currency.id and st_line_currency_rate: debit_at_current_rate = self.pool.get('res.currency').round(cr, uid, company_currency, mv_line_dict['debit'] / st_line_currency_rate) credit_at_current_rate = self.pool.get('res.currency').round(cr, uid, company_currency, mv_line_dict['credit'] / st_line_currency_rate) elif st_line.currency_id and st_line_currency_rate: debit_at_current_rate = currency_obj.compute(cr, uid, statement_currency.id, company_currency.id, mv_line_dict['debit'] / st_line_currency_rate, context=ctx) credit_at_current_rate = currency_obj.compute(cr, uid, statement_currency.id, company_currency.id, mv_line_dict['credit'] / st_line_currency_rate, context=ctx) else: debit_at_current_rate = currency_obj.compute(cr, uid, st_line_currency.id, company_currency.id, mv_line_dict['debit'], context=ctx) credit_at_current_rate = currency_obj.compute(cr, uid, st_line_currency.id, company_currency.id, mv_line_dict['credit'], context=ctx) if mv_line_dict.get('counterpart_move_line_id'): #post an account line that use the same currency rate than the counterpart (to balance the account) and post the difference in another line ctx['date'] = mv_line.date if mv_line.currency_id.id == mv_line_dict['currency_id'] \ and float_is_zero(abs(mv_line.amount_currency) - abs(mv_line_dict['amount_currency']), precision_rounding=mv_line.currency_id.rounding): debit_at_old_rate = mv_line.credit credit_at_old_rate = mv_line.debit else: debit_at_old_rate = currency_obj.compute(cr, uid, st_line_currency.id, company_currency.id, mv_line_dict['debit'], context=ctx) credit_at_old_rate = currency_obj.compute(cr, uid, st_line_currency.id, company_currency.id, mv_line_dict['credit'], context=ctx) mv_line_dict['credit'] = credit_at_old_rate mv_line_dict['debit'] = debit_at_old_rate if debit_at_old_rate - debit_at_current_rate: currency_diff = debit_at_current_rate - debit_at_old_rate to_create.append(self.get_currency_rate_line(cr, uid, st_line, -currency_diff, move_id, context=context)) if credit_at_old_rate - credit_at_current_rate: currency_diff = credit_at_current_rate - credit_at_old_rate to_create.append(self.get_currency_rate_line(cr, uid, st_line, currency_diff, move_id, context=context)) if mv_line.currency_id and mv_line_dict['currency_id'] == mv_line.currency_id.id: amount_unreconciled = mv_line.amount_residual_currency else: amount_unreconciled = currency_obj.compute(cr, uid, company_currency.id, mv_line_dict['currency_id'] , mv_line.amount_residual, context=ctx) if float_is_zero(mv_line_dict['amount_currency'] + amount_unreconciled, precision_rounding=mv_line.currency_id.rounding): amount = mv_line_dict['debit'] or mv_line_dict['credit'] sign = -1 if mv_line_dict['debit'] else 1 currency_rate_difference = sign * (mv_line.amount_residual - amount) if not company_currency.is_zero(currency_rate_difference): exchange_lines = self._get_exchange_lines(cr, uid, st_line, mv_line, currency_rate_difference, mv_line_dict['currency_id'], move_id, context=context) for exchange_line in exchange_lines: to_create.append(exchange_line) else: mv_line_dict['debit'] = debit_at_current_rate mv_line_dict['credit'] = credit_at_current_rate elif statement_currency.id != company_currency.id: #statement is in foreign currency but the transaction is in company currency prorata_factor = (mv_line_dict['debit'] - mv_line_dict['credit']) / st_line.amount_currency mv_line_dict['amount_currency'] = prorata_factor * st_line.amount to_create.append(mv_line_dict) # If the reconciliation is performed in another currency than the company currency, the amounts are converted to get the right debit/credit. # If there is more than 1 debit and 1 credit, this can induce a rounding error, which we put in the foreign exchane gain/loss account. if st_line_currency.id != company_currency.id: diff_amount = bank_st_move_vals['debit'] - bank_st_move_vals['credit'] \ + sum(aml['debit'] for aml in to_create) - sum(aml['credit'] for aml in to_create) if not company_currency.is_zero(diff_amount): diff_aml = self.get_currency_rate_line(cr, uid, st_line, diff_amount, move_id, context=context) diff_aml['name'] = _('Rounding error from currency conversion') to_create.append(diff_aml) # Create move lines move_line_pairs_to_reconcile = [] for mv_line_dict in to_create: counterpart_move_line_id = None # NB : this attribute is irrelevant for aml_obj.create() and needs to be removed from the dict if mv_line_dict.get('counterpart_move_line_id'): counterpart_move_line_id = mv_line_dict['counterpart_move_line_id'] del mv_line_dict['counterpart_move_line_id'] new_aml_id = aml_obj.create(cr, uid, mv_line_dict, context=context) if counterpart_move_line_id != None: move_line_pairs_to_reconcile.append([new_aml_id, counterpart_move_line_id]) # Reconcile for pair in move_line_pairs_to_reconcile: aml_obj.reconcile_partial(cr, uid, pair, context=context) # Mark the statement line as reconciled self.write(cr, uid, id, {'journal_entry_id': move_id}, context=context) # FIXME : if it wasn't for the multicompany security settings in account_security.xml, the method would just # return [('journal_entry_id', '=', False)] # Unfortunately, that spawns a "no access rights" error ; it shouldn't. def _needaction_domain_get(self, cr, uid, context=None): user = self.pool.get("res.users").browse(cr, uid, uid) return ['|', ('company_id', '=', False), ('company_id', 'child_of', [user.company_id.id]), ('journal_entry_id', '=', False), ('account_id', '=', False)] _order = "statement_id desc, sequence" _name = "account.bank.statement.line" _description = "Bank Statement Line" _inherit = ['ir.needaction_mixin'] _columns = { 'name': fields.char('Communication', required=True), 'date': fields.date('Date', required=True), 'amount': fields.float('Amount', digits_compute=dp.get_precision('Account')), 'partner_id': fields.many2one('res.partner', 'Partner'), 'bank_account_id': fields.many2one('res.partner.bank','Bank Account'), 'account_id': fields.many2one('account.account', 'Account', help="This technical field can be used at the statement line creation/import time in order to avoid the reconciliation process on it later on. The statement line will simply create a counterpart on this account"), 'statement_id': fields.many2one('account.bank.statement', 'Statement', select=True, required=True, ondelete='restrict'), 'journal_id': fields.related('statement_id', 'journal_id', type='many2one', relation='account.journal', string='Journal', store=True, readonly=True), 'partner_name': fields.char('Partner Name', help="This field is used to record the third party name when importing bank statement in electronic format, when the partner doesn't exist yet in the database (or cannot be found)."), 'ref': fields.char('Reference'), 'note': fields.text('Notes'), 'sequence': fields.integer('Sequence', select=True, help="Gives the sequence order when displaying a list of bank statement lines."), 'company_id': fields.related('statement_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, readonly=True), 'journal_entry_id': fields.many2one('account.move', 'Journal Entry', copy=False), 'amount_currency': fields.float('Amount Currency', help="The amount expressed in an optional other currency if it is a multi-currency entry.", digits_compute=dp.get_precision('Account')), 'currency_id': fields.many2one('res.currency', 'Currency', help="The optional other currency if it is a multi-currency entry."), } _defaults = { 'name': lambda self,cr,uid,context={}: self.pool.get('ir.sequence').get(cr, uid, 'account.bank.statement.line'), 'date': lambda self,cr,uid,context={}: context.get('date', fields.date.context_today(self,cr,uid,context=context)), } class account_statement_operation_template(osv.osv): _name = "account.statement.operation.template" _description = "Preset for the lines that can be created in a bank statement reconciliation" _columns = { 'name': fields.char('Button Label', required=True), 'account_id': fields.many2one('account.account', 'Account', ondelete='cascade', domain=[('type', 'not in', ('view', 'closed', 'consolidation'))]), 'label': fields.char('Label'), 'amount_type': fields.selection([('fixed', 'Fixed'),('percentage_of_total','Percentage of total amount'),('percentage_of_balance', 'Percentage of open balance')], 'Amount type', required=True), 'amount': fields.float('Amount', digits_compute=dp.get_precision('Account'), help="The amount will count as a debit if it is negative, as a credit if it is positive (except if amount type is 'Percentage of open balance').", required=True), 'tax_id': fields.many2one('account.tax', 'Tax', ondelete='restrict', domain=[('type_tax_use', 'in', ['purchase', 'all']), ('parent_id', '=', False)]), 'analytic_account_id': fields.many2one('account.analytic.account', 'Analytic Account', ondelete='set null', domain=[('type','!=','view'), ('state','not in',('close','cancelled'))]), } _defaults = { 'amount_type': 'percentage_of_balance', 'amount': 100.0 } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
Martinfx/yodaqa
data/ml/fbpath/evaluate_queries_results.py
1
9997
#!/usr/bin/python -u # # Evaluate fbpath-based query performance (on gold standard and as predicted) # # Usage: evaluate_queries_results.py traindata.json valdata.json # # A model is trained on traindata and then its performance is measured # on valdata. (FIXME: Duplicate code with fbpath_train_logistic, instead # of reusing the already-trained model.) # # The json data can be generated using: # # mkdir -p data/ml/fbpath/wq-fbpath # cd ../dataset-factoid-webquestions # for i in trainmodel val devtest; do # scripts/fulldata.py $i ../yodaqa/data/ml/fbpath/wq-fbpath/ main/ d-dump/ d-freebase-mids/ d-freebase-brp/ # done # # Example: data/ml/fbpath/evaluate_queries_results.py data/ml/fbpath/wq-fbpath/trainmodel.json data/ml/fbpath/wq-fbpath/val.json # # For every question, the script prints its qId, whether all answers were found # using gold standard fbpaths, whether any answer was found using gold standard # fbpaths, whether all answers were found using predicted fbpaths and whether # any answer was found using predicted fbpaths. # # At the end of the script, it prints number of questions and percentual # information about all/any answers obtained form freebase using gold # standard/predicted fbpaths plus it prints number of questions which could not # be answered because SPARQLWrapper does not support long queries. from SPARQLWrapper import SPARQLWrapper, JSON import json, sys from fbpathtrain import VectorizedData import random, time from sklearn.linear_model import LogisticRegression from sklearn.multiclass import OneVsRestClassifier import numpy as np from urllib2 import HTTPError URL = 'http://yodaqa.felk.cvut.cz/fuseki-dbp/dbpedia/query' def check_q(cfier, v, i): probs = cfier.predict_proba(v.X.toarray()[i])[0] top_probs = sorted(enumerate(probs), key=lambda k: k[1], reverse=True) top_lprobs = ['%s: %.3f' % (v.Ydict.classes_[k[0]], k[1]) for k in top_probs[:15]] return (sorted(v.Xdict.inverse_transform(v.X[i])[0].keys(), key=lambda s: reversed(s)), v.Ydict.inverse_transform(cfier.predict(v.X.toarray()[i]))[0], top_lprobs, v.Ydict.inverse_transform(np.array([v.Y[i]]))[0]) def generate_query(paths, mid, proba, concepts): pathQueries = [] for path in paths: path = [p[1:].replace("/",".") for p in path] if (len(path) == 1): pathQueryStr = "{" \ " ns:" + mid + " ns:" + path[0] + " ?val .\n" \ " BIND(\"ns:" + path[0] + "\" AS ?prop)\n" \ " BIND(" + proba + " AS ?score)\n" \ " BIND(0 AS ?branched)\n" \ " BIND(ns:" + mid + " AS ?res)\n" \ " OPTIONAL {\n" \ " ns:" + path[0] + " rdfs:label ?proplabel .\n" \ " FILTER(LANGMATCHES(LANG(?proplabel), \"en\"))\n" \ " }\n" \ "}" pathQueries.append(pathQueryStr); elif (len(path) == 2): pathQueryStr = "{" \ " ns:" + mid + " ns:" + path[0] + "/ns:" + path[1] + " ?val .\n" \ " BIND(\"ns:" + path[0] + "/ns:" + path[1] + "\" AS ?prop)\n" \ " BIND(" + proba + " AS ?score)\n" \ " BIND(0 AS ?branched)\n" \ " BIND(ns:" + mid + " AS ?res)\n" \ " OPTIONAL {\n" \ " ns:" + path[0] + " rdfs:label ?pl0 .\n" \ " ns:" + path[1] + " rdfs:label ?pl1 .\n" \ " FILTER(LANGMATCHES(LANG(?pl0), \"en\"))\n" \ " FILTER(LANGMATCHES(LANG(?pl1), \"en\"))\n" \ " BIND(CONCAT(?pl0, \": \", ?pl1) AS ?proplabel)\n" \ " }\n" \ "}" pathQueries.append(pathQueryStr); elif (len(path) == 3): for concept in concepts: witnessRel = path[2]; quotedTitle = concept['fullLabel'].replace("\"", "").replace("\\\\", "").replace("\n", " ") pathQueryStr = "{" \ " ns:" + mid + " ns:" + path[0] + " ?med .\n" \ " ?med ns:" + path[1] + " ?val .\n" \ " {\n" \ " ?med ns:" + witnessRel + " ?concept .\n" \ " ?concept <http://rdf.freebase.com/key/wikipedia.en_id> \"" + concept['pageID'] + "\" .\n" \ " } UNION {\n" \ " {\n" \ " ?med ns:" + witnessRel + " ?wlabel .\n" \ " FILTER(!ISURI(?wlabel))\n" \ " } UNION {\n" \ " ?med ns:" + witnessRel + " ?concept .\n" \ " ?concept rdfs:label ?wlabel .\n" \ " }\n" \ " FILTER(LANGMATCHES(LANG(?wlabel), \"en\"))\n" \ " FILTER(CONTAINS(LCASE(?wlabel), LCASE(\"" + quotedTitle + "\")))\n" \ " }\n" \ " BIND(\"ns:" + path[0] + "/ns:" + path[1] + "\" AS ?prop)\n" \ " BIND(" + proba + " AS ?score)\n" \ " BIND(1 AS ?branched)\n" \ " BIND(ns:" + mid + " AS ?res)\n" \ " OPTIONAL {\n" \ " ns:" + path[0] + " rdfs:label ?pl0 .\n" \ " ns:" + path[1] + " rdfs:label ?pl1 .\n" \ " FILTER(LANGMATCHES(LANG(?pl0), \"en\"))\n" \ " FILTER(LANGMATCHES(LANG(?pl1), \"en\"))\n" \ " BIND(CONCAT(?pl0, \": \", ?pl1) AS ?proplabel)\n" \ " }\n" \ "}" pathQueries.append(pathQueryStr) return pathQueries def generate_results(paths, mids, concepts): prefix = """PREFIX owl: <http://www.w3.org/2002/07/owl#> PREFIX xsd: <http://www.w3.org/2001/XMLSchema#> PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> PREFIX foaf: <http://xmlns.com/foaf/0.1/> PREFIX dc: <http://purl.org/dc/elements/1.1/> PREFIX skos: <http://www.w3.org/2004/02/skos/core#> PREFIX ns: <http://rdf.freebase.com/ns/> SELECT ?property ?value ?prop ?val ?res ?score ?branched ?witnessAF WHERE {""" postfix = """BIND( IF(BOUND(?proplabel), ?proplabel, ?prop) AS ?property ) OPTIONAL { ?val rdfs:label ?vallabel . FILTER( LANGMATCHES(LANG(?vallabel), "en") ) } BIND( IF(BOUND(?vallabel), ?vallabel, ?val) AS ?value ) FILTER( !ISURI(?value) ) FILTER( LANG(?value) = "" || LANGMATCHES(LANG(?value), "en") ) }LIMIT 400""" results = [] for m in mids: tmp = generate_query(paths, m, "1", concepts) if (len(tmp) == 0): return [] sparql = SPARQLWrapper(URL) sparql.setReturnFormat(JSON) query = prefix + " UNION ".join(tmp) + postfix # print(query) sparql.setQuery(query) res = sparql.query().convert() # print("") # print(res) results += list(set([r['value']['value'] for r in res['results']['bindings']])) return results def mid_by_pageid(pageID): query = '''PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#> PREFIX ns: <http://rdf.freebase.com/ns/> SELECT * WHERE { ?topic <http://rdf.freebase.com/key/wikipedia.en_id> "''' + pageID + '''" . ?topic rdfs:label ?label . FILTER( LANGMATCHES(LANG(?label), "en") ) }''' sparql = SPARQLWrapper(URL) sparql.setReturnFormat(JSON) sparql.setQuery(query) res = sparql.query().convert() ret = [] for r in res['results']['bindings']: ret.append(r['topic']['value'][27:]) if (ret == []): return "" return ret[0] if __name__ == '__main__': with open(sys.argv[1], 'r') as f: traindata = VectorizedData(json.load(f)) print('// traindata: %d questions, %d features, %d fbpaths' % ( np.size(traindata.X, axis=0), np.size(traindata.X, axis=1), np.size(traindata.Y, axis=1))) sys.stdout.flush() t_start = time.clock() cfier = OneVsRestClassifier(LogisticRegression(penalty='l1'), n_jobs=4) cfier.fit(traindata.X, traindata.Y) with open(sys.argv[2]) as f: full = json.load(f) full_data = VectorizedData(full, traindata.Xdict, traindata.Ydict) error = 0 anyCnt = 0 allCnt = 0 anyPCnt = 0 allPCnt = 0 for i, line in enumerate(full): concepts = line['Concept'] mids = [c["mid"] for c in line['freebaseMids']] relpaths = [c[0] for c in line['relPaths']] mids_from_pageids = [mid_by_pageid(c['pageID']) for c in line['Concept']] filter(lambda a: a != "", mids_from_pageids) predicted_paths = [lab.split(":")[0].split("|") for lab in check_q(cfier, full_data, i)[2]] # print(predicted_paths) try: results = generate_results(relpaths, mids, concepts) predicted_results = generate_results(predicted_paths, mids_from_pageids, concepts) except HTTPError: error += 1 continue # print(results) allAnswers = True allAnswersPredicted = True anyAnswers = False anyAnswersPredicted = False for a in line["answers"]: if (a in results): anyAnswers = True else: allAnswers = False if (a in predicted_results): anyAnswersPredicted = True else: allAnswersPredicted = False if (anyAnswers): anyCnt += 1 if (anyAnswersPredicted): anyPCnt += 1 if (allAnswersPredicted): allPCnt += 1 if (allAnswers): allCnt += 1 print("qID %s, all: %s, all form predicted: %s, any: %s, any form predicted: %s" % (line['qId'], allAnswers, allAnswersPredicted, anyAnswers, anyAnswersPredicted)) print("SUMARRY") print("Number of questions: %d, all: %f, all predicted: %f, any: %f, any predicted: %f, http error: %d" % (len(full), (1.0*allCnt)/len(full), (1.0*allPCnt)/len(full), (1.0*anyCnt)/len(full), (1.0*anyPCnt)/len(full), error))
apache-2.0
karanisverma/flasktest
lib/flask/wrappers.py
773
6709
# -*- coding: utf-8 -*- """ flask.wrappers ~~~~~~~~~~~~~~ Implements the WSGI wrappers (request and response). :copyright: (c) 2011 by Armin Ronacher. :license: BSD, see LICENSE for more details. """ from werkzeug.wrappers import Request as RequestBase, Response as ResponseBase from werkzeug.exceptions import BadRequest from .debughelpers import attach_enctype_error_multidict from . import json from .globals import _request_ctx_stack _missing = object() def _get_data(req, cache): getter = getattr(req, 'get_data', None) if getter is not None: return getter(cache=cache) return req.data class Request(RequestBase): """The request object used by default in Flask. Remembers the matched endpoint and view arguments. It is what ends up as :class:`~flask.request`. If you want to replace the request object used you can subclass this and set :attr:`~flask.Flask.request_class` to your subclass. The request object is a :class:`~werkzeug.wrappers.Request` subclass and provides all of the attributes Werkzeug defines plus a few Flask specific ones. """ #: the internal URL rule that matched the request. This can be #: useful to inspect which methods are allowed for the URL from #: a before/after handler (``request.url_rule.methods``) etc. #: #: .. versionadded:: 0.6 url_rule = None #: a dict of view arguments that matched the request. If an exception #: happened when matching, this will be `None`. view_args = None #: if matching the URL failed, this is the exception that will be #: raised / was raised as part of the request handling. This is #: usually a :exc:`~werkzeug.exceptions.NotFound` exception or #: something similar. routing_exception = None # switched by the request context until 1.0 to opt in deprecated # module functionality _is_old_module = False @property def max_content_length(self): """Read-only view of the `MAX_CONTENT_LENGTH` config key.""" ctx = _request_ctx_stack.top if ctx is not None: return ctx.app.config['MAX_CONTENT_LENGTH'] @property def endpoint(self): """The endpoint that matched the request. This in combination with :attr:`view_args` can be used to reconstruct the same or a modified URL. If an exception happened when matching, this will be `None`. """ if self.url_rule is not None: return self.url_rule.endpoint @property def module(self): """The name of the current module if the request was dispatched to an actual module. This is deprecated functionality, use blueprints instead. """ from warnings import warn warn(DeprecationWarning('modules were deprecated in favor of ' 'blueprints. Use request.blueprint ' 'instead.'), stacklevel=2) if self._is_old_module: return self.blueprint @property def blueprint(self): """The name of the current blueprint""" if self.url_rule and '.' in self.url_rule.endpoint: return self.url_rule.endpoint.rsplit('.', 1)[0] @property def json(self): """If the mimetype is `application/json` this will contain the parsed JSON data. Otherwise this will be `None`. The :meth:`get_json` method should be used instead. """ # XXX: deprecate property return self.get_json() def get_json(self, force=False, silent=False, cache=True): """Parses the incoming JSON request data and returns it. If parsing fails the :meth:`on_json_loading_failed` method on the request object will be invoked. By default this function will only load the json data if the mimetype is ``application/json`` but this can be overriden by the `force` parameter. :param force: if set to `True` the mimetype is ignored. :param silent: if set to `False` this method will fail silently and return `False`. :param cache: if set to `True` the parsed JSON data is remembered on the request. """ rv = getattr(self, '_cached_json', _missing) if rv is not _missing: return rv if self.mimetype != 'application/json' and not force: return None # We accept a request charset against the specification as # certain clients have been using this in the past. This # fits our general approach of being nice in what we accept # and strict in what we send out. request_charset = self.mimetype_params.get('charset') try: data = _get_data(self, cache) if request_charset is not None: rv = json.loads(data, encoding=request_charset) else: rv = json.loads(data) except ValueError as e: if silent: rv = None else: rv = self.on_json_loading_failed(e) if cache: self._cached_json = rv return rv def on_json_loading_failed(self, e): """Called if decoding of the JSON data failed. The return value of this method is used by :meth:`get_json` when an error occurred. The default implementation just raises a :class:`BadRequest` exception. .. versionchanged:: 0.10 Removed buggy previous behavior of generating a random JSON response. If you want that behavior back you can trivially add it by subclassing. .. versionadded:: 0.8 """ raise BadRequest() def _load_form_data(self): RequestBase._load_form_data(self) # in debug mode we're replacing the files multidict with an ad-hoc # subclass that raises a different error for key errors. ctx = _request_ctx_stack.top if ctx is not None and ctx.app.debug and \ self.mimetype != 'multipart/form-data' and not self.files: attach_enctype_error_multidict(self) class Response(ResponseBase): """The response object that is used by default in Flask. Works like the response object from Werkzeug but is set to have an HTML mimetype by default. Quite often you don't have to create this object yourself because :meth:`~flask.Flask.make_response` will take care of that for you. If you want to replace the response object used you can subclass this and set :attr:`~flask.Flask.response_class` to your subclass. """ default_mimetype = 'text/html'
apache-2.0
dmnfarrell/peat
PEATDB/Ekin/Dataset.py
1
9801
#!/usr/bin/env python # # Protein Engineering Analysis Tool DataBase (PEATDB) # Copyright (C) 2010 Damien Farrell & Jens Erik Nielsen # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # Contact information: # Email: Jens.Nielsen_at_gmail.com # Normal mail: # Jens Nielsen # SBBS, Conway Institute # University College Dublin # Dublin 4, Ireland # from PEATDB.TableModels import TableModel import numpy as np import time import timeit class EkinDataset(object): """Dataset structure for new ekin datasets, note that x and y lists can have None values, this is to allow values to be removed/changed e.g. a table""" names = ['data', 'active', 'errors', 'labels'] def __init__(self, x=[], y=[], xy=None, active=[], xerrs=[], yerrs=[], xlabel='x', ylabel='y', data=None): if data == None: self.dims = 2 if xy != None: self.x,self.y = xy else: self.x=x self.y=y self.data = [self.x,self.y] if len(active)==0: active = [1 for i in self.x] self.active=active if xerrs != None and len(xerrs) == 0: xerrs = [0 for i in self.x] if yerrs != None and len(yerrs) == 0: yerrs = [0 for i in self.y] self.checkLists() #convert to floats if needed self.errors=[xerrs,yerrs] self.labels=[xlabel,ylabel] self.fits={} else: self.__dict__ = data return def checkLists(self): """Check that our lists are floats or None, if empty""" self.x = self.checkList(self.x) self.y = self.checkList(self.y) return def checkList(self, lst): n=[] for i in lst: try: n.append(float(i)) except: n.append(None) return n def add(self, dp=(None,None), a=1, e=(None,None)): """Add a new datapoint, dp is a tuple""" self.x.append(dp[0]) self.y.append(dp[1]) self.active.append(a) for d in range(self.dims): self.errors[d].append(e[d]) return def remove(self, i=None): """Delete a datapoint at index i""" if i > len(self.x): print 'index out of range' return if i==None: self.x.pop() self.y.pop() else: del self.x[i] del self.y[i] del self.active[i] for j in range(self.dims): del self.errors[j][i] return def removeMultiple(self, lst): """Remove points using list of elements""" rng = range(0,self.length()) self.x=[self.x[i] for i in rng if i not in lst] self.y=[self.y[i] for i in rng if i not in lst] self.active=[self.active[i] for i in rng if i not in lst] for d in range(self.dims): self.errors[d] = [self.errors[d][i] for i in rng if i not in lst] return def removeBounded(self, bounds): """Remove points within selected x-y bounds""" if bounds==None or len(bounds)!=4: return x1,y1,x2,y2 = bounds if x1>x2 : temp=x1;x1=x2;x2=temp if y1>y2: temp=y1;y1=y2;y2=temp lst=[] for i in range(0,self.length()): x=self.x[i]; y=self.y[i] if (x>x1 and x<x2) and (y>y1 and y<y2): lst.append(i) self.removeMultiple(lst) return def addFit(self, fitdata, name='default'): """Add a fit model""" if not hasattr(self, 'fits'): self.fits={} self.fits[name] = fitdata return def getFit(self): """get default fit data""" if self.fits.has_key('default'): return self.fits['default'] else: return None def getx(self): x=[i for i in self.x if i!=None and i!=None] return x def gety(self): y=[i for i in self.y if i!=None and i!=None] return y def getxy(self): """Get x-y lists""" xy=zip(self.x,self.y) x=[i[0] for i in xy if i[0]!=None and i[1]!=None] y=[i[1] for i in xy if i[0]!=None and i[1]!=None] return x,y def getxya(self): xya=zip(self.x,self.y,self.active) x=[i[0] for i in xya if i[0]!=None and i[1]!=None] y=[i[1] for i in xya if i[0]!=None and i[1]!=None] a=[i[2] for i in xya if i[0]!=None and i[1]!=None] return x,y,a def getActive(self): """Get only active points""" x=[];y=[] for i in zip(self.x,self.y,self.active): if i[0]!=None and i[1]!=None and i[2] == 1: x.append(i[0]) y.append(i[1]) return x,y def getAll(self): """Get all data, including error vals""" x,y,a = self.getxya() xerrs = [self.errors[0][i] for i in range(len(self.x)) if self.x[i]!=None and self.y[i]!=None] yerrs = [self.errors[1][i] for i in range(len(self.x)) if self.x[i]!=None and self.y[i]!=None] return x,y,a,xerrs,yerrs def getxySorted(self): """Get sorted version of lists""" from operator import itemgetter x,y=self.getxy() s=sorted(zip(x,y), key=itemgetter(0)) x,y = zip(*s) return x,y def setActive(self, i, a=1): """Set a point as active""" self.active[i] = a return def setActiveBounded(self, bounds=None, status=1): """Set (in)active from bounded points, a tuple x1,y1,x2,y2""" if bounds==None or len(bounds)!=4: return x1,y1,x2,y2 = bounds if x1>x2 : temp=x1;x1=x2;x2=temp if y1>y2: temp=y1;y1=y2;y2=temp for i in range(0,self.length()): x=self.x[i]; y=self.y[i] if (x>x1 and x<x2) and (y>y1 and y<y2): self.active[i]= status return def adjust(self, column=0, op='+', val=0): """Perform some arithmetic on pts""" lst=self.data[column] for i in range(0,self.length()): lst[i]=eval(str(lst[i]) + op + str(val)) return def setError(self, col, i, e): if e != None: self.errors[col][i] = e return def getErrors(self): xerrs,yerrs=self.errors if xerrs != None: xerrs=[i[0] for i in zip(xerrs, self.x) if i[0]!=None and i[1]!=None] if yerrs != None: yerrs=[i[0] for i in zip(yerrs, self.y) if i[0]!=None and i[1]!=None] return xerrs,yerrs def xval(self, i): """Get x value at index i""" return self.x[i] def yxal(self, i): """Get y value at index i""" return self.y[i] def xerr(self, i): """Get x error at index i""" return self.errors[0][i] def yerr(self, i): """Get y error at index i""" return self.errors[1][i] def xrange(self): x = self.x return max(x)-min(x) def yrange(self): y = self.y return max(y)-min(y) def length(self, getall=False): """Get no. of datapoints""" return len(self.x) def minX(self): """Get average of x datapoints""" return min(self.getx()) def maxX(self): """Get average of x datapoints""" return max(self.getx()) def minY(self): """Get average of y datapoints""" return min(self.gety()) def maxY(self): """Get average of y datapoints""" return max(self.gety()) def avgX(self): """Get average of x datapoints""" return np.mean(self.getx()) def avgY(self): """Get average of y datapoints""" return np.mean(self.gety()) def getData(self): return self.__dict__ def printAll(self): for i in range(self.x): print self.x[i], self.y[i] return def printXY(self): """print paired X-Y points""" print zip(self.x, self.y) def prettyPrint(self): """prints the entire dict""" import pprint pp = pprint.PrettyPrinter(indent=4) x=pp.pformat(self.__dict__) print x return def __repr__(self): return 'dataset with %s points' %self.length() def len(self): return len(self.x) def simpleTest(): """Do basic tests""" m = EkinDataset() for x in range(1,11): y=x/3.14 m.add((x,y), a=1, e=(x/10.0,y*0.02)) print m.xrange(), m.yrange() print m.minX(), m.maxX() print m.minY(), m.maxY() print m.avgX(), m.avgY() print m.xerr(0), m.yerr(0) x=m.getData() m.setActive(2, 0) m.setError(1, 1,0.5) m.prettyPrint() return def main(): simpleTest() if __name__ == '__main__': main()
mit
rickyHong/Tensorflow_modi
tensorflow/python/kernel_tests/bcast_ops_test.py
5
2202
"""Tests for tensorflow.kernels.bcast_ops.""" import tensorflow.python.platform import tensorflow as tf from tensorflow.python.ops.gen_array_ops import _broadcast_gradient_args class BcastOpsTest(tf.test.TestCase): def _GetGradientArgs(self, xs, ys): with self.test_session() as sess: return sess.run(_broadcast_gradient_args(xs, ys)) def testBasic(self): r0, r1 = self._GetGradientArgs([2, 3, 5], [1]) self.assertAllEqual(r0, []) self.assertAllEqual(r1, [0, 1, 2]) r0, r1 = self._GetGradientArgs([1], [2, 3, 5]) self.assertAllEqual(r0, [0, 1, 2]) self.assertAllEqual(r1, []) r0, r1 = self._GetGradientArgs([2, 3, 5], [5]) self.assertAllEqual(r0, []) self.assertAllEqual(r1, [0, 1]) r0, r1 = self._GetGradientArgs([5], [2, 3, 5]) self.assertAllEqual(r0, [0, 1]) self.assertAllEqual(r1, []) r0, r1 = self._GetGradientArgs([2, 3, 5], [3, 5]) self.assertAllEqual(r0, []) self.assertAllEqual(r1, [0]) r0, r1 = self._GetGradientArgs([3, 5], [2, 3, 5]) self.assertAllEqual(r0, [0]) self.assertAllEqual(r1, []) r0, r1 = self._GetGradientArgs([2, 3, 5], [3, 1]) self.assertAllEqual(r0, []) self.assertAllEqual(r1, [0, 2]) r0, r1 = self._GetGradientArgs([3, 1], [2, 3, 5]) self.assertAllEqual(r0, [0, 2]) self.assertAllEqual(r1, []) r0, r1 = self._GetGradientArgs([2, 1, 5], [3, 1]) self.assertAllEqual(r0, [1]) self.assertAllEqual(r1, [0, 2]) r0, r1 = self._GetGradientArgs([3, 1], [2, 1, 5]) self.assertAllEqual(r0, [0, 2]) self.assertAllEqual(r1, [1]) def testZeroDims(self): r0, r1 = self._GetGradientArgs([2, 0, 3, 0, 5], [3, 0, 5]) self.assertAllEqual(r0, []) self.assertAllEqual(r1, [0, 1]) r0, r1 = self._GetGradientArgs([3, 0, 5], [2, 0, 3, 0, 5]) self.assertAllEqual(r0, [0, 1]) self.assertAllEqual(r1, []) r0, r1 = self._GetGradientArgs([2, 0, 3, 0, 5], [3, 1, 5]) self.assertAllEqual(r0, []) self.assertAllEqual(r1, [0, 1, 3]) r0, r1 = self._GetGradientArgs([3, 1, 5], [2, 0, 3, 0, 5]) self.assertAllEqual(r0, [0, 1, 3]) self.assertAllEqual(r1, []) if __name__ == "__main__": tf.test.main()
apache-2.0
athompso/ansible
contrib/inventory/vagrant.py
55
3958
#!/usr/bin/env python """ Vagrant external inventory script. Automatically finds the IP of the booted vagrant vm(s), and returns it under the host group 'vagrant' Example Vagrant configuration using this script: config.vm.provision :ansible do |ansible| ansible.playbook = "./provision/your_playbook.yml" ansible.inventory_file = "./provision/inventory/vagrant.py" ansible.verbose = true end """ # Copyright (C) 2013 Mark Mandel <mark@compoundtheory.com> # 2015 Igor Khomyakov <homyakov@gmail.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # Thanks to the spacewalk.py inventory script for giving me the basic structure # of this. # import sys import os.path import subprocess import re from paramiko import SSHConfig from cStringIO import StringIO from optparse import OptionParser from collections import defaultdict try: import json except: import simplejson as json _group = 'vagrant' # a default group _ssh_to_ansible = [('user', 'ansible_ssh_user'), ('hostname', 'ansible_ssh_host'), ('identityfile', 'ansible_ssh_private_key_file'), ('port', 'ansible_ssh_port')] # Options # ------------------------------ parser = OptionParser(usage="%prog [options] --list | --host <machine>") parser.add_option('--list', default=False, dest="list", action="store_true", help="Produce a JSON consumable grouping of Vagrant servers for Ansible") parser.add_option('--host', default=None, dest="host", help="Generate additional host specific details for given host for Ansible") (options, args) = parser.parse_args() # # helper functions # # get all the ssh configs for all boxes in an array of dictionaries. def get_ssh_config(): return {k: get_a_ssh_config(k) for k in list_running_boxes()} # list all the running boxes def list_running_boxes(): output = subprocess.check_output(["vagrant", "status"]).split('\n') boxes = [] for line in output: matcher = re.search("([^\s]+)[\s]+running \(.+", line) if matcher: boxes.append(matcher.group(1)) return boxes # get the ssh config for a single box def get_a_ssh_config(box_name): """Gives back a map of all the machine's ssh configurations""" output = subprocess.check_output(["vagrant", "ssh-config", box_name]) config = SSHConfig() config.parse(StringIO(output)) host_config = config.lookup(box_name) # man 5 ssh_config: # > It is possible to have multiple identity files ... # > all these identities will be tried in sequence. for id in host_config['identityfile']: if os.path.isfile(id): host_config['identityfile'] = id return {v: host_config[k] for k, v in _ssh_to_ansible} # List out servers that vagrant has running # ------------------------------ if options.list: ssh_config = get_ssh_config() meta = defaultdict(dict) for host in ssh_config: meta['hostvars'][host] = ssh_config[host] print json.dumps({_group: list(ssh_config.keys()), '_meta': meta}) sys.exit(0) # Get out the host details # ------------------------------ elif options.host: print json.dumps(get_a_ssh_config(options.host)) sys.exit(0) # Print out help # ------------------------------ else: parser.print_help() sys.exit(0)
gpl-3.0
RonnyPfannschmidt/pip
src/pip/_vendor/chardet/latin1prober.py
290
5370
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Universal charset detector code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 2001 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # Shy Shalom - original C code # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from .charsetprober import CharSetProber from .enums import ProbingState FREQ_CAT_NUM = 4 UDF = 0 # undefined OTH = 1 # other ASC = 2 # ascii capital letter ASS = 3 # ascii small letter ACV = 4 # accent capital vowel ACO = 5 # accent capital other ASV = 6 # accent small vowel ASO = 7 # accent small other CLASS_NUM = 8 # total classes Latin1_CharToClass = ( OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 00 - 07 OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 08 - 0F OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 10 - 17 OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 18 - 1F OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 20 - 27 OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 28 - 2F OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 30 - 37 OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 38 - 3F OTH, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 40 - 47 ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 48 - 4F ASC, ASC, ASC, ASC, ASC, ASC, ASC, ASC, # 50 - 57 ASC, ASC, ASC, OTH, OTH, OTH, OTH, OTH, # 58 - 5F OTH, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 60 - 67 ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 68 - 6F ASS, ASS, ASS, ASS, ASS, ASS, ASS, ASS, # 70 - 77 ASS, ASS, ASS, OTH, OTH, OTH, OTH, OTH, # 78 - 7F OTH, UDF, OTH, ASO, OTH, OTH, OTH, OTH, # 80 - 87 OTH, OTH, ACO, OTH, ACO, UDF, ACO, UDF, # 88 - 8F UDF, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # 90 - 97 OTH, OTH, ASO, OTH, ASO, UDF, ASO, ACO, # 98 - 9F OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A0 - A7 OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # A8 - AF OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B0 - B7 OTH, OTH, OTH, OTH, OTH, OTH, OTH, OTH, # B8 - BF ACV, ACV, ACV, ACV, ACV, ACV, ACO, ACO, # C0 - C7 ACV, ACV, ACV, ACV, ACV, ACV, ACV, ACV, # C8 - CF ACO, ACO, ACV, ACV, ACV, ACV, ACV, OTH, # D0 - D7 ACV, ACV, ACV, ACV, ACV, ACO, ACO, ACO, # D8 - DF ASV, ASV, ASV, ASV, ASV, ASV, ASO, ASO, # E0 - E7 ASV, ASV, ASV, ASV, ASV, ASV, ASV, ASV, # E8 - EF ASO, ASO, ASV, ASV, ASV, ASV, ASV, OTH, # F0 - F7 ASV, ASV, ASV, ASV, ASV, ASO, ASO, ASO, # F8 - FF ) # 0 : illegal # 1 : very unlikely # 2 : normal # 3 : very likely Latin1ClassModel = ( # UDF OTH ASC ASS ACV ACO ASV ASO 0, 0, 0, 0, 0, 0, 0, 0, # UDF 0, 3, 3, 3, 3, 3, 3, 3, # OTH 0, 3, 3, 3, 3, 3, 3, 3, # ASC 0, 3, 3, 3, 1, 1, 3, 3, # ASS 0, 3, 3, 3, 1, 2, 1, 2, # ACV 0, 3, 3, 3, 3, 3, 3, 3, # ACO 0, 3, 1, 3, 1, 1, 1, 3, # ASV 0, 3, 1, 3, 1, 1, 3, 3, # ASO ) class Latin1Prober(CharSetProber): def __init__(self): super(Latin1Prober, self).__init__() self._last_char_class = None self._freq_counter = None self.reset() def reset(self): self._last_char_class = OTH self._freq_counter = [0] * FREQ_CAT_NUM CharSetProber.reset(self) @property def charset_name(self): return "ISO-8859-1" @property def language(self): return "" def feed(self, byte_str): byte_str = self.filter_with_english_letters(byte_str) for c in byte_str: char_class = Latin1_CharToClass[c] freq = Latin1ClassModel[(self._last_char_class * CLASS_NUM) + char_class] if freq == 0: self._state = ProbingState.NOT_ME break self._freq_counter[freq] += 1 self._last_char_class = char_class return self.state def get_confidence(self): if self.state == ProbingState.NOT_ME: return 0.01 total = sum(self._freq_counter) if total < 0.01: confidence = 0.0 else: confidence = ((self._freq_counter[3] - self._freq_counter[1] * 20.0) / total) if confidence < 0.0: confidence = 0.0 # lower the confidence of latin1 so that other more accurate # detector can take priority. confidence = confidence * 0.73 return confidence
mit
mitch-b/web
pelicanconf.py
1
2084
#!/usr/bin/env python # -*- coding: utf-8 -*- # from __future__ import unicode_literals AUTHOR = u'Mitchell Barry' AUTHOR_EMAIL = u'mitch.barry@gmail.com' ABOUT_AUTHOR = u'Software Developer, Person' SITENAME = u'Mitchell Barry' SITEURL = 'http://localhost:8000' PATH = 'content' THEME = 'notmyidea' THEME = 'themes/pure' TIMEZONE = 'America/Chicago' DEFAULT_LANG = u'en' DEFAULT_PAGINATION = 10 # Feed generation is usually not desired when developing FEED_ALL_ATOM = None CATEGORY_FEED_ATOM = None TRANSLATION_FEED_ATOM = None # Plugins PLUGIN_PATHS = ['plugins'] PLUGINS = ['gravitar','sitemap','tipue_search',] # Social widget SOCIAL = (('google-plus', 'https://google.com/+MitchellBarry'), ('github', 'https://github.com/mitch-b'), ('twitter','https://twitter.com/mitchbarry'),) FOOTER_LINKS = (('Source', 'https://github.com/mitch-b/web'), ('RSS', 'http://mitchbarry.com/feeds/all.atom.xml'), ('Pelican', 'http://blog.getpelican.com/'), ('PurePelican (fork)', 'https://github.com/mitch-b/pelican-purecss'),) DISPLAY_PAGES_ON_MENU = False MENUITEMS = [ ('Archive', 'archives.html'), ('About', 'about/'), ('Contact', 'contact/'), ('Projects', 'category/projects.html'), ] ARTICLE_URL = '{slug}/' ARTICLE_SAVE_AS = '{slug}/index.html' PAGE_URL = '{slug}/' PAGE_SAVE_AS = '{slug}/index.html' # Uncomment following line if you want document-relative URLs when developing #RELATIVE_URLS = True # These folders will copy from content to output directly STATIC_PATHS = ['images', 'assets'] DIRECT_TEMPLATES = (('index', 'tags', 'categories', 'archives', 'search')) # Pure theme settings COVER_IMG_URL = '/images/zealandia.jpg' PROFILE_IMAGE_URL = '/images/glass.jpg' TAGLINE = 'Software Developer, Person' DISQUS_SITENAME = '' GOOGLE_ANALYTICS = '' SITEMAP = { 'format': 'xml', 'priorities': { 'articles': 0.5, 'indexes': 0.5, 'pages': 0.5 }, 'changefreqs': { 'articles': 'monthly', 'indexes': 'daily', 'pages': 'monthly' } }
mit
biddisco/VTK
ThirdParty/Twisted/twisted/names/test/test_names.py
4
29371
# -*- test-case-name: twisted.names.test.test_names -*- # Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """ Test cases for twisted.names. """ import socket, operator, copy from StringIO import StringIO from twisted.trial import unittest from twisted.internet import reactor, defer, error from twisted.internet.task import Clock from twisted.internet.defer import succeed from twisted.names import client, server, common, authority, dns from twisted.python import failure from twisted.names.dns import Message from twisted.names.client import Resolver from twisted.names.secondary import ( SecondaryAuthorityService, SecondaryAuthority) from twisted.python.compat import reduce from twisted.test.proto_helpers import StringTransport, MemoryReactor def justPayload(results): return [r.payload for r in results[0]] class NoFileAuthority(authority.FileAuthority): def __init__(self, soa, records): # Yes, skip FileAuthority common.ResolverBase.__init__(self) self.soa, self.records = soa, records soa_record = dns.Record_SOA( mname = 'test-domain.com', rname = 'root.test-domain.com', serial = 100, refresh = 1234, minimum = 7654, expire = 19283784, retry = 15, ttl=1 ) reverse_soa = dns.Record_SOA( mname = '93.84.28.in-addr.arpa', rname = '93.84.28.in-addr.arpa', serial = 120, refresh = 54321, minimum = 382, expire = 11193983, retry = 30, ttl=3 ) my_soa = dns.Record_SOA( mname = 'my-domain.com', rname = 'postmaster.test-domain.com', serial = 130, refresh = 12345, minimum = 1, expire = 999999, retry = 100, ) test_domain_com = NoFileAuthority( soa = ('test-domain.com', soa_record), records = { 'test-domain.com': [ soa_record, dns.Record_A('127.0.0.1'), dns.Record_NS('39.28.189.39'), dns.Record_SPF('v=spf1 mx/30 mx:example.org/30 -all'), dns.Record_SPF('v=spf1 +mx a:\0colo', '.example.com/28 -all not valid'), dns.Record_MX(10, 'host.test-domain.com'), dns.Record_HINFO(os='Linux', cpu='A Fast One, Dontcha know'), dns.Record_CNAME('canonical.name.com'), dns.Record_MB('mailbox.test-domain.com'), dns.Record_MG('mail.group.someplace'), dns.Record_TXT('A First piece of Text', 'a SecoNd piece'), dns.Record_A6(0, 'ABCD::4321', ''), dns.Record_A6(12, '0:0069::0', 'some.network.tld'), dns.Record_A6(8, '0:5634:1294:AFCB:56AC:48EF:34C3:01FF', 'tra.la.la.net'), dns.Record_TXT('Some more text, haha! Yes. \0 Still here?'), dns.Record_MR('mail.redirect.or.whatever'), dns.Record_MINFO(rmailbx='r mail box', emailbx='e mail box'), dns.Record_AFSDB(subtype=1, hostname='afsdb.test-domain.com'), dns.Record_RP(mbox='whatever.i.dunno', txt='some.more.text'), dns.Record_WKS('12.54.78.12', socket.IPPROTO_TCP, '\x12\x01\x16\xfe\xc1\x00\x01'), dns.Record_NAPTR(100, 10, "u", "sip+E2U", "!^.*$!sip:information@domain.tld!"), dns.Record_AAAA('AF43:5634:1294:AFCB:56AC:48EF:34C3:01FF')], 'http.tcp.test-domain.com': [ dns.Record_SRV(257, 16383, 43690, 'some.other.place.fool') ], 'host.test-domain.com': [ dns.Record_A('123.242.1.5'), dns.Record_A('0.255.0.255'), ], 'host-two.test-domain.com': [ # # Python bug # dns.Record_A('255.255.255.255'), # dns.Record_A('255.255.255.254'), dns.Record_A('0.0.0.0') ], 'cname.test-domain.com': [ dns.Record_CNAME('test-domain.com') ], 'anothertest-domain.com': [ dns.Record_A('1.2.3.4')], } ) reverse_domain = NoFileAuthority( soa = ('93.84.28.in-addr.arpa', reverse_soa), records = { '123.93.84.28.in-addr.arpa': [ dns.Record_PTR('test.host-reverse.lookup.com'), reverse_soa ] } ) my_domain_com = NoFileAuthority( soa = ('my-domain.com', my_soa), records = { 'my-domain.com': [ my_soa, dns.Record_A('1.2.3.4', ttl='1S'), dns.Record_NS('ns1.domain', ttl='2M'), dns.Record_NS('ns2.domain', ttl='3H'), dns.Record_SRV(257, 16383, 43690, 'some.other.place.fool', ttl='4D') ] } ) class ServerDNSTestCase(unittest.TestCase): """ Test cases for DNS server and client. """ def setUp(self): self.factory = server.DNSServerFactory([ test_domain_com, reverse_domain, my_domain_com ], verbose=2) p = dns.DNSDatagramProtocol(self.factory) while 1: listenerTCP = reactor.listenTCP(0, self.factory, interface="127.0.0.1") # It's simpler to do the stop listening with addCleanup, # even though we might not end up using this TCP port in # the test (if the listenUDP below fails). Cleaning up # this TCP port sooner than "cleanup time" would mean # adding more code to keep track of the Deferred returned # by stopListening. self.addCleanup(listenerTCP.stopListening) port = listenerTCP.getHost().port try: listenerUDP = reactor.listenUDP(port, p, interface="127.0.0.1") except error.CannotListenError: pass else: self.addCleanup(listenerUDP.stopListening) break self.listenerTCP = listenerTCP self.listenerUDP = listenerUDP self.resolver = client.Resolver(servers=[('127.0.0.1', port)]) def tearDown(self): """ Clean up any server connections associated with the L{DNSServerFactory} created in L{setUp} """ # It'd be great if DNSServerFactory had a method that # encapsulated this task. At least the necessary data is # available, though. for conn in self.factory.connections[:]: conn.transport.loseConnection() def namesTest(self, d, r): self.response = None def setDone(response): self.response = response def checkResults(ignored): if isinstance(self.response, failure.Failure): raise self.response results = justPayload(self.response) assert len(results) == len(r), "%s != %s" % (map(str, results), map(str, r)) for rec in results: assert rec in r, "%s not in %s" % (rec, map(str, r)) d.addBoth(setDone) d.addCallback(checkResults) return d def testAddressRecord1(self): """Test simple DNS 'A' record queries""" return self.namesTest( self.resolver.lookupAddress('test-domain.com'), [dns.Record_A('127.0.0.1', ttl=19283784)] ) def testAddressRecord2(self): """Test DNS 'A' record queries with multiple answers""" return self.namesTest( self.resolver.lookupAddress('host.test-domain.com'), [dns.Record_A('123.242.1.5', ttl=19283784), dns.Record_A('0.255.0.255', ttl=19283784)] ) def testAddressRecord3(self): """Test DNS 'A' record queries with edge cases""" return self.namesTest( self.resolver.lookupAddress('host-two.test-domain.com'), [dns.Record_A('255.255.255.254', ttl=19283784), dns.Record_A('0.0.0.0', ttl=19283784)] ) def testAuthority(self): """Test DNS 'SOA' record queries""" return self.namesTest( self.resolver.lookupAuthority('test-domain.com'), [soa_record] ) def testMailExchangeRecord(self): """Test DNS 'MX' record queries""" return self.namesTest( self.resolver.lookupMailExchange('test-domain.com'), [dns.Record_MX(10, 'host.test-domain.com', ttl=19283784)] ) def testNameserver(self): """Test DNS 'NS' record queries""" return self.namesTest( self.resolver.lookupNameservers('test-domain.com'), [dns.Record_NS('39.28.189.39', ttl=19283784)] ) def testHINFO(self): """Test DNS 'HINFO' record queries""" return self.namesTest( self.resolver.lookupHostInfo('test-domain.com'), [dns.Record_HINFO(os='Linux', cpu='A Fast One, Dontcha know', ttl=19283784)] ) def testPTR(self): """Test DNS 'PTR' record queries""" return self.namesTest( self.resolver.lookupPointer('123.93.84.28.in-addr.arpa'), [dns.Record_PTR('test.host-reverse.lookup.com', ttl=11193983)] ) def testCNAME(self): """Test DNS 'CNAME' record queries""" return self.namesTest( self.resolver.lookupCanonicalName('test-domain.com'), [dns.Record_CNAME('canonical.name.com', ttl=19283784)] ) def testCNAMEAdditional(self): """Test additional processing for CNAME records""" return self.namesTest( self.resolver.lookupAddress('cname.test-domain.com'), [dns.Record_CNAME('test-domain.com', ttl=19283784), dns.Record_A('127.0.0.1', ttl=19283784)] ) def testMB(self): """Test DNS 'MB' record queries""" return self.namesTest( self.resolver.lookupMailBox('test-domain.com'), [dns.Record_MB('mailbox.test-domain.com', ttl=19283784)] ) def testMG(self): """Test DNS 'MG' record queries""" return self.namesTest( self.resolver.lookupMailGroup('test-domain.com'), [dns.Record_MG('mail.group.someplace', ttl=19283784)] ) def testMR(self): """Test DNS 'MR' record queries""" return self.namesTest( self.resolver.lookupMailRename('test-domain.com'), [dns.Record_MR('mail.redirect.or.whatever', ttl=19283784)] ) def testMINFO(self): """Test DNS 'MINFO' record queries""" return self.namesTest( self.resolver.lookupMailboxInfo('test-domain.com'), [dns.Record_MINFO(rmailbx='r mail box', emailbx='e mail box', ttl=19283784)] ) def testSRV(self): """Test DNS 'SRV' record queries""" return self.namesTest( self.resolver.lookupService('http.tcp.test-domain.com'), [dns.Record_SRV(257, 16383, 43690, 'some.other.place.fool', ttl=19283784)] ) def testAFSDB(self): """Test DNS 'AFSDB' record queries""" return self.namesTest( self.resolver.lookupAFSDatabase('test-domain.com'), [dns.Record_AFSDB(subtype=1, hostname='afsdb.test-domain.com', ttl=19283784)] ) def testRP(self): """Test DNS 'RP' record queries""" return self.namesTest( self.resolver.lookupResponsibility('test-domain.com'), [dns.Record_RP(mbox='whatever.i.dunno', txt='some.more.text', ttl=19283784)] ) def testTXT(self): """Test DNS 'TXT' record queries""" return self.namesTest( self.resolver.lookupText('test-domain.com'), [dns.Record_TXT('A First piece of Text', 'a SecoNd piece', ttl=19283784), dns.Record_TXT('Some more text, haha! Yes. \0 Still here?', ttl=19283784)] ) def test_spf(self): """ L{DNSServerFactory} can serve I{SPF} resource records. """ return self.namesTest( self.resolver.lookupSenderPolicy('test-domain.com'), [dns.Record_SPF('v=spf1 mx/30 mx:example.org/30 -all', ttl=19283784), dns.Record_SPF('v=spf1 +mx a:\0colo', '.example.com/28 -all not valid', ttl=19283784)] ) def testWKS(self): """Test DNS 'WKS' record queries""" return self.namesTest( self.resolver.lookupWellKnownServices('test-domain.com'), [dns.Record_WKS('12.54.78.12', socket.IPPROTO_TCP, '\x12\x01\x16\xfe\xc1\x00\x01', ttl=19283784)] ) def testSomeRecordsWithTTLs(self): result_soa = copy.copy(my_soa) result_soa.ttl = my_soa.expire return self.namesTest( self.resolver.lookupAllRecords('my-domain.com'), [result_soa, dns.Record_A('1.2.3.4', ttl='1S'), dns.Record_NS('ns1.domain', ttl='2M'), dns.Record_NS('ns2.domain', ttl='3H'), dns.Record_SRV(257, 16383, 43690, 'some.other.place.fool', ttl='4D')] ) def testAAAA(self): """Test DNS 'AAAA' record queries (IPv6)""" return self.namesTest( self.resolver.lookupIPV6Address('test-domain.com'), [dns.Record_AAAA('AF43:5634:1294:AFCB:56AC:48EF:34C3:01FF', ttl=19283784)] ) def testA6(self): """Test DNS 'A6' record queries (IPv6)""" return self.namesTest( self.resolver.lookupAddress6('test-domain.com'), [dns.Record_A6(0, 'ABCD::4321', '', ttl=19283784), dns.Record_A6(12, '0:0069::0', 'some.network.tld', ttl=19283784), dns.Record_A6(8, '0:5634:1294:AFCB:56AC:48EF:34C3:01FF', 'tra.la.la.net', ttl=19283784)] ) def test_zoneTransfer(self): """ Test DNS 'AXFR' queries (Zone transfer) """ default_ttl = soa_record.expire results = [copy.copy(r) for r in reduce(operator.add, test_domain_com.records.values())] for r in results: if r.ttl is None: r.ttl = default_ttl return self.namesTest( self.resolver.lookupZone('test-domain.com').addCallback(lambda r: (r[0][:-1],)), results ) def testSimilarZonesDontInterfere(self): """Tests that unrelated zones don't mess with each other.""" return self.namesTest( self.resolver.lookupAddress("anothertest-domain.com"), [dns.Record_A('1.2.3.4', ttl=19283784)] ) def test_NAPTR(self): """ Test DNS 'NAPTR' record queries. """ return self.namesTest( self.resolver.lookupNamingAuthorityPointer('test-domain.com'), [dns.Record_NAPTR(100, 10, "u", "sip+E2U", "!^.*$!sip:information@domain.tld!", ttl=19283784)]) class DNSServerFactoryTests(unittest.TestCase): """ Tests for L{server.DNSServerFactory}. """ def _messageReceivedTest(self, methodName, message): """ Assert that the named method is called with the given message when it is passed to L{DNSServerFactory.messageReceived}. """ # Make it appear to have some queries so that # DNSServerFactory.allowQuery allows it. message.queries = [None] receivedMessages = [] def fakeHandler(message, protocol, address): receivedMessages.append((message, protocol, address)) class FakeProtocol(object): def writeMessage(self, message): pass protocol = FakeProtocol() factory = server.DNSServerFactory(None) setattr(factory, methodName, fakeHandler) factory.messageReceived(message, protocol) self.assertEqual(receivedMessages, [(message, protocol, None)]) def test_notifyMessageReceived(self): """ L{DNSServerFactory.messageReceived} passes messages with an opcode of C{OP_NOTIFY} on to L{DNSServerFactory.handleNotify}. """ # RFC 1996, section 4.5 opCode = 4 self._messageReceivedTest('handleNotify', Message(opCode=opCode)) def test_updateMessageReceived(self): """ L{DNSServerFactory.messageReceived} passes messages with an opcode of C{OP_UPDATE} on to L{DNSServerFactory.handleOther}. This may change if the implementation ever covers update messages. """ # RFC 2136, section 1.3 opCode = 5 self._messageReceivedTest('handleOther', Message(opCode=opCode)) def test_connectionTracking(self): """ The C{connectionMade} and C{connectionLost} methods of L{DNSServerFactory} cooperate to keep track of all L{DNSProtocol} objects created by a factory which are connected. """ protoA, protoB = object(), object() factory = server.DNSServerFactory() factory.connectionMade(protoA) self.assertEqual(factory.connections, [protoA]) factory.connectionMade(protoB) self.assertEqual(factory.connections, [protoA, protoB]) factory.connectionLost(protoA) self.assertEqual(factory.connections, [protoB]) factory.connectionLost(protoB) self.assertEqual(factory.connections, []) class HelperTestCase(unittest.TestCase): def testSerialGenerator(self): f = self.mktemp() a = authority.getSerial(f) for i in range(20): b = authority.getSerial(f) self.failUnless(a < b) a = b class AXFRTest(unittest.TestCase): def setUp(self): self.results = None self.d = defer.Deferred() self.d.addCallback(self._gotResults) self.controller = client.AXFRController('fooby.com', self.d) self.soa = dns.RRHeader(name='fooby.com', type=dns.SOA, cls=dns.IN, ttl=86400, auth=False, payload=dns.Record_SOA(mname='fooby.com', rname='hooj.fooby.com', serial=100, refresh=200, retry=300, expire=400, minimum=500, ttl=600)) self.records = [ self.soa, dns.RRHeader(name='fooby.com', type=dns.NS, cls=dns.IN, ttl=700, auth=False, payload=dns.Record_NS(name='ns.twistedmatrix.com', ttl=700)), dns.RRHeader(name='fooby.com', type=dns.MX, cls=dns.IN, ttl=700, auth=False, payload=dns.Record_MX(preference=10, exchange='mail.mv3d.com', ttl=700)), dns.RRHeader(name='fooby.com', type=dns.A, cls=dns.IN, ttl=700, auth=False, payload=dns.Record_A(address='64.123.27.105', ttl=700)), self.soa ] def _makeMessage(self): # hooray they all have the same message format return dns.Message(id=999, answer=1, opCode=0, recDes=0, recAv=1, auth=1, rCode=0, trunc=0, maxSize=0) def testBindAndTNamesStyle(self): # Bind style = One big single message m = self._makeMessage() m.queries = [dns.Query('fooby.com', dns.AXFR, dns.IN)] m.answers = self.records self.controller.messageReceived(m, None) self.assertEqual(self.results, self.records) def _gotResults(self, result): self.results = result def testDJBStyle(self): # DJB style = message per record records = self.records[:] while records: m = self._makeMessage() m.queries = [] # DJB *doesn't* specify any queries.. hmm.. m.answers = [records.pop(0)] self.controller.messageReceived(m, None) self.assertEqual(self.results, self.records) class ResolvConfHandling(unittest.TestCase): def testMissing(self): resolvConf = self.mktemp() r = client.Resolver(resolv=resolvConf) self.assertEqual(r.dynServers, [('127.0.0.1', 53)]) r._parseCall.cancel() def testEmpty(self): resolvConf = self.mktemp() fObj = file(resolvConf, 'w') fObj.close() r = client.Resolver(resolv=resolvConf) self.assertEqual(r.dynServers, [('127.0.0.1', 53)]) r._parseCall.cancel() class AuthorityTests(unittest.TestCase): """ Tests for the basic response record selection code in L{FileAuthority} (independent of its fileness). """ def test_recordMissing(self): """ If a L{FileAuthority} has a zone which includes an I{NS} record for a particular name and that authority is asked for another record for the same name which does not exist, the I{NS} record is not included in the authority section of the response. """ authority = NoFileAuthority( soa=(str(soa_record.mname), soa_record), records={ str(soa_record.mname): [ soa_record, dns.Record_NS('1.2.3.4'), ]}) d = authority.lookupAddress(str(soa_record.mname)) result = [] d.addCallback(result.append) answer, authority, additional = result[0] self.assertEqual(answer, []) self.assertEqual( authority, [ dns.RRHeader( str(soa_record.mname), soa_record.TYPE, ttl=soa_record.expire, payload=soa_record, auth=True)]) self.assertEqual(additional, []) def _referralTest(self, method): """ Create an authority and make a request against it. Then verify that the result is a referral, including no records in the answers or additional sections, but with an I{NS} record in the authority section. """ subdomain = 'example.' + str(soa_record.mname) nameserver = dns.Record_NS('1.2.3.4') authority = NoFileAuthority( soa=(str(soa_record.mname), soa_record), records={ subdomain: [ nameserver, ]}) d = getattr(authority, method)(subdomain) result = [] d.addCallback(result.append) answer, authority, additional = result[0] self.assertEqual(answer, []) self.assertEqual( authority, [dns.RRHeader( subdomain, dns.NS, ttl=soa_record.expire, payload=nameserver, auth=False)]) self.assertEqual(additional, []) def test_referral(self): """ When an I{NS} record is found for a child zone, it is included in the authority section of the response. It is marked as non-authoritative if the authority is not also authoritative for the child zone (RFC 2181, section 6.1). """ self._referralTest('lookupAddress') def test_allRecordsReferral(self): """ A referral is also generated for a request of type C{ALL_RECORDS}. """ self._referralTest('lookupAllRecords') class NoInitialResponseTestCase(unittest.TestCase): def test_no_answer(self): """ If a request returns a L{dns.NS} response, but we can't connect to the given server, the request fails with the error returned at connection. """ def query(self, *args): # Pop from the message list, so that it blows up if more queries # are run than expected. return succeed(messages.pop(0)) def queryProtocol(self, *args, **kwargs): return defer.fail(socket.gaierror("Couldn't connect")) resolver = Resolver(servers=[('0.0.0.0', 0)]) resolver._query = query messages = [] # Let's patch dns.DNSDatagramProtocol.query, as there is no easy way to # customize it. self.patch(dns.DNSDatagramProtocol, "query", queryProtocol) records = [ dns.RRHeader(name='fooba.com', type=dns.NS, cls=dns.IN, ttl=700, auth=False, payload=dns.Record_NS(name='ns.twistedmatrix.com', ttl=700))] m = dns.Message(id=999, answer=1, opCode=0, recDes=0, recAv=1, auth=1, rCode=0, trunc=0, maxSize=0) m.answers = records messages.append(m) return self.assertFailure( resolver.getHostByName("fooby.com"), socket.gaierror) class SecondaryAuthorityServiceTests(unittest.TestCase): """ Tests for L{SecondaryAuthorityService}, a service which keeps one or more authorities up to date by doing zone transfers from a master. """ def test_constructAuthorityFromHost(self): """ L{SecondaryAuthorityService} can be constructed with a C{str} giving a master server address and several domains, causing the creation of a secondary authority for each domain and that master server address and the default DNS port. """ primary = '192.168.1.2' service = SecondaryAuthorityService( primary, ['example.com', 'example.org']) self.assertEqual(service.primary, primary) self.assertEqual(service._port, 53) self.assertEqual(service.domains[0].primary, primary) self.assertEqual(service.domains[0]._port, 53) self.assertEqual(service.domains[0].domain, 'example.com') self.assertEqual(service.domains[1].primary, primary) self.assertEqual(service.domains[1]._port, 53) self.assertEqual(service.domains[1].domain, 'example.org') def test_constructAuthorityFromHostAndPort(self): """ L{SecondaryAuthorityService.fromServerAddressAndDomains} constructs a new L{SecondaryAuthorityService} from a C{str} giving a master server address and DNS port and several domains, causing the creation of a secondary authority for each domain and that master server address and the given DNS port. """ primary = '192.168.1.3' port = 5335 service = SecondaryAuthorityService.fromServerAddressAndDomains( (primary, port), ['example.net', 'example.edu']) self.assertEqual(service.primary, primary) self.assertEqual(service._port, 5335) self.assertEqual(service.domains[0].primary, primary) self.assertEqual(service.domains[0]._port, port) self.assertEqual(service.domains[0].domain, 'example.net') self.assertEqual(service.domains[1].primary, primary) self.assertEqual(service.domains[1]._port, port) self.assertEqual(service.domains[1].domain, 'example.edu') class SecondaryAuthorityTests(unittest.TestCase): """ L{twisted.names.secondary.SecondaryAuthority} correctly constructs objects with a specified IP address and optionally specified DNS port. """ def test_defaultPort(self): """ When constructed using L{SecondaryAuthority.__init__}, the default port of 53 is used. """ secondary = SecondaryAuthority('192.168.1.1', 'inside.com') self.assertEqual(secondary.primary, '192.168.1.1') self.assertEqual(secondary._port, 53) self.assertEqual(secondary.domain, 'inside.com') def test_explicitPort(self): """ When constructed using L{SecondaryAuthority.fromServerAddressAndDomain}, the specified port is used. """ secondary = SecondaryAuthority.fromServerAddressAndDomain( ('192.168.1.1', 5353), 'inside.com') self.assertEqual(secondary.primary, '192.168.1.1') self.assertEqual(secondary._port, 5353) self.assertEqual(secondary.domain, 'inside.com') def test_transfer(self): """ An attempt is made to transfer the zone for the domain the L{SecondaryAuthority} was constructed with from the server address it was constructed with when L{SecondaryAuthority.transfer} is called. """ class ClockMemoryReactor(Clock, MemoryReactor): def __init__(self): Clock.__init__(self) MemoryReactor.__init__(self) secondary = SecondaryAuthority.fromServerAddressAndDomain( ('192.168.1.2', 1234), 'example.com') secondary._reactor = reactor = ClockMemoryReactor() secondary.transfer() # Verify a connection attempt to the server address above host, port, factory, timeout, bindAddress = reactor.tcpClients.pop(0) self.assertEqual(host, '192.168.1.2') self.assertEqual(port, 1234) # See if a zone transfer query is issued. proto = factory.buildProtocol((host, port)) transport = StringTransport() proto.makeConnection(transport) msg = Message() # DNSProtocol.writeMessage length encodes the message by prepending a # 2 byte message length to the buffered value. msg.decode(StringIO(transport.value()[2:])) self.assertEqual( [dns.Query('example.com', dns.AXFR, dns.IN)], msg.queries)
bsd-3-clause
nvoron23/arangodb
3rdParty/V8-4.3.61/build/gyp/test/additional-targets/gyptest-additional.py
139
1530
#!/usr/bin/env python # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Verifies simple actions when using an explicit build target of 'all'. """ import TestGyp test = TestGyp.TestGyp() test.run_gyp('all.gyp', chdir='src') test.relocate('src', 'relocate/src') # Build all. test.build('all.gyp', chdir='relocate/src') if test.format=='xcode': chdir = 'relocate/src/dir1' else: chdir = 'relocate/src' # Output is as expected. file_content = 'Hello from emit.py\n' test.built_file_must_match('out2.txt', file_content, chdir=chdir) test.built_file_must_not_exist('out.txt', chdir='relocate/src') test.built_file_must_not_exist('foolib1', type=test.SHARED_LIB, chdir=chdir) # TODO(mmoss) Make consistent with msvs, with 'dir1' before 'out/Default'? if test.format in ('make', 'ninja', 'android', 'cmake'): chdir='relocate/src' else: chdir='relocate/src/dir1' # Build the action explicitly. test.build('actions.gyp', 'action1_target', chdir=chdir) # Check that things got run. file_content = 'Hello from emit.py\n' test.built_file_must_exist('out.txt', chdir=chdir) # Build the shared library explicitly. test.build('actions.gyp', 'foolib1', chdir=chdir) test.built_file_must_exist('foolib1', type=test.SHARED_LIB, chdir=chdir, subdir='dir1') test.pass_test()
apache-2.0
NMGRL/pychron
alembic_dvc/env.py
3
2274
from __future__ import with_statement from logging.config import fileConfig from alembic import context from sqlalchemy import engine_from_config, pool # this is the Alembic Config object, which provides # access to the values within the .ini file in use. config = context.config # Interpret the config file for Python logging. # This line sets up loggers basically. fileConfig(config.config_file_name) # add your model's MetaData object here # for 'autogenerate' support # from myapp import mymodel # target_metadata = mymodel.Base.metadata import sys # root = os.path.join('.','pychron') # print root, os.path.abspath(root) # print os.path.exists(root) # root = os.path.join(os.path.expanduser('~'), 'Programming/github/pychron_dev') sys.path.append('.') # sys.exit() from pychron.dvc.dvc_orm import Base target_metadata = Base.metadata # other values from the config, defined by the needs of env.py, # can be acquired: # my_important_option = config.get_main_option("my_important_option") # ... etc. def run_migrations_offline(): """Run migrations in 'offline' mode. This configures the context with just a URL and not an Engine, though an Engine is acceptable here as well. By skipping the Engine creation we don't even need a DBAPI to be available. Calls to context.execute() here emit the given string to the script output. """ url = config.get_main_option("sqlalchemy.url") context.configure(url=url) with context.begin_transaction(): context.run_migrations() def run_migrations_online(): """Run migrations in 'online' mode. In this scenario we need to create an Engine and associate a connection with the context. """ engine = engine_from_config( config.get_section(config.config_ini_section), prefix='sqlalchemy.', poolclass=pool.NullPool) connection = engine.connect() context.configure( connection=connection, target_metadata=target_metadata ) try: with context.begin_transaction(): context.run_migrations() finally: connection.close() if context.is_offline_mode(): run_migrations_offline() else: run_migrations_online()
apache-2.0
syscoin/syscoin
test/functional/feature_notifications.py
1
9586
#!/usr/bin/env python3 # Copyright (c) 2014-2020 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test the -alertnotify, -blocknotify and -walletnotify options.""" import os from test_framework.address import ADDRESS_BCRT1_UNSPENDABLE from test_framework.descriptors import descsum_create from test_framework.test_framework import SyscoinTestFramework from test_framework.util import ( assert_equal, ) # Linux allow all characters other than \x00 # Windows disallow control characters (0-31) and /\?%:|"<> FILE_CHAR_START = 32 if os.name == 'nt' else 1 FILE_CHAR_END = 128 FILE_CHARS_DISALLOWED = '/\\?%*:|"<>' if os.name == 'nt' else '/' UNCONFIRMED_HASH_STRING = 'unconfirmed' def notify_outputname(walletname, txid): return txid if os.name == 'nt' else '{}_{}'.format(walletname, txid) class NotificationsTest(SyscoinTestFramework): def set_test_params(self): self.num_nodes = 2 self.setup_clean_chain = True def setup_network(self): self.wallet = ''.join(chr(i) for i in range(FILE_CHAR_START, FILE_CHAR_END) if chr(i) not in FILE_CHARS_DISALLOWED) self.alertnotify_dir = os.path.join(self.options.tmpdir, "alertnotify") self.blocknotify_dir = os.path.join(self.options.tmpdir, "blocknotify") self.walletnotify_dir = os.path.join(self.options.tmpdir, "walletnotify") os.mkdir(self.alertnotify_dir) os.mkdir(self.blocknotify_dir) os.mkdir(self.walletnotify_dir) # -alertnotify and -blocknotify on node0, walletnotify on node1 self.extra_args = [[ "-alertnotify=echo > {}".format(os.path.join(self.alertnotify_dir, '%s')), "-blocknotify=echo > {}".format(os.path.join(self.blocknotify_dir, '%s')), ], [ "-rescan", "-walletnotify=echo %h_%b > {}".format(os.path.join(self.walletnotify_dir, notify_outputname('%w', '%s'))), ]] self.wallet_names = [self.default_wallet_name, self.wallet] super().setup_network() def run_test(self): if self.is_wallet_compiled(): # Setup the descriptors to be imported to the wallet seed = "cTdGmKFWpbvpKQ7ejrdzqYT2hhjyb3GPHnLAK7wdi5Em67YLwSm9" xpriv = "tprv8ZgxMBicQKsPfHCsTwkiM1KT56RXbGGTqvc2hgqzycpwbHqqpcajQeMRZoBD35kW4RtyCemu6j34Ku5DEspmgjKdt2qe4SvRch5Kk8B8A2v" desc_imports = [{ "desc": descsum_create("wpkh(" + xpriv + "/0/*)"), "timestamp": 0, "active": True, "keypool": True, },{ "desc": descsum_create("wpkh(" + xpriv + "/1/*)"), "timestamp": 0, "active": True, "keypool": True, "internal": True, }] # Make the wallets and import the descriptors # Ensures that node 0 and node 1 share the same wallet for the conflicting transaction tests below. for i, name in enumerate(self.wallet_names): self.nodes[i].createwallet(wallet_name=name, descriptors=self.options.descriptors, blank=True, load_on_startup=True) if self.options.descriptors: self.nodes[i].importdescriptors(desc_imports) else: self.nodes[i].sethdseed(True, seed) self.log.info("test -blocknotify") block_count = 10 blocks = self.nodes[1].generatetoaddress(block_count, self.nodes[1].getnewaddress() if self.is_wallet_compiled() else ADDRESS_BCRT1_UNSPENDABLE) # wait at most 10 seconds for expected number of files before reading the content self.wait_until(lambda: len(os.listdir(self.blocknotify_dir)) == block_count, timeout=10) # directory content should equal the generated blocks hashes assert_equal(sorted(blocks), sorted(os.listdir(self.blocknotify_dir))) if self.is_wallet_compiled(): self.log.info("test -walletnotify") # wait at most 10 seconds for expected number of files before reading the content self.wait_until(lambda: len(os.listdir(self.walletnotify_dir)) == block_count, timeout=10) # directory content should equal the generated transaction hashes tx_details = list(map(lambda t: (t['txid'], t['blockheight'], t['blockhash']), self.nodes[1].listtransactions("*", block_count))) self.stop_node(1) self.expect_wallet_notify(tx_details) self.log.info("test -walletnotify after rescan") # restart node to rescan to force wallet notifications self.start_node(1) self.connect_nodes(0, 1) self.wait_until(lambda: len(os.listdir(self.walletnotify_dir)) == block_count, timeout=10) # directory content should equal the generated transaction hashes tx_details = list(map(lambda t: (t['txid'], t['blockheight'], t['blockhash']), self.nodes[1].listtransactions("*", block_count))) self.expect_wallet_notify(tx_details) # Conflicting transactions tests. # Generate spends from node 0, and check notifications # triggered by node 1 self.log.info("test -walletnotify with conflicting transactions") self.nodes[0].rescanblockchain() self.nodes[0].generatetoaddress(100, ADDRESS_BCRT1_UNSPENDABLE) self.sync_blocks() # Generate transaction on node 0, sync mempools, and check for # notification on node 1. tx1 = self.nodes[0].sendtoaddress(address=ADDRESS_BCRT1_UNSPENDABLE, amount=1, replaceable=True) assert_equal(tx1 in self.nodes[0].getrawmempool(), True) self.sync_mempools() self.expect_wallet_notify([(tx1, -1, UNCONFIRMED_HASH_STRING)]) # Generate bump transaction, sync mempools, and check for bump1 # notification. In the future, per # https://github.com/bitcoin/bitcoin/pull/9371, it might be better # to have notifications for both tx1 and bump1. bump1 = self.nodes[0].bumpfee(tx1)["txid"] assert_equal(bump1 in self.nodes[0].getrawmempool(), True) self.sync_mempools() self.expect_wallet_notify([(bump1, -1, UNCONFIRMED_HASH_STRING)]) # Add bump1 transaction to new block, checking for a notification # and the correct number of confirmations. blockhash1 = self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)[0] blockheight1 = self.nodes[0].getblockcount() self.sync_blocks() self.expect_wallet_notify([(bump1, blockheight1, blockhash1)]) assert_equal(self.nodes[1].gettransaction(bump1)["confirmations"], 1) # Generate a second transaction to be bumped. tx2 = self.nodes[0].sendtoaddress(address=ADDRESS_BCRT1_UNSPENDABLE, amount=1, replaceable=True) assert_equal(tx2 in self.nodes[0].getrawmempool(), True) self.sync_mempools() self.expect_wallet_notify([(tx2, -1, UNCONFIRMED_HASH_STRING)]) # Bump tx2 as bump2 and generate a block on node 0 while # disconnected, then reconnect and check for notifications on node 1 # about newly confirmed bump2 and newly conflicted tx2. self.disconnect_nodes(0, 1) bump2 = self.nodes[0].bumpfee(tx2)["txid"] blockhash2 = self.nodes[0].generatetoaddress(1, ADDRESS_BCRT1_UNSPENDABLE)[0] blockheight2 = self.nodes[0].getblockcount() assert_equal(self.nodes[0].gettransaction(bump2)["confirmations"], 1) assert_equal(tx2 in self.nodes[1].getrawmempool(), True) self.connect_nodes(0, 1) self.sync_blocks() self.expect_wallet_notify([(bump2, blockheight2, blockhash2), (tx2, -1, UNCONFIRMED_HASH_STRING)]) assert_equal(self.nodes[1].gettransaction(bump2)["confirmations"], 1) # TODO: add test for `-alertnotify` large fork notifications def expect_wallet_notify(self, tx_details): self.wait_until(lambda: len(os.listdir(self.walletnotify_dir)) >= len(tx_details), timeout=10) # Should have no more and no less files than expected assert_equal(sorted(notify_outputname(self.wallet, tx_id) for tx_id, _, _ in tx_details), sorted(os.listdir(self.walletnotify_dir))) # Should now verify contents of each file for tx_id, blockheight, blockhash in tx_details: fname = os.path.join(self.walletnotify_dir, notify_outputname(self.wallet, tx_id)) # Wait for the cached writes to hit storage self.wait_until(lambda: os.path.getsize(fname) > 0, timeout=10) with open(fname, 'rt', encoding='utf-8') as f: text = f.read() # Universal newline ensures '\n' on 'nt' assert_equal(text[-1], '\n') text = text[:-1] if os.name == 'nt': # On Windows, echo as above will append a whitespace assert_equal(text[-1], ' ') text = text[:-1] expected = str(blockheight) + '_' + blockhash assert_equal(text, expected) for tx_file in os.listdir(self.walletnotify_dir): os.remove(os.path.join(self.walletnotify_dir, tx_file)) if __name__ == '__main__': NotificationsTest().main()
mit
stonebig/flexx
examples/ui/chatroom.py
20
2212
""" Simple chat web app in less than 80 lines. """ from flexx import app, ui, react nsamples = 16 @react.input def message_relay(msg): """ One global signal to relay messages to all participants. """ return msg + '<br />' class MessageBox(ui.Label): CSS = """ .flx-messagebox { overflow-y:scroll; background: #e8e8e8; border: 1px solid #444; margin: 3px; } """ @app.serve class ChatRoom(ui.Widget): """ Despite the name, this represents one connection to the chat room. """ def init(self): with ui.HBox(): ui.Widget(flex=1) with ui.VBox(): self.name = ui.LineEdit(placeholder_text='your name') self.people = ui.Label(flex=1, size=(250, 0)) with ui.VBox(): self.messages = MessageBox(flex=1) with ui.HBox(): self.message = ui.LineEdit(flex=1, placeholder_text='enter message') self.ok = ui.Button(text='Send') ui.Widget(flex=1) self._update_participants() def _update_participants(self): proxies = app.manager.get_connections(self.__class__.__name__) names = [p.app.name.text() for p in proxies] text = '<br />%i persons in this chat:<br /><br />' % len(names) text += '<br />'.join([name or 'anonymous' for name in sorted(names)]) self.people.text(text) app.call_later(3, self._update_participants) @react.connect('ok.mouse_down', 'message.submit') def _send_message(self, down, submit): text = self.message.text() if text: name = self.name.text() or 'anonymous' message_relay('<i>%s</i>: %s' % (name, text)) self.message.text('') @react.connect('message_relay') def new_text(self, text): return text # proxy to pass total_text to JS class JS: @react.connect('new_text') def _update_total_text(self, text): self.messages.text(self.messages.text() + text) if __name__ == '__main__': #m = app.launch(ChatRoom) # for use during development app.start()
bsd-2-clause
josh-willis/pycbc
pycbc/fft/parser_support.py
14
4879
# Copyright (C) 2012 Josh Willis, Andrew Miller # # This program is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the # Free Software Foundation; either version 3 of the License, or (at your # option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General # Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # ============================================================================= # # Preamble # # ============================================================================= # """ This package provides a front-end to various fast Fourier transform implementations within PyCBC. """ from .backend_support import get_backend_modules, get_backend_names from .backend_support import set_backend, get_backend # Next we add all of the machinery to set backends and their options # from the command line. def insert_fft_option_group(parser): """ Adds the options used to choose an FFT backend. This should be used if your program supports the ability to select the FFT backend; otherwise you may simply call the fft and ifft functions and rely on default choices. This function will also attempt to add any options exported by available backends through a function called insert_fft_options. These submodule functions should take the fft_group object as argument. Parameters ---------- parser : object OptionParser instance """ fft_group = parser.add_argument_group("Options for selecting the" " FFT backend and controlling its performance" " in this program.") # We have one argument to specify the backends. This becomes the default list used # if none is specified for a particular call of fft() of ifft(). Note that this # argument expects a *list* of inputs, as indicated by the nargs='*'. fft_group.add_argument("--fft-backends", help="Preference list of the FFT backends. " "Choices are: \n" + str(get_backend_names()), nargs='*', default=[]) for backend in get_backend_modules(): try: backend.insert_fft_options(fft_group) except AttributeError: pass def verify_fft_options(opt, parser): """Parses the FFT options and verifies that they are reasonable. Parameters ---------- opt : object Result of parsing the CLI with OptionParser, or any object with the required attributes. parser : object OptionParser instance. """ if len(opt.fft_backends) > 0: _all_backends = get_backend_names() for backend in opt.fft_backends: if backend not in _all_backends: parser.error("Backend {0} is not available".format(backend)) for backend in get_backend_modules(): try: backend.verify_fft_options(opt, parser) except AttributeError: pass # The following function is the only one that is designed # only to work with the active scheme. We'd like to fix that, # eventually, but it's non-trivial because of how poorly MKL # and FFTW cooperate. def from_cli(opt): """Parses the command line options and sets the FFT backend for each (available) scheme. Aside from setting the default backed for this context, this function will also call (if it exists) the from_cli function of the specified backends in the *current* scheme; typically one would only call this function once inside of a scheme context manager, but if it is desired to perform FFTs both inside and outside of a context, then this function would need to be called again. Parameters ---------- opt: object Result of parsing the CLI with OptionParser, or any object with the required attributes. Returns """ set_backend(opt.fft_backends) # Eventually, we need to be able to parse command lines # from more than just the current scheme's preference. But # the big problem is that calling from_cli for more than one # backend could cause interference; apparently, FFTW and MKL # don't play nice unless FFTW has been compiled and linked # with icc (and possibly numpy, scipy, and/or Python as well?) backend = get_backend() try: backend.from_cli(opt) except AttributeError: pass
gpl-3.0
jlegendary/youtube-dl
youtube_dl/extractor/minhateca.py
127
2519
# coding: utf-8 from __future__ import unicode_literals from .common import InfoExtractor from ..compat import ( compat_urllib_parse, compat_urllib_request, ) from ..utils import ( int_or_none, parse_duration, parse_filesize, ) class MinhatecaIE(InfoExtractor): _VALID_URL = r'https?://minhateca\.com\.br/[^?#]+,(?P<id>[0-9]+)\.' _TEST = { 'url': 'http://minhateca.com.br/pereba/misc/youtube-dl+test+video,125848331.mp4(video)', 'info_dict': { 'id': '125848331', 'ext': 'mp4', 'title': 'youtube-dl test video', 'thumbnail': 're:^https?://.*\.jpg$', 'filesize_approx': 1530000, 'duration': 9, 'view_count': int, } } def _real_extract(self, url): video_id = self._match_id(url) webpage = self._download_webpage(url, video_id) token = self._html_search_regex( r'<input name="__RequestVerificationToken".*?value="([^"]+)"', webpage, 'request token') token_data = [ ('fileId', video_id), ('__RequestVerificationToken', token), ] req = compat_urllib_request.Request( 'http://minhateca.com.br/action/License/Download', data=compat_urllib_parse.urlencode(token_data)) req.add_header('Content-Type', 'application/x-www-form-urlencoded') data = self._download_json( req, video_id, note='Downloading metadata') video_url = data['redirectUrl'] title_str = self._html_search_regex( r'<h1.*?>(.*?)</h1>', webpage, 'title') title, _, ext = title_str.rpartition('.') filesize_approx = parse_filesize(self._html_search_regex( r'<p class="fileSize">(.*?)</p>', webpage, 'file size approximation', fatal=False)) duration = parse_duration(self._html_search_regex( r'(?s)<p class="fileLeng[ht][th]">.*?class="bold">(.*?)<', webpage, 'duration', fatal=False)) view_count = int_or_none(self._html_search_regex( r'<p class="downloadsCounter">([0-9]+)</p>', webpage, 'view count', fatal=False)) return { 'id': video_id, 'url': video_url, 'title': title, 'ext': ext, 'filesize_approx': filesize_approx, 'duration': duration, 'view_count': view_count, 'thumbnail': self._og_search_thumbnail(webpage), }
unlicense
cbrewster/servo
tests/heartbeats/characterize.py
56
10667
#!/usr/bin/env python # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import sys import os from os import path import time import datetime import argparse import platform import subprocess TOP_DIR = path.join("..", "..") GUARD_TIME = 10 HEARTBEAT_DEFAULT_WINDOW_SIZE = 20 # Use a larger window sizes to reduce or prevent writing log files until benchmark completion # (profiler name, window size) # These categories need to be kept aligned with ProfilerCategory in components/profile_traits/time.rs HEARTBEAT_PROFILER_CATEGORIES = [ ("Compositing", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("LayoutPerform", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("LayoutStyleRecalc", HEARTBEAT_DEFAULT_WINDOW_SIZE), # ("LayoutTextShaping", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("LayoutRestyleDamagePropagation", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("LayoutNonIncrementalReset", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("LayoutSelectorMatch", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("LayoutTreeBuilder", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("LayoutDamagePropagate", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("LayoutGeneratedContent", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("LayoutDisplayListSorting", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("LayoutFloatPlacementSpeculation", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("LayoutMain", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("LayoutStoreOverflow", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("LayoutParallelWarmup", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("LayoutDispListBuild", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("NetHTTPRequestResponse", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("PaintingPerTile", 50), ("PaintingPrepBuff", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("Painting", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ImageDecoding", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ImageSaving", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptAttachLayout", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptConstellationMsg", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptDevtoolsMsg", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptDocumentEvent", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptDomEvent", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptEvaluate", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptEvent", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptFileRead", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptImageCacheMsg", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptInputEvent", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptNetworkEvent", 200), ("ScriptParseHTML", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptPlannedNavigation", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptResize", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptSetScrollState", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptSetViewport", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptTimerEvent", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptStylesheetLoad", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptUpdateReplacedElement", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptWebSocketEvent", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptWorkerEvent", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptServiceWorkerEvent", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptParseXML", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptEnterFullscreen", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptExitFullscreen", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ScriptWebVREvent", HEARTBEAT_DEFAULT_WINDOW_SIZE), ("ApplicationHeartbeat", 100), ] ENERGY_READER_BIN = "energymon-file-provider" ENERGY_READER_TEMP_OUTPUT = "energymon.txt" SUMMARY_OUTPUT = "summary.txt" def get_command(build_target, layout_thread_count, renderer, page, profile): """Get the command to execute. """ return path.join(TOP_DIR, "target", build_target, "servo") + \ " -p %d -o output.png -y %d %s -Z profile-script-events '%s'" % \ (profile, layout_thread_count, renderer, page) def set_app_environment(log_dir): """Set environment variables to enable heartbeats. """ prefix = "heartbeat-" for (profiler, window) in HEARTBEAT_PROFILER_CATEGORIES: os.environ["SERVO_HEARTBEAT_ENABLE_" + profiler] = "" os.environ["SERVO_HEARTBEAT_LOG_" + profiler] = path.join(log_dir, prefix + profiler + ".log") os.environ["SERVO_HEARTBEAT_WINDOW_" + profiler] = str(window) def start_energy_reader(): """Energy reader writes to a file that we will poll. """ os.system(ENERGY_READER_BIN + " " + ENERGY_READER_TEMP_OUTPUT + "&") def stop_energy_reader(): """Stop the energy reader and remove its temp file. """ os.system("pkill -x " + ENERGY_READER_BIN) os.remove(ENERGY_READER_TEMP_OUTPUT) def read_energy(): """Poll the energy reader's temp file. """ data = 0 with open(ENERGY_READER_TEMP_OUTPUT, "r") as em: data = int(em.read().replace('\n', '')) return data def git_rev_hash(): """Get the git revision hash. """ return subprocess.check_output(['git', 'rev-parse', 'HEAD']).rstrip() def git_rev_hash_short(): """Get the git revision short hash. """ return subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).rstrip() def execute(base_dir, build_target, renderer, page, profile, trial, layout_thread_count): """Run a single execution. """ log_dir = path.join(base_dir, "logs_l" + str(layout_thread_count), "trial_" + str(trial)) if os.path.exists(log_dir): print "Log directory already exists: " + log_dir sys.exit(1) os.makedirs(log_dir) set_app_environment(log_dir) cmd = get_command(build_target, layout_thread_count, renderer, page, profile) # Execute start_energy_reader() print 'sleep ' + str(GUARD_TIME) time.sleep(GUARD_TIME) time_start = time.time() energy_start = read_energy() print cmd os.system(cmd) energy_end = read_energy() time_end = time.time() stop_energy_reader() print 'sleep ' + str(GUARD_TIME) time.sleep(GUARD_TIME) uj = energy_end - energy_start latency = time_end - time_start watts = uj / 1000000.0 / latency # Write a file that describes this execution with open(path.join(log_dir, SUMMARY_OUTPUT), "w") as f: f.write("Datetime (UTC): " + datetime.datetime.utcnow().isoformat()) f.write("\nPlatform: " + platform.platform()) f.write("\nGit hash: " + git_rev_hash()) f.write("\nGit short hash: " + git_rev_hash_short()) f.write("\nRelease: " + build_target) f.write("\nLayout threads: " + str(layout_thread_count)) f.write("\nTrial: " + str(trial)) f.write("\nCommand: " + cmd) f.write("\nTime (sec): " + str(latency)) f.write("\nEnergy (uJ): " + str(uj)) f.write("\nPower (W): " + str(watts)) def characterize(build_target, base_dir, (min_layout_threads, max_layout_threads), renderer, page, profile, trials): """Run all configurations and capture results. """ for layout_thread_count in xrange(min_layout_threads, max_layout_threads + 1): for trial in xrange(1, trials + 1): execute(base_dir, build_target, renderer, page, profile, trial, layout_thread_count) def main(): """For this script to be useful, the following conditions are needed: - HEARTBEAT_PROFILER_CATEGORIES should be aligned with the profiler categories in the source code. - The "energymon" project needs to be installed to the system (libraries and the "energymon" binary). - The "default" energymon library will be used - make sure you choose one that is useful for your system setup when installing energymon. - Build servo in release mode with the "energy-profiling" feature enabled (this links with the energymon lib). """ # Default max number of layout threads max_layout_threads = 1 # Default benchmark benchmark = path.join(TOP_DIR, "tests", "html", "perf-rainbow.html") # Default renderer renderer = "" # Default output directory output_dir = "heartbeat_logs" # Default build target build_target = "release" # Default profile interval profile = 60 # Default single argument single = False # Default number of trials trials = 1 # Parsing the input of the script parser = argparse.ArgumentParser(description="Characterize Servo timing and energy behavior") parser.add_argument("-b", "--benchmark", default=benchmark, help="Gets the benchmark, for example \"-b http://www.example.com\"") parser.add_argument("-d", "--debug", action='store_true', help="Use debug build instead of release build") parser.add_argument("-w", "--webrender", action='store_true', help="Use webrender backend") parser.add_argument("-l", "--max_layout_threads", help="Specify the maximum number of threads for layout, for example \"-l 5\"") parser.add_argument("-o", "--output", help="Specify the log output directory, for example \"-o heartbeat_logs\"") parser.add_argument("-p", "--profile", default=60, help="Profiler output interval, for example \"-p 60\"") parser.add_argument("-s", "--single", action='store_true', help="Just run a single trial of the config provided, for example \"-s\"") parser.add_argument("-t", "--trials", default=1, type=int, help="Number of trials to run for each configuration, for example \"-t 1\"") args = parser.parse_args() if args.benchmark: benchmark = args.benchmark if args.debug: build_target = "debug" if args.webrender: renderer = "-w" if args.max_layout_threads: max_layout_threads = int(args.max_layout_threads) if args.output: output_dir = args.output if args.profile: profile = args.profile if args.single: single = True if args.trials: trials = args.trials if os.path.exists(output_dir): print "Output directory already exists: " + output_dir sys.exit(1) os.makedirs(output_dir) if single: execute(output_dir, build_target, renderer, benchmark, profile, trials, max_layout_threads) else: characterize(build_target, output_dir, (1, max_layout_threads), renderer, benchmark, profile, trials) if __name__ == "__main__": main()
mpl-2.0
yeyuguo/pyodbc
tests3/dbapi20.py
44
31431
#!/usr/bin/env python ''' Python DB API 2.0 driver compliance unit test suite. This software is Public Domain and may be used without restrictions. "Now we have booze and barflies entering the discussion, plus rumours of DBAs on drugs... and I won't tell you what flashes through my mind each time I read the subject line with 'Anal Compliance' in it. All around this is turning out to be a thoroughly unwholesome unit test." -- Ian Bicking ''' __rcs_id__ = '$Id: dbapi20.py,v 1.10 2003/10/09 03:14:14 zenzen Exp $' __version__ = '$Revision: 1.10 $'[11:-2] __author__ = 'Stuart Bishop <zen@shangri-la.dropbear.id.au>' import unittest import time # $Log: dbapi20.py,v $ # Revision 1.10 2003/10/09 03:14:14 zenzen # Add test for DB API 2.0 optional extension, where database exceptions # are exposed as attributes on the Connection object. # # Revision 1.9 2003/08/13 01:16:36 zenzen # Minor tweak from Stefan Fleiter # # Revision 1.8 2003/04/10 00:13:25 zenzen # Changes, as per suggestions by M.-A. Lemburg # - Add a table prefix, to ensure namespace collisions can always be avoided # # Revision 1.7 2003/02/26 23:33:37 zenzen # Break out DDL into helper functions, as per request by David Rushby # # Revision 1.6 2003/02/21 03:04:33 zenzen # Stuff from Henrik Ekelund: # added test_None # added test_nextset & hooks # # Revision 1.5 2003/02/17 22:08:43 zenzen # Implement suggestions and code from Henrik Eklund - test that cursor.arraysize # defaults to 1 & generic cursor.callproc test added # # Revision 1.4 2003/02/15 00:16:33 zenzen # Changes, as per suggestions and bug reports by M.-A. Lemburg, # Matthew T. Kromer, Federico Di Gregorio and Daniel Dittmar # - Class renamed # - Now a subclass of TestCase, to avoid requiring the driver stub # to use multiple inheritance # - Reversed the polarity of buggy test in test_description # - Test exception heirarchy correctly # - self.populate is now self._populate(), so if a driver stub # overrides self.ddl1 this change propogates # - VARCHAR columns now have a width, which will hopefully make the # DDL even more portible (this will be reversed if it causes more problems) # - cursor.rowcount being checked after various execute and fetchXXX methods # - Check for fetchall and fetchmany returning empty lists after results # are exhausted (already checking for empty lists if select retrieved # nothing # - Fix bugs in test_setoutputsize_basic and test_setinputsizes # class DatabaseAPI20Test(unittest.TestCase): ''' Test a database self.driver for DB API 2.0 compatibility. This implementation tests Gadfly, but the TestCase is structured so that other self.drivers can subclass this test case to ensure compiliance with the DB-API. It is expected that this TestCase may be expanded in the future if ambiguities or edge conditions are discovered. The 'Optional Extensions' are not yet being tested. self.drivers should subclass this test, overriding setUp, tearDown, self.driver, connect_args and connect_kw_args. Class specification should be as follows: import dbapi20 class mytest(dbapi20.DatabaseAPI20Test): [...] Don't 'import DatabaseAPI20Test from dbapi20', or you will confuse the unit tester - just 'import dbapi20'. ''' # The self.driver module. This should be the module where the 'connect' # method is to be found driver = None connect_args = () # List of arguments to pass to connect connect_kw_args = {} # Keyword arguments for connect table_prefix = 'dbapi20test_' # If you need to specify a prefix for tables ddl1 = 'create table %sbooze (name varchar(20))' % table_prefix ddl2 = 'create table %sbarflys (name varchar(20))' % table_prefix xddl1 = 'drop table %sbooze' % table_prefix xddl2 = 'drop table %sbarflys' % table_prefix lowerfunc = 'lower' # Name of stored procedure to convert string->lowercase # Some drivers may need to override these helpers, for example adding # a 'commit' after the execute. def executeDDL1(self,cursor): cursor.execute(self.ddl1) def executeDDL2(self,cursor): cursor.execute(self.ddl2) def setUp(self): ''' self.drivers should override this method to perform required setup if any is necessary, such as creating the database. ''' pass def tearDown(self): ''' self.drivers should override this method to perform required cleanup if any is necessary, such as deleting the test database. The default drops the tables that may be created. ''' con = self._connect() try: cur = con.cursor() for i, ddl in enumerate((self.xddl1,self.xddl2)): try: cur.execute(ddl) con.commit() except self.driver.Error: # Assume table didn't exist. Other tests will check if # execute is busted. pass finally: con.close() def _connect(self): try: return self.driver.connect( *self.connect_args,**self.connect_kw_args ) except AttributeError: self.fail("No connect method found in self.driver module") def test_connect(self): con = self._connect() con.close() def test_apilevel(self): try: # Must exist apilevel = self.driver.apilevel # Must equal 2.0 self.assertEqual(apilevel,'2.0') except AttributeError: self.fail("Driver doesn't define apilevel") def test_threadsafety(self): try: # Must exist threadsafety = self.driver.threadsafety # Must be a valid value self.failUnless(threadsafety in (0,1,2,3)) except AttributeError: self.fail("Driver doesn't define threadsafety") def test_paramstyle(self): try: # Must exist paramstyle = self.driver.paramstyle # Must be a valid value self.failUnless(paramstyle in ( 'qmark','numeric','named','format','pyformat' )) except AttributeError: self.fail("Driver doesn't define paramstyle") def test_Exceptions(self): # Make sure required exceptions exist, and are in the # defined heirarchy. self.failUnless(issubclass(self.driver.Warning,StandardError)) self.failUnless(issubclass(self.driver.Error,StandardError)) self.failUnless( issubclass(self.driver.InterfaceError,self.driver.Error) ) self.failUnless( issubclass(self.driver.DatabaseError,self.driver.Error) ) self.failUnless( issubclass(self.driver.OperationalError,self.driver.Error) ) self.failUnless( issubclass(self.driver.IntegrityError,self.driver.Error) ) self.failUnless( issubclass(self.driver.InternalError,self.driver.Error) ) self.failUnless( issubclass(self.driver.ProgrammingError,self.driver.Error) ) self.failUnless( issubclass(self.driver.NotSupportedError,self.driver.Error) ) def test_ExceptionsAsConnectionAttributes(self): # OPTIONAL EXTENSION # Test for the optional DB API 2.0 extension, where the exceptions # are exposed as attributes on the Connection object # I figure this optional extension will be implemented by any # driver author who is using this test suite, so it is enabled # by default. con = self._connect() drv = self.driver self.failUnless(con.Warning is drv.Warning) self.failUnless(con.Error is drv.Error) self.failUnless(con.InterfaceError is drv.InterfaceError) self.failUnless(con.DatabaseError is drv.DatabaseError) self.failUnless(con.OperationalError is drv.OperationalError) self.failUnless(con.IntegrityError is drv.IntegrityError) self.failUnless(con.InternalError is drv.InternalError) self.failUnless(con.ProgrammingError is drv.ProgrammingError) self.failUnless(con.NotSupportedError is drv.NotSupportedError) def test_commit(self): con = self._connect() try: # Commit must work, even if it doesn't do anything con.commit() finally: con.close() def test_rollback(self): con = self._connect() # If rollback is defined, it should either work or throw # the documented exception if hasattr(con,'rollback'): try: con.rollback() except self.driver.NotSupportedError: pass def test_cursor(self): con = self._connect() try: cur = con.cursor() finally: con.close() def test_cursor_isolation(self): con = self._connect() try: # Make sure cursors created from the same connection have # the documented transaction isolation level cur1 = con.cursor() cur2 = con.cursor() self.executeDDL1(cur1) cur1.execute("insert into %sbooze values ('Victoria Bitter')" % ( self.table_prefix )) cur2.execute("select name from %sbooze" % self.table_prefix) booze = cur2.fetchall() self.assertEqual(len(booze),1) self.assertEqual(len(booze[0]),1) self.assertEqual(booze[0][0],'Victoria Bitter') finally: con.close() def test_description(self): con = self._connect() try: cur = con.cursor() self.executeDDL1(cur) self.assertEqual(cur.description,None, 'cursor.description should be none after executing a ' 'statement that can return no rows (such as DDL)' ) cur.execute('select name from %sbooze' % self.table_prefix) self.assertEqual(len(cur.description),1, 'cursor.description describes too many columns' ) self.assertEqual(len(cur.description[0]),7, 'cursor.description[x] tuples must have 7 elements' ) self.assertEqual(cur.description[0][0].lower(),'name', 'cursor.description[x][0] must return column name' ) self.assertEqual(cur.description[0][1],self.driver.STRING, 'cursor.description[x][1] must return column type. Got %r' % cur.description[0][1] ) # Make sure self.description gets reset self.executeDDL2(cur) self.assertEqual(cur.description,None, 'cursor.description not being set to None when executing ' 'no-result statements (eg. DDL)' ) finally: con.close() def test_rowcount(self): con = self._connect() try: cur = con.cursor() self.executeDDL1(cur) self.assertEqual(cur.rowcount,-1, 'cursor.rowcount should be -1 after executing no-result ' 'statements' ) cur.execute("insert into %sbooze values ('Victoria Bitter')" % ( self.table_prefix )) self.failUnless(cur.rowcount in (-1,1), 'cursor.rowcount should == number or rows inserted, or ' 'set to -1 after executing an insert statement' ) cur.execute("select name from %sbooze" % self.table_prefix) self.failUnless(cur.rowcount in (-1,1), 'cursor.rowcount should == number of rows returned, or ' 'set to -1 after executing a select statement' ) self.executeDDL2(cur) self.assertEqual(cur.rowcount,-1, 'cursor.rowcount not being reset to -1 after executing ' 'no-result statements' ) finally: con.close() lower_func = 'lower' def test_callproc(self): con = self._connect() try: cur = con.cursor() if self.lower_func and hasattr(cur,'callproc'): r = cur.callproc(self.lower_func,('FOO',)) self.assertEqual(len(r),1) self.assertEqual(r[0],'FOO') r = cur.fetchall() self.assertEqual(len(r),1,'callproc produced no result set') self.assertEqual(len(r[0]),1, 'callproc produced invalid result set' ) self.assertEqual(r[0][0],'foo', 'callproc produced invalid results' ) finally: con.close() def test_close(self): con = self._connect() try: cur = con.cursor() finally: con.close() # cursor.execute should raise an Error if called after connection # closed self.assertRaises(self.driver.Error,self.executeDDL1,cur) # connection.commit should raise an Error if called after connection' # closed.' self.assertRaises(self.driver.Error,con.commit) # connection.close should raise an Error if called more than once self.assertRaises(self.driver.Error,con.close) def test_execute(self): con = self._connect() try: cur = con.cursor() self._paraminsert(cur) finally: con.close() def _paraminsert(self,cur): self.executeDDL1(cur) cur.execute("insert into %sbooze values ('Victoria Bitter')" % ( self.table_prefix )) self.failUnless(cur.rowcount in (-1,1)) if self.driver.paramstyle == 'qmark': cur.execute( 'insert into %sbooze values (?)' % self.table_prefix, ("Cooper's",) ) elif self.driver.paramstyle == 'numeric': cur.execute( 'insert into %sbooze values (:1)' % self.table_prefix, ("Cooper's",) ) elif self.driver.paramstyle == 'named': cur.execute( 'insert into %sbooze values (:beer)' % self.table_prefix, {'beer':"Cooper's"} ) elif self.driver.paramstyle == 'format': cur.execute( 'insert into %sbooze values (%%s)' % self.table_prefix, ("Cooper's",) ) elif self.driver.paramstyle == 'pyformat': cur.execute( 'insert into %sbooze values (%%(beer)s)' % self.table_prefix, {'beer':"Cooper's"} ) else: self.fail('Invalid paramstyle') self.failUnless(cur.rowcount in (-1,1)) cur.execute('select name from %sbooze' % self.table_prefix) res = cur.fetchall() self.assertEqual(len(res),2,'cursor.fetchall returned too few rows') beers = [res[0][0],res[1][0]] beers.sort() self.assertEqual(beers[0],"Cooper's", 'cursor.fetchall retrieved incorrect data, or data inserted ' 'incorrectly' ) self.assertEqual(beers[1],"Victoria Bitter", 'cursor.fetchall retrieved incorrect data, or data inserted ' 'incorrectly' ) def test_executemany(self): con = self._connect() try: cur = con.cursor() self.executeDDL1(cur) largs = [ ("Cooper's",) , ("Boag's",) ] margs = [ {'beer': "Cooper's"}, {'beer': "Boag's"} ] if self.driver.paramstyle == 'qmark': cur.executemany( 'insert into %sbooze values (?)' % self.table_prefix, largs ) elif self.driver.paramstyle == 'numeric': cur.executemany( 'insert into %sbooze values (:1)' % self.table_prefix, largs ) elif self.driver.paramstyle == 'named': cur.executemany( 'insert into %sbooze values (:beer)' % self.table_prefix, margs ) elif self.driver.paramstyle == 'format': cur.executemany( 'insert into %sbooze values (%%s)' % self.table_prefix, largs ) elif self.driver.paramstyle == 'pyformat': cur.executemany( 'insert into %sbooze values (%%(beer)s)' % ( self.table_prefix ), margs ) else: self.fail('Unknown paramstyle') self.failUnless(cur.rowcount in (-1,2), 'insert using cursor.executemany set cursor.rowcount to ' 'incorrect value %r' % cur.rowcount ) cur.execute('select name from %sbooze' % self.table_prefix) res = cur.fetchall() self.assertEqual(len(res),2, 'cursor.fetchall retrieved incorrect number of rows' ) beers = [res[0][0],res[1][0]] beers.sort() self.assertEqual(beers[0],"Boag's",'incorrect data retrieved') self.assertEqual(beers[1],"Cooper's",'incorrect data retrieved') finally: con.close() def test_fetchone(self): con = self._connect() try: cur = con.cursor() # cursor.fetchone should raise an Error if called before # executing a select-type query self.assertRaises(self.driver.Error,cur.fetchone) # cursor.fetchone should raise an Error if called after # executing a query that cannnot return rows self.executeDDL1(cur) self.assertRaises(self.driver.Error,cur.fetchone) cur.execute('select name from %sbooze' % self.table_prefix) self.assertEqual(cur.fetchone(),None, 'cursor.fetchone should return None if a query retrieves ' 'no rows' ) self.failUnless(cur.rowcount in (-1,0)) # cursor.fetchone should raise an Error if called after # executing a query that cannnot return rows cur.execute("insert into %sbooze values ('Victoria Bitter')" % ( self.table_prefix )) self.assertRaises(self.driver.Error,cur.fetchone) cur.execute('select name from %sbooze' % self.table_prefix) r = cur.fetchone() self.assertEqual(len(r),1, 'cursor.fetchone should have retrieved a single row' ) self.assertEqual(r[0],'Victoria Bitter', 'cursor.fetchone retrieved incorrect data' ) self.assertEqual(cur.fetchone(),None, 'cursor.fetchone should return None if no more rows available' ) self.failUnless(cur.rowcount in (-1,1)) finally: con.close() samples = [ 'Carlton Cold', 'Carlton Draft', 'Mountain Goat', 'Redback', 'Victoria Bitter', 'XXXX' ] def _populate(self): ''' Return a list of sql commands to setup the DB for the fetch tests. ''' populate = [ "insert into %sbooze values ('%s')" % (self.table_prefix,s) for s in self.samples ] return populate def test_fetchmany(self): con = self._connect() try: cur = con.cursor() # cursor.fetchmany should raise an Error if called without #issuing a query self.assertRaises(self.driver.Error,cur.fetchmany,4) self.executeDDL1(cur) for sql in self._populate(): cur.execute(sql) cur.execute('select name from %sbooze' % self.table_prefix) r = cur.fetchmany() self.assertEqual(len(r),1, 'cursor.fetchmany retrieved incorrect number of rows, ' 'default of arraysize is one.' ) cur.arraysize=10 r = cur.fetchmany(3) # Should get 3 rows self.assertEqual(len(r),3, 'cursor.fetchmany retrieved incorrect number of rows' ) r = cur.fetchmany(4) # Should get 2 more self.assertEqual(len(r),2, 'cursor.fetchmany retrieved incorrect number of rows' ) r = cur.fetchmany(4) # Should be an empty sequence self.assertEqual(len(r),0, 'cursor.fetchmany should return an empty sequence after ' 'results are exhausted' ) self.failUnless(cur.rowcount in (-1,6)) # Same as above, using cursor.arraysize cur.arraysize=4 cur.execute('select name from %sbooze' % self.table_prefix) r = cur.fetchmany() # Should get 4 rows self.assertEqual(len(r),4, 'cursor.arraysize not being honoured by fetchmany' ) r = cur.fetchmany() # Should get 2 more self.assertEqual(len(r),2) r = cur.fetchmany() # Should be an empty sequence self.assertEqual(len(r),0) self.failUnless(cur.rowcount in (-1,6)) cur.arraysize=6 cur.execute('select name from %sbooze' % self.table_prefix) rows = cur.fetchmany() # Should get all rows self.failUnless(cur.rowcount in (-1,6)) self.assertEqual(len(rows),6) self.assertEqual(len(rows),6) rows = [r[0] for r in rows] rows.sort() # Make sure we get the right data back out for i in range(0,6): self.assertEqual(rows[i],self.samples[i], 'incorrect data retrieved by cursor.fetchmany' ) rows = cur.fetchmany() # Should return an empty list self.assertEqual(len(rows),0, 'cursor.fetchmany should return an empty sequence if ' 'called after the whole result set has been fetched' ) self.failUnless(cur.rowcount in (-1,6)) self.executeDDL2(cur) cur.execute('select name from %sbarflys' % self.table_prefix) r = cur.fetchmany() # Should get empty sequence self.assertEqual(len(r),0, 'cursor.fetchmany should return an empty sequence if ' 'query retrieved no rows' ) self.failUnless(cur.rowcount in (-1,0)) finally: con.close() def test_fetchall(self): con = self._connect() try: cur = con.cursor() # cursor.fetchall should raise an Error if called # without executing a query that may return rows (such # as a select) self.assertRaises(self.driver.Error, cur.fetchall) self.executeDDL1(cur) for sql in self._populate(): cur.execute(sql) # cursor.fetchall should raise an Error if called # after executing a a statement that cannot return rows self.assertRaises(self.driver.Error,cur.fetchall) cur.execute('select name from %sbooze' % self.table_prefix) rows = cur.fetchall() self.failUnless(cur.rowcount in (-1,len(self.samples))) self.assertEqual(len(rows),len(self.samples), 'cursor.fetchall did not retrieve all rows' ) rows = [r[0] for r in rows] rows.sort() for i in range(0,len(self.samples)): self.assertEqual(rows[i],self.samples[i], 'cursor.fetchall retrieved incorrect rows' ) rows = cur.fetchall() self.assertEqual( len(rows),0, 'cursor.fetchall should return an empty list if called ' 'after the whole result set has been fetched' ) self.failUnless(cur.rowcount in (-1,len(self.samples))) self.executeDDL2(cur) cur.execute('select name from %sbarflys' % self.table_prefix) rows = cur.fetchall() self.failUnless(cur.rowcount in (-1,0)) self.assertEqual(len(rows),0, 'cursor.fetchall should return an empty list if ' 'a select query returns no rows' ) finally: con.close() def test_mixedfetch(self): con = self._connect() try: cur = con.cursor() self.executeDDL1(cur) for sql in self._populate(): cur.execute(sql) cur.execute('select name from %sbooze' % self.table_prefix) rows1 = cur.fetchone() rows23 = cur.fetchmany(2) rows4 = cur.fetchone() rows56 = cur.fetchall() self.failUnless(cur.rowcount in (-1,6)) self.assertEqual(len(rows23),2, 'fetchmany returned incorrect number of rows' ) self.assertEqual(len(rows56),2, 'fetchall returned incorrect number of rows' ) rows = [rows1[0]] rows.extend([rows23[0][0],rows23[1][0]]) rows.append(rows4[0]) rows.extend([rows56[0][0],rows56[1][0]]) rows.sort() for i in range(0,len(self.samples)): self.assertEqual(rows[i],self.samples[i], 'incorrect data retrieved or inserted' ) finally: con.close() def help_nextset_setUp(self,cur): ''' Should create a procedure called deleteme that returns two result sets, first the number of rows in booze then "name from booze" ''' raise NotImplementedError,'Helper not implemented' #sql=""" # create procedure deleteme as # begin # select count(*) from booze # select name from booze # end #""" #cur.execute(sql) def help_nextset_tearDown(self,cur): 'If cleaning up is needed after nextSetTest' raise NotImplementedError,'Helper not implemented' #cur.execute("drop procedure deleteme") def test_nextset(self): con = self._connect() try: cur = con.cursor() if not hasattr(cur,'nextset'): return try: self.executeDDL1(cur) sql=self._populate() for sql in self._populate(): cur.execute(sql) self.help_nextset_setUp(cur) cur.callproc('deleteme') numberofrows=cur.fetchone() assert numberofrows[0]== len(self.samples) assert cur.nextset() names=cur.fetchall() assert len(names) == len(self.samples) s=cur.nextset() assert s == None,'No more return sets, should return None' finally: self.help_nextset_tearDown(cur) finally: con.close() def test_nextset(self): raise NotImplementedError,'Drivers need to override this test' def test_arraysize(self): # Not much here - rest of the tests for this are in test_fetchmany con = self._connect() try: cur = con.cursor() self.failUnless(hasattr(cur,'arraysize'), 'cursor.arraysize must be defined' ) finally: con.close() def test_setinputsizes(self): con = self._connect() try: cur = con.cursor() cur.setinputsizes( (25,) ) self._paraminsert(cur) # Make sure cursor still works finally: con.close() def test_setoutputsize_basic(self): # Basic test is to make sure setoutputsize doesn't blow up con = self._connect() try: cur = con.cursor() cur.setoutputsize(1000) cur.setoutputsize(2000,0) self._paraminsert(cur) # Make sure the cursor still works finally: con.close() def test_setoutputsize(self): # Real test for setoutputsize is driver dependant raise NotImplementedError,'Driver need to override this test' def test_None(self): con = self._connect() try: cur = con.cursor() self.executeDDL1(cur) cur.execute('insert into %sbooze values (NULL)' % self.table_prefix) cur.execute('select name from %sbooze' % self.table_prefix) r = cur.fetchall() self.assertEqual(len(r),1) self.assertEqual(len(r[0]),1) self.assertEqual(r[0][0],None,'NULL value not returned as None') finally: con.close() def test_Date(self): d1 = self.driver.Date(2002,12,25) d2 = self.driver.DateFromTicks(time.mktime((2002,12,25,0,0,0,0,0,0))) # Can we assume this? API doesn't specify, but it seems implied # self.assertEqual(str(d1),str(d2)) def test_Time(self): t1 = self.driver.Time(13,45,30) t2 = self.driver.TimeFromTicks(time.mktime((2001,1,1,13,45,30,0,0,0))) # Can we assume this? API doesn't specify, but it seems implied # self.assertEqual(str(t1),str(t2)) def test_Timestamp(self): t1 = self.driver.Timestamp(2002,12,25,13,45,30) t2 = self.driver.TimestampFromTicks( time.mktime((2002,12,25,13,45,30,0,0,0)) ) # Can we assume this? API doesn't specify, but it seems implied # self.assertEqual(str(t1),str(t2)) def test_Binary(self): b = self.driver.Binary('Something') b = self.driver.Binary('') def test_STRING(self): self.failUnless(hasattr(self.driver,'STRING'), 'module.STRING must be defined' ) def test_BINARY(self): self.failUnless(hasattr(self.driver,'BINARY'), 'module.BINARY must be defined.' ) def test_NUMBER(self): self.failUnless(hasattr(self.driver,'NUMBER'), 'module.NUMBER must be defined.' ) def test_DATETIME(self): self.failUnless(hasattr(self.driver,'DATETIME'), 'module.DATETIME must be defined.' ) def test_ROWID(self): self.failUnless(hasattr(self.driver,'ROWID'), 'module.ROWID must be defined.' )
mit
seanwestfall/django
tests/m2m_signals/tests.py
271
15982
""" Testing signals emitted on changing m2m relations. """ from django.db import models from django.test import TestCase from .models import Car, Part, Person, SportsCar class ManyToManySignalsTest(TestCase): def m2m_changed_signal_receiver(self, signal, sender, **kwargs): message = { 'instance': kwargs['instance'], 'action': kwargs['action'], 'reverse': kwargs['reverse'], 'model': kwargs['model'], } if kwargs['pk_set']: message['objects'] = list( kwargs['model'].objects.filter(pk__in=kwargs['pk_set']) ) self.m2m_changed_messages.append(message) def setUp(self): self.m2m_changed_messages = [] self.vw = Car.objects.create(name='VW') self.bmw = Car.objects.create(name='BMW') self.toyota = Car.objects.create(name='Toyota') self.wheelset = Part.objects.create(name='Wheelset') self.doors = Part.objects.create(name='Doors') self.engine = Part.objects.create(name='Engine') self.airbag = Part.objects.create(name='Airbag') self.sunroof = Part.objects.create(name='Sunroof') self.alice = Person.objects.create(name='Alice') self.bob = Person.objects.create(name='Bob') self.chuck = Person.objects.create(name='Chuck') self.daisy = Person.objects.create(name='Daisy') def tearDown(self): # disconnect all signal handlers models.signals.m2m_changed.disconnect( self.m2m_changed_signal_receiver, Car.default_parts.through ) models.signals.m2m_changed.disconnect( self.m2m_changed_signal_receiver, Car.optional_parts.through ) models.signals.m2m_changed.disconnect( self.m2m_changed_signal_receiver, Person.fans.through ) models.signals.m2m_changed.disconnect( self.m2m_changed_signal_receiver, Person.friends.through ) def _initialize_signal_car(self, add_default_parts_before_set_signal=False): """ Install a listener on the two m2m relations. """ models.signals.m2m_changed.connect( self.m2m_changed_signal_receiver, Car.optional_parts.through ) if add_default_parts_before_set_signal: # adding a default part to our car - no signal listener installed self.vw.default_parts.add(self.sunroof) models.signals.m2m_changed.connect( self.m2m_changed_signal_receiver, Car.default_parts.through ) def test_m2m_relations_add_remove_clear(self): expected_messages = [] self._initialize_signal_car(add_default_parts_before_set_signal=True) self.vw.default_parts.add(self.wheelset, self.doors, self.engine) expected_messages.append({ 'instance': self.vw, 'action': 'pre_add', 'reverse': False, 'model': Part, 'objects': [self.doors, self.engine, self.wheelset], }) expected_messages.append({ 'instance': self.vw, 'action': 'post_add', 'reverse': False, 'model': Part, 'objects': [self.doors, self.engine, self.wheelset], }) self.assertEqual(self.m2m_changed_messages, expected_messages) # give the BMW and Toyota some doors as well self.doors.car_set.add(self.bmw, self.toyota) expected_messages.append({ 'instance': self.doors, 'action': 'pre_add', 'reverse': True, 'model': Car, 'objects': [self.bmw, self.toyota], }) expected_messages.append({ 'instance': self.doors, 'action': 'post_add', 'reverse': True, 'model': Car, 'objects': [self.bmw, self.toyota], }) self.assertEqual(self.m2m_changed_messages, expected_messages) def test_m2m_relations_signals_remove_relation(self): self._initialize_signal_car() # remove the engine from the self.vw and the airbag (which is not set # but is returned) self.vw.default_parts.remove(self.engine, self.airbag) self.assertEqual(self.m2m_changed_messages, [ { 'instance': self.vw, 'action': 'pre_remove', 'reverse': False, 'model': Part, 'objects': [self.airbag, self.engine], }, { 'instance': self.vw, 'action': 'post_remove', 'reverse': False, 'model': Part, 'objects': [self.airbag, self.engine], } ]) def test_m2m_relations_signals_give_the_self_vw_some_optional_parts(self): expected_messages = [] self._initialize_signal_car() # give the self.vw some optional parts (second relation to same model) self.vw.optional_parts.add(self.airbag, self.sunroof) expected_messages.append({ 'instance': self.vw, 'action': 'pre_add', 'reverse': False, 'model': Part, 'objects': [self.airbag, self.sunroof], }) expected_messages.append({ 'instance': self.vw, 'action': 'post_add', 'reverse': False, 'model': Part, 'objects': [self.airbag, self.sunroof], }) self.assertEqual(self.m2m_changed_messages, expected_messages) # add airbag to all the cars (even though the self.vw already has one) self.airbag.cars_optional.add(self.vw, self.bmw, self.toyota) expected_messages.append({ 'instance': self.airbag, 'action': 'pre_add', 'reverse': True, 'model': Car, 'objects': [self.bmw, self.toyota], }) expected_messages.append({ 'instance': self.airbag, 'action': 'post_add', 'reverse': True, 'model': Car, 'objects': [self.bmw, self.toyota], }) self.assertEqual(self.m2m_changed_messages, expected_messages) def test_m2m_relations_signals_reverse_relation_with_custom_related_name(self): self._initialize_signal_car() # remove airbag from the self.vw (reverse relation with custom # related_name) self.airbag.cars_optional.remove(self.vw) self.assertEqual(self.m2m_changed_messages, [ { 'instance': self.airbag, 'action': 'pre_remove', 'reverse': True, 'model': Car, 'objects': [self.vw], }, { 'instance': self.airbag, 'action': 'post_remove', 'reverse': True, 'model': Car, 'objects': [self.vw], } ]) def test_m2m_relations_signals_clear_all_parts_of_the_self_vw(self): self._initialize_signal_car() # clear all parts of the self.vw self.vw.default_parts.clear() self.assertEqual(self.m2m_changed_messages, [ { 'instance': self.vw, 'action': 'pre_clear', 'reverse': False, 'model': Part, }, { 'instance': self.vw, 'action': 'post_clear', 'reverse': False, 'model': Part, } ]) def test_m2m_relations_signals_all_the_doors_off_of_cars(self): self._initialize_signal_car() # take all the doors off of cars self.doors.car_set.clear() self.assertEqual(self.m2m_changed_messages, [ { 'instance': self.doors, 'action': 'pre_clear', 'reverse': True, 'model': Car, }, { 'instance': self.doors, 'action': 'post_clear', 'reverse': True, 'model': Car, } ]) def test_m2m_relations_signals_reverse_relation(self): self._initialize_signal_car() # take all the airbags off of cars (clear reverse relation with custom # related_name) self.airbag.cars_optional.clear() self.assertEqual(self.m2m_changed_messages, [ { 'instance': self.airbag, 'action': 'pre_clear', 'reverse': True, 'model': Car, }, { 'instance': self.airbag, 'action': 'post_clear', 'reverse': True, 'model': Car, } ]) def test_m2m_relations_signals_alternative_ways(self): expected_messages = [] self._initialize_signal_car() # alternative ways of setting relation: self.vw.default_parts.create(name='Windows') p6 = Part.objects.get(name='Windows') expected_messages.append({ 'instance': self.vw, 'action': 'pre_add', 'reverse': False, 'model': Part, 'objects': [p6], }) expected_messages.append({ 'instance': self.vw, 'action': 'post_add', 'reverse': False, 'model': Part, 'objects': [p6], }) self.assertEqual(self.m2m_changed_messages, expected_messages) # direct assignment clears the set first, then adds self.vw.default_parts = [self.wheelset, self.doors, self.engine] expected_messages.append({ 'instance': self.vw, 'action': 'pre_remove', 'reverse': False, 'model': Part, 'objects': [p6], }) expected_messages.append({ 'instance': self.vw, 'action': 'post_remove', 'reverse': False, 'model': Part, 'objects': [p6], }) expected_messages.append({ 'instance': self.vw, 'action': 'pre_add', 'reverse': False, 'model': Part, 'objects': [self.doors, self.engine, self.wheelset], }) expected_messages.append({ 'instance': self.vw, 'action': 'post_add', 'reverse': False, 'model': Part, 'objects': [self.doors, self.engine, self.wheelset], }) self.assertEqual(self.m2m_changed_messages, expected_messages) def test_m2m_relations_signals_clearing_removing(self): expected_messages = [] self._initialize_signal_car(add_default_parts_before_set_signal=True) # set by clearing. self.vw.default_parts.set([self.wheelset, self.doors, self.engine], clear=True) expected_messages.append({ 'instance': self.vw, 'action': 'pre_clear', 'reverse': False, 'model': Part, }) expected_messages.append({ 'instance': self.vw, 'action': 'post_clear', 'reverse': False, 'model': Part, }) expected_messages.append({ 'instance': self.vw, 'action': 'pre_add', 'reverse': False, 'model': Part, 'objects': [self.doors, self.engine, self.wheelset], }) expected_messages.append({ 'instance': self.vw, 'action': 'post_add', 'reverse': False, 'model': Part, 'objects': [self.doors, self.engine, self.wheelset], }) self.assertEqual(self.m2m_changed_messages, expected_messages) # set by only removing what's necessary. self.vw.default_parts.set([self.wheelset, self.doors], clear=False) expected_messages.append({ 'instance': self.vw, 'action': 'pre_remove', 'reverse': False, 'model': Part, 'objects': [self.engine], }) expected_messages.append({ 'instance': self.vw, 'action': 'post_remove', 'reverse': False, 'model': Part, 'objects': [self.engine], }) self.assertEqual(self.m2m_changed_messages, expected_messages) def test_m2m_relations_signals_when_inheritance(self): expected_messages = [] self._initialize_signal_car(add_default_parts_before_set_signal=True) # Check that signals still work when model inheritance is involved c4 = SportsCar.objects.create(name='Bugatti', price='1000000') c4b = Car.objects.get(name='Bugatti') c4.default_parts = [self.doors] expected_messages.append({ 'instance': c4, 'action': 'pre_add', 'reverse': False, 'model': Part, 'objects': [self.doors], }) expected_messages.append({ 'instance': c4, 'action': 'post_add', 'reverse': False, 'model': Part, 'objects': [self.doors], }) self.assertEqual(self.m2m_changed_messages, expected_messages) self.engine.car_set.add(c4) expected_messages.append({ 'instance': self.engine, 'action': 'pre_add', 'reverse': True, 'model': Car, 'objects': [c4b], }) expected_messages.append({ 'instance': self.engine, 'action': 'post_add', 'reverse': True, 'model': Car, 'objects': [c4b], }) self.assertEqual(self.m2m_changed_messages, expected_messages) def _initialize_signal_person(self): # Install a listener on the two m2m relations. models.signals.m2m_changed.connect( self.m2m_changed_signal_receiver, Person.fans.through ) models.signals.m2m_changed.connect( self.m2m_changed_signal_receiver, Person.friends.through ) def test_m2m_relations_with_self_add_friends(self): self._initialize_signal_person() self.alice.friends = [self.bob, self.chuck] self.assertEqual(self.m2m_changed_messages, [ { 'instance': self.alice, 'action': 'pre_add', 'reverse': False, 'model': Person, 'objects': [self.bob, self.chuck], }, { 'instance': self.alice, 'action': 'post_add', 'reverse': False, 'model': Person, 'objects': [self.bob, self.chuck], } ]) def test_m2m_relations_with_self_add_fan(self): self._initialize_signal_person() self.alice.fans = [self.daisy] self.assertEqual(self.m2m_changed_messages, [ { 'instance': self.alice, 'action': 'pre_add', 'reverse': False, 'model': Person, 'objects': [self.daisy], }, { 'instance': self.alice, 'action': 'post_add', 'reverse': False, 'model': Person, 'objects': [self.daisy], } ]) def test_m2m_relations_with_self_add_idols(self): self._initialize_signal_person() self.chuck.idols = [self.alice, self.bob] self.assertEqual(self.m2m_changed_messages, [ { 'instance': self.chuck, 'action': 'pre_add', 'reverse': True, 'model': Person, 'objects': [self.alice, self.bob], }, { 'instance': self.chuck, 'action': 'post_add', 'reverse': True, 'model': Person, 'objects': [self.alice, self.bob], } ])
bsd-3-clause
filias/django
tests/invalid_models_tests/test_relative_fields.py
15
59901
# -*- encoding: utf-8 -*- from __future__ import unicode_literals import warnings from django.core.checks import Error, Warning as DjangoWarning from django.db import models from django.db.models.fields.related import ForeignObject from django.test import ignore_warnings from django.test.testcases import SimpleTestCase, skipIfDBFeature from django.test.utils import isolate_apps, override_settings from django.utils import six from django.utils.deprecation import RemovedInDjango20Warning from django.utils.version import get_docs_version @isolate_apps('invalid_models_tests') class RelativeFieldTests(SimpleTestCase): def test_valid_foreign_key_without_accessor(self): class Target(models.Model): # There would be a clash if Model.field installed an accessor. model = models.IntegerField() class Model(models.Model): field = models.ForeignKey(Target, models.CASCADE, related_name='+') field = Model._meta.get_field('field') errors = field.check() self.assertEqual(errors, []) @ignore_warnings(category=RemovedInDjango20Warning) def test_valid_foreign_key_without_on_delete(self): class Target(models.Model): model = models.IntegerField() class Model(models.Model): field = models.ForeignKey(Target, related_name='+') def test_foreign_key_without_on_delete_warning(self): with warnings.catch_warnings(record=True) as warns: warnings.simplefilter('always') # prevent warnings from appearing as errors class Target(models.Model): model = models.IntegerField() class Model(models.Model): field = models.ForeignKey(Target, related_name='+') self.assertEqual(len(warns), 1) self.assertEqual( str(warns[0].message), 'on_delete will be a required arg for ForeignKey in Django ' '2.0. Set it to models.CASCADE on models and in existing ' 'migrations if you want to maintain the current default ' 'behavior. See https://docs.djangoproject.com/en/%s/ref/models/fields/' '#django.db.models.ForeignKey.on_delete' % get_docs_version(), ) def test_foreign_key_to_field_as_arg(self): with warnings.catch_warnings(record=True) as warns: warnings.simplefilter('always') # prevent warnings from appearing as errors class Target(models.Model): model = models.IntegerField() class Model(models.Model): field = models.ForeignKey(Target, 'id') self.assertEqual(len(warns), 1) self.assertEqual( str(warns[0].message), "The signature for ForeignKey will change in Django 2.0. " "Pass to_field='id' as a kwarg instead of as an arg." ) def test_one_to_one_field_without_on_delete_warning(self): with warnings.catch_warnings(record=True) as warns: warnings.simplefilter('always') # prevent warnings from appearing as errors class Target(models.Model): model = models.IntegerField() class Model(models.Model): field = models.OneToOneField(Target, related_name='+') self.assertEqual(len(warns), 1) self.assertEqual( str(warns[0].message), 'on_delete will be a required arg for OneToOneField in Django ' '2.0. Set it to models.CASCADE on models and in existing ' 'migrations if you want to maintain the current default ' 'behavior. See https://docs.djangoproject.com/en/%s/ref/models/fields/' '#django.db.models.ForeignKey.on_delete' % get_docs_version(), ) def test_one_to_one_field_to_field_as_arg(self): with warnings.catch_warnings(record=True) as warns: warnings.simplefilter('always') # prevent warnings from appearing as errors class Target(models.Model): model = models.IntegerField() class Model(models.Model): field = models.OneToOneField(Target, 'id') self.assertEqual(len(warns), 1) self.assertEqual( str(warns[0].message), "The signature for OneToOneField will change in Django 2.0. " "Pass to_field='id' as a kwarg instead of as an arg." ) def test_foreign_key_to_missing_model(self): # Model names are resolved when a model is being created, so we cannot # test relative fields in isolation and we need to attach them to a # model. class Model(models.Model): foreign_key = models.ForeignKey('Rel1', models.CASCADE) field = Model._meta.get_field('foreign_key') errors = field.check() expected = [ Error( "Field defines a relation with model 'Rel1', " "which is either not installed, or is abstract.", obj=field, id='fields.E300', ), ] self.assertEqual(errors, expected) @isolate_apps('invalid_models_tests') def test_foreign_key_to_isolate_apps_model(self): """ #25723 - Referenced model registration lookup should be run against the field's model registry. """ class OtherModel(models.Model): pass class Model(models.Model): foreign_key = models.ForeignKey('OtherModel', models.CASCADE) field = Model._meta.get_field('foreign_key') self.assertEqual(field.check(from_model=Model), []) def test_many_to_many_to_missing_model(self): class Model(models.Model): m2m = models.ManyToManyField("Rel2") field = Model._meta.get_field('m2m') errors = field.check(from_model=Model) expected = [ Error( "Field defines a relation with model 'Rel2', " "which is either not installed, or is abstract.", obj=field, id='fields.E300', ), ] self.assertEqual(errors, expected) @isolate_apps('invalid_models_tests') def test_many_to_many_to_isolate_apps_model(self): """ #25723 - Referenced model registration lookup should be run against the field's model registry. """ class OtherModel(models.Model): pass class Model(models.Model): m2m = models.ManyToManyField('OtherModel') field = Model._meta.get_field('m2m') self.assertEqual(field.check(from_model=Model), []) def test_many_to_many_with_useless_options(self): class Model(models.Model): name = models.CharField(max_length=20) class ModelM2M(models.Model): m2m = models.ManyToManyField(Model, null=True, validators=['']) errors = ModelM2M.check() field = ModelM2M._meta.get_field('m2m') expected = [ DjangoWarning( 'null has no effect on ManyToManyField.', obj=field, id='fields.W340', ) ] expected.append( DjangoWarning( 'ManyToManyField does not support validators.', obj=field, id='fields.W341', ) ) self.assertEqual(errors, expected) def test_ambiguous_relationship_model(self): class Person(models.Model): pass class Group(models.Model): field = models.ManyToManyField('Person', through="AmbiguousRelationship", related_name='tertiary') class AmbiguousRelationship(models.Model): # Too much foreign keys to Person. first_person = models.ForeignKey(Person, models.CASCADE, related_name="first") second_person = models.ForeignKey(Person, models.CASCADE, related_name="second") second_model = models.ForeignKey(Group, models.CASCADE) field = Group._meta.get_field('field') errors = field.check(from_model=Group) expected = [ Error( "The model is used as an intermediate model by " "'invalid_models_tests.Group.field', but it has more than one " "foreign key to 'Person', which is ambiguous. You must specify " "which foreign key Django should use via the through_fields " "keyword argument.", hint=( 'If you want to create a recursive relationship, use ' 'ForeignKey("self", symmetrical=False, through="AmbiguousRelationship").' ), obj=field, id='fields.E335', ), ] self.assertEqual(errors, expected) def test_relationship_model_with_foreign_key_to_wrong_model(self): class WrongModel(models.Model): pass class Person(models.Model): pass class Group(models.Model): members = models.ManyToManyField('Person', through="InvalidRelationship") class InvalidRelationship(models.Model): person = models.ForeignKey(Person, models.CASCADE) wrong_foreign_key = models.ForeignKey(WrongModel, models.CASCADE) # The last foreign key should point to Group model. field = Group._meta.get_field('members') errors = field.check(from_model=Group) expected = [ Error( "The model is used as an intermediate model by " "'invalid_models_tests.Group.members', but it does not " "have a foreign key to 'Group' or 'Person'.", obj=InvalidRelationship, id='fields.E336', ), ] self.assertEqual(errors, expected) def test_relationship_model_missing_foreign_key(self): class Person(models.Model): pass class Group(models.Model): members = models.ManyToManyField('Person', through="InvalidRelationship") class InvalidRelationship(models.Model): group = models.ForeignKey(Group, models.CASCADE) # No foreign key to Person field = Group._meta.get_field('members') errors = field.check(from_model=Group) expected = [ Error( "The model is used as an intermediate model by " "'invalid_models_tests.Group.members', but it does not have " "a foreign key to 'Group' or 'Person'.", obj=InvalidRelationship, id='fields.E336', ), ] self.assertEqual(errors, expected) def test_missing_relationship_model(self): class Person(models.Model): pass class Group(models.Model): members = models.ManyToManyField('Person', through="MissingM2MModel") field = Group._meta.get_field('members') errors = field.check(from_model=Group) expected = [ Error( "Field specifies a many-to-many relation through model " "'MissingM2MModel', which has not been installed.", obj=field, id='fields.E331', ), ] self.assertEqual(errors, expected) @isolate_apps('invalid_models_tests') def test_many_to_many_through_isolate_apps_model(self): """ #25723 - Through model registration lookup should be run against the field's model registry. """ class GroupMember(models.Model): person = models.ForeignKey('Person', models.CASCADE) group = models.ForeignKey('Group', models.CASCADE) class Person(models.Model): pass class Group(models.Model): members = models.ManyToManyField('Person', through='GroupMember') field = Group._meta.get_field('members') self.assertEqual(field.check(from_model=Group), []) def test_symmetrical_self_referential_field(self): class Person(models.Model): # Implicit symmetrical=False. friends = models.ManyToManyField('self', through="Relationship") class Relationship(models.Model): first = models.ForeignKey(Person, models.CASCADE, related_name="rel_from_set") second = models.ForeignKey(Person, models.CASCADE, related_name="rel_to_set") field = Person._meta.get_field('friends') errors = field.check(from_model=Person) expected = [ Error( 'Many-to-many fields with intermediate tables must not be symmetrical.', obj=field, id='fields.E332', ), ] self.assertEqual(errors, expected) def test_too_many_foreign_keys_in_self_referential_model(self): class Person(models.Model): friends = models.ManyToManyField('self', through="InvalidRelationship", symmetrical=False) class InvalidRelationship(models.Model): first = models.ForeignKey(Person, models.CASCADE, related_name="rel_from_set_2") second = models.ForeignKey(Person, models.CASCADE, related_name="rel_to_set_2") third = models.ForeignKey(Person, models.CASCADE, related_name="too_many_by_far") field = Person._meta.get_field('friends') errors = field.check(from_model=Person) expected = [ Error( "The model is used as an intermediate model by " "'invalid_models_tests.Person.friends', but it has more than two " "foreign keys to 'Person', which is ambiguous. You must specify " "which two foreign keys Django should use via the through_fields " "keyword argument.", hint='Use through_fields to specify which two foreign keys Django should use.', obj=InvalidRelationship, id='fields.E333', ), ] self.assertEqual(errors, expected) def test_symmetric_self_reference_with_intermediate_table(self): class Person(models.Model): # Explicit symmetrical=True. friends = models.ManyToManyField('self', through="Relationship", symmetrical=True) class Relationship(models.Model): first = models.ForeignKey(Person, models.CASCADE, related_name="rel_from_set") second = models.ForeignKey(Person, models.CASCADE, related_name="rel_to_set") field = Person._meta.get_field('friends') errors = field.check(from_model=Person) expected = [ Error( 'Many-to-many fields with intermediate tables must not be symmetrical.', obj=field, id='fields.E332', ), ] self.assertEqual(errors, expected) def test_symmetric_self_reference_with_intermediate_table_and_through_fields(self): """ Using through_fields in a m2m with an intermediate model shouldn't mask its incompatibility with symmetry. """ class Person(models.Model): # Explicit symmetrical=True. friends = models.ManyToManyField( 'self', symmetrical=True, through="Relationship", through_fields=('first', 'second'), ) class Relationship(models.Model): first = models.ForeignKey(Person, models.CASCADE, related_name="rel_from_set") second = models.ForeignKey(Person, models.CASCADE, related_name="rel_to_set") referee = models.ForeignKey(Person, models.CASCADE, related_name="referred") field = Person._meta.get_field('friends') errors = field.check(from_model=Person) expected = [ Error( 'Many-to-many fields with intermediate tables must not be symmetrical.', obj=field, id='fields.E332', ), ] self.assertEqual(errors, expected) def test_foreign_key_to_abstract_model(self): class AbstractModel(models.Model): class Meta: abstract = True class Model(models.Model): rel_string_foreign_key = models.ForeignKey('AbstractModel', models.CASCADE) rel_class_foreign_key = models.ForeignKey(AbstractModel, models.CASCADE) fields = [ Model._meta.get_field('rel_string_foreign_key'), Model._meta.get_field('rel_class_foreign_key'), ] expected_error = Error( "Field defines a relation with model 'AbstractModel', " "which is either not installed, or is abstract.", id='fields.E300', ) for field in fields: expected_error.obj = field errors = field.check() self.assertEqual(errors, [expected_error]) def test_m2m_to_abstract_model(self): class AbstractModel(models.Model): class Meta: abstract = True class Model(models.Model): rel_string_m2m = models.ManyToManyField('AbstractModel') rel_class_m2m = models.ManyToManyField(AbstractModel) fields = [ Model._meta.get_field('rel_string_m2m'), Model._meta.get_field('rel_class_m2m'), ] expected_error = Error( "Field defines a relation with model 'AbstractModel', " "which is either not installed, or is abstract.", id='fields.E300', ) for field in fields: expected_error.obj = field errors = field.check(from_model=Model) self.assertEqual(errors, [expected_error]) def test_unique_m2m(self): class Person(models.Model): name = models.CharField(max_length=5) class Group(models.Model): members = models.ManyToManyField('Person', unique=True) field = Group._meta.get_field('members') errors = field.check(from_model=Group) expected = [ Error( 'ManyToManyFields cannot be unique.', obj=field, id='fields.E330', ), ] self.assertEqual(errors, expected) def test_foreign_key_to_non_unique_field(self): class Target(models.Model): bad = models.IntegerField() # No unique=True class Model(models.Model): foreign_key = models.ForeignKey('Target', models.CASCADE, to_field='bad') field = Model._meta.get_field('foreign_key') errors = field.check() expected = [ Error( "'Target.bad' must set unique=True because it is referenced by a foreign key.", obj=field, id='fields.E311', ), ] self.assertEqual(errors, expected) def test_foreign_key_to_non_unique_field_under_explicit_model(self): class Target(models.Model): bad = models.IntegerField() class Model(models.Model): field = models.ForeignKey(Target, models.CASCADE, to_field='bad') field = Model._meta.get_field('field') errors = field.check() expected = [ Error( "'Target.bad' must set unique=True because it is referenced by a foreign key.", obj=field, id='fields.E311', ), ] self.assertEqual(errors, expected) def test_foreign_object_to_non_unique_fields(self): class Person(models.Model): # Note that both fields are not unique. country_id = models.IntegerField() city_id = models.IntegerField() class MMembership(models.Model): person_country_id = models.IntegerField() person_city_id = models.IntegerField() person = models.ForeignObject( Person, on_delete=models.CASCADE, from_fields=['person_country_id', 'person_city_id'], to_fields=['country_id', 'city_id'], ) field = MMembership._meta.get_field('person') errors = field.check() expected = [ Error( "No subset of the fields 'country_id', 'city_id' on model 'Person' is unique.", hint=( "Add unique=True on any of those fields or add at least " "a subset of them to a unique_together constraint." ), obj=field, id='fields.E310', ) ] self.assertEqual(errors, expected) def test_on_delete_set_null_on_non_nullable_field(self): class Person(models.Model): pass class Model(models.Model): foreign_key = models.ForeignKey('Person', models.SET_NULL) field = Model._meta.get_field('foreign_key') errors = field.check() expected = [ Error( 'Field specifies on_delete=SET_NULL, but cannot be null.', hint='Set null=True argument on the field, or change the on_delete rule.', obj=field, id='fields.E320', ), ] self.assertEqual(errors, expected) def test_on_delete_set_default_without_default_value(self): class Person(models.Model): pass class Model(models.Model): foreign_key = models.ForeignKey('Person', models.SET_DEFAULT) field = Model._meta.get_field('foreign_key') errors = field.check() expected = [ Error( 'Field specifies on_delete=SET_DEFAULT, but has no default value.', hint='Set a default value, or change the on_delete rule.', obj=field, id='fields.E321', ), ] self.assertEqual(errors, expected) @skipIfDBFeature('interprets_empty_strings_as_nulls') def test_nullable_primary_key(self): class Model(models.Model): field = models.IntegerField(primary_key=True, null=True) field = Model._meta.get_field('field') errors = field.check() expected = [ Error( 'Primary keys must not have null=True.', hint='Set null=False on the field, or remove primary_key=True argument.', obj=field, id='fields.E007', ), ] self.assertEqual(errors, expected) def test_not_swapped_model(self): class SwappableModel(models.Model): # A model that can be, but isn't swapped out. References to this # model should *not* raise any validation error. class Meta: swappable = 'TEST_SWAPPABLE_MODEL' class Model(models.Model): explicit_fk = models.ForeignKey( SwappableModel, models.CASCADE, related_name='explicit_fk', ) implicit_fk = models.ForeignKey( 'invalid_models_tests.SwappableModel', models.CASCADE, related_name='implicit_fk', ) explicit_m2m = models.ManyToManyField(SwappableModel, related_name='explicit_m2m') implicit_m2m = models.ManyToManyField( 'invalid_models_tests.SwappableModel', related_name='implicit_m2m', ) explicit_fk = Model._meta.get_field('explicit_fk') self.assertEqual(explicit_fk.check(), []) implicit_fk = Model._meta.get_field('implicit_fk') self.assertEqual(implicit_fk.check(), []) explicit_m2m = Model._meta.get_field('explicit_m2m') self.assertEqual(explicit_m2m.check(from_model=Model), []) implicit_m2m = Model._meta.get_field('implicit_m2m') self.assertEqual(implicit_m2m.check(from_model=Model), []) @override_settings(TEST_SWAPPED_MODEL='invalid_models_tests.Replacement') def test_referencing_to_swapped_model(self): class Replacement(models.Model): pass class SwappedModel(models.Model): class Meta: swappable = 'TEST_SWAPPED_MODEL' class Model(models.Model): explicit_fk = models.ForeignKey( SwappedModel, models.CASCADE, related_name='explicit_fk', ) implicit_fk = models.ForeignKey( 'invalid_models_tests.SwappedModel', models.CASCADE, related_name='implicit_fk', ) explicit_m2m = models.ManyToManyField(SwappedModel, related_name='explicit_m2m') implicit_m2m = models.ManyToManyField( 'invalid_models_tests.SwappedModel', related_name='implicit_m2m', ) fields = [ Model._meta.get_field('explicit_fk'), Model._meta.get_field('implicit_fk'), Model._meta.get_field('explicit_m2m'), Model._meta.get_field('implicit_m2m'), ] expected_error = Error( ("Field defines a relation with the model " "'invalid_models_tests.SwappedModel', which has been swapped out."), hint="Update the relation to point at 'settings.TEST_SWAPPED_MODEL'.", id='fields.E301', ) for field in fields: expected_error.obj = field errors = field.check(from_model=Model) self.assertEqual(errors, [expected_error]) def test_related_field_has_invalid_related_name(self): digit = 0 illegal_non_alphanumeric = '!' whitespace = '\t' invalid_related_names = [ '%s_begins_with_digit' % digit, '%s_begins_with_illegal_non_alphanumeric' % illegal_non_alphanumeric, '%s_begins_with_whitespace' % whitespace, 'contains_%s_illegal_non_alphanumeric' % illegal_non_alphanumeric, 'contains_%s_whitespace' % whitespace, 'ends_with_with_illegal_non_alphanumeric_%s' % illegal_non_alphanumeric, 'ends_with_whitespace_%s' % whitespace, 'with', # a Python keyword 'related_name\n', '', ] # Python 2 crashes on non-ASCII strings. if six.PY3: invalid_related_names.append(',') class Parent(models.Model): pass for invalid_related_name in invalid_related_names: Child = type(str('Child_%s') % str(invalid_related_name), (models.Model,), { 'parent': models.ForeignKey('Parent', models.CASCADE, related_name=invalid_related_name), '__module__': Parent.__module__, }) field = Child._meta.get_field('parent') errors = Child.check() expected = [ Error( "The name '%s' is invalid related_name for field Child_%s.parent" % (invalid_related_name, invalid_related_name), hint="Related name must be a valid Python identifier or end with a '+'", obj=field, id='fields.E306', ), ] self.assertEqual(errors, expected) def test_related_field_has_valid_related_name(self): lowercase = 'a' uppercase = 'A' digit = 0 related_names = [ '%s_starts_with_lowercase' % lowercase, '%s_tarts_with_uppercase' % uppercase, '_starts_with_underscore', 'contains_%s_digit' % digit, 'ends_with_plus+', '_', '_+', '+', ] # Python 2 crashes on non-ASCII strings. if six.PY3: related_names.extend(['試', '試驗+']) class Parent(models.Model): pass for related_name in related_names: Child = type(str('Child_%s') % str(related_name), (models.Model,), { 'parent': models.ForeignKey('Parent', models.CASCADE, related_name=related_name), '__module__': Parent.__module__, }) errors = Child.check() self.assertFalse(errors) @isolate_apps('invalid_models_tests') class AccessorClashTests(SimpleTestCase): def test_fk_to_integer(self): self._test_accessor_clash( target=models.IntegerField(), relative=models.ForeignKey('Target', models.CASCADE)) def test_fk_to_fk(self): self._test_accessor_clash( target=models.ForeignKey('Another', models.CASCADE), relative=models.ForeignKey('Target', models.CASCADE)) def test_fk_to_m2m(self): self._test_accessor_clash( target=models.ManyToManyField('Another'), relative=models.ForeignKey('Target', models.CASCADE)) def test_m2m_to_integer(self): self._test_accessor_clash( target=models.IntegerField(), relative=models.ManyToManyField('Target')) def test_m2m_to_fk(self): self._test_accessor_clash( target=models.ForeignKey('Another', models.CASCADE), relative=models.ManyToManyField('Target')) def test_m2m_to_m2m(self): self._test_accessor_clash( target=models.ManyToManyField('Another'), relative=models.ManyToManyField('Target')) def _test_accessor_clash(self, target, relative): class Another(models.Model): pass class Target(models.Model): model_set = target class Model(models.Model): rel = relative errors = Model.check() expected = [ Error( "Reverse accessor for 'Model.rel' clashes with field name 'Target.model_set'.", hint=("Rename field 'Target.model_set', or add/change " "a related_name argument to the definition " "for field 'Model.rel'."), obj=Model._meta.get_field('rel'), id='fields.E302', ), ] self.assertEqual(errors, expected) def test_clash_between_accessors(self): class Target(models.Model): pass class Model(models.Model): foreign = models.ForeignKey(Target, models.CASCADE) m2m = models.ManyToManyField(Target) errors = Model.check() expected = [ Error( "Reverse accessor for 'Model.foreign' clashes with reverse accessor for 'Model.m2m'.", hint=( "Add or change a related_name argument to the definition " "for 'Model.foreign' or 'Model.m2m'." ), obj=Model._meta.get_field('foreign'), id='fields.E304', ), Error( "Reverse accessor for 'Model.m2m' clashes with reverse accessor for 'Model.foreign'.", hint=( "Add or change a related_name argument to the definition " "for 'Model.m2m' or 'Model.foreign'." ), obj=Model._meta.get_field('m2m'), id='fields.E304', ), ] self.assertEqual(errors, expected) def test_m2m_to_m2m_with_inheritance(self): """ Ref #22047. """ class Target(models.Model): pass class Model(models.Model): children = models.ManyToManyField('Child', related_name="m2m_clash", related_query_name="no_clash") class Parent(models.Model): m2m_clash = models.ManyToManyField('Target') class Child(Parent): pass errors = Model.check() expected = [ Error( "Reverse accessor for 'Model.children' clashes with field name 'Child.m2m_clash'.", hint=( "Rename field 'Child.m2m_clash', or add/change a related_name " "argument to the definition for field 'Model.children'." ), obj=Model._meta.get_field('children'), id='fields.E302', ) ] self.assertEqual(errors, expected) @isolate_apps('invalid_models_tests') class ReverseQueryNameClashTests(SimpleTestCase): def test_fk_to_integer(self): self._test_reverse_query_name_clash( target=models.IntegerField(), relative=models.ForeignKey('Target', models.CASCADE)) def test_fk_to_fk(self): self._test_reverse_query_name_clash( target=models.ForeignKey('Another', models.CASCADE), relative=models.ForeignKey('Target', models.CASCADE)) def test_fk_to_m2m(self): self._test_reverse_query_name_clash( target=models.ManyToManyField('Another'), relative=models.ForeignKey('Target', models.CASCADE)) def test_m2m_to_integer(self): self._test_reverse_query_name_clash( target=models.IntegerField(), relative=models.ManyToManyField('Target')) def test_m2m_to_fk(self): self._test_reverse_query_name_clash( target=models.ForeignKey('Another', models.CASCADE), relative=models.ManyToManyField('Target')) def test_m2m_to_m2m(self): self._test_reverse_query_name_clash( target=models.ManyToManyField('Another'), relative=models.ManyToManyField('Target')) def _test_reverse_query_name_clash(self, target, relative): class Another(models.Model): pass class Target(models.Model): model = target class Model(models.Model): rel = relative errors = Model.check() expected = [ Error( "Reverse query name for 'Model.rel' clashes with field name 'Target.model'.", hint=( "Rename field 'Target.model', or add/change a related_name " "argument to the definition for field 'Model.rel'." ), obj=Model._meta.get_field('rel'), id='fields.E303', ), ] self.assertEqual(errors, expected) @isolate_apps('invalid_models_tests') class ExplicitRelatedNameClashTests(SimpleTestCase): def test_fk_to_integer(self): self._test_explicit_related_name_clash( target=models.IntegerField(), relative=models.ForeignKey('Target', models.CASCADE, related_name='clash')) def test_fk_to_fk(self): self._test_explicit_related_name_clash( target=models.ForeignKey('Another', models.CASCADE), relative=models.ForeignKey('Target', models.CASCADE, related_name='clash')) def test_fk_to_m2m(self): self._test_explicit_related_name_clash( target=models.ManyToManyField('Another'), relative=models.ForeignKey('Target', models.CASCADE, related_name='clash')) def test_m2m_to_integer(self): self._test_explicit_related_name_clash( target=models.IntegerField(), relative=models.ManyToManyField('Target', related_name='clash')) def test_m2m_to_fk(self): self._test_explicit_related_name_clash( target=models.ForeignKey('Another', models.CASCADE), relative=models.ManyToManyField('Target', related_name='clash')) def test_m2m_to_m2m(self): self._test_explicit_related_name_clash( target=models.ManyToManyField('Another'), relative=models.ManyToManyField('Target', related_name='clash')) def _test_explicit_related_name_clash(self, target, relative): class Another(models.Model): pass class Target(models.Model): clash = target class Model(models.Model): rel = relative errors = Model.check() expected = [ Error( "Reverse accessor for 'Model.rel' clashes with field name 'Target.clash'.", hint=( "Rename field 'Target.clash', or add/change a related_name " "argument to the definition for field 'Model.rel'." ), obj=Model._meta.get_field('rel'), id='fields.E302', ), Error( "Reverse query name for 'Model.rel' clashes with field name 'Target.clash'.", hint=( "Rename field 'Target.clash', or add/change a related_name " "argument to the definition for field 'Model.rel'." ), obj=Model._meta.get_field('rel'), id='fields.E303', ), ] self.assertEqual(errors, expected) @isolate_apps('invalid_models_tests') class ExplicitRelatedQueryNameClashTests(SimpleTestCase): def test_fk_to_integer(self, related_name=None): self._test_explicit_related_query_name_clash( target=models.IntegerField(), relative=models.ForeignKey( 'Target', models.CASCADE, related_name=related_name, related_query_name='clash', ) ) def test_hidden_fk_to_integer(self, related_name=None): self.test_fk_to_integer(related_name='+') def test_fk_to_fk(self, related_name=None): self._test_explicit_related_query_name_clash( target=models.ForeignKey('Another', models.CASCADE), relative=models.ForeignKey( 'Target', models.CASCADE, related_name=related_name, related_query_name='clash', ) ) def test_hidden_fk_to_fk(self): self.test_fk_to_fk(related_name='+') def test_fk_to_m2m(self, related_name=None): self._test_explicit_related_query_name_clash( target=models.ManyToManyField('Another'), relative=models.ForeignKey( 'Target', models.CASCADE, related_name=related_name, related_query_name='clash', ) ) def test_hidden_fk_to_m2m(self): self.test_fk_to_m2m(related_name='+') def test_m2m_to_integer(self, related_name=None): self._test_explicit_related_query_name_clash( target=models.IntegerField(), relative=models.ManyToManyField('Target', related_name=related_name, related_query_name='clash')) def test_hidden_m2m_to_integer(self): self.test_m2m_to_integer(related_name='+') def test_m2m_to_fk(self, related_name=None): self._test_explicit_related_query_name_clash( target=models.ForeignKey('Another', models.CASCADE), relative=models.ManyToManyField('Target', related_name=related_name, related_query_name='clash')) def test_hidden_m2m_to_fk(self): self.test_m2m_to_fk(related_name='+') def test_m2m_to_m2m(self, related_name=None): self._test_explicit_related_query_name_clash( target=models.ManyToManyField('Another'), relative=models.ManyToManyField( 'Target', related_name=related_name, related_query_name='clash', ) ) def test_hidden_m2m_to_m2m(self): self.test_m2m_to_m2m(related_name='+') def _test_explicit_related_query_name_clash(self, target, relative): class Another(models.Model): pass class Target(models.Model): clash = target class Model(models.Model): rel = relative errors = Model.check() expected = [ Error( "Reverse query name for 'Model.rel' clashes with field name 'Target.clash'.", hint=( "Rename field 'Target.clash', or add/change a related_name " "argument to the definition for field 'Model.rel'." ), obj=Model._meta.get_field('rel'), id='fields.E303', ), ] self.assertEqual(errors, expected) @isolate_apps('invalid_models_tests') class SelfReferentialM2MClashTests(SimpleTestCase): def test_clash_between_accessors(self): class Model(models.Model): first_m2m = models.ManyToManyField('self', symmetrical=False) second_m2m = models.ManyToManyField('self', symmetrical=False) errors = Model.check() expected = [ Error( "Reverse accessor for 'Model.first_m2m' clashes with reverse accessor for 'Model.second_m2m'.", hint=( "Add or change a related_name argument to the definition " "for 'Model.first_m2m' or 'Model.second_m2m'." ), obj=Model._meta.get_field('first_m2m'), id='fields.E304', ), Error( "Reverse accessor for 'Model.second_m2m' clashes with reverse accessor for 'Model.first_m2m'.", hint=( "Add or change a related_name argument to the definition " "for 'Model.second_m2m' or 'Model.first_m2m'." ), obj=Model._meta.get_field('second_m2m'), id='fields.E304', ), ] self.assertEqual(errors, expected) def test_accessor_clash(self): class Model(models.Model): model_set = models.ManyToManyField("self", symmetrical=False) errors = Model.check() expected = [ Error( "Reverse accessor for 'Model.model_set' clashes with field name 'Model.model_set'.", hint=( "Rename field 'Model.model_set', or add/change a related_name " "argument to the definition for field 'Model.model_set'." ), obj=Model._meta.get_field('model_set'), id='fields.E302', ), ] self.assertEqual(errors, expected) def test_reverse_query_name_clash(self): class Model(models.Model): model = models.ManyToManyField("self", symmetrical=False) errors = Model.check() expected = [ Error( "Reverse query name for 'Model.model' clashes with field name 'Model.model'.", hint=( "Rename field 'Model.model', or add/change a related_name " "argument to the definition for field 'Model.model'." ), obj=Model._meta.get_field('model'), id='fields.E303', ), ] self.assertEqual(errors, expected) def test_clash_under_explicit_related_name(self): class Model(models.Model): clash = models.IntegerField() m2m = models.ManyToManyField("self", symmetrical=False, related_name='clash') errors = Model.check() expected = [ Error( "Reverse accessor for 'Model.m2m' clashes with field name 'Model.clash'.", hint=( "Rename field 'Model.clash', or add/change a related_name " "argument to the definition for field 'Model.m2m'." ), obj=Model._meta.get_field('m2m'), id='fields.E302', ), Error( "Reverse query name for 'Model.m2m' clashes with field name 'Model.clash'.", hint=( "Rename field 'Model.clash', or add/change a related_name " "argument to the definition for field 'Model.m2m'." ), obj=Model._meta.get_field('m2m'), id='fields.E303', ), ] self.assertEqual(errors, expected) def test_valid_model(self): class Model(models.Model): first = models.ManyToManyField("self", symmetrical=False, related_name='first_accessor') second = models.ManyToManyField("self", symmetrical=False, related_name='second_accessor') errors = Model.check() self.assertEqual(errors, []) @isolate_apps('invalid_models_tests') class SelfReferentialFKClashTests(SimpleTestCase): def test_accessor_clash(self): class Model(models.Model): model_set = models.ForeignKey("Model", models.CASCADE) errors = Model.check() expected = [ Error( "Reverse accessor for 'Model.model_set' clashes with field name 'Model.model_set'.", hint=( "Rename field 'Model.model_set', or add/change " "a related_name argument to the definition " "for field 'Model.model_set'." ), obj=Model._meta.get_field('model_set'), id='fields.E302', ), ] self.assertEqual(errors, expected) def test_reverse_query_name_clash(self): class Model(models.Model): model = models.ForeignKey("Model", models.CASCADE) errors = Model.check() expected = [ Error( "Reverse query name for 'Model.model' clashes with field name 'Model.model'.", hint=( "Rename field 'Model.model', or add/change a related_name " "argument to the definition for field 'Model.model'." ), obj=Model._meta.get_field('model'), id='fields.E303', ), ] self.assertEqual(errors, expected) def test_clash_under_explicit_related_name(self): class Model(models.Model): clash = models.CharField(max_length=10) foreign = models.ForeignKey("Model", models.CASCADE, related_name='clash') errors = Model.check() expected = [ Error( "Reverse accessor for 'Model.foreign' clashes with field name 'Model.clash'.", hint=( "Rename field 'Model.clash', or add/change a related_name " "argument to the definition for field 'Model.foreign'." ), obj=Model._meta.get_field('foreign'), id='fields.E302', ), Error( "Reverse query name for 'Model.foreign' clashes with field name 'Model.clash'.", hint=( "Rename field 'Model.clash', or add/change a related_name " "argument to the definition for field 'Model.foreign'." ), obj=Model._meta.get_field('foreign'), id='fields.E303', ), ] self.assertEqual(errors, expected) @isolate_apps('invalid_models_tests') class ComplexClashTests(SimpleTestCase): # New tests should not be included here, because this is a single, # self-contained sanity check, not a test of everything. def test_complex_clash(self): class Target(models.Model): tgt_safe = models.CharField(max_length=10) clash = models.CharField(max_length=10) model = models.CharField(max_length=10) clash1_set = models.CharField(max_length=10) class Model(models.Model): src_safe = models.CharField(max_length=10) foreign_1 = models.ForeignKey(Target, models.CASCADE, related_name='id') foreign_2 = models.ForeignKey(Target, models.CASCADE, related_name='src_safe') m2m_1 = models.ManyToManyField(Target, related_name='id') m2m_2 = models.ManyToManyField(Target, related_name='src_safe') errors = Model.check() expected = [ Error( "Reverse accessor for 'Model.foreign_1' clashes with field name 'Target.id'.", hint=("Rename field 'Target.id', or add/change a related_name " "argument to the definition for field 'Model.foreign_1'."), obj=Model._meta.get_field('foreign_1'), id='fields.E302', ), Error( "Reverse query name for 'Model.foreign_1' clashes with field name 'Target.id'.", hint=("Rename field 'Target.id', or add/change a related_name " "argument to the definition for field 'Model.foreign_1'."), obj=Model._meta.get_field('foreign_1'), id='fields.E303', ), Error( "Reverse accessor for 'Model.foreign_1' clashes with reverse accessor for 'Model.m2m_1'.", hint=("Add or change a related_name argument to " "the definition for 'Model.foreign_1' or 'Model.m2m_1'."), obj=Model._meta.get_field('foreign_1'), id='fields.E304', ), Error( "Reverse query name for 'Model.foreign_1' clashes with reverse query name for 'Model.m2m_1'.", hint=("Add or change a related_name argument to " "the definition for 'Model.foreign_1' or 'Model.m2m_1'."), obj=Model._meta.get_field('foreign_1'), id='fields.E305', ), Error( "Reverse accessor for 'Model.foreign_2' clashes with reverse accessor for 'Model.m2m_2'.", hint=("Add or change a related_name argument " "to the definition for 'Model.foreign_2' or 'Model.m2m_2'."), obj=Model._meta.get_field('foreign_2'), id='fields.E304', ), Error( "Reverse query name for 'Model.foreign_2' clashes with reverse query name for 'Model.m2m_2'.", hint=("Add or change a related_name argument to " "the definition for 'Model.foreign_2' or 'Model.m2m_2'."), obj=Model._meta.get_field('foreign_2'), id='fields.E305', ), Error( "Reverse accessor for 'Model.m2m_1' clashes with field name 'Target.id'.", hint=("Rename field 'Target.id', or add/change a related_name " "argument to the definition for field 'Model.m2m_1'."), obj=Model._meta.get_field('m2m_1'), id='fields.E302', ), Error( "Reverse query name for 'Model.m2m_1' clashes with field name 'Target.id'.", hint=("Rename field 'Target.id', or add/change a related_name " "argument to the definition for field 'Model.m2m_1'."), obj=Model._meta.get_field('m2m_1'), id='fields.E303', ), Error( "Reverse accessor for 'Model.m2m_1' clashes with reverse accessor for 'Model.foreign_1'.", hint=("Add or change a related_name argument to the definition " "for 'Model.m2m_1' or 'Model.foreign_1'."), obj=Model._meta.get_field('m2m_1'), id='fields.E304', ), Error( "Reverse query name for 'Model.m2m_1' clashes with reverse query name for 'Model.foreign_1'.", hint=("Add or change a related_name argument to " "the definition for 'Model.m2m_1' or 'Model.foreign_1'."), obj=Model._meta.get_field('m2m_1'), id='fields.E305', ), Error( "Reverse accessor for 'Model.m2m_2' clashes with reverse accessor for 'Model.foreign_2'.", hint=("Add or change a related_name argument to the definition " "for 'Model.m2m_2' or 'Model.foreign_2'."), obj=Model._meta.get_field('m2m_2'), id='fields.E304', ), Error( "Reverse query name for 'Model.m2m_2' clashes with reverse query name for 'Model.foreign_2'.", hint=("Add or change a related_name argument to the definition " "for 'Model.m2m_2' or 'Model.foreign_2'."), obj=Model._meta.get_field('m2m_2'), id='fields.E305', ), ] self.assertEqual(errors, expected) @isolate_apps('invalid_models_tests') class M2mThroughFieldsTests(SimpleTestCase): def test_m2m_field_argument_validation(self): """ Tests that ManyToManyField accepts the ``through_fields`` kwarg only if an intermediary table is specified. """ class Fan(models.Model): pass with self.assertRaisesMessage(ValueError, 'Cannot specify through_fields without a through model'): models.ManyToManyField(Fan, through_fields=('f1', 'f2')) def test_invalid_order(self): """ Tests that mixing up the order of link fields to ManyToManyField.through_fields triggers validation errors. """ class Fan(models.Model): pass class Event(models.Model): invitees = models.ManyToManyField(Fan, through='Invitation', through_fields=('invitee', 'event')) class Invitation(models.Model): event = models.ForeignKey(Event, models.CASCADE) invitee = models.ForeignKey(Fan, models.CASCADE) inviter = models.ForeignKey(Fan, models.CASCADE, related_name='+') field = Event._meta.get_field('invitees') errors = field.check(from_model=Event) expected = [ Error( "'Invitation.invitee' is not a foreign key to 'Event'.", hint="Did you mean one of the following foreign keys to 'Event': event?", obj=field, id='fields.E339', ), Error( "'Invitation.event' is not a foreign key to 'Fan'.", hint="Did you mean one of the following foreign keys to 'Fan': invitee, inviter?", obj=field, id='fields.E339', ), ] self.assertEqual(expected, errors) def test_invalid_field(self): """ Tests that providing invalid field names to ManyToManyField.through_fields triggers validation errors. """ class Fan(models.Model): pass class Event(models.Model): invitees = models.ManyToManyField( Fan, through='Invitation', through_fields=('invalid_field_1', 'invalid_field_2'), ) class Invitation(models.Model): event = models.ForeignKey(Event, models.CASCADE) invitee = models.ForeignKey(Fan, models.CASCADE) inviter = models.ForeignKey(Fan, models.CASCADE, related_name='+') field = Event._meta.get_field('invitees') errors = field.check(from_model=Event) expected = [ Error( "The intermediary model 'invalid_models_tests.Invitation' has no field 'invalid_field_1'.", hint="Did you mean one of the following foreign keys to 'Event': event?", obj=field, id='fields.E338', ), Error( "The intermediary model 'invalid_models_tests.Invitation' has no field 'invalid_field_2'.", hint="Did you mean one of the following foreign keys to 'Fan': invitee, inviter?", obj=field, id='fields.E338', ), ] self.assertEqual(expected, errors) def test_explicit_field_names(self): """ Tests that if ``through_fields`` kwarg is given, it must specify both link fields of the intermediary table. """ class Fan(models.Model): pass class Event(models.Model): invitees = models.ManyToManyField(Fan, through='Invitation', through_fields=(None, 'invitee')) class Invitation(models.Model): event = models.ForeignKey(Event, models.CASCADE) invitee = models.ForeignKey(Fan, models.CASCADE) inviter = models.ForeignKey(Fan, models.CASCADE, related_name='+') field = Event._meta.get_field('invitees') errors = field.check(from_model=Event) expected = [ Error( "Field specifies 'through_fields' but does not provide the names " "of the two link fields that should be used for the relation " "through model 'invalid_models_tests.Invitation'.", hint="Make sure you specify 'through_fields' as through_fields=('field1', 'field2')", obj=field, id='fields.E337')] self.assertEqual(expected, errors) def test_superset_foreign_object(self): class Parent(models.Model): a = models.PositiveIntegerField() b = models.PositiveIntegerField() c = models.PositiveIntegerField() class Meta: unique_together = (('a', 'b', 'c'),) class Child(models.Model): a = models.PositiveIntegerField() b = models.PositiveIntegerField() value = models.CharField(max_length=255) parent = ForeignObject( Parent, on_delete=models.SET_NULL, from_fields=('a', 'b'), to_fields=('a', 'b'), related_name='children', ) field = Child._meta.get_field('parent') errors = field.check(from_model=Child) expected = [ Error( "No subset of the fields 'a', 'b' on model 'Parent' is unique.", hint=( "Add unique=True on any of those fields or add at least " "a subset of them to a unique_together constraint." ), obj=field, id='fields.E310', ), ] self.assertEqual(expected, errors) def test_intersection_foreign_object(self): class Parent(models.Model): a = models.PositiveIntegerField() b = models.PositiveIntegerField() c = models.PositiveIntegerField() d = models.PositiveIntegerField() class Meta: unique_together = (('a', 'b', 'c'),) class Child(models.Model): a = models.PositiveIntegerField() b = models.PositiveIntegerField() d = models.PositiveIntegerField() value = models.CharField(max_length=255) parent = ForeignObject( Parent, on_delete=models.SET_NULL, from_fields=('a', 'b', 'd'), to_fields=('a', 'b', 'd'), related_name='children', ) field = Child._meta.get_field('parent') errors = field.check(from_model=Child) expected = [ Error( "No subset of the fields 'a', 'b', 'd' on model 'Parent' is unique.", hint=( "Add unique=True on any of those fields or add at least " "a subset of them to a unique_together constraint." ), obj=field, id='fields.E310', ), ] self.assertEqual(expected, errors)
bsd-3-clause
BubuLK/sfepy
sfepy/version.py
3
1750
# SfePy version __version__ = '2021.2' # "Minimal" supported versions. NUMPY_MIN_VERSION = '1.3' SCIPY_MIN_VERSION = '0.7' MATPLOTLIB_MIN_VERSION = '0.99.0' PYPARSING_MIN_VERSION = '1.5.0' PYTABLES_MIN_VERSION = '2.1.2' MAYAVI_MIN_VERSION = '3.3.0' SYMPY_MIN_VERSION = '0.7.3' IGAKIT_MIN_VERSION = '0.1' PETSC4PY_MIN_VERSION = '3.4' SLEPC4PY_MIN_VERSION = '3.4' MPI4PY_MIN_VERSION = '1.3.1' PYMETIS_MIN_VERSION = '2014.1' SCIKIT_UMFPACK_MIN_VERSION = '0.1' MESHIO_MIN_VERSION='4.0.0' PSUTIL_MIN_VERSION='1.0.0' PYVISTA_MIN_VERSION='0.23.0' OPT_EINSUM_MIN_VERSION='3.0.0' JAX_MIN_VERSION='0.2.0' DASK_MIN_VERSION='2.0.0' CYTHON_MIN_VERSION = '0.14.1' def get_basic_info(version=__version__): """ Return SfePy installation directory information. Append current git commit hash to `version`. """ import os.path as op from sfepy import Config # If installed, up_dir is '.', otherwise (in (git) source directory) '..'. for up_dir in ['..', '.']: top_dir = op.normpath(op.realpath(op.join(op.dirname(__file__), up_dir))) aux = op.join(top_dir, 'README.rst') if op.isfile(aux): break else: raise RuntimeError('cannot determine SfePy top level directory!') config = Config() if not config.is_release(): # Append current git commit hash to __version__. master = op.join(top_dir, '.git/refs/heads/master') if op.isfile(master): fd = open(master, 'r') version += '+git.%s' % fd.readline().strip() fd.close() in_source_tree = up_dir == '..' return version, top_dir, in_source_tree __version__, top_dir, in_source_tree = get_basic_info(__version__)
bsd-3-clause
RafaelTorrealba/odoo
addons/event/__openerp__.py
261
2296
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Events Organisation', 'version': '0.1', 'website' : 'https://www.odoo.com/page/events', 'category': 'Tools', 'summary': 'Trainings, Conferences, Meetings, Exhibitions, Registrations', 'description': """ Organization and management of Events. ====================================== The event module allows you to efficiently organise events and all related tasks: planification, registration tracking, attendances, etc. Key Features ------------ * Manage your Events and Registrations * Use emails to automatically confirm and send acknowledgements for any event registration """, 'author': 'OpenERP SA', 'depends': ['base_setup', 'board', 'email_template', 'marketing'], 'data': [ 'security/event_security.xml', 'security/ir.model.access.csv', 'wizard/event_confirm_view.xml', 'event_view.xml', 'event_data.xml', 'report/report_event_registration_view.xml', 'res_partner_view.xml', 'email_template.xml', 'views/event.xml', ], 'demo': [ 'event_demo.xml', ], 'test': [ 'test/ui/event_users.yml', 'test/process/event_draft2done.yml' ], 'installable': True, 'auto_install': False, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
cabaag/serp
serp/app/tmp.py
1
1102
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'form.ui' # # Created: Wed Feb 18 22:38:35 2015 # by: PyQt5 UI code generator 5.3.1 # # WARNING! All changes made in this file will be lost! from PyQt5 import QtCore, QtGui, QtWidgets class Ui_Form(object): def setupUi(self, Form): Form.setObjectName("Form") Form.resize(640, 480) Form.setStyleSheet("background-color: rgb(0, 255, 127);") self.frame = QtWidgets.QFrame(Form) self.frame.setGeometry(QtCore.QRect(0, 0, 641, 481)) self.frame.setFrameShape(QtWidgets.QFrame.StyledPanel) self.frame.setFrameShadow(QtWidgets.QFrame.Raised) self.frame.setObjectName("frame") self.dial = QtWidgets.QDial(self.frame) self.dial.setGeometry(QtCore.QRect(20, 0, 50, 64)) self.dial.setObjectName("dial") self.retranslateUi(Form) QtCore.QMetaObject.connectSlotsByName(Form) def retranslateUi(self, Form): _translate = QtCore.QCoreApplication.translate Form.setWindowTitle(_translate("Form", "Form"))
apache-2.0
thjashin/tensorflow
tensorflow/python/training/adadelta.py
16
4468
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Adadelta for TensorFlow.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.framework import ops from tensorflow.python.ops import math_ops from tensorflow.python.training import optimizer from tensorflow.python.training import training_ops class AdadeltaOptimizer(optimizer.Optimizer): """Optimizer that implements the Adadelta algorithm. See [M. D. Zeiler](http://arxiv.org/abs/1212.5701) ([pdf](http://arxiv.org/pdf/1212.5701v1.pdf)) """ def __init__(self, learning_rate=0.001, rho=0.95, epsilon=1e-8, use_locking=False, name="Adadelta"): """Construct a new Adadelta optimizer. Args: learning_rate: A `Tensor` or a floating point value. The learning rate. rho: A `Tensor` or a floating point value. The decay rate. epsilon: A `Tensor` or a floating point value. A constant epsilon used to better conditioning the grad update. use_locking: If `True` use locks for update operations. name: Optional name prefix for the operations created when applying gradients. Defaults to "Adadelta". """ super(AdadeltaOptimizer, self).__init__(use_locking, name) self._lr = learning_rate self._rho = rho self._epsilon = epsilon # Tensor versions of the constructor arguments, created in _prepare(). self._lr_t = None self._rho_t = None self._epsilon_t = None def _create_slots(self, var_list): for v in var_list: self._zeros_slot(v, "accum", self._name) self._zeros_slot(v, "accum_update", self._name) def _prepare(self): self._lr_t = ops.convert_to_tensor(self._lr, name="lr") self._rho_t = ops.convert_to_tensor(self._rho, name="rho") self._epsilon_t = ops.convert_to_tensor(self._epsilon, name="epsilon") def _apply_dense(self, grad, var): accum = self.get_slot(var, "accum") accum_update = self.get_slot(var, "accum_update") return training_ops.apply_adadelta( var, accum, accum_update, math_ops.cast(self._lr_t, var.dtype.base_dtype), math_ops.cast(self._rho_t, var.dtype.base_dtype), math_ops.cast(self._epsilon_t, var.dtype.base_dtype), grad, use_locking=self._use_locking) def _resource_apply_dense(self, grad, var): accum = self.get_slot(var, "accum") accum_update = self.get_slot(var, "accum_update") return training_ops.resource_apply_adadelta( var.handle, accum.handle, accum_update.handle, math_ops.cast(self._lr_t, grad.dtype.base_dtype), math_ops.cast(self._rho_t, grad.dtype.base_dtype), math_ops.cast(self._epsilon_t, grad.dtype.base_dtype), grad, use_locking=self._use_locking) def _apply_sparse(self, grad, var): accum = self.get_slot(var, "accum") accum_update = self.get_slot(var, "accum_update") return training_ops.sparse_apply_adadelta( var, accum, accum_update, math_ops.cast(self._lr_t, var.dtype.base_dtype), math_ops.cast(self._rho_t, var.dtype.base_dtype), math_ops.cast(self._epsilon_t, var.dtype.base_dtype), grad.values, grad.indices, use_locking=self._use_locking) def _resource_apply_sparse(self, grad, var, indices): accum = self.get_slot(var, "accum") accum_update = self.get_slot(var, "accum_update") return training_ops.resource_sparse_apply_adadelta( var.handle, accum.handle, accum_update.handle, math_ops.cast(self._lr_t, grad.dtype), math_ops.cast(self._rho_t, grad.dtype), math_ops.cast(self._epsilon_t, grad.dtype), grad, indices, use_locking=self._use_locking)
apache-2.0
jpinedaf/pyspeckit
docs/example_hcop.py
7
1377
import pyspeckit # load a FITS-compliant spectrum spec = pyspeckit.Spectrum('10074-190_HCOp.fits') # The units are originally frequency (check this by printing spec.xarr.units). # I want to know the velocity. Convert! # Note that this only works because the reference frequency is set in the header spec.xarr.convert_to_unit('km/s') # plot it spec.plotter() # Subtract a baseline spec.baseline() # Fit a gaussian. We know it will be an emission line, so we force a positive guess spec.specfit(negamp=False) # Note that the errors on the fits are larger than the fitted parameters. # That's because this spectrum did not have an error assigned to it. # Let's use the residuals: spec.specfit.plotresiduals() # Now, refit with error determined from the residuals: # (we pass in guesses to save time / make sure nothing changes) spec.specfit(guesses=spec.specfit.modelpars) # Save the figures to put on the web.... spec.plotter.figure.savefig("simple_fit_example_HCOp.png") spec.specfit.residualaxis.figure.savefig("simple_fit_example_HCOp_residuals.png") # Also, let's crop out stuff we don't want... spec.crop(-100,100) # replot after cropping (crop doesn't auto-refresh) spec.plotter() # replot the fit without re-fitting spec.specfit.plot_fit() # show the annotations again spec.specfit.annotate() spec.plotter.figure.savefig("simple_fit_example_HCOp_cropped.png")
mit
ampax/edx-platform
common/test/acceptance/pages/lms/bookmarks.py
28
2384
""" Courseware Boomarks """ from bok_choy.promise import EmptyPromise from .course_page import CoursePage from ..common.paging import PaginatedUIMixin class BookmarksPage(CoursePage, PaginatedUIMixin): """ Courseware Bookmarks Page. """ url = None url_path = "courseware/" BOOKMARKS_BUTTON_SELECTOR = '.bookmarks-list-button' BOOKMARKED_ITEMS_SELECTOR = '.bookmarks-results-list .bookmarks-results-list-item' BOOKMARKED_BREADCRUMBS = BOOKMARKED_ITEMS_SELECTOR + ' .list-item-breadcrumbtrail' def is_browser_on_page(self): """ Verify if we are on correct page """ return self.q(css=self.BOOKMARKS_BUTTON_SELECTOR).visible def bookmarks_button_visible(self): """ Check if bookmarks button is visible """ return self.q(css=self.BOOKMARKS_BUTTON_SELECTOR).visible def click_bookmarks_button(self, wait_for_results=True): """ Click on Bookmarks button """ self.q(css=self.BOOKMARKS_BUTTON_SELECTOR).first.click() if wait_for_results: EmptyPromise(self.results_present, "Bookmarks results present").fulfill() def results_present(self): """ Check if bookmarks results are present """ return self.q(css='#my-bookmarks').present def results_header_text(self): """ Returns the bookmarks results header text """ return self.q(css='.bookmarks-results-header').text[0] def empty_header_text(self): """ Returns the bookmarks empty header text """ return self.q(css='.bookmarks-empty-header').text[0] def empty_list_text(self): """ Returns the bookmarks empty list text """ return self.q(css='.bookmarks-empty-detail-title').text[0] def count(self): """ Returns the total number of bookmarks in the list """ return len(self.q(css=self.BOOKMARKED_ITEMS_SELECTOR).results) def breadcrumbs(self): """ Return list of breadcrumbs for all bookmarks """ breadcrumbs = self.q(css=self.BOOKMARKED_BREADCRUMBS).text return [breadcrumb.replace('\n', '').split('-') for breadcrumb in breadcrumbs] def click_bookmarked_block(self, index): """ Click on bookmarked block at index `index` Arguments: index (int): bookmark index in the list """ self.q(css=self.BOOKMARKED_ITEMS_SELECTOR).nth(index).click()
agpl-3.0
googleapis/googleapis-gen
google/cloud/talent/v4beta1/talent-v4beta1-py/google/cloud/talent_v4beta1/services/completion/transports/__init__.py
2
1158
# -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from collections import OrderedDict from typing import Dict, Type from .base import CompletionTransport from .grpc import CompletionGrpcTransport from .grpc_asyncio import CompletionGrpcAsyncIOTransport # Compile a registry of transports. _transport_registry = OrderedDict() # type: Dict[str, Type[CompletionTransport]] _transport_registry['grpc'] = CompletionGrpcTransport _transport_registry['grpc_asyncio'] = CompletionGrpcAsyncIOTransport __all__ = ( 'CompletionTransport', 'CompletionGrpcTransport', 'CompletionGrpcAsyncIOTransport', )
apache-2.0
sdgathman/cjdns
node_build/dependencies/libuv/build/gyp/test/rename/gyptest-filecase.py
320
1110
#!/usr/bin/env python # Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Checks that files whose file case changes get rebuilt correctly. """ import os import TestGyp test = TestGyp.TestGyp() CHDIR = 'filecase' test.run_gyp('test.gyp', chdir=CHDIR) test.build('test.gyp', test.ALL, chdir=CHDIR) os.rename('filecase/file.c', 'filecase/fIlE.c') test.write('filecase/test.gyp', test.read('filecase/test.gyp').replace('file.c', 'fIlE.c')) test.run_gyp('test.gyp', chdir=CHDIR) test.build('test.gyp', test.ALL, chdir=CHDIR) # Check that having files that differ just in their case still work on # case-sensitive file systems. test.write('filecase/FiLe.c', 'int f(); int main() { return f(); }') test.write('filecase/fIlE.c', 'int f() { return 42; }') is_case_sensitive = test.read('filecase/FiLe.c') != test.read('filecase/fIlE.c') if is_case_sensitive: test.run_gyp('test-casesensitive.gyp', chdir=CHDIR) test.build('test-casesensitive.gyp', test.ALL, chdir=CHDIR) test.pass_test()
gpl-3.0
ApsOps/zulip
zproject/settings.py
73
33382
# Django settings for zulip project. ######################################################################## # Here's how settings for the Zulip project work: # # * settings.py contains non-site-specific and settings configuration # for the Zulip Django app. # * settings.py imports local_settings.py, and any site-specific configuration # belongs there. The template for local_settings.py is local_settings_template.py ######################################################################## import os import platform import time import sys import ConfigParser from zerver.lib.db import TimeTrackingConnection ######################################################################## # INITIAL SETTINGS ######################################################################## config_file = ConfigParser.RawConfigParser() config_file.read("/etc/zulip/zulip.conf") # Whether this instance of Zulip is running in a production environment. PRODUCTION = config_file.has_option('machine', 'deploy_type') DEVELOPMENT = not PRODUCTION secrets_file = ConfigParser.RawConfigParser() if PRODUCTION: secrets_file.read("/etc/zulip/zulip-secrets.conf") else: secrets_file.read("zproject/dev-secrets.conf") def get_secret(key): if secrets_file.has_option('secrets', key): return secrets_file.get('secrets', key) return None # Make this unique, and don't share it with anybody. SECRET_KEY = get_secret("secret_key") # A shared secret, used to authenticate different parts of the app to each other. SHARED_SECRET = get_secret("shared_secret") # We use this salt to hash a user's email into a filename for their user-uploaded # avatar. If this salt is discovered, attackers will only be able to determine # that the owner of an email account has uploaded an avatar to Zulip, which isn't # the end of the world. Don't use the salt where there is more security exposure. AVATAR_SALT = get_secret("avatar_salt") # SERVER_GENERATION is used to track whether the server has been # restarted for triggering browser clients to reload. SERVER_GENERATION = int(time.time()) if not 'DEBUG' in globals(): # Uncomment end of next line to test JS/CSS minification. DEBUG = DEVELOPMENT # and platform.node() != 'your-machine' TEMPLATE_DEBUG = DEBUG if DEBUG: INTERNAL_IPS = ('127.0.0.1',) # Detect whether we're running as a queue worker; this impacts the logging configuration. if len(sys.argv) > 2 and sys.argv[0].endswith('manage.py') and sys.argv[1] == 'process_queue': IS_WORKER = True else: IS_WORKER = False # This is overridden in test_settings.py for the test suites TEST_SUITE = False # The new user tutorial is enabled by default, but disabled for client tests. TUTORIAL_ENABLED = True # Import variables like secrets from the local_settings file # Import local_settings after determining the deployment/machine type if PRODUCTION: from local_settings import * else: # For the Dev VM environment, we use the same settings as the # sample local_settings.py file, with a few exceptions. from local_settings_template import * EXTERNAL_HOST = 'localhost:9991' ALLOWED_HOSTS = ['localhost'] AUTHENTICATION_BACKENDS = ('zproject.backends.DevAuthBackend',) # Add some of the below if you're testing other backends # AUTHENTICATION_BACKENDS = ('zproject.backends.EmailAuthBackend', # 'zproject.backends.GoogleMobileOauth2Backend',) EXTERNAL_URI_SCHEME = "http://" EMAIL_GATEWAY_PATTERN = "%s@" + EXTERNAL_HOST ADMIN_DOMAIN = "zulip.com" NOTIFICATION_BOT = "notification-bot@zulip.com" ERROR_BOT = "error-bot@zulip.com" NEW_USER_BOT = "new-user-bot@zulip.com" EMAIL_GATEWAY_BOT = "emailgateway@zulip.com" ######################################################################## # DEFAULT VALUES FOR SETTINGS ######################################################################## # For any settings that are not defined in local_settings.py, # we want to initialize them to sane default DEFAULT_SETTINGS = {'TWITTER_CONSUMER_KEY': '', 'TWITTER_CONSUMER_SECRET': '', 'TWITTER_ACCESS_TOKEN_KEY': '', 'TWITTER_ACCESS_TOKEN_SECRET': '', 'EMAIL_GATEWAY_PATTERN': '', 'EMAIL_GATEWAY_EXAMPLE': '', 'EMAIL_GATEWAY_BOT': None, 'EMAIL_GATEWAY_LOGIN': None, 'EMAIL_GATEWAY_PASSWORD': None, 'EMAIL_GATEWAY_IMAP_SERVER': None, 'EMAIL_GATEWAY_IMAP_PORT': None, 'EMAIL_GATEWAY_IMAP_FOLDER': None, 'MANDRILL_API_KEY': '', 'S3_KEY': '', 'S3_SECRET_KEY': '', 'S3_BUCKET': '', 'S3_AVATAR_BUCKET': '', 'LOCAL_UPLOADS_DIR': None, 'DROPBOX_APP_KEY': '', 'ERROR_REPORTING': True, 'JWT_AUTH_KEYS': {}, 'NAME_CHANGES_DISABLED': False, 'DEPLOYMENT_ROLE_NAME': "", # The following bots only exist in non-VOYAGER installs 'ERROR_BOT': None, 'NEW_USER_BOT': None, 'NAGIOS_STAGING_SEND_BOT': None, 'NAGIOS_STAGING_RECEIVE_BOT': None, 'APNS_CERT_FILE': None, 'ANDROID_GCM_API_KEY': None, 'INITIAL_PASSWORD_SALT': None, 'FEEDBACK_BOT': 'feedback@zulip.com', 'FEEDBACK_BOT_NAME': 'Zulip Feedback Bot', 'API_SUPER_USERS': set(), 'ADMINS': '', 'INLINE_IMAGE_PREVIEW': True, 'CAMO_URI': '', 'ENABLE_FEEDBACK': PRODUCTION, 'FEEDBACK_EMAIL': None, 'ENABLE_GRAVATAR': True, 'DEFAULT_AVATAR_URI': '/static/images/default-avatar.png', 'AUTH_LDAP_SERVER_URI': "", 'EXTERNAL_URI_SCHEME': "https://", 'ZULIP_COM': False, 'ZULIP_COM_STAGING': False, 'STATSD_HOST': '', 'REMOTE_POSTGRES_HOST': '', 'GOOGLE_CLIENT_ID': '', 'DBX_APNS_CERT_FILE': None, } for setting_name, setting_val in DEFAULT_SETTINGS.iteritems(): if not setting_name in vars(): vars()[setting_name] = setting_val # These are the settings that we will check that the user has filled in for # production deployments before starting the app. It consists of a series # of pairs of (setting name, default value that it must be changed from) REQUIRED_SETTINGS = [("EXTERNAL_HOST", "zulip.example.com"), ("ZULIP_ADMINISTRATOR", "zulip-admin@example.com"), ("ADMIN_DOMAIN", "example.com"), # SECRET_KEY doesn't really need to be here, in # that we set it automatically, but just in # case, it seems worth having in this list ("SECRET_KEY", ""), ("AUTHENTICATION_BACKENDS", ()), ("NOREPLY_EMAIL_ADDRESS", "noreply@example.com"), ("DEFAULT_FROM_EMAIL", "Zulip <zulip@example.com>"), ("ALLOWED_HOSTS", "*"), ] if ADMINS == "": ADMINS = (("Zulip Administrator", ZULIP_ADMINISTRATOR),) MANAGERS = ADMINS # Voyager is a production zulip server that is not zulip.com or # staging.zulip.com VOYAGER is the standalone all-on-one-server # production deployment model for based on the original Zulip # ENTERPRISE implementation. We expect most users of the open source # project will be using VOYAGER=True in production. VOYAGER = PRODUCTION and not ZULIP_COM ######################################################################## # STANDARD DJANGO SETTINGS ######################################################################## # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # In a Windows environment this must be set to your system time zone. TIME_ZONE = 'America/New_York' # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = 'en-us' # The ID, as an integer, of the current site in the django_site database table. # This is used so that application data can hook into specific site(s) and a # single database can manage content for multiple sites. # # We set this site's domain to 'zulip.com' in populate_db. SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = True # If you set this to False, Django will not format dates, numbers and # calendars according to the current locale. USE_L10N = True # If you set this to False, Django will not use timezone-aware datetimes. USE_TZ = True DEPLOY_ROOT = os.path.join(os.path.realpath(os.path.dirname(__file__)), '..') TEMPLATE_DIRS = ( os.path.join(DEPLOY_ROOT, 'templates'), ) # Make redirects work properly behind a reverse proxy USE_X_FORWARDED_HOST = True # List of callables that know how to import templates from various sources. TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ) if PRODUCTION: # Template caching is a significant performance win in production. TEMPLATE_LOADERS = ( ('django.template.loaders.cached.Loader', TEMPLATE_LOADERS), ) MIDDLEWARE_CLASSES = ( # Our logging middleware should be the first middleware item. 'zerver.middleware.TagRequests', 'zerver.middleware.LogRequests', 'zerver.middleware.JsonErrorHandler', 'zerver.middleware.RateLimitMiddleware', 'zerver.middleware.FlushDisplayRecipientCache', 'django.middleware.common.CommonMiddleware', 'zerver.middleware.SessionHostDomainMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', ) ANONYMOUS_USER_ID = None AUTH_USER_MODEL = "zerver.UserProfile" TEST_RUNNER = 'zerver.lib.test_runner.Runner' ROOT_URLCONF = 'zproject.urls' # Python dotted path to the WSGI application used by Django's runserver. WSGI_APPLICATION = 'zproject.wsgi.application' INSTALLED_APPS = [ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.staticfiles', 'confirmation', 'guardian', 'pipeline', 'zerver', ] if not VOYAGER: INSTALLED_APPS += [ 'analytics', 'zilencer', ] # Base URL of the Tornado server # We set it to None when running backend tests or populate_db. # We override the port number when running frontend tests. TORNADO_SERVER = 'http://localhost:9993' RUNNING_INSIDE_TORNADO = False ######################################################################## # DATABASE CONFIGURATION ######################################################################## DATABASES = {"default": { 'ENGINE': 'django.db.backends.postgresql_psycopg2', 'NAME': 'zulip', 'USER': 'zulip', 'PASSWORD': '', # Authentication done via certificates 'HOST': '', # Host = '' => connect through a local socket 'SCHEMA': 'zulip', 'CONN_MAX_AGE': 600, 'OPTIONS': { 'connection_factory': TimeTrackingConnection }, }, } if DEVELOPMENT: LOCAL_DATABASE_PASSWORD = get_secret("local_database_password") DATABASES["default"].update({ 'PASSWORD': LOCAL_DATABASE_PASSWORD, 'HOST': 'localhost' }) elif REMOTE_POSTGRES_HOST != '': DATABASES['default'].update({ 'HOST': REMOTE_POSTGRES_HOST, }) DATABASES['default']['OPTIONS']['sslmode'] = 'verify-full' ######################################################################## # RABBITMQ CONFIGURATION ######################################################################## USING_RABBITMQ = True RABBITMQ_USERNAME = 'zulip' RABBITMQ_PASSWORD = get_secret("rabbitmq_password") ######################################################################## # CACHING CONFIGURATION ######################################################################## SESSION_ENGINE = "django.contrib.sessions.backends.cached_db" CACHES = { 'default': { 'BACKEND': 'django.core.cache.backends.memcached.PyLibMCCache', 'LOCATION': '127.0.0.1:11211', 'TIMEOUT': 3600 }, 'database': { 'BACKEND': 'django.core.cache.backends.db.DatabaseCache', 'LOCATION': 'third_party_api_results', # Basically never timeout. Setting to 0 isn't guaranteed # to work, see https://code.djangoproject.com/ticket/9595 'TIMEOUT': 2000000000, 'OPTIONS': { 'MAX_ENTRIES': 100000000, 'CULL_FREQUENCY': 10, } }, } ######################################################################## # REDIS-BASED RATE LIMITING CONFIGURATION ######################################################################## RATE_LIMITING = True REDIS_HOST = '127.0.0.1' REDIS_PORT = 6379 RATE_LIMITING_RULES = [ (60, 100), # 100 requests max every minute ] ######################################################################## # SECURITY SETTINGS ######################################################################## # Tell the browser to never send our cookies without encryption, e.g. # when executing the initial http -> https redirect. # # Turn it off for local testing because we don't have SSL. if PRODUCTION: SESSION_COOKIE_SECURE = True CSRF_COOKIE_SECURE = True try: # For get_updates hostname sharding. domain = config_file.get('django', 'cookie_domain') SESSION_COOKIE_DOMAIN = '.' + domain CSRF_COOKIE_DOMAIN = '.' + domain except ConfigParser.Error: # Failing here is OK pass # Prevent Javascript from reading the CSRF token from cookies. Our code gets # the token from the DOM, which means malicious code could too. But hiding the # cookie will slow down some attackers. CSRF_COOKIE_PATH = '/;HttpOnly' CSRF_FAILURE_VIEW = 'zerver.middleware.csrf_failure' if DEVELOPMENT: # Use fast password hashing for creating testing users when not # PRODUCTION. Saves a bunch of time. PASSWORD_HASHERS = ( 'django.contrib.auth.hashers.SHA1PasswordHasher', 'django.contrib.auth.hashers.PBKDF2PasswordHasher' ) # Also we auto-generate passwords for the default users which you # can query using ./manage.py print_initial_password INITIAL_PASSWORD_SALT = get_secret("initial_password_salt") ######################################################################## # API/BOT SETTINGS ######################################################################## if "EXTERNAL_API_PATH" not in vars(): EXTERNAL_API_PATH = EXTERNAL_HOST + "/api" EXTERNAL_API_URI = EXTERNAL_URI_SCHEME + EXTERNAL_API_PATH S3_KEY = get_secret("s3_key") S3_SECRET_KEY = get_secret("s3_secret_key") # GCM tokens are IP-whitelisted; if we deploy to additional # servers you will need to explicitly add their IPs here: # https://cloud.google.com/console/project/apps~zulip-android/apiui/credential ANDROID_GCM_API_KEY = get_secret("android_gcm_api_key") GOOGLE_OAUTH2_CLIENT_SECRET = get_secret('google_oauth2_client_secret') DROPBOX_APP_KEY = get_secret("dropbox_app_key") MAILCHIMP_API_KEY = get_secret("mailchimp_api_key") # This comes from our mandrill accounts page MANDRILL_API_KEY = get_secret("mandrill_api_key") # Twitter API credentials # Secrecy not required because its only used for R/O requests. # Please don't make us go over our rate limit. TWITTER_CONSUMER_KEY = get_secret("twitter_consumer_key") TWITTER_CONSUMER_SECRET = get_secret("twitter_consumer_secret") TWITTER_ACCESS_TOKEN_KEY = get_secret("twitter_access_token_key") TWITTER_ACCESS_TOKEN_SECRET = get_secret("twitter_access_token_secret") # These are the bots that Zulip sends automated messages as. INTERNAL_BOTS = [ {'var_name': 'NOTIFICATION_BOT', 'email_template': 'notification-bot@%s', 'name': 'Notification Bot'}, {'var_name': 'EMAIL_GATEWAY_BOT', 'email_template': 'emailgateway@%s', 'name': 'Email Gateway'}, {'var_name': 'NAGIOS_SEND_BOT', 'email_template': 'nagios-send-bot@%s', 'name': 'Nagios Send Bot'}, {'var_name': 'NAGIOS_RECEIVE_BOT', 'email_template': 'nagios-receive-bot@%s', 'name': 'Nagios Receive Bot'}, {'var_name': 'WELCOME_BOT', 'email_template': 'welcome-bot@%s', 'name': 'Welcome Bot'} ] INTERNAL_BOT_DOMAIN = "zulip.com" # Set the realm-specific bot names for bot in INTERNAL_BOTS: if not bot['var_name'] in vars(): bot_email = bot['email_template'] % (INTERNAL_BOT_DOMAIN,) vars()[bot['var_name'] ] = bot_email if EMAIL_GATEWAY_BOT not in API_SUPER_USERS: API_SUPER_USERS.add(EMAIL_GATEWAY_BOT) if EMAIL_GATEWAY_PATTERN != "": EMAIL_GATEWAY_EXAMPLE = EMAIL_GATEWAY_PATTERN % ("support+abcdefg",) DEPLOYMENT_ROLE_KEY = get_secret("deployment_role_key") if PRODUCTION: FEEDBACK_TARGET="https://zulip.com/api" else: FEEDBACK_TARGET="http://localhost:9991/api" ######################################################################## # STATSD CONFIGURATION ######################################################################## # Statsd is not super well supported; if you want to use it you'll need # to set STATSD_HOST and STATSD_PREFIX. if STATSD_HOST != '': INSTALLED_APPS += ['django_statsd'] STATSD_PORT = 8125 STATSD_CLIENT = 'django_statsd.clients.normal' ######################################################################## # CAMO HTTPS CACHE CONFIGURATION ######################################################################## if CAMO_URI != '': # This needs to be synced with the Camo installation CAMO_KEY = get_secret("camo_key") ######################################################################## # STATIC CONTENT AND MINIFICATION SETTINGS ######################################################################## STATIC_URL = '/static/' # ZulipStorage is a modified version of PipelineCachedStorage, # and, like that class, it inserts a file hash into filenames # to prevent the browser from using stale files from cache. # # Unlike PipelineStorage, it requires the files to exist in # STATIC_ROOT even for dev servers. So we only use # ZulipStorage when not DEBUG. # This is the default behavior from Pipeline, but we set it # here so that urls.py can read it. PIPELINE = not DEBUG if DEBUG: STATICFILES_STORAGE = 'pipeline.storage.PipelineStorage' STATICFILES_FINDERS = ( 'django.contrib.staticfiles.finders.AppDirectoriesFinder', ) if PIPELINE: STATIC_ROOT = 'prod-static/serve' else: STATIC_ROOT = 'static/' else: STATICFILES_STORAGE = 'zerver.storage.ZulipStorage' STATICFILES_FINDERS = ( 'zerver.finders.ZulipFinder', ) if PRODUCTION: STATIC_ROOT = '/home/zulip/prod-static' else: STATIC_ROOT = 'prod-static/serve' # We want all temporary uploaded files to be stored on disk. FILE_UPLOAD_MAX_MEMORY_SIZE = 0 STATICFILES_DIRS = ['static/'] STATIC_HEADER_FILE = 'zerver/static_header.txt' # To use minified files in dev, set PIPELINE = True. For the full # cache-busting behavior, you must also set DEBUG = False. # # You will need to run update-prod-static after changing # static files. PIPELINE_CSS = { 'activity': { 'source_filenames': ('styles/activity.css',), 'output_filename': 'min/activity.css' }, 'portico': { 'source_filenames': ( 'third/zocial/zocial.css', 'styles/portico.css', 'styles/pygments.css', 'styles/thirdparty-fonts.css', 'styles/fonts.css', ), 'output_filename': 'min/portico.css' }, # Two versions of the app CSS exist because of QTBUG-3467 'app-fontcompat': { 'source_filenames': ( 'third/bootstrap-notify/css/bootstrap-notify.css', 'third/spectrum/spectrum.css', 'styles/zulip.css', 'styles/pygments.css', 'styles/thirdparty-fonts.css', # We don't want fonts.css on QtWebKit, so its omitted here ), 'output_filename': 'min/app-fontcompat.css' }, 'app': { 'source_filenames': ( 'third/bootstrap-notify/css/bootstrap-notify.css', 'third/spectrum/spectrum.css', 'third/jquery-perfect-scrollbar/css/perfect-scrollbar.css', 'styles/zulip.css', 'styles/pygments.css', 'styles/thirdparty-fonts.css', 'styles/fonts.css', ), 'output_filename': 'min/app.css' }, 'common': { 'source_filenames': ( 'third/bootstrap/css/bootstrap.css', 'third/bootstrap/css/bootstrap-btn.css', 'third/bootstrap/css/bootstrap-responsive.css', ), 'output_filename': 'min/common.css' }, } JS_SPECS = { 'common': { 'source_filenames': ( 'third/jquery/jquery-1.7.2.js', 'third/underscore/underscore.js', 'js/blueslip.js', 'third/bootstrap/js/bootstrap.js', 'js/common.js', ), 'output_filename': 'min/common.js' }, 'signup': { 'source_filenames': ( 'js/signup.js', 'third/jquery-validate/jquery.validate.js', ), 'output_filename': 'min/signup.js' }, 'initial_invite': { 'source_filenames': ( 'third/jquery-validate/jquery.validate.js', 'js/initial_invite.js', ), 'output_filename': 'min/initial_invite.js' }, 'api': { 'source_filenames': ('js/api.js',), 'output_filename': 'min/api.js' }, 'app_debug': { 'source_filenames': ('js/debug.js',), 'output_filename': 'min/app_debug.js' }, 'app': { 'source_filenames': [ 'third/bootstrap-notify/js/bootstrap-notify.js', 'third/html5-formdata/formdata.js', 'third/jquery-validate/jquery.validate.js', 'third/jquery-form/jquery.form.js', 'third/jquery-filedrop/jquery.filedrop.js', 'third/jquery-caret/jquery.caret.1.02.js', 'third/xdate/xdate.dev.js', 'third/spin/spin.js', 'third/jquery-mousewheel/jquery.mousewheel.js', 'third/jquery-throttle-debounce/jquery.ba-throttle-debounce.js', 'third/jquery-idle/jquery.idle.js', 'third/jquery-autosize/jquery.autosize.js', 'third/jquery-perfect-scrollbar/js/perfect-scrollbar.js', 'third/lazyload/lazyload.js', 'third/spectrum/spectrum.js', 'third/winchan/winchan.js', 'third/sockjs/sockjs-0.3.4.js', 'third/handlebars/handlebars.runtime.js', 'third/marked/lib/marked.js', 'templates/compiled.js', 'js/feature_flags.js', 'js/loading.js', 'js/util.js', 'js/dict.js', 'js/localstorage.js', 'js/channel.js', 'js/setup.js', 'js/muting.js', 'js/muting_ui.js', 'js/viewport.js', 'js/rows.js', 'js/unread.js', 'js/stream_list.js', 'js/filter.js', 'js/narrow.js', 'js/reload.js', 'js/compose_fade.js', 'js/fenced_code.js', 'js/echo.js', 'js/socket.js', 'js/compose.js', 'js/stream_color.js', 'js/admin.js', 'js/stream_data.js', 'js/subs.js', 'js/message_edit.js', 'js/condense.js', 'js/resize.js', 'js/floating_recipient_bar.js', 'js/ui.js', 'js/click_handlers.js', 'js/scroll_bar.js', 'js/gear_menu.js', 'js/copy_and_paste.js', 'js/popovers.js', 'js/typeahead_helper.js', 'js/search_suggestion.js', 'js/search.js', 'js/composebox_typeahead.js', 'js/navigate.js', 'js/hotkey.js', 'js/favicon.js', 'js/notifications.js', 'js/hashchange.js', 'js/invite.js', 'js/message_list_view.js', 'js/message_list.js', 'js/message_flags.js', 'js/alert_words.js', 'js/alert_words_ui.js', 'js/people.js', 'js/message_store.js', 'js/server_events.js', 'js/zulip.js', 'js/activity.js', 'js/colorspace.js', 'js/timerender.js', 'js/tutorial.js', 'js/templates.js', 'js/avatar.js', 'js/settings.js', 'js/tab_bar.js', 'js/emoji.js', 'js/referral.js', 'js/custom_markdown.js', 'js/bot_data.js', ], 'output_filename': 'min/app.js' }, 'activity': { 'source_filenames': ( 'third/sorttable/sorttable.js', ), 'output_filename': 'min/activity.js' }, # We also want to minify sockjs separately for the sockjs iframe transport 'sockjs': { 'source_filenames': ('third/sockjs/sockjs-0.3.4.js',), 'output_filename': 'min/sockjs-0.3.4.min.js' }, } app_srcs = JS_SPECS['app']['source_filenames'] PIPELINE_JS = {} # Now handled in tools/minify-js PIPELINE_JS_COMPRESSOR = None PIPELINE_CSS_COMPRESSOR = 'pipeline.compressors.yui.YUICompressor' PIPELINE_YUI_BINARY = '/usr/bin/env yui-compressor' ######################################################################## # LOGGING SETTINGS ######################################################################## ZULIP_PATHS = [ ("SERVER_LOG_PATH", "/var/log/zulip/server.log"), ("ERROR_FILE_LOG_PATH", "/var/log/zulip/errors.log"), ("MANAGEMENT_LOG_PATH", "/var/log/zulip/manage.log"), ("WORKER_LOG_PATH", "/var/log/zulip/workers.log"), ("PERSISTENT_QUEUE_FILENAME", "/home/zulip/tornado/event_queues.pickle"), ("JSON_PERSISTENT_QUEUE_FILENAME", "/home/zulip/tornado/event_queues.json"), ("EMAIL_MIRROR_LOG_PATH", "/var/log/zulip/email-mirror.log"), ("EMAIL_DELIVERER_LOG_PATH", "/var/log/zulip/email-deliverer.log"), ("LDAP_SYNC_LOG_PATH", "/var/log/zulip/sync_ldap_user_data.log"), ("QUEUE_ERROR_DIR", "/var/log/zulip/queue_error"), ("STATS_DIR", "/home/zulip/stats"), ("DIGEST_LOG_PATH", "/var/log/zulip/digest.log"), ] # The Event log basically logs most significant database changes, # which can be useful for debugging. if VOYAGER: EVENT_LOG_DIR = None else: ZULIP_PATHS.append(("EVENT_LOG_DIR", "/home/zulip/logs/event_log")) for (var, path) in ZULIP_PATHS: if DEVELOPMENT: # if DEVELOPMENT, store these files in the Zulip checkout path = os.path.basename(path) vars()[var] = path ZULIP_WORKER_TEST_FILE = '/tmp/zulip-worker-test-file' if IS_WORKER: FILE_LOG_PATH = WORKER_LOG_PATH else: FILE_LOG_PATH = SERVER_LOG_PATH LOGGING = { 'version': 1, 'disable_existing_loggers': True, 'formatters': { 'default': { 'format': '%(asctime)s %(levelname)-8s %(message)s' } }, 'filters': { 'ZulipLimiter': { '()': 'zerver.lib.logging_util.ZulipLimiter', }, 'EmailLimiter': { '()': 'zerver.lib.logging_util.EmailLimiter', }, 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse', }, 'nop': { '()': 'zerver.lib.logging_util.ReturnTrue', }, 'require_really_deployed': { '()': 'zerver.lib.logging_util.RequireReallyDeployed', }, }, 'handlers': { 'zulip_admins': { 'level': 'ERROR', 'class': 'zerver.handlers.AdminZulipHandler', # For testing the handler delete the next line 'filters': ['ZulipLimiter', 'require_debug_false', 'require_really_deployed'], 'formatter': 'default' }, 'console': { 'level': 'DEBUG', 'class': 'logging.StreamHandler', 'formatter': 'default' }, 'file': { 'level': 'DEBUG', 'class': 'logging.handlers.TimedRotatingFileHandler', 'formatter': 'default', 'filename': FILE_LOG_PATH, 'when': 'D', 'interval': 7, 'backupCount': 100000000, }, 'errors_file': { 'level': 'WARNING', 'class': 'logging.handlers.TimedRotatingFileHandler', 'formatter': 'default', 'filename': ERROR_FILE_LOG_PATH, 'when': 'D', 'interval': 7, 'backupCount': 100000000, }, }, 'loggers': { '': { 'handlers': ['console', 'file', 'errors_file'], 'level': 'INFO', 'propagate': False, }, 'django': { 'handlers': (['zulip_admins'] if ERROR_REPORTING else []) + ['console', 'file', 'errors_file'], 'level': 'INFO', 'propagate': False, }, 'zulip.requests': { 'handlers': ['console', 'file', 'errors_file'], 'level': 'INFO', 'propagate': False, }, 'zulip.management': { 'handlers': ['file', 'errors_file'], 'level': 'INFO', 'propagate': False, }, ## Uncomment the following to get all database queries logged to the console # 'django.db': { # 'handlers': ['console'], # 'level': 'DEBUG', # 'propagate': False, # }, } } TEMPLATE_CONTEXT_PROCESSORS = ( 'zerver.context_processors.add_settings', 'zerver.context_processors.add_metrics', ) ACCOUNT_ACTIVATION_DAYS=7 LOGIN_REDIRECT_URL='/' # Client-side polling timeout for get_events, in milliseconds. # We configure this here so that the client test suite can override it. # We already kill the connection server-side with heartbeat events, # but it's good to have a safety. This value should be greater than # (HEARTBEAT_MIN_FREQ_SECS + 10) POLL_TIMEOUT = 90 * 1000 # iOS App IDs ZULIP_IOS_APP_ID = 'com.zulip.Zulip' DBX_IOS_APP_ID = 'com.dropbox.Zulip' ######################################################################## # SSO AND LDAP SETTINGS ######################################################################## USING_APACHE_SSO = ('zproject.backends.ZulipRemoteUserBackend' in AUTHENTICATION_BACKENDS) if (len(AUTHENTICATION_BACKENDS) == 1 and AUTHENTICATION_BACKENDS[0] == "zproject.backends.ZulipRemoteUserBackend"): HOME_NOT_LOGGED_IN = "/accounts/login/sso" ONLY_SSO = True else: HOME_NOT_LOGGED_IN = '/login' ONLY_SSO = False AUTHENTICATION_BACKENDS += ('guardian.backends.ObjectPermissionBackend',) AUTHENTICATION_BACKENDS += ('zproject.backends.ZulipDummyBackend',) POPULATE_PROFILE_VIA_LDAP = bool(AUTH_LDAP_SERVER_URI) if POPULATE_PROFILE_VIA_LDAP and \ not 'zproject.backends.ZulipLDAPAuthBackend' in AUTHENTICATION_BACKENDS: AUTHENTICATION_BACKENDS += ('zproject.backends.ZulipLDAPUserPopulator',) else: POPULATE_PROFILE_VIA_LDAP = 'zproject.backends.ZulipLDAPAuthBackend' in AUTHENTICATION_BACKENDS or POPULATE_PROFILE_VIA_LDAP ######################################################################## # EMAIL SETTINGS ######################################################################## # If an email host is not specified, fail silently and gracefully if not EMAIL_HOST and PRODUCTION: EMAIL_BACKEND = 'django.core.mail.backends.dummy.EmailBackend' elif DEVELOPMENT: # In the dev environment, emails are printed to the run-dev.py console. EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend' else: EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend' EMAIL_HOST_PASSWORD = get_secret('email_password') ######################################################################## # MISC SETTINGS ######################################################################## if PRODUCTION: # Filter out user data DEFAULT_EXCEPTION_REPORTER_FILTER = 'zerver.filters.ZulipExceptionReporterFilter' # This is a debugging option only PROFILE_ALL_REQUESTS = False CROSS_REALM_BOT_EMAILS = set(('feedback@zulip.com', 'notification-bot@zulip.com'))
apache-2.0
bojanbog/orbital-academy
python_version/simulation.py
1
1942
from body import Body class Simulation(object): MOUNTAIN_HEIGHT = 1E06 # 1000 km def __init__(self, num_random_objs): self.bodies = [] self.selected_body = None if num_random_objs > 0: self.selected_body = 0 self.bodies.append(Body.generate_circular_equatorial_orbit(6.0E5, (0.0, 1.0, 1.0, 1.0))) self.bodies.append(Body.generate_circular_equatorial_orbit(1.2E6)) for x in xrange(num_random_objs - 2): self.bodies.append(Body.generate_random_orbit()) self.pos_viz_mode = Body.POSITION_VISUALISATIONS['symbol'] self.orbit_viz_mode = Body.ORBIT_VISUALISATIONS['all'] self.set_defaults() def set_defaults(self): self.state = 'pre-run' self.time = 0.0 self.time_step = 10.0 self.time_barrier = 1.0E15 self.draw_atmosphere = True self.draw_mountain = False self.planet_transparent = True def current_body(self): if self.selected_body is not None: return self.bodies[self.selected_body] else: return None def start(self): if self.state in ('pre-run', 'paused'): self.state = 'running' def pause(self): if self.state == 'running': self.state = 'paused' def step_time(self): if self.state != 'finished': if self.state == 'running' and self.time < self.time_barrier: self.time += self.time_step else: self.state = 'finished' def forward_time(self, t): self.time = t for body in self.bodies: body.calc_state_vectors(t) # iss.a = 415000 + 42000 + 2 * 6.371E6 # iss.T = 5575.12 # iss.e = 0.0003706 # iss.i = math.radians(51.647) # iss.o = math.radians(284.7313) # iss.w = math.radians(60.0528)
mit
Neuvoo/legacy-portage
pym/portage/elog/messages.py
2
3849
# elog/messages.py - elog core functions # Copyright 2006-2009 Gentoo Foundation # Distributed under the terms of the GNU General Public License v2 import portage portage.proxy.lazyimport.lazyimport(globals(), 'portage.output:colorize', 'portage.util:writemsg', ) from portage.const import EBUILD_PHASES from portage.localization import _ from portage import os from portage import _encodings from portage import _unicode_encode from portage import _unicode_decode import codecs import sys def collect_ebuild_messages(path): """ Collect elog messages generated by the bash logging function stored at 'path'. """ mylogfiles = None try: mylogfiles = os.listdir(path) except OSError: pass # shortcut for packages without any messages if not mylogfiles: return {} # exploit listdir() file order so we process log entries in chronological order mylogfiles.reverse() logentries = {} for msgfunction in mylogfiles: filename = os.path.join(path, msgfunction) if msgfunction not in EBUILD_PHASES: writemsg(_("!!! can't process invalid log file: %s\n") % filename, noiselevel=-1) continue if not msgfunction in logentries: logentries[msgfunction] = [] lastmsgtype = None msgcontent = [] for l in codecs.open(_unicode_encode(filename, encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['repo.content'], errors='replace'): if not l: continue try: msgtype, msg = l.split(" ", 1) except ValueError: writemsg(_("!!! malformed entry in " "log file: '%s'\n") % filename, noiselevel=-1) continue if lastmsgtype is None: lastmsgtype = msgtype if msgtype == lastmsgtype: msgcontent.append(msg) else: if msgcontent: logentries[msgfunction].append((lastmsgtype, msgcontent)) msgcontent = [msg] lastmsgtype = msgtype if msgcontent: logentries[msgfunction].append((lastmsgtype, msgcontent)) # clean logfiles to avoid repetitions for f in mylogfiles: try: os.unlink(os.path.join(path, f)) except OSError: pass return logentries _msgbuffer = {} def _elog_base(level, msg, phase="other", key=None, color=None, out=None): """ Backend for the other messaging functions, should not be called directly. """ global _msgbuffer if out is None: out = sys.stdout if color is None: color = "GOOD" msg = _unicode_decode(msg, encoding=_encodings['content'], errors='replace') formatted_msg = colorize(color, " * ") + msg + "\n" # avoid potential UnicodeEncodeError if out in (sys.stdout, sys.stderr): formatted_msg = _unicode_encode(formatted_msg, encoding=_encodings['stdio'], errors='backslashreplace') if sys.hexversion >= 0x3000000: out = out.buffer out.write(formatted_msg) if key not in _msgbuffer: _msgbuffer[key] = {} if phase not in _msgbuffer[key]: _msgbuffer[key][phase] = [] _msgbuffer[key][phase].append((level, msg)) #raise NotImplementedError() def collect_messages(): global _msgbuffer rValue = _msgbuffer _reset_buffer() return rValue def _reset_buffer(): """ Reset the internal message buffer when it has been processed, should not be called directly. """ global _msgbuffer _msgbuffer = {} # creating and exporting the actual messaging functions _functions = { "einfo": ("INFO", "GOOD"), "elog": ("LOG", "GOOD"), "ewarn": ("WARN", "WARN"), "eqawarn": ("QA", "WARN"), "eerror": ("ERROR", "BAD"), } def _make_msgfunction(level, color): def _elog(msg, phase="other", key=None, out=None): """ Display and log a message assigned to the given key/cpv (or unassigned if no key is given). """ _elog_base(level, msg, phase=phase, key=key, color=color, out=out) return _elog import sys for f in _functions: setattr(sys.modules[__name__], f, _make_msgfunction(_functions[f][0], _functions[f][1])) del f, _functions
gpl-2.0
wzbozon/scikit-learn
examples/ensemble/plot_voting_decision_regions.py
230
2386
""" ================================================== Plot the decision boundaries of a VotingClassifier ================================================== Plot the decision boundaries of a `VotingClassifier` for two features of the Iris dataset. Plot the class probabilities of the first sample in a toy dataset predicted by three different classifiers and averaged by the `VotingClassifier`. First, three examplary classifiers are initialized (`DecisionTreeClassifier`, `KNeighborsClassifier`, and `SVC`) and used to initialize a soft-voting `VotingClassifier` with weights `[2, 1, 2]`, which means that the predicted probabilities of the `DecisionTreeClassifier` and `SVC` count 5 times as much as the weights of the `KNeighborsClassifier` classifier when the averaged probability is calculated. """ print(__doc__) from itertools import product import numpy as np import matplotlib.pyplot as plt from sklearn import datasets from sklearn.tree import DecisionTreeClassifier from sklearn.neighbors import KNeighborsClassifier from sklearn.svm import SVC from sklearn.ensemble import VotingClassifier # Loading some example data iris = datasets.load_iris() X = iris.data[:, [0, 2]] y = iris.target # Training classifiers clf1 = DecisionTreeClassifier(max_depth=4) clf2 = KNeighborsClassifier(n_neighbors=7) clf3 = SVC(kernel='rbf', probability=True) eclf = VotingClassifier(estimators=[('dt', clf1), ('knn', clf2), ('svc', clf3)], voting='soft', weights=[2, 1, 2]) clf1.fit(X, y) clf2.fit(X, y) clf3.fit(X, y) eclf.fit(X, y) # Plotting decision regions x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1 y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1 xx, yy = np.meshgrid(np.arange(x_min, x_max, 0.1), np.arange(y_min, y_max, 0.1)) f, axarr = plt.subplots(2, 2, sharex='col', sharey='row', figsize=(10, 8)) for idx, clf, tt in zip(product([0, 1], [0, 1]), [clf1, clf2, clf3, eclf], ['Decision Tree (depth=4)', 'KNN (k=7)', 'Kernel SVM', 'Soft Voting']): Z = clf.predict(np.c_[xx.ravel(), yy.ravel()]) Z = Z.reshape(xx.shape) axarr[idx[0], idx[1]].contourf(xx, yy, Z, alpha=0.4) axarr[idx[0], idx[1]].scatter(X[:, 0], X[:, 1], c=y, alpha=0.8) axarr[idx[0], idx[1]].set_title(tt) plt.show()
bsd-3-clause
Yubico/yubiauth
yubiauth/core/__init__.py
2
1495
# # Copyright (c) 2013 Yubico AB # All rights reserved. # # Redistribution and use in source and binary forms, with or # without modification, are permitted provided that the following # conditions are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided # with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # __all__ = [ 'model', 'controller', 'rest', 'YubiAuth' ] from yubiauth.core.controller import YubiAuth
bsd-2-clause
hasibi/EntityLinkingRetrieval-ELR
nordlys/retrieval/scorer.py
1
13963
""" Various retrieval models for scoring a individual document for a given query. @author: Faegheh Hasibi (faegheh.hasibi@idi.ntnu.no) @author: Krisztian Balog (krisztian.balog@uis.no) """ from __future__ import division import math from lucene_tools import Lucene class Scorer(object): """Base scorer class.""" SCORER_DEBUG = 0 def __init__(self, lucene, query, params): self.lucene = lucene self.query = query self.params = params self.lucene.open_searcher() """ @todo consider the field for analysis """ # NOTE: The analyser might return terms that are not in the collection. # These terms are filtered out later in the score_doc functions. self.query_terms = lucene.analyze_query(self.query) if query is not None else None @staticmethod def get_scorer(model, lucene, query, params): """ Returns Scorer object (Scorer factory). :param model: accepted values: lucene, lm or mlm :param lucene: Lucene object :param query: raw query (to be analyzed) :param params: dict with models parameters """ if model == "lm": print "\tLM scoring ... " return ScorerLM(lucene, query, params) elif model == "mlm": print "\tMLM scoring ..." return ScorerMLM(lucene, query, params) elif model == "prms": print "\tPRMS scoring ..." return ScorerPRMS(lucene, query, params) else: raise Exception("Unknown model '" + model + "'") class ScorerLM(Scorer): def __init__(self, lucene, query, params): super(ScorerLM, self).__init__(lucene, query, params) self.smoothing_method = params.get('smoothing_method', "jm").lower() if (self.smoothing_method != "jm") and (self.smoothing_method != "dirichlet"): raise Exception(self.params['smoothing_method'] + " smoothing method is not supported!") self.tf = {} @staticmethod def get_jm_prob(tf_t_d, len_d, tf_t_C, len_C, lambd): """ Computes JM-smoothed probability p(t|theta_d) = [(1-lambda) tf(t, d)/|d|] + [lambda tf(t, C)/|C|] :param tf_t_d: tf(t,d) :param len_d: |d| :param tf_t_C: tf(t,C) :param len_C: |C| = \sum_{d \in C} |d| :param lambd: \lambda :return: """ p_t_d = tf_t_d / len_d if len_d > 0 else 0 p_t_C = tf_t_C / len_C if len_C > 0 else 0 return (1 - lambd) * p_t_d + lambd * p_t_C @staticmethod def get_dirichlet_prob(tf_t_d, len_d, tf_t_C, len_C, mu): """ Computes Dirichlet-smoothed probability P(t|theta_d) = [tf(t, d) + mu P(t|C)] / [|d| + mu] :param tf_t_d: tf(t,d) :param len_d: |d| :param tf_t_C: tf(t,C) :param len_C: |C| = \sum_{d \in C} |d| :param mu: \mu :return: """ if mu == 0: # i.e. field does not have any content in the collection return 0 else: p_t_C = tf_t_C / len_C if len_C > 0 else 0 return (tf_t_d + mu * p_t_C) / (len_d + mu) def get_tf(self, lucene_doc_id, field): if lucene_doc_id not in self.tf: self.tf[lucene_doc_id] = {} if field not in self.tf[lucene_doc_id]: self.tf[lucene_doc_id][field] = self.lucene.get_doc_termfreqs(lucene_doc_id, field) return self.tf[lucene_doc_id][field] def get_term_prob(self, lucene_doc_id, field, t, tf_t_d_f=None, tf_t_C_f=None): """ Returns probability of a given term for the given field. :param lucene_doc_id: internal Lucene document ID :param field: entity field name, e.g. <dbo:abstract> :param t: term :return: P(t|d_f) """ # Gets term freqs for field of document tf = {} if lucene_doc_id is not None: tf = self.get_tf(lucene_doc_id, field) len_d_f = sum(tf.values()) len_C_f = self.lucene.get_coll_length(field) tf_t_d_f = tf.get(t, 0) if tf_t_d_f is None else tf_t_d_f tf_t_C_f = self.lucene.get_coll_termfreq(t, field) if tf_t_C_f is None else tf_t_C_f if self.SCORER_DEBUG: print "\t\tt=" + t + ", f=" + field print "\t\t\tDoc: tf(t,f)=" + str(tf_t_d_f) + "\t|f|=" + str(len_d_f) print "\t\t\tColl: tf(t,f)=" + str(tf_t_C_f) + "\t|f|=" + str(len_C_f) # JM smoothing: p(t|theta_d_f) = [(1-lambda) tf(t, d_f)/|d_f|] + [lambda tf(t, C_f)/|C_f|] if self.smoothing_method == "jm": lambd = self.params.get('smoothing_param', 0.1) p_t_d_f = self.get_jm_prob(tf_t_d_f, len_d_f, tf_t_C_f, len_C_f, lambd) if self.SCORER_DEBUG: print "\t\t\tJM smoothing:" print "\t\t\tDoc: p(t|theta_d_f)=", p_t_d_f # Dirichlet smoothing elif self.smoothing_method == "dirichlet": mu = self.params.get('smoothing_param', self.lucene.get_avg_len(field)) p_t_d_f = self.get_dirichlet_prob(tf_t_d_f, len_d_f, tf_t_C_f, len_C_f, mu) if self.SCORER_DEBUG: print "\t\t\tDirichlet smoothing:" print "\t\t\tmu:", mu print "\t\t\tDoc: p(t|theta_d_f)=", p_t_d_f return p_t_d_f def get_term_probs(self, lucene_doc_id, field): """ Returns probability of all query terms for the given field. :param lucene_doc_id: internal Lucene document ID :param field: entity field name, e.g. <dbo:abstract> :return: dictionary of terms with their probabilities """ p_t_theta_d_f = {} for t in set(self.query_terms): p_t_theta_d_f[t] = self.get_term_prob(lucene_doc_id, field, t) return p_t_theta_d_f def score_doc(self, doc_id, lucene_doc_id=None): """ Scores the given document using LM. :param doc_id: document id :param lucene_doc_id: internal Lucene document ID :return float, LM score of document and query """ if self.SCORER_DEBUG: print "Scoring doc ID=" + doc_id if lucene_doc_id is None: lucene_doc_id = self.lucene.get_lucene_document_id(doc_id) field = self.params.get('field', Lucene.FIELDNAME_CONTENTS) p_t_theta_d = self.get_term_probs(lucene_doc_id, field) if sum(p_t_theta_d.values()) == 0: # none of query terms are in the field collection if self.SCORER_DEBUG: print "\t\tP(q|" + field + ") = None" return None # p(q|theta_d) = prod(p(t|theta_d)) ; we return log(p(q|theta_d)) p_q_theta_d = 0 for t in self.query_terms: # Skips the term if it is not in the field collection if p_t_theta_d[t] == 0: continue if self.SCORER_DEBUG: print "\t\tP(" + t + "|" + field + ") = " + str(p_t_theta_d[t]) p_q_theta_d += math.log(p_t_theta_d[t]) if self.SCORER_DEBUG: print "\tP(d|q)=" + str(p_q_theta_d) return p_q_theta_d class ScorerMLM(ScorerLM): def __init__(self, lucene, query, params): super(ScorerMLM, self).__init__(lucene, query, params) def get_mlm_term_prob(self, lucene_doc_id, weights, t): """ Returns MLM probability for the given term and field-weights. :param lucene_doc_id: internal Lucene document ID :param weights: dictionary, {field: weights, ...} :param t: term :return: P(t|theta_d) """ # p(t|theta_d) = sum(mu_f * p(t|theta_d_f)) p_t_theta_d = 0 for f, mu_f in weights.iteritems(): p_t_theta_d_f = self.get_term_prob(lucene_doc_id, f, t) p_t_theta_d += mu_f * p_t_theta_d_f if self.SCORER_DEBUG: print "\t\tP(t|theta_d)=" + str(p_t_theta_d) return p_t_theta_d def get_mlm_term_probs(self, lucene_doc_id, weights): """ Returns probability of all query terms for the given field weights. :param lucene_doc_id: internal Lucene document ID :param weights: dictionary, {field: weights, ...} :return: dictionary of terms with their probabilities """ p_t_theta_d = {} for t in set(self.query_terms): if self.SCORER_DEBUG: print "\tt=" + t p_t_theta_d[t] = self.get_mlm_term_prob(lucene_doc_id, weights, t) return p_t_theta_d def score_doc(self, doc_id, lucene_doc_id=None): """ Scores the given document using MLM model. :param doc_id: document id :param lucene_doc_id: internal Lucene document ID :return float, MLM score of document and query """ if self.SCORER_DEBUG: print "Scoring doc ID=" + doc_id if lucene_doc_id is None: lucene_doc_id = self.lucene.get_lucene_document_id(doc_id) weights = self.params['field_weights'] p_t_theta_d = self.get_mlm_term_probs(lucene_doc_id, weights) # none of query terms are in the field collection if sum(p_t_theta_d.values()) == 0: if self.SCORER_DEBUG: print "\t\tP_mlm(q|theta_d) = None" return None # p(q|theta_d) = prod(p(t|theta_d)) ; we return log(p(q|theta_d)) p_q_theta_d = 0 for t in self.query_terms: if p_t_theta_d[t] == 0: continue if self.SCORER_DEBUG: print "\t\tP_mlm(" + t + "|theta_d) = " + str(p_t_theta_d[t]) p_q_theta_d += math.log(p_t_theta_d[t]) return p_q_theta_d class ScorerPRMS(ScorerLM): def __init__(self, lucene, query, params): super(ScorerPRMS, self).__init__(lucene, query, params) self.fields = self.params['fields'] self.total_field_freq = None self.mapping_probs = None def score_doc(self, doc_id, lucene_doc_id=None): """ Scores the given document using PRMS model. :param doc_id: document id :param lucene_doc_id: internal Lucene document ID :return float, PRMS score of document and query """ if self.SCORER_DEBUG: print "Scoring doc ID=" + doc_id if lucene_doc_id is None: lucene_doc_id = self.lucene.get_lucene_document_id(doc_id) # gets mapping probs: p(f|t) p_f_t = self.get_mapping_probs() # gets term probs: p(t|theta_d_f) p_t_theta_d_f = {} for field in self.fields: p_t_theta_d_f[field] = self.get_term_probs(lucene_doc_id, field) # none of query terms are in the field collection if sum([sum(p_t_theta_d_f[field].values()) for field in p_t_theta_d_f]) == 0: return None # p(q|theta_d) = prod(p(t|theta_d)) ; we return log(p(q|theta_d)) p_q_theta_d = 0 for t in self.query_terms: if self.SCORER_DEBUG: print "\tt=" + t # p(t|theta_d) = sum(p(f|t) * p(t|theta_d_f)) p_t_theta_d = 0 for f in self.fields: if f in p_f_t[t]: p_t_theta_d += p_f_t[t][f] * p_t_theta_d_f[f][t] if self.SCORER_DEBUG: print "\t\t\tf=" + f + ", p(t|f)=" + str(p_f_t[t][f]) + " P(t|theta_d,f)=" + str(p_t_theta_d_f[f][t]) if p_t_theta_d == 0: continue p_q_theta_d += math.log(p_t_theta_d) if self.SCORER_DEBUG: print "\t\tP(t|theta_d)=" + str(p_t_theta_d) return p_q_theta_d def get_mapping_probs(self): """Gets (cached) mapping probabilities for all query terms.""" if self.mapping_probs is None: self.mapping_probs = {} for t in set(self.query_terms): self.mapping_probs[t] = self.get_mapping_prob(t) return self.mapping_probs def get_mapping_prob(self, t, coll_termfreq_fields=None): """ Computes PRMS field mapping probability. p(f|t) = P(t|f)P(f) / sum_f'(P(t|C_{f'_c})P(f')) :param t: str :param coll_termfreq_fields: {field: freq, ...} :return Dictionary {field: prms_prob, ...} """ if coll_termfreq_fields is None: coll_termfreq_fields = {} for f in self.fields: coll_termfreq_fields[f] = self.lucene.get_coll_termfreq(t, f) # calculates numerators for all fields: P(t|f)P(f) numerators = {} for f in self.fields: p_t_f = coll_termfreq_fields[f] / self.lucene.get_coll_length(f) p_f = self.lucene.get_doc_count(f) / self.get_total_field_freq() p_f_t = p_t_f * p_f if p_f_t > 0: numerators[f] = p_f_t if self.SCORER_DEBUG: print "\tf= " + f, "t= " + t + " P(t|f)=" + str(p_t_f) + " P(f)=" + str(p_f) # calculates denominator: sum_f'(P(t|C_{f'_c})P(f')) denominator = sum(numerators.values()) mapping_probs = {} if denominator > 0: # if the term is present in the collection for f in numerators: mapping_probs[f] = numerators[f] / denominator if self.SCORER_DEBUG: print "\t\tf= " + f + " t= " + t + " p(f|t)= " + str(numerators[f]) + "/" + str(sum(numerators.values())) + \ " = " + str(mapping_probs[f]) return mapping_probs def get_total_field_freq(self): """Returns total occurrences of all fields""" if self.total_field_freq is None: total_field_freq = 0 for f in self.fields: total_field_freq += self.lucene.get_doc_count(f) self.total_field_freq = total_field_freq return self.total_field_freq
mit
jsgf/xen
tools/python/xen/xend/XendProtocol.py
49
7170
#============================================================================ # This library is free software; you can redistribute it and/or # modify it under the terms of version 2.1 of the GNU Lesser General Public # License as published by the Free Software Foundation. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA #============================================================================ # Copyright (C) 2004, 2005 Mike Wray <mike.wray@hp.com> # Copyright (C) 2005 XenSource Ltd. #============================================================================ import socket import httplib import time import types from encode import * from xen.xend import sxp from xen.xend import XendOptions DEBUG = 0 HTTP_OK = 200 HTTP_CREATED = 201 HTTP_ACCEPTED = 202 HTTP_NO_CONTENT = 204 xoptions = XendOptions.instance() class XendError(RuntimeError): """Error class for 'expected errors' when talking to xend. """ pass class XendRequest: """A request to xend. """ def __init__(self, url, method, args): """Create a request. Sets up the headers, argument data, and the url. @param url: the url to request @param method: request method, GET or POST @param args: dict containing request args, if any """ if url.proto != 'http': raise ValueError('Invalid protocol: ' + url.proto) (hdr, data) = encode_data(args) if args and method == 'GET': url.query = data data = None if method == "POST" and url.path.endswith('/'): url.path = url.path[:-1] self.headers = hdr self.data = data self.url = url self.method = method class XendClientProtocol: """Abstract class for xend clients. """ def xendRequest(self, url, method, args=None): """Make a request to xend. Implement in a subclass. @param url: xend request url @param method: http method: POST or GET @param args: request arguments (dict) """ raise NotImplementedError() def xendGet(self, url, args=None): """Make a xend request using HTTP GET. Requests using GET are usually 'safe' and may be repeated without nasty side-effects. @param url: xend request url @param data: request arguments (dict) """ return self.xendRequest(url, "GET", args) def xendPost(self, url, args): """Make a xend request using HTTP POST. Requests using POST potentially cause side-effects, and should not be repeated unless you really want to repeat the side effect. @param url: xend request url @param args: request arguments (dict) """ return self.xendRequest(url, "POST", args) def handleStatus(self, _, status, message): """Handle the status returned from the request. """ status = int(status) if status in [ HTTP_NO_CONTENT ]: return None if status not in [ HTTP_OK, HTTP_CREATED, HTTP_ACCEPTED ]: return self.handleException(XendError(message)) return 'ok' def handleResponse(self, data): """Handle the data returned in response to the request. """ if data is None: return None typ = self.getHeader('Content-Type') if typ != sxp.mime_type: return data try: pin = sxp.Parser() pin.input(data); pin.input_eof() val = pin.get_val() except sxp.ParseError, err: return self.handleException(err) if isinstance(val, types.ListType) and sxp.name(val) == 'xend.err': err = XendError(val[1]) return self.handleException(err) return val def handleException(self, err): """Handle an exception during the request. May be overridden in a subclass. """ raise err def getHeader(self, key): """Get a header from the response. Case is ignored in the key. @param key: header key @return: header """ raise NotImplementedError() class HttpXendClientProtocol(XendClientProtocol): """A synchronous xend client. This will make a request, wait for the reply and return the result. """ resp = None request = None def makeConnection(self, url): return httplib.HTTPConnection(url.location()) def makeRequest(self, url, method, args): return XendRequest(url, method, args) def xendRequest(self, url, method, args=None): """Make a request to xend. @param url: xend request url @param method: http method: POST or GET @param args: request arguments (dict) """ retries = 0 while retries < 2: self.request = self.makeRequest(url, method, args) conn = self.makeConnection(url) try: if DEBUG: conn.set_debuglevel(1) conn.request(method, url.fullpath(), self.request.data, self.request.headers) try: resp = conn.getresponse() self.resp = resp val = self.handleStatus(resp.version, resp.status, resp.reason) if val is None: data = None else: data = resp.read() val = self.handleResponse(data) return val except httplib.BadStatusLine: retries += 1 time.sleep(5) finally: conn.close() raise XendError("Received invalid response from Xend, twice.") def getHeader(self, key): return self.resp.getheader(key) class UnixConnection(httplib.HTTPConnection): """Subclass of Python library HTTPConnection that uses a unix-domain socket. """ def __init__(self, path): httplib.HTTPConnection.__init__(self, 'localhost') self.path = path def connect(self): sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sock.connect(self.path) self.sock = sock class UnixXendClientProtocol(HttpXendClientProtocol): """A synchronous xend client using a unix-domain socket. """ def __init__(self, path=None): if path is None: path = xoptions.get_xend_unix_path() self.path = path def makeConnection(self, _): return UnixConnection(self.path)
gpl-2.0
ms-iot/python
cpython/Lib/_collections_abc.py
3
25028
# Copyright 2007 Google, Inc. All Rights Reserved. # Licensed to PSF under a Contributor Agreement. """Abstract Base Classes (ABCs) for collections, according to PEP 3119. Unit tests are in test_collections. """ from abc import ABCMeta, abstractmethod import sys __all__ = ["Awaitable", "Coroutine", "AsyncIterable", "AsyncIterator", "Hashable", "Iterable", "Iterator", "Generator", "Sized", "Container", "Callable", "Set", "MutableSet", "Mapping", "MutableMapping", "MappingView", "KeysView", "ItemsView", "ValuesView", "Sequence", "MutableSequence", "ByteString", ] # This module has been renamed from collections.abc to _collections_abc to # speed up interpreter startup. Some of the types such as MutableMapping are # required early but collections module imports a lot of other modules. # See issue #19218 __name__ = "collections.abc" # Private list of types that we want to register with the various ABCs # so that they will pass tests like: # it = iter(somebytearray) # assert isinstance(it, Iterable) # Note: in other implementations, these types many not be distinct # and they make have their own implementation specific types that # are not included on this list. bytes_iterator = type(iter(b'')) bytearray_iterator = type(iter(bytearray())) #callable_iterator = ??? dict_keyiterator = type(iter({}.keys())) dict_valueiterator = type(iter({}.values())) dict_itemiterator = type(iter({}.items())) list_iterator = type(iter([])) list_reverseiterator = type(iter(reversed([]))) range_iterator = type(iter(range(0))) set_iterator = type(iter(set())) str_iterator = type(iter("")) tuple_iterator = type(iter(())) zip_iterator = type(iter(zip())) ## views ## dict_keys = type({}.keys()) dict_values = type({}.values()) dict_items = type({}.items()) ## misc ## mappingproxy = type(type.__dict__) generator = type((lambda: (yield))()) ### ONE-TRICK PONIES ### class Hashable(metaclass=ABCMeta): __slots__ = () @abstractmethod def __hash__(self): return 0 @classmethod def __subclasshook__(cls, C): if cls is Hashable: for B in C.__mro__: if "__hash__" in B.__dict__: if B.__dict__["__hash__"]: return True break return NotImplemented class _AwaitableMeta(ABCMeta): def __instancecheck__(cls, instance): # 0x80 = CO_COROUTINE # 0x100 = CO_ITERABLE_COROUTINE # We don't want to import 'inspect' module, as # a dependency for 'collections.abc'. CO_COROUTINES = 0x80 | 0x100 if (isinstance(instance, generator) and instance.gi_code.co_flags & CO_COROUTINES): return True return super().__instancecheck__(instance) class Awaitable(metaclass=_AwaitableMeta): __slots__ = () @abstractmethod def __await__(self): yield @classmethod def __subclasshook__(cls, C): if cls is Awaitable: for B in C.__mro__: if "__await__" in B.__dict__: if B.__dict__["__await__"]: return True break return NotImplemented class Coroutine(Awaitable): __slots__ = () @abstractmethod def send(self, value): """Send a value into the coroutine. Return next yielded value or raise StopIteration. """ raise StopIteration @abstractmethod def throw(self, typ, val=None, tb=None): """Raise an exception in the coroutine. Return next yielded value or raise StopIteration. """ if val is None: if tb is None: raise typ val = typ() if tb is not None: val = val.with_traceback(tb) raise val def close(self): """Raise GeneratorExit inside coroutine. """ try: self.throw(GeneratorExit) except (GeneratorExit, StopIteration): pass else: raise RuntimeError("coroutine ignored GeneratorExit") @classmethod def __subclasshook__(cls, C): if cls is Coroutine: mro = C.__mro__ for method in ('__await__', 'send', 'throw', 'close'): for base in mro: if method in base.__dict__: break else: return NotImplemented return True return NotImplemented class AsyncIterable(metaclass=ABCMeta): __slots__ = () @abstractmethod async def __aiter__(self): return AsyncIterator() @classmethod def __subclasshook__(cls, C): if cls is AsyncIterable: if any("__aiter__" in B.__dict__ for B in C.__mro__): return True return NotImplemented class AsyncIterator(AsyncIterable): __slots__ = () @abstractmethod async def __anext__(self): """Return the next item or raise StopAsyncIteration when exhausted.""" raise StopAsyncIteration async def __aiter__(self): return self @classmethod def __subclasshook__(cls, C): if cls is AsyncIterator: if (any("__anext__" in B.__dict__ for B in C.__mro__) and any("__aiter__" in B.__dict__ for B in C.__mro__)): return True return NotImplemented class Iterable(metaclass=ABCMeta): __slots__ = () @abstractmethod def __iter__(self): while False: yield None @classmethod def __subclasshook__(cls, C): if cls is Iterable: if any("__iter__" in B.__dict__ for B in C.__mro__): return True return NotImplemented class Iterator(Iterable): __slots__ = () @abstractmethod def __next__(self): 'Return the next item from the iterator. When exhausted, raise StopIteration' raise StopIteration def __iter__(self): return self @classmethod def __subclasshook__(cls, C): if cls is Iterator: if (any("__next__" in B.__dict__ for B in C.__mro__) and any("__iter__" in B.__dict__ for B in C.__mro__)): return True return NotImplemented Iterator.register(bytes_iterator) Iterator.register(bytearray_iterator) #Iterator.register(callable_iterator) Iterator.register(dict_keyiterator) Iterator.register(dict_valueiterator) Iterator.register(dict_itemiterator) Iterator.register(list_iterator) Iterator.register(list_reverseiterator) Iterator.register(range_iterator) Iterator.register(set_iterator) Iterator.register(str_iterator) Iterator.register(tuple_iterator) Iterator.register(zip_iterator) class Generator(Iterator): __slots__ = () def __next__(self): """Return the next item from the generator. When exhausted, raise StopIteration. """ return self.send(None) @abstractmethod def send(self, value): """Send a value into the generator. Return next yielded value or raise StopIteration. """ raise StopIteration @abstractmethod def throw(self, typ, val=None, tb=None): """Raise an exception in the generator. Return next yielded value or raise StopIteration. """ if val is None: if tb is None: raise typ val = typ() if tb is not None: val = val.with_traceback(tb) raise val def close(self): """Raise GeneratorExit inside generator. """ try: self.throw(GeneratorExit) except (GeneratorExit, StopIteration): pass else: raise RuntimeError("generator ignored GeneratorExit") @classmethod def __subclasshook__(cls, C): if cls is Generator: mro = C.__mro__ for method in ('__iter__', '__next__', 'send', 'throw', 'close'): for base in mro: if method in base.__dict__: break else: return NotImplemented return True return NotImplemented Generator.register(generator) class Sized(metaclass=ABCMeta): __slots__ = () @abstractmethod def __len__(self): return 0 @classmethod def __subclasshook__(cls, C): if cls is Sized: if any("__len__" in B.__dict__ for B in C.__mro__): return True return NotImplemented class Container(metaclass=ABCMeta): __slots__ = () @abstractmethod def __contains__(self, x): return False @classmethod def __subclasshook__(cls, C): if cls is Container: if any("__contains__" in B.__dict__ for B in C.__mro__): return True return NotImplemented class Callable(metaclass=ABCMeta): __slots__ = () @abstractmethod def __call__(self, *args, **kwds): return False @classmethod def __subclasshook__(cls, C): if cls is Callable: if any("__call__" in B.__dict__ for B in C.__mro__): return True return NotImplemented ### SETS ### class Set(Sized, Iterable, Container): """A set is a finite, iterable container. This class provides concrete generic implementations of all methods except for __contains__, __iter__ and __len__. To override the comparisons (presumably for speed, as the semantics are fixed), redefine __le__ and __ge__, then the other operations will automatically follow suit. """ __slots__ = () def __le__(self, other): if not isinstance(other, Set): return NotImplemented if len(self) > len(other): return False for elem in self: if elem not in other: return False return True def __lt__(self, other): if not isinstance(other, Set): return NotImplemented return len(self) < len(other) and self.__le__(other) def __gt__(self, other): if not isinstance(other, Set): return NotImplemented return len(self) > len(other) and self.__ge__(other) def __ge__(self, other): if not isinstance(other, Set): return NotImplemented if len(self) < len(other): return False for elem in other: if elem not in self: return False return True def __eq__(self, other): if not isinstance(other, Set): return NotImplemented return len(self) == len(other) and self.__le__(other) @classmethod def _from_iterable(cls, it): '''Construct an instance of the class from any iterable input. Must override this method if the class constructor signature does not accept an iterable for an input. ''' return cls(it) def __and__(self, other): if not isinstance(other, Iterable): return NotImplemented return self._from_iterable(value for value in other if value in self) __rand__ = __and__ def isdisjoint(self, other): 'Return True if two sets have a null intersection.' for value in other: if value in self: return False return True def __or__(self, other): if not isinstance(other, Iterable): return NotImplemented chain = (e for s in (self, other) for e in s) return self._from_iterable(chain) __ror__ = __or__ def __sub__(self, other): if not isinstance(other, Set): if not isinstance(other, Iterable): return NotImplemented other = self._from_iterable(other) return self._from_iterable(value for value in self if value not in other) def __rsub__(self, other): if not isinstance(other, Set): if not isinstance(other, Iterable): return NotImplemented other = self._from_iterable(other) return self._from_iterable(value for value in other if value not in self) def __xor__(self, other): if not isinstance(other, Set): if not isinstance(other, Iterable): return NotImplemented other = self._from_iterable(other) return (self - other) | (other - self) __rxor__ = __xor__ def _hash(self): """Compute the hash value of a set. Note that we don't define __hash__: not all sets are hashable. But if you define a hashable set type, its __hash__ should call this function. This must be compatible __eq__. All sets ought to compare equal if they contain the same elements, regardless of how they are implemented, and regardless of the order of the elements; so there's not much freedom for __eq__ or __hash__. We match the algorithm used by the built-in frozenset type. """ MAX = sys.maxsize MASK = 2 * MAX + 1 n = len(self) h = 1927868237 * (n + 1) h &= MASK for x in self: hx = hash(x) h ^= (hx ^ (hx << 16) ^ 89869747) * 3644798167 h &= MASK h = h * 69069 + 907133923 h &= MASK if h > MAX: h -= MASK + 1 if h == -1: h = 590923713 return h Set.register(frozenset) class MutableSet(Set): """A mutable set is a finite, iterable container. This class provides concrete generic implementations of all methods except for __contains__, __iter__, __len__, add(), and discard(). To override the comparisons (presumably for speed, as the semantics are fixed), all you have to do is redefine __le__ and then the other operations will automatically follow suit. """ __slots__ = () @abstractmethod def add(self, value): """Add an element.""" raise NotImplementedError @abstractmethod def discard(self, value): """Remove an element. Do not raise an exception if absent.""" raise NotImplementedError def remove(self, value): """Remove an element. If not a member, raise a KeyError.""" if value not in self: raise KeyError(value) self.discard(value) def pop(self): """Return the popped value. Raise KeyError if empty.""" it = iter(self) try: value = next(it) except StopIteration: raise KeyError self.discard(value) return value def clear(self): """This is slow (creates N new iterators!) but effective.""" try: while True: self.pop() except KeyError: pass def __ior__(self, it): for value in it: self.add(value) return self def __iand__(self, it): for value in (self - it): self.discard(value) return self def __ixor__(self, it): if it is self: self.clear() else: if not isinstance(it, Set): it = self._from_iterable(it) for value in it: if value in self: self.discard(value) else: self.add(value) return self def __isub__(self, it): if it is self: self.clear() else: for value in it: self.discard(value) return self MutableSet.register(set) ### MAPPINGS ### class Mapping(Sized, Iterable, Container): __slots__ = () """A Mapping is a generic container for associating key/value pairs. This class provides concrete generic implementations of all methods except for __getitem__, __iter__, and __len__. """ @abstractmethod def __getitem__(self, key): raise KeyError def get(self, key, default=None): 'D.get(k[,d]) -> D[k] if k in D, else d. d defaults to None.' try: return self[key] except KeyError: return default def __contains__(self, key): try: self[key] except KeyError: return False else: return True def keys(self): "D.keys() -> a set-like object providing a view on D's keys" return KeysView(self) def items(self): "D.items() -> a set-like object providing a view on D's items" return ItemsView(self) def values(self): "D.values() -> an object providing a view on D's values" return ValuesView(self) def __eq__(self, other): if not isinstance(other, Mapping): return NotImplemented return dict(self.items()) == dict(other.items()) Mapping.register(mappingproxy) class MappingView(Sized): __slots__ = '_mapping', def __init__(self, mapping): self._mapping = mapping def __len__(self): return len(self._mapping) def __repr__(self): return '{0.__class__.__name__}({0._mapping!r})'.format(self) class KeysView(MappingView, Set): __slots__ = () @classmethod def _from_iterable(self, it): return set(it) def __contains__(self, key): return key in self._mapping def __iter__(self): yield from self._mapping KeysView.register(dict_keys) class ItemsView(MappingView, Set): __slots__ = () @classmethod def _from_iterable(self, it): return set(it) def __contains__(self, item): key, value = item try: v = self._mapping[key] except KeyError: return False else: return v == value def __iter__(self): for key in self._mapping: yield (key, self._mapping[key]) ItemsView.register(dict_items) class ValuesView(MappingView): __slots__ = () def __contains__(self, value): for key in self._mapping: if value == self._mapping[key]: return True return False def __iter__(self): for key in self._mapping: yield self._mapping[key] ValuesView.register(dict_values) class MutableMapping(Mapping): __slots__ = () """A MutableMapping is a generic container for associating key/value pairs. This class provides concrete generic implementations of all methods except for __getitem__, __setitem__, __delitem__, __iter__, and __len__. """ @abstractmethod def __setitem__(self, key, value): raise KeyError @abstractmethod def __delitem__(self, key): raise KeyError __marker = object() def pop(self, key, default=__marker): '''D.pop(k[,d]) -> v, remove specified key and return the corresponding value. If key is not found, d is returned if given, otherwise KeyError is raised. ''' try: value = self[key] except KeyError: if default is self.__marker: raise return default else: del self[key] return value def popitem(self): '''D.popitem() -> (k, v), remove and return some (key, value) pair as a 2-tuple; but raise KeyError if D is empty. ''' try: key = next(iter(self)) except StopIteration: raise KeyError value = self[key] del self[key] return key, value def clear(self): 'D.clear() -> None. Remove all items from D.' try: while True: self.popitem() except KeyError: pass def update(*args, **kwds): ''' D.update([E, ]**F) -> None. Update D from mapping/iterable E and F. If E present and has a .keys() method, does: for k in E: D[k] = E[k] If E present and lacks .keys() method, does: for (k, v) in E: D[k] = v In either case, this is followed by: for k, v in F.items(): D[k] = v ''' if not args: raise TypeError("descriptor 'update' of 'MutableMapping' object " "needs an argument") self, *args = args if len(args) > 1: raise TypeError('update expected at most 1 arguments, got %d' % len(args)) if args: other = args[0] if isinstance(other, Mapping): for key in other: self[key] = other[key] elif hasattr(other, "keys"): for key in other.keys(): self[key] = other[key] else: for key, value in other: self[key] = value for key, value in kwds.items(): self[key] = value def setdefault(self, key, default=None): 'D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D' try: return self[key] except KeyError: self[key] = default return default MutableMapping.register(dict) ### SEQUENCES ### class Sequence(Sized, Iterable, Container): """All the operations on a read-only sequence. Concrete subclasses must override __new__ or __init__, __getitem__, and __len__. """ __slots__ = () @abstractmethod def __getitem__(self, index): raise IndexError def __iter__(self): i = 0 try: while True: v = self[i] yield v i += 1 except IndexError: return def __contains__(self, value): for v in self: if v == value: return True return False def __reversed__(self): for i in reversed(range(len(self))): yield self[i] def index(self, value, start=0, stop=None): '''S.index(value, [start, [stop]]) -> integer -- return first index of value. Raises ValueError if the value is not present. ''' if start is not None and start < 0: start = max(len(self) + start, 0) if stop is not None and stop < 0: stop += len(self) i = start while stop is None or i < stop: try: if self[i] == value: return i except IndexError: break i += 1 raise ValueError def count(self, value): 'S.count(value) -> integer -- return number of occurrences of value' return sum(1 for v in self if v == value) Sequence.register(tuple) Sequence.register(str) Sequence.register(range) Sequence.register(memoryview) class ByteString(Sequence): """This unifies bytes and bytearray. XXX Should add all their methods. """ __slots__ = () ByteString.register(bytes) ByteString.register(bytearray) class MutableSequence(Sequence): __slots__ = () """All the operations on a read-write sequence. Concrete subclasses must provide __new__ or __init__, __getitem__, __setitem__, __delitem__, __len__, and insert(). """ @abstractmethod def __setitem__(self, index, value): raise IndexError @abstractmethod def __delitem__(self, index): raise IndexError @abstractmethod def insert(self, index, value): 'S.insert(index, value) -- insert value before index' raise IndexError def append(self, value): 'S.append(value) -- append value to the end of the sequence' self.insert(len(self), value) def clear(self): 'S.clear() -> None -- remove all items from S' try: while True: self.pop() except IndexError: pass def reverse(self): 'S.reverse() -- reverse *IN PLACE*' n = len(self) for i in range(n//2): self[i], self[n-i-1] = self[n-i-1], self[i] def extend(self, values): 'S.extend(iterable) -- extend sequence by appending elements from the iterable' for v in values: self.append(v) def pop(self, index=-1): '''S.pop([index]) -> item -- remove and return item at index (default last). Raise IndexError if list is empty or index is out of range. ''' v = self[index] del self[index] return v def remove(self, value): '''S.remove(value) -- remove first occurrence of value. Raise ValueError if the value is not present. ''' del self[self.index(value)] def __iadd__(self, values): self.extend(values) return self MutableSequence.register(list) MutableSequence.register(bytearray) # Multiply inheriting, see ByteString
bsd-3-clause
ns950/calibre
src/calibre/web/__init__.py
15
1789
__license__ = 'GPL v3' __copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>' class Recipe(object): pass def get_download_filename_from_response(response): from urlparse import urlparse from urllib2 import unquote as urllib2_unquote filename = last_part_name = '' try: purl = urlparse(response.geturl()) last_part_name = purl.path.split('/')[-1] disposition = response.info().get('Content-disposition', '') for p in disposition.split(';'): if 'filename' in p: if '*=' in disposition: parts = disposition.split('*=')[-1] filename = parts.split('\'')[-1] else: filename = disposition.split('=')[-1] if filename[0] in ('\'', '"'): filename = filename[1:] if filename[-1] in ('\'', '"'): filename = filename[:-1] filename = urllib2_unquote(filename) break except Exception: import traceback traceback.print_exc() return filename or last_part_name def get_download_filename(url, cookie_file=None): ''' Get a local filename for a URL using the content disposition header Returns empty string if an error occurs. ''' from calibre import browser from contextlib import closing filename = '' br = browser() if cookie_file: from mechanize import MozillaCookieJar cj = MozillaCookieJar() cj.load(cookie_file) br.set_cookiejar(cj) try: with closing(br.open(url)) as r: filename = get_download_filename_from_response(r) except: import traceback traceback.print_exc() return filename
gpl-3.0
sameeptandon/sail-car-log
lidar/FrameFinder.py
1
6213
#!/usr/bin/python # -*- coding: utf-8 -*- import bisect import os import sys EXPORT_RDR = True try: import rosbag except ImportError, e: print 'Failed to import rosbag, not exporting radar' EXPORT_RDR = False sys.path.append('../process') from GPSReader import GPSReader class FrameFinder: """ Creates a mapping from cloud to frames. """ frame_to_cloud_map = {} map_file_path = "" map_file_name = "" def __init__(self, gps_file, frames_folder, radar_bag_file, write_to_file): if frames_folder[-1] == '/': frames_folder = frames_folder[0:-1] basename = frames_folder.replace('_frames', '') self.map_file_name = basename + ".map" reader = GPSReader(gps_file) # Camera time should already be sorted because frame # always increases camera_times = [utc_from_gps(data['week'], data['seconds']) for data in reader.getData()] ldr_times = \ [long(os.path.splitext(os.path.basename(ldr_file))[0]) for ldr_file in os.listdir(frames_folder) if ldr_file.endswith('.ldr')] # Need to sort the ldr_times because listdir returns undefined ordering ldr_times.sort() if EXPORT_RDR: rdr_times = unpack_bag(basename, radar_bag_file) for frame_number, camera_time in enumerate(camera_times): # Find the closest time in ldr times nearest_index_ldr = bisect.bisect(ldr_times, camera_time) nearest_index_rdr = -1 if EXPORT_RDR: nearest_index_rdr = bisect.bisect(rdr_times, camera_time) if nearest_index_ldr >= 1 and (not EXPORT_RDR or nearest_index_rdr >=1): lidr_file = str(ldr_times[nearest_index_ldr - 1]) + '.ldr' if EXPORT_RDR: radar_seq = str(rdr_times[nearest_index_rdr - 1]) + '.rdr' # Frames are indexed by 1, not real_frame = frame_number + 1 if EXPORT_RDR: self.frame_to_cloud_map[real_frame] = (lidr_file, radar_seq) else: self.frame_to_cloud_map[real_frame] = lidr_file #print real_frame, (lidr_file, radar_seq) if write_to_file: self.__write_frame_map() def get_map(self): """ Returns a mapping from camera frame to ldr file """ return self.frame_to_cloud_map def __write_frame_map(self): """ Writes the camera frame to ldr file mapping to a file """ out_file = open(self.map_file_name, 'w') for frame, data in self.get_map().iteritems(): line = str(frame) + ' ' + str(data[0]) if EXPORT_RDR: line = str(frame) + ' ' + str(data[0]) + ' ' + str(data[1]) else: line = str(frame) + ' ' + data line += '\n' out_file.write(line) out_file.close() def utc_from_gps(gps_week, seconds, leap_seconds=16): """ Converts from gps week time to UTC time. UTC time starts from JAN 1, 1970 and GPS time starts from JAN 6, 1980. http://leapsecond.com/java/gpsclock.htm """ secs_in_week = 604800 secs_gps_to_utc = 315964800 return long((gps_week * secs_in_week + seconds + secs_gps_to_utc - leap_seconds) * 1000000) def unpack_bag(basename, radar_bag_file): """ Unpacks the bag and writes individual segments to files. The ouput folder is the basename + _rdr. Each file name is the time of the starting segment """ radar_bag = rosbag.Bag(radar_bag_file) times = [] cur_file = None rdr_dir = basename + '_rdr/' if not os.path.exists(rdr_dir): os.mkdir(rdr_dir) for topic, msg, t in radar_bag.read_messages(topics=['/object_list', '/target_status']): if msg.obj_id == 61: if cur_file != None: cur_file.close() time = msg.header.stamp.to_nsec()/1000 - 66000 times.append(time) cur_file = open(rdr_dir + str(time) + '.rdr', 'w') if cur_file != None: if msg.obj_id == 0 or msg.obj_id == 62: continue line = None if topic == '/object_list': if msg.isMeasurd == True: fmt = 'O {id} {dist} {lat_dist} {rel_spd} {dyn_prop} {rcs} {w} {l}' line = fmt.format( id = msg.obj_id, dist = msg.dist, lat_dist = msg.lat_dist, rel_spd = msg.relative_spd, dyn_prop = msg.dyn_prop, rcs = msg.rcs, w = msg.width, l = msg.length) else: if msg.status > 0: fmt = 'T {id} {dist} {lat_dist} {rel_spd} {dyn_prop} {traj} {w} {l} {obst_probab} {exist_probab} {rel_acc} {type} {lost_reason}' line = fmt.format( id = msg.obj_id, dist = msg.dist, lat_dist = msg.lat_dist, rel_spd = msg.relative_spd, dyn_prop = msg.dyn_prop, traj = msg.traj, w = msg.width, l = msg.length, obst_probab = msg.obst_probab, exist_probab = msg.exist_probab, rel_acc = msg.relative_acc, type = msg.type, lost_reason = msg.lost_reason ) if line != None: cur_file.write(line + '\n') times.sort() return times def main(): """ Prints out times """ if len(sys.argv) != 4: print """ Usage: ./FrameFinder.py <gps_output_file> <ldr_folder_directory> <radar_bag_file> """ sys.exit() gps_file = sys.argv[1] frames_folder = sys.argv[2] radar_folder = sys.argv[3] FrameFinder(gps_file, frames_folder, radar_folder, write_to_file=True) if __name__ == '__main__': main()
bsd-2-clause
quru/rvsr
networkx/generators/stochastic.py
15
1333
"""Stocastic graph.""" import networkx as nx # Copyright (C) 2010 by # Aric Hagberg <hagberg@lanl.gov> # Dan Schult <dschult@colgate.edu> # Pieter Swart <swart@lanl.gov> # All rights reserved. # BSD license. __author__ = "Aric Hagberg <hagberg@lanl.gov>" __all__ = ['stochastic_graph'] def stochastic_graph(G, copy=True, weight='weight'): """Return a right-stochastic representation of G. A right-stochastic graph is a weighted graph in which all of the node (out) neighbors edge weights sum to 1. Parameters ----------- G : graph A NetworkX graph, must have valid edge weights copy : boolean, optional If True make a copy of the graph, otherwise modify original graph weight : key (optional) Edge data key used for weight. If None all weights are set to 1. """ if type(G) == nx.MultiGraph or type(G) == nx.MultiDiGraph: raise Exception("stochastic_graph not implemented for multigraphs") if not G.is_directed(): raise Exception("stochastic_graph not defined for undirected graphs") if copy: W=nx.DiGraph(G) else: W=G # reference original graph, no copy degree=W.out_degree(weight=weight) for (u,v,d) in W.edges(data=True): d[weight]=d.get(weight,1.0)/degree[u] return W
mpl-2.0
elkingtonmcb/linux
arch/ia64/scripts/unwcheck.py
13143
1714
#!/usr/bin/python # # Usage: unwcheck.py FILE # # This script checks the unwind info of each function in file FILE # and verifies that the sum of the region-lengths matches the total # length of the function. # # Based on a shell/awk script originally written by Harish Patil, # which was converted to Perl by Matthew Chapman, which was converted # to Python by David Mosberger. # import os import re import sys if len(sys.argv) != 2: print "Usage: %s FILE" % sys.argv[0] sys.exit(2) readelf = os.getenv("READELF", "readelf") start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]") rlen_pattern = re.compile(".*rlen=([0-9]+)") def check_func (func, slots, rlen_sum): if slots != rlen_sum: global num_errors num_errors += 1 if not func: func = "[%#x-%#x]" % (start, end) print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum) return num_funcs = 0 num_errors = 0 func = False slots = 0 rlen_sum = 0 for line in os.popen("%s -u %s" % (readelf, sys.argv[1])): m = start_pattern.match(line) if m: check_func(func, slots, rlen_sum) func = m.group(1) start = long(m.group(2), 16) end = long(m.group(3), 16) slots = 3 * (end - start) / 16 rlen_sum = 0L num_funcs += 1 else: m = rlen_pattern.match(line) if m: rlen_sum += long(m.group(1)) check_func(func, slots, rlen_sum) if num_errors == 0: print "No errors detected in %u functions." % num_funcs else: if num_errors > 1: err="errors" else: err="error" print "%u %s detected in %u functions." % (num_errors, err, num_funcs) sys.exit(1)
gpl-2.0
tectronics/mythbox
resources/lib/IMDbPY/setup.py
5
8321
#!/usr/bin/env python import os import sys import ez_setup ez_setup.use_setuptools() import setuptools # version of the software; in the code repository this represents # the _next_ release. setuptools will automatically add 'dev-rREVISION'. version = '4.7' home_page = 'http://imdbpy.sf.net/' long_desc = """IMDbPY is a Python package useful to retrieve and manage the data of the IMDb movie database about movies, people, characters and companies. Platform-independent and written in pure Python (and few C lines), it can retrieve data from both the IMDb's web server and a local copy of the whole database. IMDbPY package can be very easily used by programmers and developers to provide access to the IMDb's data to their programs. Some simple example scripts - useful for the end users - are included in this package; other IMDbPY-based programs are available at the home page: %s """ % home_page dwnl_url = 'http://imdbpy.sf.net/?page=download' classifiers = """\ Development Status :: 5 - Production/Stable Environment :: Console Environment :: Web Environment Environment :: Handhelds/PDA's Intended Audience :: Developers Intended Audience :: End Users/Desktop License :: OSI Approved :: GNU General Public License (GPL) Natural Language :: English Natural Language :: Italian Natural Language :: Turkish Programming Language :: Python Programming Language :: C Operating System :: OS Independent Topic :: Database :: Front-Ends Topic :: Internet :: WWW/HTTP :: Dynamic Content :: CGI Tools/Libraries Topic :: Software Development :: Libraries :: Python Modules """ keywords = ['imdb', 'movie', 'people', 'database', 'cinema', 'film', 'person', 'cast', 'actor', 'actress', 'director', 'sql', 'character', 'company', 'package', 'plain text data files', 'keywords', 'top250', 'bottom100', 'xml'] cutils = setuptools.Extension('imdb.parser.sql.cutils', ['imdb/parser/sql/cutils.c']) scripts = ['./bin/get_first_movie.py', './bin/get_movie.py', './bin/search_movie.py', './bin/get_first_person.py', './bin/get_person.py', './bin/search_person.py', './bin/get_character.py', './bin/get_first_character.py', './bin/get_company.py', './bin/search_character.py', './bin/search_company.py', './bin/get_first_company.py', './bin/get_keyword.py', './bin/search_keyword.py', './bin/get_top_bottom_movies.py'] # XXX: I'm not sure that 'etc' is a good idea. Making it an absolute # path seems a recipe for a disaster (with bdist_egg, at least). data_files = [('doc', setuptools.findall('docs')), ('etc', ['docs/imdbpy.cfg'])] # Defining these 'features', it's possible to run commands like: # python ./setup.py --without-sql bdist # having (in this example) imdb.parser.sql removed. featCutils = setuptools.dist.Feature('compile the C module', standard=True, ext_modules=[cutils]) featLxml = setuptools.dist.Feature('add lxml dependency', standard=True, install_requires=['lxml']) # XXX: it seems there's no way to specify that we need EITHER # SQLObject OR SQLAlchemy. featSQLObject = setuptools.dist.Feature('add SQLObject dependency', standard=True, install_requires=['SQLObject'], require_features='sql') featSQLAlchemy = setuptools.dist.Feature('add SQLAlchemy dependency', standard=True, install_requires=['SQLAlchemy', 'sqlalchemy-migrate'], require_features='sql') sqlScripts = ['./bin/imdbpy2sql.py'] # standard=False so that it's not installed if both --without-sqlobject # and --without-sqlalchemy are specified. featSQL = setuptools.dist.Feature('access to SQL databases', standard=False, remove='imdb.parser.sql', scripts=sqlScripts) features = { 'cutils': featCutils, 'sql': featSQL, 'lxml': featLxml, 'sqlobject': featSQLObject, 'sqlalchemy': featSQLAlchemy } params = { # Meta-information. 'name': 'IMDbPY', 'version': version, 'description': 'Python package to access the IMDb\'s database', 'long_description': long_desc, 'author': 'Davide Alberani', 'author_email': 'da@erlug.linux.it', 'contact': 'IMDbPY-devel mailing list', 'contact_email': 'imdbpy-devel@lists.sourceforge.net', 'maintainer': 'Davide Alberani', 'maintainer_email': 'da@erlug.linux.it', 'license': 'GPL', 'platforms': 'any', 'keywords': keywords, 'classifiers': filter(None, classifiers.split("\n")), 'zip_safe': False, # XXX: I guess, at least... # Download URLs. 'url': home_page, 'download_url': dwnl_url, # Scripts. 'scripts': scripts, # Documentation files. 'data_files': data_files, # C extensions. #'ext_modules': [cutils], # Requirements. XXX: maybe we can use extras_require? #'install_requires': install_requires, #'extras_require': extras_require, 'features': features, # Packages. 'packages': setuptools.find_packages() } ERR_MSG = """ ==================================================================== ERROR ===== Aaargh! An error! An error! Curse my metal body, I wasn't fast enough. It's all my fault! Anyway, if you were trying to build a package or install IMDbPY to your system, looks like we're unable to fetch or install some dependencies, or to compile the C module. The best solution is to resolve these dependencies (maybe you're not connected to Internet?) and/or install a C compiler. You may, however, go on without some optional pieces of IMDbPY; try re-running this script with the corresponding optional argument: --without-lxml exclude lxml (speeds up 'http') --without-cutils don't compile the C module (speeds up 'sql') --without-sqlobject exclude SQLObject (you need at least one of) --without-sqlalchemy exclude SQLAlchemy (SQLObject or SQLAlchemy,) (if you want to access a ) (local SQL database ) --without-sql no access to SQL databases (implied if both --without-sqlobject and --without-sqlalchemy are used) Example: python ./setup.py --without-lxml --without-sql install The caught exception, is re-raise below: """ REBUILDMO_DIR = os.path.join('imdb', 'locale') REBUILDMO_NAME = 'rebuildmo' def runRebuildmo(): """Call the function to rebuild the locales.""" cwd = os.getcwd() import sys path = list(sys.path) languages = [] try: import imp scriptPath = os.path.dirname(__file__) modulePath = os.path.join(cwd, scriptPath, REBUILDMO_DIR) sys.path += [modulePath, '.', cwd] modInfo = imp.find_module(REBUILDMO_NAME, [modulePath, '.', cwd]) rebuildmo = imp.load_module('rebuildmo', *modInfo) os.chdir(modulePath) languages = rebuildmo.rebuildmo() print 'Created locale for: %s.' % ' '.join(languages) except Exception, e: print 'ERROR: unable to rebuild .mo files; caught exception %s' % e sys.path = path os.chdir(cwd) return languages def hasCommand(): """Return true if at least one command is found on the command line.""" args = sys.argv[1:] if '--help' in args: return False if '-h' in args: return False for arg in args: if arg and not arg.startswith('-'): return True return False try: if hasCommand(): languages = runRebuildmo() else: languages = [] if languages: data_files.append(('imdb/locale', ['imdb/locale/imdbpy.pot'])) for lang in languages: files_found = setuptools.findall('imdb/locale/%s' % lang) if not files_found: continue base_dir = os.path.dirname(files_found[0]) data_files.append(('imdb/locale', ['imdb/locale/imdbpy-%s.po' % lang])) if not base_dir: continue data_files.append((base_dir, files_found)) setuptools.setup(**params) except SystemExit: print ERR_MSG raise
gpl-2.0
magvugr/AT
EntVirtual/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/euctwfreq.py
3133
34872
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # EUCTW frequency table # Converted from big5 work # by Taiwan's Mandarin Promotion Council # <http:#www.edu.tw:81/mandr/> # 128 --> 0.42261 # 256 --> 0.57851 # 512 --> 0.74851 # 1024 --> 0.89384 # 2048 --> 0.97583 # # Idea Distribution Ratio = 0.74851/(1-0.74851) =2.98 # Random Distribution Ration = 512/(5401-512)=0.105 # # Typical Distribution Ratio about 25% of Ideal one, still much higher than RDR EUCTW_TYPICAL_DISTRIBUTION_RATIO = 0.75 # Char to FreqOrder table , EUCTW_TABLE_SIZE = 8102 EUCTWCharToFreqOrder = ( 1,1800,1506, 255,1431, 198, 9, 82, 6,7310, 177, 202,3615,1256,2808, 110, # 2742 3735, 33,3241, 261, 76, 44,2113, 16,2931,2184,1176, 659,3868, 26,3404,2643, # 2758 1198,3869,3313,4060, 410,2211, 302, 590, 361,1963, 8, 204, 58,4296,7311,1931, # 2774 63,7312,7313, 317,1614, 75, 222, 159,4061,2412,1480,7314,3500,3068, 224,2809, # 2790 3616, 3, 10,3870,1471, 29,2774,1135,2852,1939, 873, 130,3242,1123, 312,7315, # 2806 4297,2051, 507, 252, 682,7316, 142,1914, 124, 206,2932, 34,3501,3173, 64, 604, # 2822 7317,2494,1976,1977, 155,1990, 645, 641,1606,7318,3405, 337, 72, 406,7319, 80, # 2838 630, 238,3174,1509, 263, 939,1092,2644, 756,1440,1094,3406, 449, 69,2969, 591, # 2854 179,2095, 471, 115,2034,1843, 60, 50,2970, 134, 806,1868, 734,2035,3407, 180, # 2870 995,1607, 156, 537,2893, 688,7320, 319,1305, 779,2144, 514,2374, 298,4298, 359, # 2886 2495, 90,2707,1338, 663, 11, 906,1099,2545, 20,2436, 182, 532,1716,7321, 732, # 2902 1376,4062,1311,1420,3175, 25,2312,1056, 113, 399, 382,1949, 242,3408,2467, 529, # 2918 3243, 475,1447,3617,7322, 117, 21, 656, 810,1297,2295,2329,3502,7323, 126,4063, # 2934 706, 456, 150, 613,4299, 71,1118,2036,4064, 145,3069, 85, 835, 486,2114,1246, # 2950 1426, 428, 727,1285,1015, 800, 106, 623, 303,1281,7324,2127,2354, 347,3736, 221, # 2966 3503,3110,7325,1955,1153,4065, 83, 296,1199,3070, 192, 624, 93,7326, 822,1897, # 2982 2810,3111, 795,2064, 991,1554,1542,1592, 27, 43,2853, 859, 139,1456, 860,4300, # 2998 437, 712,3871, 164,2392,3112, 695, 211,3017,2096, 195,3872,1608,3504,3505,3618, # 3014 3873, 234, 811,2971,2097,3874,2229,1441,3506,1615,2375, 668,2076,1638, 305, 228, # 3030 1664,4301, 467, 415,7327, 262,2098,1593, 239, 108, 300, 200,1033, 512,1247,2077, # 3046 7328,7329,2173,3176,3619,2673, 593, 845,1062,3244, 88,1723,2037,3875,1950, 212, # 3062 266, 152, 149, 468,1898,4066,4302, 77, 187,7330,3018, 37, 5,2972,7331,3876, # 3078 7332,7333, 39,2517,4303,2894,3177,2078, 55, 148, 74,4304, 545, 483,1474,1029, # 3094 1665, 217,1869,1531,3113,1104,2645,4067, 24, 172,3507, 900,3877,3508,3509,4305, # 3110 32,1408,2811,1312, 329, 487,2355,2247,2708, 784,2674, 4,3019,3314,1427,1788, # 3126 188, 109, 499,7334,3620,1717,1789, 888,1217,3020,4306,7335,3510,7336,3315,1520, # 3142 3621,3878, 196,1034, 775,7337,7338, 929,1815, 249, 439, 38,7339,1063,7340, 794, # 3158 3879,1435,2296, 46, 178,3245,2065,7341,2376,7342, 214,1709,4307, 804, 35, 707, # 3174 324,3622,1601,2546, 140, 459,4068,7343,7344,1365, 839, 272, 978,2257,2572,3409, # 3190 2128,1363,3623,1423, 697, 100,3071, 48, 70,1231, 495,3114,2193,7345,1294,7346, # 3206 2079, 462, 586,1042,3246, 853, 256, 988, 185,2377,3410,1698, 434,1084,7347,3411, # 3222 314,2615,2775,4308,2330,2331, 569,2280, 637,1816,2518, 757,1162,1878,1616,3412, # 3238 287,1577,2115, 768,4309,1671,2854,3511,2519,1321,3737, 909,2413,7348,4069, 933, # 3254 3738,7349,2052,2356,1222,4310, 765,2414,1322, 786,4311,7350,1919,1462,1677,2895, # 3270 1699,7351,4312,1424,2437,3115,3624,2590,3316,1774,1940,3413,3880,4070, 309,1369, # 3286 1130,2812, 364,2230,1653,1299,3881,3512,3882,3883,2646, 525,1085,3021, 902,2000, # 3302 1475, 964,4313, 421,1844,1415,1057,2281, 940,1364,3116, 376,4314,4315,1381, 7, # 3318 2520, 983,2378, 336,1710,2675,1845, 321,3414, 559,1131,3022,2742,1808,1132,1313, # 3334 265,1481,1857,7352, 352,1203,2813,3247, 167,1089, 420,2814, 776, 792,1724,3513, # 3350 4071,2438,3248,7353,4072,7354, 446, 229, 333,2743, 901,3739,1200,1557,4316,2647, # 3366 1920, 395,2744,2676,3740,4073,1835, 125, 916,3178,2616,4317,7355,7356,3741,7357, # 3382 7358,7359,4318,3117,3625,1133,2547,1757,3415,1510,2313,1409,3514,7360,2145, 438, # 3398 2591,2896,2379,3317,1068, 958,3023, 461, 311,2855,2677,4074,1915,3179,4075,1978, # 3414 383, 750,2745,2617,4076, 274, 539, 385,1278,1442,7361,1154,1964, 384, 561, 210, # 3430 98,1295,2548,3515,7362,1711,2415,1482,3416,3884,2897,1257, 129,7363,3742, 642, # 3446 523,2776,2777,2648,7364, 141,2231,1333, 68, 176, 441, 876, 907,4077, 603,2592, # 3462 710, 171,3417, 404, 549, 18,3118,2393,1410,3626,1666,7365,3516,4319,2898,4320, # 3478 7366,2973, 368,7367, 146, 366, 99, 871,3627,1543, 748, 807,1586,1185, 22,2258, # 3494 379,3743,3180,7368,3181, 505,1941,2618,1991,1382,2314,7369, 380,2357, 218, 702, # 3510 1817,1248,3418,3024,3517,3318,3249,7370,2974,3628, 930,3250,3744,7371, 59,7372, # 3526 585, 601,4078, 497,3419,1112,1314,4321,1801,7373,1223,1472,2174,7374, 749,1836, # 3542 690,1899,3745,1772,3885,1476, 429,1043,1790,2232,2116, 917,4079, 447,1086,1629, # 3558 7375, 556,7376,7377,2020,1654, 844,1090, 105, 550, 966,1758,2815,1008,1782, 686, # 3574 1095,7378,2282, 793,1602,7379,3518,2593,4322,4080,2933,2297,4323,3746, 980,2496, # 3590 544, 353, 527,4324, 908,2678,2899,7380, 381,2619,1942,1348,7381,1341,1252, 560, # 3606 3072,7382,3420,2856,7383,2053, 973, 886,2080, 143,4325,7384,7385, 157,3886, 496, # 3622 4081, 57, 840, 540,2038,4326,4327,3421,2117,1445, 970,2259,1748,1965,2081,4082, # 3638 3119,1234,1775,3251,2816,3629, 773,1206,2129,1066,2039,1326,3887,1738,1725,4083, # 3654 279,3120, 51,1544,2594, 423,1578,2130,2066, 173,4328,1879,7386,7387,1583, 264, # 3670 610,3630,4329,2439, 280, 154,7388,7389,7390,1739, 338,1282,3073, 693,2857,1411, # 3686 1074,3747,2440,7391,4330,7392,7393,1240, 952,2394,7394,2900,1538,2679, 685,1483, # 3702 4084,2468,1436, 953,4085,2054,4331, 671,2395, 79,4086,2441,3252, 608, 567,2680, # 3718 3422,4087,4088,1691, 393,1261,1791,2396,7395,4332,7396,7397,7398,7399,1383,1672, # 3734 3748,3182,1464, 522,1119, 661,1150, 216, 675,4333,3888,1432,3519, 609,4334,2681, # 3750 2397,7400,7401,7402,4089,3025, 0,7403,2469, 315, 231,2442, 301,3319,4335,2380, # 3766 7404, 233,4090,3631,1818,4336,4337,7405, 96,1776,1315,2082,7406, 257,7407,1809, # 3782 3632,2709,1139,1819,4091,2021,1124,2163,2778,1777,2649,7408,3074, 363,1655,3183, # 3798 7409,2975,7410,7411,7412,3889,1567,3890, 718, 103,3184, 849,1443, 341,3320,2934, # 3814 1484,7413,1712, 127, 67, 339,4092,2398, 679,1412, 821,7414,7415, 834, 738, 351, # 3830 2976,2146, 846, 235,1497,1880, 418,1992,3749,2710, 186,1100,2147,2746,3520,1545, # 3846 1355,2935,2858,1377, 583,3891,4093,2573,2977,7416,1298,3633,1078,2549,3634,2358, # 3862 78,3750,3751, 267,1289,2099,2001,1594,4094, 348, 369,1274,2194,2175,1837,4338, # 3878 1820,2817,3635,2747,2283,2002,4339,2936,2748, 144,3321, 882,4340,3892,2749,3423, # 3894 4341,2901,7417,4095,1726, 320,7418,3893,3026, 788,2978,7419,2818,1773,1327,2859, # 3910 3894,2819,7420,1306,4342,2003,1700,3752,3521,2359,2650, 787,2022, 506, 824,3636, # 3926 534, 323,4343,1044,3322,2023,1900, 946,3424,7421,1778,1500,1678,7422,1881,4344, # 3942 165, 243,4345,3637,2521, 123, 683,4096, 764,4346, 36,3895,1792, 589,2902, 816, # 3958 626,1667,3027,2233,1639,1555,1622,3753,3896,7423,3897,2860,1370,1228,1932, 891, # 3974 2083,2903, 304,4097,7424, 292,2979,2711,3522, 691,2100,4098,1115,4347, 118, 662, # 3990 7425, 611,1156, 854,2381,1316,2861, 2, 386, 515,2904,7426,7427,3253, 868,2234, # 4006 1486, 855,2651, 785,2212,3028,7428,1040,3185,3523,7429,3121, 448,7430,1525,7431, # 4022 2164,4348,7432,3754,7433,4099,2820,3524,3122, 503, 818,3898,3123,1568, 814, 676, # 4038 1444, 306,1749,7434,3755,1416,1030, 197,1428, 805,2821,1501,4349,7435,7436,7437, # 4054 1993,7438,4350,7439,7440,2195, 13,2779,3638,2980,3124,1229,1916,7441,3756,2131, # 4070 7442,4100,4351,2399,3525,7443,2213,1511,1727,1120,7444,7445, 646,3757,2443, 307, # 4086 7446,7447,1595,3186,7448,7449,7450,3639,1113,1356,3899,1465,2522,2523,7451, 519, # 4102 7452, 128,2132, 92,2284,1979,7453,3900,1512, 342,3125,2196,7454,2780,2214,1980, # 4118 3323,7455, 290,1656,1317, 789, 827,2360,7456,3758,4352, 562, 581,3901,7457, 401, # 4134 4353,2248, 94,4354,1399,2781,7458,1463,2024,4355,3187,1943,7459, 828,1105,4101, # 4150 1262,1394,7460,4102, 605,4356,7461,1783,2862,7462,2822, 819,2101, 578,2197,2937, # 4166 7463,1502, 436,3254,4103,3255,2823,3902,2905,3425,3426,7464,2712,2315,7465,7466, # 4182 2332,2067, 23,4357, 193, 826,3759,2102, 699,1630,4104,3075, 390,1793,1064,3526, # 4198 7467,1579,3076,3077,1400,7468,4105,1838,1640,2863,7469,4358,4359, 137,4106, 598, # 4214 3078,1966, 780, 104, 974,2938,7470, 278, 899, 253, 402, 572, 504, 493,1339,7471, # 4230 3903,1275,4360,2574,2550,7472,3640,3029,3079,2249, 565,1334,2713, 863, 41,7473, # 4246 7474,4361,7475,1657,2333, 19, 463,2750,4107, 606,7476,2981,3256,1087,2084,1323, # 4262 2652,2982,7477,1631,1623,1750,4108,2682,7478,2864, 791,2714,2653,2334, 232,2416, # 4278 7479,2983,1498,7480,2654,2620, 755,1366,3641,3257,3126,2025,1609, 119,1917,3427, # 4294 862,1026,4109,7481,3904,3760,4362,3905,4363,2260,1951,2470,7482,1125, 817,4110, # 4310 4111,3906,1513,1766,2040,1487,4112,3030,3258,2824,3761,3127,7483,7484,1507,7485, # 4326 2683, 733, 40,1632,1106,2865, 345,4113, 841,2524, 230,4364,2984,1846,3259,3428, # 4342 7486,1263, 986,3429,7487, 735, 879, 254,1137, 857, 622,1300,1180,1388,1562,3907, # 4358 3908,2939, 967,2751,2655,1349, 592,2133,1692,3324,2985,1994,4114,1679,3909,1901, # 4374 2185,7488, 739,3642,2715,1296,1290,7489,4115,2198,2199,1921,1563,2595,2551,1870, # 4390 2752,2986,7490, 435,7491, 343,1108, 596, 17,1751,4365,2235,3430,3643,7492,4366, # 4406 294,3527,2940,1693, 477, 979, 281,2041,3528, 643,2042,3644,2621,2782,2261,1031, # 4422 2335,2134,2298,3529,4367, 367,1249,2552,7493,3530,7494,4368,1283,3325,2004, 240, # 4438 1762,3326,4369,4370, 836,1069,3128, 474,7495,2148,2525, 268,3531,7496,3188,1521, # 4454 1284,7497,1658,1546,4116,7498,3532,3533,7499,4117,3327,2684,1685,4118, 961,1673, # 4470 2622, 190,2005,2200,3762,4371,4372,7500, 570,2497,3645,1490,7501,4373,2623,3260, # 4486 1956,4374, 584,1514, 396,1045,1944,7502,4375,1967,2444,7503,7504,4376,3910, 619, # 4502 7505,3129,3261, 215,2006,2783,2553,3189,4377,3190,4378, 763,4119,3763,4379,7506, # 4518 7507,1957,1767,2941,3328,3646,1174, 452,1477,4380,3329,3130,7508,2825,1253,2382, # 4534 2186,1091,2285,4120, 492,7509, 638,1169,1824,2135,1752,3911, 648, 926,1021,1324, # 4550 4381, 520,4382, 997, 847,1007, 892,4383,3764,2262,1871,3647,7510,2400,1784,4384, # 4566 1952,2942,3080,3191,1728,4121,2043,3648,4385,2007,1701,3131,1551, 30,2263,4122, # 4582 7511,2026,4386,3534,7512, 501,7513,4123, 594,3431,2165,1821,3535,3432,3536,3192, # 4598 829,2826,4124,7514,1680,3132,1225,4125,7515,3262,4387,4126,3133,2336,7516,4388, # 4614 4127,7517,3912,3913,7518,1847,2383,2596,3330,7519,4389, 374,3914, 652,4128,4129, # 4630 375,1140, 798,7520,7521,7522,2361,4390,2264, 546,1659, 138,3031,2445,4391,7523, # 4646 2250, 612,1848, 910, 796,3765,1740,1371, 825,3766,3767,7524,2906,2554,7525, 692, # 4662 444,3032,2624, 801,4392,4130,7526,1491, 244,1053,3033,4131,4132, 340,7527,3915, # 4678 1041,2987, 293,1168, 87,1357,7528,1539, 959,7529,2236, 721, 694,4133,3768, 219, # 4694 1478, 644,1417,3331,2656,1413,1401,1335,1389,3916,7530,7531,2988,2362,3134,1825, # 4710 730,1515, 184,2827, 66,4393,7532,1660,2943, 246,3332, 378,1457, 226,3433, 975, # 4726 3917,2944,1264,3537, 674, 696,7533, 163,7534,1141,2417,2166, 713,3538,3333,4394, # 4742 3918,7535,7536,1186, 15,7537,1079,1070,7538,1522,3193,3539, 276,1050,2716, 758, # 4758 1126, 653,2945,3263,7539,2337, 889,3540,3919,3081,2989, 903,1250,4395,3920,3434, # 4774 3541,1342,1681,1718, 766,3264, 286, 89,2946,3649,7540,1713,7541,2597,3334,2990, # 4790 7542,2947,2215,3194,2866,7543,4396,2498,2526, 181, 387,1075,3921, 731,2187,3335, # 4806 7544,3265, 310, 313,3435,2299, 770,4134, 54,3034, 189,4397,3082,3769,3922,7545, # 4822 1230,1617,1849, 355,3542,4135,4398,3336, 111,4136,3650,1350,3135,3436,3035,4137, # 4838 2149,3266,3543,7546,2784,3923,3924,2991, 722,2008,7547,1071, 247,1207,2338,2471, # 4854 1378,4399,2009, 864,1437,1214,4400, 373,3770,1142,2216, 667,4401, 442,2753,2555, # 4870 3771,3925,1968,4138,3267,1839, 837, 170,1107, 934,1336,1882,7548,7549,2118,4139, # 4886 2828, 743,1569,7550,4402,4140, 582,2384,1418,3437,7551,1802,7552, 357,1395,1729, # 4902 3651,3268,2418,1564,2237,7553,3083,3772,1633,4403,1114,2085,4141,1532,7554, 482, # 4918 2446,4404,7555,7556,1492, 833,1466,7557,2717,3544,1641,2829,7558,1526,1272,3652, # 4934 4142,1686,1794, 416,2556,1902,1953,1803,7559,3773,2785,3774,1159,2316,7560,2867, # 4950 4405,1610,1584,3036,2419,2754, 443,3269,1163,3136,7561,7562,3926,7563,4143,2499, # 4966 3037,4406,3927,3137,2103,1647,3545,2010,1872,4144,7564,4145, 431,3438,7565, 250, # 4982 97, 81,4146,7566,1648,1850,1558, 160, 848,7567, 866, 740,1694,7568,2201,2830, # 4998 3195,4147,4407,3653,1687, 950,2472, 426, 469,3196,3654,3655,3928,7569,7570,1188, # 5014 424,1995, 861,3546,4148,3775,2202,2685, 168,1235,3547,4149,7571,2086,1674,4408, # 5030 3337,3270, 220,2557,1009,7572,3776, 670,2992, 332,1208, 717,7573,7574,3548,2447, # 5046 3929,3338,7575, 513,7576,1209,2868,3339,3138,4409,1080,7577,7578,7579,7580,2527, # 5062 3656,3549, 815,1587,3930,3931,7581,3550,3439,3777,1254,4410,1328,3038,1390,3932, # 5078 1741,3933,3778,3934,7582, 236,3779,2448,3271,7583,7584,3657,3780,1273,3781,4411, # 5094 7585, 308,7586,4412, 245,4413,1851,2473,1307,2575, 430, 715,2136,2449,7587, 270, # 5110 199,2869,3935,7588,3551,2718,1753, 761,1754, 725,1661,1840,4414,3440,3658,7589, # 5126 7590, 587, 14,3272, 227,2598, 326, 480,2265, 943,2755,3552, 291, 650,1883,7591, # 5142 1702,1226, 102,1547, 62,3441, 904,4415,3442,1164,4150,7592,7593,1224,1548,2756, # 5158 391, 498,1493,7594,1386,1419,7595,2055,1177,4416, 813, 880,1081,2363, 566,1145, # 5174 4417,2286,1001,1035,2558,2599,2238, 394,1286,7596,7597,2068,7598, 86,1494,1730, # 5190 3936, 491,1588, 745, 897,2948, 843,3340,3937,2757,2870,3273,1768, 998,2217,2069, # 5206 397,1826,1195,1969,3659,2993,3341, 284,7599,3782,2500,2137,2119,1903,7600,3938, # 5222 2150,3939,4151,1036,3443,1904, 114,2559,4152, 209,1527,7601,7602,2949,2831,2625, # 5238 2385,2719,3139, 812,2560,7603,3274,7604,1559, 737,1884,3660,1210, 885, 28,2686, # 5254 3553,3783,7605,4153,1004,1779,4418,7606, 346,1981,2218,2687,4419,3784,1742, 797, # 5270 1642,3940,1933,1072,1384,2151, 896,3941,3275,3661,3197,2871,3554,7607,2561,1958, # 5286 4420,2450,1785,7608,7609,7610,3942,4154,1005,1308,3662,4155,2720,4421,4422,1528, # 5302 2600, 161,1178,4156,1982, 987,4423,1101,4157, 631,3943,1157,3198,2420,1343,1241, # 5318 1016,2239,2562, 372, 877,2339,2501,1160, 555,1934, 911,3944,7611, 466,1170, 169, # 5334 1051,2907,2688,3663,2474,2994,1182,2011,2563,1251,2626,7612, 992,2340,3444,1540, # 5350 2721,1201,2070,2401,1996,2475,7613,4424, 528,1922,2188,1503,1873,1570,2364,3342, # 5366 3276,7614, 557,1073,7615,1827,3445,2087,2266,3140,3039,3084, 767,3085,2786,4425, # 5382 1006,4158,4426,2341,1267,2176,3664,3199, 778,3945,3200,2722,1597,2657,7616,4427, # 5398 7617,3446,7618,7619,7620,3277,2689,1433,3278, 131, 95,1504,3946, 723,4159,3141, # 5414 1841,3555,2758,2189,3947,2027,2104,3665,7621,2995,3948,1218,7622,3343,3201,3949, # 5430 4160,2576, 248,1634,3785, 912,7623,2832,3666,3040,3786, 654, 53,7624,2996,7625, # 5446 1688,4428, 777,3447,1032,3950,1425,7626, 191, 820,2120,2833, 971,4429, 931,3202, # 5462 135, 664, 783,3787,1997, 772,2908,1935,3951,3788,4430,2909,3203, 282,2723, 640, # 5478 1372,3448,1127, 922, 325,3344,7627,7628, 711,2044,7629,7630,3952,2219,2787,1936, # 5494 3953,3345,2220,2251,3789,2300,7631,4431,3790,1258,3279,3954,3204,2138,2950,3955, # 5510 3956,7632,2221, 258,3205,4432, 101,1227,7633,3280,1755,7634,1391,3281,7635,2910, # 5526 2056, 893,7636,7637,7638,1402,4161,2342,7639,7640,3206,3556,7641,7642, 878,1325, # 5542 1780,2788,4433, 259,1385,2577, 744,1183,2267,4434,7643,3957,2502,7644, 684,1024, # 5558 4162,7645, 472,3557,3449,1165,3282,3958,3959, 322,2152, 881, 455,1695,1152,1340, # 5574 660, 554,2153,4435,1058,4436,4163, 830,1065,3346,3960,4437,1923,7646,1703,1918, # 5590 7647, 932,2268, 122,7648,4438, 947, 677,7649,3791,2627, 297,1905,1924,2269,4439, # 5606 2317,3283,7650,7651,4164,7652,4165, 84,4166, 112, 989,7653, 547,1059,3961, 701, # 5622 3558,1019,7654,4167,7655,3450, 942, 639, 457,2301,2451, 993,2951, 407, 851, 494, # 5638 4440,3347, 927,7656,1237,7657,2421,3348, 573,4168, 680, 921,2911,1279,1874, 285, # 5654 790,1448,1983, 719,2167,7658,7659,4441,3962,3963,1649,7660,1541, 563,7661,1077, # 5670 7662,3349,3041,3451, 511,2997,3964,3965,3667,3966,1268,2564,3350,3207,4442,4443, # 5686 7663, 535,1048,1276,1189,2912,2028,3142,1438,1373,2834,2952,1134,2012,7664,4169, # 5702 1238,2578,3086,1259,7665, 700,7666,2953,3143,3668,4170,7667,4171,1146,1875,1906, # 5718 4444,2601,3967, 781,2422, 132,1589, 203, 147, 273,2789,2402, 898,1786,2154,3968, # 5734 3969,7668,3792,2790,7669,7670,4445,4446,7671,3208,7672,1635,3793, 965,7673,1804, # 5750 2690,1516,3559,1121,1082,1329,3284,3970,1449,3794, 65,1128,2835,2913,2759,1590, # 5766 3795,7674,7675, 12,2658, 45, 976,2579,3144,4447, 517,2528,1013,1037,3209,7676, # 5782 3796,2836,7677,3797,7678,3452,7679,2602, 614,1998,2318,3798,3087,2724,2628,7680, # 5798 2580,4172, 599,1269,7681,1810,3669,7682,2691,3088, 759,1060, 489,1805,3351,3285, # 5814 1358,7683,7684,2386,1387,1215,2629,2252, 490,7685,7686,4173,1759,2387,2343,7687, # 5830 4448,3799,1907,3971,2630,1806,3210,4449,3453,3286,2760,2344, 874,7688,7689,3454, # 5846 3670,1858, 91,2914,3671,3042,3800,4450,7690,3145,3972,2659,7691,3455,1202,1403, # 5862 3801,2954,2529,1517,2503,4451,3456,2504,7692,4452,7693,2692,1885,1495,1731,3973, # 5878 2365,4453,7694,2029,7695,7696,3974,2693,1216, 237,2581,4174,2319,3975,3802,4454, # 5894 4455,2694,3560,3457, 445,4456,7697,7698,7699,7700,2761, 61,3976,3672,1822,3977, # 5910 7701, 687,2045, 935, 925, 405,2660, 703,1096,1859,2725,4457,3978,1876,1367,2695, # 5926 3352, 918,2105,1781,2476, 334,3287,1611,1093,4458, 564,3146,3458,3673,3353, 945, # 5942 2631,2057,4459,7702,1925, 872,4175,7703,3459,2696,3089, 349,4176,3674,3979,4460, # 5958 3803,4177,3675,2155,3980,4461,4462,4178,4463,2403,2046, 782,3981, 400, 251,4179, # 5974 1624,7704,7705, 277,3676, 299,1265, 476,1191,3804,2121,4180,4181,1109, 205,7706, # 5990 2582,1000,2156,3561,1860,7707,7708,7709,4464,7710,4465,2565, 107,2477,2157,3982, # 6006 3460,3147,7711,1533, 541,1301, 158, 753,4182,2872,3562,7712,1696, 370,1088,4183, # 6022 4466,3563, 579, 327, 440, 162,2240, 269,1937,1374,3461, 968,3043, 56,1396,3090, # 6038 2106,3288,3354,7713,1926,2158,4467,2998,7714,3564,7715,7716,3677,4468,2478,7717, # 6054 2791,7718,1650,4469,7719,2603,7720,7721,3983,2661,3355,1149,3356,3984,3805,3985, # 6070 7722,1076, 49,7723, 951,3211,3289,3290, 450,2837, 920,7724,1811,2792,2366,4184, # 6086 1908,1138,2367,3806,3462,7725,3212,4470,1909,1147,1518,2423,4471,3807,7726,4472, # 6102 2388,2604, 260,1795,3213,7727,7728,3808,3291, 708,7729,3565,1704,7730,3566,1351, # 6118 1618,3357,2999,1886, 944,4185,3358,4186,3044,3359,4187,7731,3678, 422, 413,1714, # 6134 3292, 500,2058,2345,4188,2479,7732,1344,1910, 954,7733,1668,7734,7735,3986,2404, # 6150 4189,3567,3809,4190,7736,2302,1318,2505,3091, 133,3092,2873,4473, 629, 31,2838, # 6166 2697,3810,4474, 850, 949,4475,3987,2955,1732,2088,4191,1496,1852,7737,3988, 620, # 6182 3214, 981,1242,3679,3360,1619,3680,1643,3293,2139,2452,1970,1719,3463,2168,7738, # 6198 3215,7739,7740,3361,1828,7741,1277,4476,1565,2047,7742,1636,3568,3093,7743, 869, # 6214 2839, 655,3811,3812,3094,3989,3000,3813,1310,3569,4477,7744,7745,7746,1733, 558, # 6230 4478,3681, 335,1549,3045,1756,4192,3682,1945,3464,1829,1291,1192, 470,2726,2107, # 6246 2793, 913,1054,3990,7747,1027,7748,3046,3991,4479, 982,2662,3362,3148,3465,3216, # 6262 3217,1946,2794,7749, 571,4480,7750,1830,7751,3570,2583,1523,2424,7752,2089, 984, # 6278 4481,3683,1959,7753,3684, 852, 923,2795,3466,3685, 969,1519, 999,2048,2320,1705, # 6294 7754,3095, 615,1662, 151, 597,3992,2405,2321,1049, 275,4482,3686,4193, 568,3687, # 6310 3571,2480,4194,3688,7755,2425,2270, 409,3218,7756,1566,2874,3467,1002, 769,2840, # 6326 194,2090,3149,3689,2222,3294,4195, 628,1505,7757,7758,1763,2177,3001,3993, 521, # 6342 1161,2584,1787,2203,2406,4483,3994,1625,4196,4197, 412, 42,3096, 464,7759,2632, # 6358 4484,3363,1760,1571,2875,3468,2530,1219,2204,3814,2633,2140,2368,4485,4486,3295, # 6374 1651,3364,3572,7760,7761,3573,2481,3469,7762,3690,7763,7764,2271,2091, 460,7765, # 6390 4487,7766,3002, 962, 588,3574, 289,3219,2634,1116, 52,7767,3047,1796,7768,7769, # 6406 7770,1467,7771,1598,1143,3691,4198,1984,1734,1067,4488,1280,3365, 465,4489,1572, # 6422 510,7772,1927,2241,1812,1644,3575,7773,4490,3692,7774,7775,2663,1573,1534,7776, # 6438 7777,4199, 536,1807,1761,3470,3815,3150,2635,7778,7779,7780,4491,3471,2915,1911, # 6454 2796,7781,3296,1122, 377,3220,7782, 360,7783,7784,4200,1529, 551,7785,2059,3693, # 6470 1769,2426,7786,2916,4201,3297,3097,2322,2108,2030,4492,1404, 136,1468,1479, 672, # 6486 1171,3221,2303, 271,3151,7787,2762,7788,2049, 678,2727, 865,1947,4493,7789,2013, # 6502 3995,2956,7790,2728,2223,1397,3048,3694,4494,4495,1735,2917,3366,3576,7791,3816, # 6518 509,2841,2453,2876,3817,7792,7793,3152,3153,4496,4202,2531,4497,2304,1166,1010, # 6534 552, 681,1887,7794,7795,2957,2958,3996,1287,1596,1861,3154, 358, 453, 736, 175, # 6550 478,1117, 905,1167,1097,7796,1853,1530,7797,1706,7798,2178,3472,2287,3695,3473, # 6566 3577,4203,2092,4204,7799,3367,1193,2482,4205,1458,2190,2205,1862,1888,1421,3298, # 6582 2918,3049,2179,3474, 595,2122,7800,3997,7801,7802,4206,1707,2636, 223,3696,1359, # 6598 751,3098, 183,3475,7803,2797,3003, 419,2369, 633, 704,3818,2389, 241,7804,7805, # 6614 7806, 838,3004,3697,2272,2763,2454,3819,1938,2050,3998,1309,3099,2242,1181,7807, # 6630 1136,2206,3820,2370,1446,4207,2305,4498,7808,7809,4208,1055,2605, 484,3698,7810, # 6646 3999, 625,4209,2273,3368,1499,4210,4000,7811,4001,4211,3222,2274,2275,3476,7812, # 6662 7813,2764, 808,2606,3699,3369,4002,4212,3100,2532, 526,3370,3821,4213, 955,7814, # 6678 1620,4214,2637,2427,7815,1429,3700,1669,1831, 994, 928,7816,3578,1260,7817,7818, # 6694 7819,1948,2288, 741,2919,1626,4215,2729,2455, 867,1184, 362,3371,1392,7820,7821, # 6710 4003,4216,1770,1736,3223,2920,4499,4500,1928,2698,1459,1158,7822,3050,3372,2877, # 6726 1292,1929,2506,2842,3701,1985,1187,2071,2014,2607,4217,7823,2566,2507,2169,3702, # 6742 2483,3299,7824,3703,4501,7825,7826, 666,1003,3005,1022,3579,4218,7827,4502,1813, # 6758 2253, 574,3822,1603, 295,1535, 705,3823,4219, 283, 858, 417,7828,7829,3224,4503, # 6774 4504,3051,1220,1889,1046,2276,2456,4004,1393,1599, 689,2567, 388,4220,7830,2484, # 6790 802,7831,2798,3824,2060,1405,2254,7832,4505,3825,2109,1052,1345,3225,1585,7833, # 6806 809,7834,7835,7836, 575,2730,3477, 956,1552,1469,1144,2323,7837,2324,1560,2457, # 6822 3580,3226,4005, 616,2207,3155,2180,2289,7838,1832,7839,3478,4506,7840,1319,3704, # 6838 3705,1211,3581,1023,3227,1293,2799,7841,7842,7843,3826, 607,2306,3827, 762,2878, # 6854 1439,4221,1360,7844,1485,3052,7845,4507,1038,4222,1450,2061,2638,4223,1379,4508, # 6870 2585,7846,7847,4224,1352,1414,2325,2921,1172,7848,7849,3828,3829,7850,1797,1451, # 6886 7851,7852,7853,7854,2922,4006,4007,2485,2346, 411,4008,4009,3582,3300,3101,4509, # 6902 1561,2664,1452,4010,1375,7855,7856, 47,2959, 316,7857,1406,1591,2923,3156,7858, # 6918 1025,2141,3102,3157, 354,2731, 884,2224,4225,2407, 508,3706, 726,3583, 996,2428, # 6934 3584, 729,7859, 392,2191,1453,4011,4510,3707,7860,7861,2458,3585,2608,1675,2800, # 6950 919,2347,2960,2348,1270,4511,4012, 73,7862,7863, 647,7864,3228,2843,2255,1550, # 6966 1346,3006,7865,1332, 883,3479,7866,7867,7868,7869,3301,2765,7870,1212, 831,1347, # 6982 4226,4512,2326,3830,1863,3053, 720,3831,4513,4514,3832,7871,4227,7872,7873,4515, # 6998 7874,7875,1798,4516,3708,2609,4517,3586,1645,2371,7876,7877,2924, 669,2208,2665, # 7014 2429,7878,2879,7879,7880,1028,3229,7881,4228,2408,7882,2256,1353,7883,7884,4518, # 7030 3158, 518,7885,4013,7886,4229,1960,7887,2142,4230,7888,7889,3007,2349,2350,3833, # 7046 516,1833,1454,4014,2699,4231,4519,2225,2610,1971,1129,3587,7890,2766,7891,2961, # 7062 1422, 577,1470,3008,1524,3373,7892,7893, 432,4232,3054,3480,7894,2586,1455,2508, # 7078 2226,1972,1175,7895,1020,2732,4015,3481,4520,7896,2733,7897,1743,1361,3055,3482, # 7094 2639,4016,4233,4521,2290, 895, 924,4234,2170, 331,2243,3056, 166,1627,3057,1098, # 7110 7898,1232,2880,2227,3374,4522, 657, 403,1196,2372, 542,3709,3375,1600,4235,3483, # 7126 7899,4523,2767,3230, 576, 530,1362,7900,4524,2533,2666,3710,4017,7901, 842,3834, # 7142 7902,2801,2031,1014,4018, 213,2700,3376, 665, 621,4236,7903,3711,2925,2430,7904, # 7158 2431,3302,3588,3377,7905,4237,2534,4238,4525,3589,1682,4239,3484,1380,7906, 724, # 7174 2277, 600,1670,7907,1337,1233,4526,3103,2244,7908,1621,4527,7909, 651,4240,7910, # 7190 1612,4241,2611,7911,2844,7912,2734,2307,3058,7913, 716,2459,3059, 174,1255,2701, # 7206 4019,3590, 548,1320,1398, 728,4020,1574,7914,1890,1197,3060,4021,7915,3061,3062, # 7222 3712,3591,3713, 747,7916, 635,4242,4528,7917,7918,7919,4243,7920,7921,4529,7922, # 7238 3378,4530,2432, 451,7923,3714,2535,2072,4244,2735,4245,4022,7924,1764,4531,7925, # 7254 4246, 350,7926,2278,2390,2486,7927,4247,4023,2245,1434,4024, 488,4532, 458,4248, # 7270 4025,3715, 771,1330,2391,3835,2568,3159,2159,2409,1553,2667,3160,4249,7928,2487, # 7286 2881,2612,1720,2702,4250,3379,4533,7929,2536,4251,7930,3231,4252,2768,7931,2015, # 7302 2736,7932,1155,1017,3716,3836,7933,3303,2308, 201,1864,4253,1430,7934,4026,7935, # 7318 7936,7937,7938,7939,4254,1604,7940, 414,1865, 371,2587,4534,4535,3485,2016,3104, # 7334 4536,1708, 960,4255, 887, 389,2171,1536,1663,1721,7941,2228,4027,2351,2926,1580, # 7350 7942,7943,7944,1744,7945,2537,4537,4538,7946,4539,7947,2073,7948,7949,3592,3380, # 7366 2882,4256,7950,4257,2640,3381,2802, 673,2703,2460, 709,3486,4028,3593,4258,7951, # 7382 1148, 502, 634,7952,7953,1204,4540,3594,1575,4541,2613,3717,7954,3718,3105, 948, # 7398 3232, 121,1745,3837,1110,7955,4259,3063,2509,3009,4029,3719,1151,1771,3838,1488, # 7414 4030,1986,7956,2433,3487,7957,7958,2093,7959,4260,3839,1213,1407,2803, 531,2737, # 7430 2538,3233,1011,1537,7960,2769,4261,3106,1061,7961,3720,3721,1866,2883,7962,2017, # 7446 120,4262,4263,2062,3595,3234,2309,3840,2668,3382,1954,4542,7963,7964,3488,1047, # 7462 2704,1266,7965,1368,4543,2845, 649,3383,3841,2539,2738,1102,2846,2669,7966,7967, # 7478 1999,7968,1111,3596,2962,7969,2488,3842,3597,2804,1854,3384,3722,7970,7971,3385, # 7494 2410,2884,3304,3235,3598,7972,2569,7973,3599,2805,4031,1460, 856,7974,3600,7975, # 7510 2885,2963,7976,2886,3843,7977,4264, 632,2510, 875,3844,1697,3845,2291,7978,7979, # 7526 4544,3010,1239, 580,4545,4265,7980, 914, 936,2074,1190,4032,1039,2123,7981,7982, # 7542 7983,3386,1473,7984,1354,4266,3846,7985,2172,3064,4033, 915,3305,4267,4268,3306, # 7558 1605,1834,7986,2739, 398,3601,4269,3847,4034, 328,1912,2847,4035,3848,1331,4270, # 7574 3011, 937,4271,7987,3602,4036,4037,3387,2160,4546,3388, 524, 742, 538,3065,1012, # 7590 7988,7989,3849,2461,7990, 658,1103, 225,3850,7991,7992,4547,7993,4548,7994,3236, # 7606 1243,7995,4038, 963,2246,4549,7996,2705,3603,3161,7997,7998,2588,2327,7999,4550, # 7622 8000,8001,8002,3489,3307, 957,3389,2540,2032,1930,2927,2462, 870,2018,3604,1746, # 7638 2770,2771,2434,2463,8003,3851,8004,3723,3107,3724,3490,3390,3725,8005,1179,3066, # 7654 8006,3162,2373,4272,3726,2541,3163,3108,2740,4039,8007,3391,1556,2542,2292, 977, # 7670 2887,2033,4040,1205,3392,8008,1765,3393,3164,2124,1271,1689, 714,4551,3491,8009, # 7686 2328,3852, 533,4273,3605,2181, 617,8010,2464,3308,3492,2310,8011,8012,3165,8013, # 7702 8014,3853,1987, 618, 427,2641,3493,3394,8015,8016,1244,1690,8017,2806,4274,4552, # 7718 8018,3494,8019,8020,2279,1576, 473,3606,4275,3395, 972,8021,3607,8022,3067,8023, # 7734 8024,4553,4554,8025,3727,4041,4042,8026, 153,4555, 356,8027,1891,2888,4276,2143, # 7750 408, 803,2352,8028,3854,8029,4277,1646,2570,2511,4556,4557,3855,8030,3856,4278, # 7766 8031,2411,3396, 752,8032,8033,1961,2964,8034, 746,3012,2465,8035,4279,3728, 698, # 7782 4558,1892,4280,3608,2543,4559,3609,3857,8036,3166,3397,8037,1823,1302,4043,2706, # 7798 3858,1973,4281,8038,4282,3167, 823,1303,1288,1236,2848,3495,4044,3398, 774,3859, # 7814 8039,1581,4560,1304,2849,3860,4561,8040,2435,2161,1083,3237,4283,4045,4284, 344, # 7830 1173, 288,2311, 454,1683,8041,8042,1461,4562,4046,2589,8043,8044,4563, 985, 894, # 7846 8045,3399,3168,8046,1913,2928,3729,1988,8047,2110,1974,8048,4047,8049,2571,1194, # 7862 425,8050,4564,3169,1245,3730,4285,8051,8052,2850,8053, 636,4565,1855,3861, 760, # 7878 1799,8054,4286,2209,1508,4566,4048,1893,1684,2293,8055,8056,8057,4287,4288,2210, # 7894 479,8058,8059, 832,8060,4049,2489,8061,2965,2490,3731, 990,3109, 627,1814,2642, # 7910 4289,1582,4290,2125,2111,3496,4567,8062, 799,4291,3170,8063,4568,2112,1737,3013, # 7926 1018, 543, 754,4292,3309,1676,4569,4570,4050,8064,1489,8065,3497,8066,2614,2889, # 7942 4051,8067,8068,2966,8069,8070,8071,8072,3171,4571,4572,2182,1722,8073,3238,3239, # 7958 1842,3610,1715, 481, 365,1975,1856,8074,8075,1962,2491,4573,8076,2126,3611,3240, # 7974 433,1894,2063,2075,8077, 602,2741,8078,8079,8080,8081,8082,3014,1628,3400,8083, # 7990 3172,4574,4052,2890,4575,2512,8084,2544,2772,8085,8086,8087,3310,4576,2891,8088, # 8006 4577,8089,2851,4578,4579,1221,2967,4053,2513,8090,8091,8092,1867,1989,8093,8094, # 8022 8095,1895,8096,8097,4580,1896,4054, 318,8098,2094,4055,4293,8099,8100, 485,8101, # 8038 938,3862, 553,2670, 116,8102,3863,3612,8103,3498,2671,2773,3401,3311,2807,8104, # 8054 3613,2929,4056,1747,2930,2968,8105,8106, 207,8107,8108,2672,4581,2514,8109,3015, # 8070 890,3614,3864,8110,1877,3732,3402,8111,2183,2353,3403,1652,8112,8113,8114, 941, # 8086 2294, 208,3499,4057,2019, 330,4294,3865,2892,2492,3733,4295,8115,8116,8117,8118, # 8102 #Everything below is of no interest for detection purpose 2515,1613,4582,8119,3312,3866,2516,8120,4058,8121,1637,4059,2466,4583,3867,8122, # 8118 2493,3016,3734,8123,8124,2192,8125,8126,2162,8127,8128,8129,8130,8131,8132,8133, # 8134 8134,8135,8136,8137,8138,8139,8140,8141,8142,8143,8144,8145,8146,8147,8148,8149, # 8150 8150,8151,8152,8153,8154,8155,8156,8157,8158,8159,8160,8161,8162,8163,8164,8165, # 8166 8166,8167,8168,8169,8170,8171,8172,8173,8174,8175,8176,8177,8178,8179,8180,8181, # 8182 8182,8183,8184,8185,8186,8187,8188,8189,8190,8191,8192,8193,8194,8195,8196,8197, # 8198 8198,8199,8200,8201,8202,8203,8204,8205,8206,8207,8208,8209,8210,8211,8212,8213, # 8214 8214,8215,8216,8217,8218,8219,8220,8221,8222,8223,8224,8225,8226,8227,8228,8229, # 8230 8230,8231,8232,8233,8234,8235,8236,8237,8238,8239,8240,8241,8242,8243,8244,8245, # 8246 8246,8247,8248,8249,8250,8251,8252,8253,8254,8255,8256,8257,8258,8259,8260,8261, # 8262 8262,8263,8264,8265,8266,8267,8268,8269,8270,8271,8272,8273,8274,8275,8276,8277, # 8278 8278,8279,8280,8281,8282,8283,8284,8285,8286,8287,8288,8289,8290,8291,8292,8293, # 8294 8294,8295,8296,8297,8298,8299,8300,8301,8302,8303,8304,8305,8306,8307,8308,8309, # 8310 8310,8311,8312,8313,8314,8315,8316,8317,8318,8319,8320,8321,8322,8323,8324,8325, # 8326 8326,8327,8328,8329,8330,8331,8332,8333,8334,8335,8336,8337,8338,8339,8340,8341, # 8342 8342,8343,8344,8345,8346,8347,8348,8349,8350,8351,8352,8353,8354,8355,8356,8357, # 8358 8358,8359,8360,8361,8362,8363,8364,8365,8366,8367,8368,8369,8370,8371,8372,8373, # 8374 8374,8375,8376,8377,8378,8379,8380,8381,8382,8383,8384,8385,8386,8387,8388,8389, # 8390 8390,8391,8392,8393,8394,8395,8396,8397,8398,8399,8400,8401,8402,8403,8404,8405, # 8406 8406,8407,8408,8409,8410,8411,8412,8413,8414,8415,8416,8417,8418,8419,8420,8421, # 8422 8422,8423,8424,8425,8426,8427,8428,8429,8430,8431,8432,8433,8434,8435,8436,8437, # 8438 8438,8439,8440,8441,8442,8443,8444,8445,8446,8447,8448,8449,8450,8451,8452,8453, # 8454 8454,8455,8456,8457,8458,8459,8460,8461,8462,8463,8464,8465,8466,8467,8468,8469, # 8470 8470,8471,8472,8473,8474,8475,8476,8477,8478,8479,8480,8481,8482,8483,8484,8485, # 8486 8486,8487,8488,8489,8490,8491,8492,8493,8494,8495,8496,8497,8498,8499,8500,8501, # 8502 8502,8503,8504,8505,8506,8507,8508,8509,8510,8511,8512,8513,8514,8515,8516,8517, # 8518 8518,8519,8520,8521,8522,8523,8524,8525,8526,8527,8528,8529,8530,8531,8532,8533, # 8534 8534,8535,8536,8537,8538,8539,8540,8541,8542,8543,8544,8545,8546,8547,8548,8549, # 8550 8550,8551,8552,8553,8554,8555,8556,8557,8558,8559,8560,8561,8562,8563,8564,8565, # 8566 8566,8567,8568,8569,8570,8571,8572,8573,8574,8575,8576,8577,8578,8579,8580,8581, # 8582 8582,8583,8584,8585,8586,8587,8588,8589,8590,8591,8592,8593,8594,8595,8596,8597, # 8598 8598,8599,8600,8601,8602,8603,8604,8605,8606,8607,8608,8609,8610,8611,8612,8613, # 8614 8614,8615,8616,8617,8618,8619,8620,8621,8622,8623,8624,8625,8626,8627,8628,8629, # 8630 8630,8631,8632,8633,8634,8635,8636,8637,8638,8639,8640,8641,8642,8643,8644,8645, # 8646 8646,8647,8648,8649,8650,8651,8652,8653,8654,8655,8656,8657,8658,8659,8660,8661, # 8662 8662,8663,8664,8665,8666,8667,8668,8669,8670,8671,8672,8673,8674,8675,8676,8677, # 8678 8678,8679,8680,8681,8682,8683,8684,8685,8686,8687,8688,8689,8690,8691,8692,8693, # 8694 8694,8695,8696,8697,8698,8699,8700,8701,8702,8703,8704,8705,8706,8707,8708,8709, # 8710 8710,8711,8712,8713,8714,8715,8716,8717,8718,8719,8720,8721,8722,8723,8724,8725, # 8726 8726,8727,8728,8729,8730,8731,8732,8733,8734,8735,8736,8737,8738,8739,8740,8741) # 8742 # flake8: noqa
gpl-3.0
alex-quiterio/pychess
lib/pychess/widgets/gamewidget.py
20
40406
""" This module handles the tabbed layout in PyChess """ from __future__ import absolute_import from gi.repository import GdkPixbuf from pychess.compat import StringIO from .BoardControl import BoardControl from .ChessClock import ChessClock from .MenuItemsDict import MenuItemsDict from pychess.System import glock, conf, prefix from pychess.System.Log import log from pychess.System.glock import glock_connect from pychess.System.prefix import addUserConfigPrefix from pychess.System.uistuff import makeYellow from pychess.Utils.GameModel import GameModel from pychess.Utils.IconLoader import load_icon from pychess.Utils.const import * from pychess.Utils.lutils import lmove from pychess.Utils.logic import playerHasMatingMaterial, isClaimableDraw from pychess.ic import get_infobarmessage_content, get_infobarmessage_content2 from pychess.ic.FICSObjects import get_player_tooltip_text from pychess.ic.ICGameModel import ICGameModel from pychess.widgets.InfoBar import InfoBar, InfoBarMessage, InfoBarMessageButton from .pydock.PyDockTop import PyDockTop from .pydock.__init__ import CENTER, EAST, SOUTH from gi.repository import Gtk from gi.repository import GObject import imp import os import traceback ################################################################################ # Initialize modul constants, and a few worker functions # ################################################################################ def createAlignment (top, right, bottom, left): align = Gtk.Alignment.new(.5, .5, 1, 1) align.set_property("top-padding", top) align.set_property("right-padding", right) align.set_property("bottom-padding", bottom) align.set_property("left-padding", left) return align def cleanNotebook (): notebook = Gtk.Notebook() notebook.set_show_tabs(False) notebook.set_show_border(False) return notebook def createImage (pixbuf): image = Gtk.Image() image.set_from_pixbuf(pixbuf) return image light_on = load_icon(16, "stock_3d-light-on", "weather-clear") light_off = load_icon(16, "stock_3d-light-off", "weather-clear-night") gtk_close = load_icon(16, "gtk-close") media_previous = load_icon(16, "gtk-media-previous-ltr") media_rewind = load_icon(16, "gtk-media-rewind-ltr") media_forward = load_icon(16, "gtk-media-forward-ltr") media_next = load_icon(16, "gtk-media-next-ltr") path = prefix.addDataPrefix("sidepanel") postfix = "Panel.py" files = [f[:-3] for f in os.listdir(path) if f.endswith(postfix)] sidePanels = [imp.load_module(f, *imp.find_module(f, [path])) for f in files] dockLocation = addUserConfigPrefix("pydock.xml") ################################################################################ # Initialize module variables # ################################################################################ widgets = None def setWidgets (w): global widgets widgets = w def getWidgets (): return widgets key2gmwidg = {} notebooks = {"board": cleanNotebook(), "statusbar": cleanNotebook(), "messageArea": cleanNotebook()} for panel in sidePanels: notebooks[panel.__name__] = cleanNotebook() docks = {"board": (Gtk.Label(label="Board"), notebooks["board"])} ################################################################################ # The holder class for tab releated widgets # ################################################################################ class GameWidget (GObject.GObject): __gsignals__ = { 'close_clicked': (GObject.SignalFlags.RUN_FIRST, None, ()), 'infront': (GObject.SignalFlags.RUN_FIRST, None, ()), 'title_changed': (GObject.SignalFlags.RUN_FIRST, None, (str,)), 'closed': (GObject.SignalFlags.RUN_FIRST, None, ()), } def __init__ (self, gamemodel): GObject.GObject.__init__(self) self.gamemodel = gamemodel self.cids = {} tabcontent, white_label, black_label, game_info_label = self.initTabcontents() boardvbox, board, infobar, clock = self.initBoardAndClock(gamemodel) statusbar, stat_hbox = self.initStatusbar(board) self.tabcontent = tabcontent self.player_name_labels = (white_label, black_label) self.game_info_label = game_info_label self.board = board self.statusbar = statusbar self.infobar = infobar infobar.connect("hide", self.infobar_hidden) self.game_ended_message = None self.clock = clock self.notebookKey = Gtk.Label(); self.notebookKey.set_size_request(0,0) self.boardvbox = boardvbox self.stat_hbox = stat_hbox self.menuitems = MenuItemsDict(self) gamemodel.connect("game_started", self.game_started) gamemodel.connect("game_ended", self.game_ended) gamemodel.connect("game_changed", self.game_changed) gamemodel.connect("game_paused", self.game_paused) gamemodel.connect("game_resumed", self.game_resumed) gamemodel.connect("moves_undone", self.moves_undone) gamemodel.connect("game_unended", self.game_unended) gamemodel.connect("game_saved", self.game_saved) gamemodel.connect("players_changed", self.players_changed) gamemodel.connect("analyzer_added", self.analyzer_added) gamemodel.connect("analyzer_removed", self.analyzer_removed) gamemodel.connect("analyzer_resumed", self.analyzer_resumed) gamemodel.connect("analyzer_paused", self.analyzer_paused) self.players_changed(gamemodel) if self.gamemodel.display_text: self.game_info_label.set_text(" " + self.gamemodel.display_text) if gamemodel.timed: gamemodel.timemodel.connect("zero_reached", self.zero_reached) if isinstance(gamemodel, ICGameModel): gamemodel.connection.bm.connect("player_lagged", self.player_lagged) gamemodel.connection.bm.connect("opp_not_out_of_time", self.opp_not_out_of_time) board.view.connect("shown_changed", self.shown_changed) # Some stuff in the sidepanels .load functions might change UI, so we # need glock # TODO: Really? glock.acquire() try: self.panels = [panel.Sidepanel().load(self) for panel in sidePanels] finally: glock.release() def _del (self): self.board._del() for obj in self.cids: if obj.handler_is_connected(self.cids[obj]): log.debug("GameWidget._del: disconnecting %s" % repr(obj)) obj.disconnect(self.cids[obj]) self.cids.clear() def _update_menu_abort (self): if self.gamemodel.isEngine2EngineGame(): self.menuitems["abort"].sensitive = True self.menuitems["abort"].tooltip = "" elif self.gamemodel.isObservationGame(): self.menuitems["abort"].sensitive = False elif isinstance(self.gamemodel, ICGameModel) \ and self.gamemodel.status in UNFINISHED_STATES: if self.gamemodel.ply < 2: self.menuitems["abort"].label = _("Abort") self.menuitems["abort"].tooltip = \ _("This game can be automatically aborted without rating loss because there has not yet been two moves made") else: self.menuitems["abort"].label = _("Offer Abort") self.menuitems["abort"].tooltip = \ _("Your opponent must agree to abort the game because there has been two or more moves made") self.menuitems["abort"].sensitive = True else: self.menuitems["abort"].sensitive = False self.menuitems["abort"].tooltip = "" def _update_menu_adjourn (self): self.menuitems["adjourn"].sensitive = \ isinstance(self.gamemodel, ICGameModel) and \ self.gamemodel.status in UNFINISHED_STATES and \ not self.gamemodel.isObservationGame() and \ not self.gamemodel.hasGuestPlayers() if isinstance(self.gamemodel, ICGameModel) and \ self.gamemodel.status in UNFINISHED_STATES and \ not self.gamemodel.isObservationGame() and \ self.gamemodel.hasGuestPlayers(): self.menuitems["adjourn"].tooltip = \ _("This game can not be adjourned because one or both players are guests") else: self.menuitems["adjourn"].tooltip = "" def _update_menu_draw (self): self.menuitems["draw"].sensitive = self.gamemodel.status in UNFINISHED_STATES \ and not self.gamemodel.isObservationGame() def can_win (color): if self.gamemodel.timed: return playerHasMatingMaterial(self.gamemodel.boards[-1], color) and \ self.gamemodel.timemodel.getPlayerTime(color) > 0 else: return playerHasMatingMaterial(self.gamemodel.boards[-1], color) if isClaimableDraw(self.gamemodel.boards[-1]) or not \ (can_win(self.gamemodel.players[0].color) or \ can_win(self.gamemodel.players[1].color)): self.menuitems["draw"].label = _("Claim Draw") def _update_menu_resign (self): self.menuitems["resign"].sensitive = self.gamemodel.status in UNFINISHED_STATES \ and not self.gamemodel.isObservationGame() def _update_menu_pause_and_resume (self): def game_is_pausable (): if self.gamemodel.isEngine2EngineGame() or \ (self.gamemodel.hasLocalPlayer() and \ (self.gamemodel.isLocalGame() or \ (isinstance(self.gamemodel, ICGameModel) and \ self.gamemodel.ply > 1))): return True else: return False self.menuitems["pause1"].sensitive = \ self.gamemodel.status == RUNNING and game_is_pausable() self.menuitems["resume1"].sensitive = \ self.gamemodel.status == PAUSED and game_is_pausable() # TODO: if IC game is over and game ended in adjournment # and opponent is available, enable Resume def _update_menu_undo (self): if self.gamemodel.isObservationGame(): self.menuitems["undo1"].sensitive = False elif isinstance(self.gamemodel, ICGameModel): if self.gamemodel.status in UNFINISHED_STATES and self.gamemodel.ply > 0: self.menuitems["undo1"].sensitive = True else: self.menuitems["undo1"].sensitive = False elif self.gamemodel.ply > 0 \ and self.gamemodel.status in UNDOABLE_STATES + (RUNNING,): self.menuitems["undo1"].sensitive = True else: self.menuitems["undo1"].sensitive = False def _update_menu_ask_to_move (self): if self.gamemodel.isObservationGame(): self.menuitems["ask_to_move"].sensitive = False elif isinstance(self.gamemodel, ICGameModel): self.menuitems["ask_to_move"].sensitive = False elif self.gamemodel.waitingplayer.__type__ == LOCAL \ and self.gamemodel.status in UNFINISHED_STATES \ and self.gamemodel.status != PAUSED: self.menuitems["ask_to_move"].sensitive = True else: self.menuitems["ask_to_move"].sensitive = False def _showHolding (self, holding): figurines = ["", ""] for color in (BLACK, WHITE): for piece in holding[color].keys(): count = holding[color][piece] figurines[color] += " " if count==0 else FAN_PIECES[color][piece]*count self.status(figurines[BLACK] + " " + figurines[WHITE]) def shown_changed (self, boardview, shown): # Help crazyhouse testing #if self.gamemodel.boards[-1].variant == CRAZYHOUSECHESS: # holding = self.gamemodel.getBoardAtPly(shown, boardview.variation).board.holding # self._showHolding(holding) if self.gamemodel.timemodel.hasTimes and \ (self.gamemodel.endstatus or self.gamemodel.status in (DRAW, WHITEWON, BLACKWON)) and \ boardview.shownIsMainLine(): wmovecount, color = divmod(shown + 1, 2) bmovecount = wmovecount -1 if color == WHITE else wmovecount if self.gamemodel.timemodel.hasBWTimes(bmovecount, wmovecount): self.clock.update(wmovecount, bmovecount) def game_started (self, gamemodel): if self.gamemodel.isLocalGame(): self.menuitems["abort"].label = _("Abort") self._update_menu_abort() self._update_menu_adjourn() self._update_menu_draw() if self.gamemodel.isLocalGame(): self.menuitems["pause1"].label = _("Pause") self.menuitems["resume1"].label = _("Resume") else: self.menuitems["pause1"].label = _("Offer Pause") self.menuitems["resume1"].label = _("Offer Resume") self._update_menu_pause_and_resume() self._update_menu_resign() if self.gamemodel.isLocalGame(): self.menuitems["undo1"].label = _("Undo") else: self.menuitems["undo1"].label = _("Offer Undo") self._update_menu_undo() self._update_menu_ask_to_move() if not gamemodel.timed and not gamemodel.timemodel.hasTimes: self.boardvbox.remove(self.clock.get_parent()) def game_ended (self, gamemodel, reason): for item in self.menuitems: if item not in self.menuitems.VIEW_MENU_ITEMS: self.menuitems[item].sensitive = False self._update_menu_undo() self._set_arrow(HINT, None) self._set_arrow(SPY, None) return False def game_changed (self, gamemodel): '''This runs when the game changes. It updates everything.''' self._update_menu_abort() self._update_menu_ask_to_move() self._update_menu_draw() self._update_menu_pause_and_resume() self._update_menu_undo() for analyzer_type in (HINT, SPY): # only clear arrows if analyzer is examining the last position if analyzer_type in gamemodel.spectators and \ gamemodel.spectators[analyzer_type].board == gamemodel.boards[-1]: self._set_arrow(analyzer_type, None) self.name_changed(gamemodel.players[0]) #We may need to add * to name return False def game_saved(self, gamemodel, uri): '''Run when the game is saved. Will remove * from title.''' self.name_changed(gamemodel.players[0]) #We may need to remove * in name return False def game_paused (self, gamemodel): self._update_menu_pause_and_resume() self._update_menu_undo() self._update_menu_ask_to_move() return False def game_resumed (self, gamemodel): self._update_menu_pause_and_resume() self._update_menu_undo() self._update_menu_ask_to_move() return False def moves_undone (self, gamemodel, moves): self.game_changed(gamemodel) return False def game_unended (self, gamemodel): self._update_menu_abort() self._update_menu_adjourn() self._update_menu_draw() self._update_menu_pause_and_resume() self._update_menu_resign() self._update_menu_undo() self._update_menu_ask_to_move() return False def _set_arrow (self, analyzer_type, coordinates): if self.gamemodel.isPlayingICSGame(): return if analyzer_type == HINT: self.board.view._set_greenarrow(coordinates) else: self.board.view._set_redarrow(coordinates) def _on_analyze (self, analyzer, analysis, analyzer_type): if not self.menuitems[analyzer_type + "_mode"].active: return if len(analysis) >= 1 and analysis[0] is not None: moves = analysis[0][0] if moves and (self.gamemodel.curplayer.__type__ == LOCAL or \ [player.__type__ for player in self.gamemodel.players] == [REMOTE, REMOTE] or \ self.gamemodel.status not in UNFINISHED_STATES): if moves[0].flag == DROP: board = analyzer.board piece = lmove.FCORD(moves[0].move) color = board.color if analyzer_type == HINT else 1-board.color cord0 = board.getHoldingCord(color, piece) self._set_arrow(analyzer_type, (cord0, moves[0].cord1)) else: self._set_arrow(analyzer_type, moves[0].cords) else: self._set_arrow(analyzer_type, None) return False def analyzer_added (self, gamemodel, analyzer, analyzer_type): self.cids[analyzer] = \ analyzer.connect("analyze", self._on_analyze, analyzer_type) #self.menuitems[analyzer_type + "_mode"].active = True self.menuitems[analyzer_type + "_mode"].sensitive = True return False def analyzer_removed (self, gamemodel, analyzer, analyzer_type): self._set_arrow(analyzer_type, None) #self.menuitems[analyzer_type + "_mode"].active = False self.menuitems[analyzer_type + "_mode"].sensitive = False try: if analyzer.handler_is_connected(self.cids[analyzer]): analyzer.disconnect(self.cids[analyzer]) del self.cids[analyzer] except KeyError: pass return False def analyzer_resumed (self, gamemodel, analyzer, analyzer_type): self.menuitems[analyzer_type + "_mode"].active = True self._on_analyze(analyzer, analyzer.getAnalysis(), analyzer_type) return False def analyzer_paused (self, gamemodel, analyzer, analyzer_type): self.menuitems[analyzer_type + "_mode"].active = False self._set_arrow(analyzer_type, None) return False def player_display_text (self, color=WHITE): if isinstance(self.gamemodel, ICGameModel): return self.gamemodel.ficsplayers[color].long_name( game_type=self.gamemodel.ficsgame.game_type) else: return repr(self.gamemodel.players[color]) @property def display_text (self): '''This will give you the name of the game.''' vs = " " + _("vs") + " " t = vs.join((self.player_display_text(color=WHITE), self.player_display_text(color=BLACK))) if self.gamemodel.display_text: t += " " + self.gamemodel.display_text return t def players_changed (self, gamemodel): log.debug("GameWidget.players_changed: starting %s" % repr(gamemodel)) for player in gamemodel.players: self.name_changed(player) # Notice that this may connect the same player many times. In # normal use that shouldn't be a problem. glock_connect(player, "name_changed", self.name_changed) log.debug("GameWidget.players_changed: returning") def name_changed (self, player): log.debug("GameWidget.name_changed: starting %s" % repr(player)) color = self.gamemodel.color(player) glock.acquire() try: self.player_name_labels[color].set_text( self.player_display_text(color=color)) if isinstance(self.gamemodel, ICGameModel) and \ player.__type__ == REMOTE: self.player_name_labels[color].set_tooltip_text( get_player_tooltip_text(self.gamemodel.ficsplayers[color], show_status=False)) finally: glock.release() self.emit('title_changed', self.display_text) log.debug("GameWidget.name_changed: returning") def zero_reached (self, timemodel, color): if self.gamemodel.status not in UNFINISHED_STATES: return if self.gamemodel.players[0].__type__ == LOCAL \ and self.gamemodel.players[1].__type__ == LOCAL: self.menuitems["call_flag"].sensitive = True return for player in self.gamemodel.players: opplayercolor = BLACK if player == self.gamemodel.players[WHITE] else WHITE if player.__type__ == LOCAL and opplayercolor == color: log.debug("gamewidget.zero_reached: LOCAL player=%s, color=%s" % \ (repr(player), str(color))) self.menuitems["call_flag"].sensitive = True break def player_lagged (self, bm, player): if player in self.gamemodel.ficsplayers: content = get_infobarmessage_content(player, _(" has lagged for 30 seconds"), self.gamemodel.ficsgame.game_type) def response_cb (infobar, response, message): message.dismiss() return False message = InfoBarMessage(Gtk.MessageType.INFO, content, response_cb) message.add_button(InfoBarMessageButton(Gtk.STOCK_CLOSE, Gtk.ResponseType.CANCEL)) with glock.glock: self.showMessage(message) return False def opp_not_out_of_time (self, bm): if self.gamemodel.remote_player.time <= 0: content = get_infobarmessage_content2( self.gamemodel.remote_ficsplayer, _(" is lagging heavily but hasn't disconnected"), _("Continue to wait for opponent, or try to adjourn the game?"), gametype=self.gamemodel.ficsgame.game_type) def response_cb (infobar, response, message): if response == 2: self.gamemodel.connection.client.run_command("adjourn") message.dismiss() return False message = InfoBarMessage(Gtk.MessageType.QUESTION, content, response_cb) message.add_button(InfoBarMessageButton(_("Wait"), Gtk.ResponseType.CANCEL)) message.add_button(InfoBarMessageButton(_("Adjourn"), 2)) with glock.glock: self.showMessage(message) return False def initTabcontents(self): tabcontent = createAlignment(0,0,0,0) hbox = Gtk.HBox() hbox.set_spacing(4) hbox.pack_start(createImage(light_off), False, True, 0) close_button = Gtk.Button() close_button.set_property("can-focus", False) close_button.add(createImage(gtk_close)) close_button.set_relief(Gtk.ReliefStyle.NONE) close_button.set_size_request(20, 18) close_button.connect("clicked", lambda w: self.emit("close_clicked")) hbox.pack_end(close_button, False, True, 0) text_hbox = Gtk.HBox() white_label = Gtk.Label(label="") text_hbox.pack_start(white_label, False, True, 0) text_hbox.pack_start(Gtk.Label(" %s " % _("vs")), False, True, 0) black_label = Gtk.Label(label="") text_hbox.pack_start(black_label, False, True, 0) gameinfo_label = Gtk.Label(label="") text_hbox.pack_start(gameinfo_label, False, True, 0) # label.set_alignment(0,.7) hbox.pack_end(text_hbox, True, True, 0) tabcontent.add(hbox) tabcontent.show_all() # Gtk doesn't show tab labels when the rest is return tabcontent, white_label, black_label, gameinfo_label def initBoardAndClock(self, gamemodel): boardvbox = Gtk.VBox() boardvbox.set_spacing(2) infobar = InfoBar() ccalign = createAlignment(0, 0, 0, 0) cclock = ChessClock() cclock.setModel(gamemodel.timemodel) ccalign.add(cclock) ccalign.set_size_request(-1, 32) boardvbox.pack_start(ccalign, False, True, 0) actionMenuDic = {} for item in ACTION_MENU_ITEMS: actionMenuDic[item] = widgets[item] board = BoardControl(gamemodel, actionMenuDic) boardvbox.pack_start(board, True, True, 0) return boardvbox, board, infobar, cclock def initStatusbar(self, board): def tip (widget, x, y, keyboard_mode, tooltip, text): l = Gtk.Label(label=text) tooltip.set_custom(l) l.show() return True stat_hbox = Gtk.HBox() page_vbox = Gtk.VBox() page_vbox.set_spacing(1) sep = Gtk.HSeparator() sep.set_size_request(-1, 2) page_hbox = Gtk.HBox() startbut = Gtk.Button() startbut.add(createImage(media_previous)) startbut.set_relief(Gtk.ReliefStyle.NONE) startbut.props.has_tooltip = True startbut.connect("query-tooltip", tip, _("Jump to initial position")) backbut = Gtk.Button() backbut.add(createImage(media_rewind)) backbut.set_relief(Gtk.ReliefStyle.NONE) backbut.props.has_tooltip = True backbut.connect("query-tooltip", tip, _("Step back one move")) forwbut = Gtk.Button() forwbut.add(createImage(media_forward)) forwbut.set_relief(Gtk.ReliefStyle.NONE) forwbut.props.has_tooltip = True forwbut.connect("query-tooltip", tip, _("Step forward one move")) endbut = Gtk.Button() endbut.add(createImage(media_next)) endbut.set_relief(Gtk.ReliefStyle.NONE) endbut.props.has_tooltip = True endbut.connect("query-tooltip", tip, _("Jump to latest position")) startbut.connect("clicked", lambda w: board.view.showFirst()) backbut.connect("clicked", lambda w: board.view.showPrev()) forwbut.connect("clicked", lambda w: board.view.showNext()) endbut.connect("clicked", lambda w: board.view.showLast()) page_hbox.pack_start(startbut, True, True, 0) page_hbox.pack_start(backbut, True, True, 0) page_hbox.pack_start(forwbut, True, True, 0) page_hbox.pack_start(endbut, True, True, 0) page_vbox.pack_start(sep, True, True, 0) page_vbox.pack_start(page_hbox, True, True, 0) statusbar = Gtk.Statusbar() stat_hbox.pack_start(page_vbox, False, True, 0) stat_hbox.pack_start(statusbar, True, True, 0) return statusbar, stat_hbox def setLocked (self, locked): """ Makes the board insensitive and turns off the tab ready indicator """ log.debug("GameWidget.setLocked: %s locked=%s" % (self.gamemodel.players, str(locked))) self.board.setLocked(locked) if not self.tabcontent.get_children(): return if len(self.tabcontent.get_child().get_children()) < 2: log.warning("GameWidget.setLocked: Not removing last tabcontent child") return glock.acquire() try: child = self.tabcontent.get_child() if child: child.remove(child.get_children()[0]) if not locked: #child.pack_start(createImage(light_on, True, True, 0), expand=False) child.pack_start(createImage(light_on), True, True, 0) else: #child.pack_start(createImage(light_off, True, True, 0), expand=False) child.pack_start(createImage(light_off), True, True, 0) self.tabcontent.show_all() finally: glock.release() log.debug("GameWidget.setLocked: %s: returning" % self.gamemodel.players) def status (self, message): glock.acquire() try: self.statusbar.pop(0) if message: #print "Setting statusbar to \"%s\"" % str(message) self.statusbar.push(0, message) finally: glock.release() def bringToFront (self): getheadbook().set_current_page(self.getPageNumber()) def isInFront(self): if not getheadbook(): return False return getheadbook().get_current_page() == self.getPageNumber() def getPageNumber (self): return getheadbook().page_num(self.notebookKey) def infobar_hidden (self, infobar): if self == cur_gmwidg(): notebooks["messageArea"].hide() def showMessage (self, message): self.infobar.push_message(message) if self == cur_gmwidg(): notebooks["messageArea"].show() def replaceMessages (self, message): """ Replace all messages with message """ self.infobar.clear_messages() self.showMessage(message) def clearMessages (self): self.infobar.clear_messages() if self == cur_gmwidg(): notebooks["messageArea"].hide() ################################################################################ # Main handling of gamewidgets # ################################################################################ def splitit(widget): if not hasattr(widget, 'get_children'): return for child in widget.get_children(): splitit(child) widget.remove(child) def delGameWidget (gmwidg): """ Remove the widget from the GUI after the game has been terminated """ log.debug("gamewidget.delGameWidget: starting %s" % repr(gmwidg)) gmwidg.emit("closed") called_from_preferences = False #wl = Gtk.window_list_toplevels() wl = Gtk.Window.list_toplevels() for window in wl: if window.is_active() and window == widgets["preferences"]: called_from_preferences = True break del key2gmwidg[gmwidg.notebookKey] pageNum = gmwidg.getPageNumber() headbook = getheadbook() headbook.remove_page(pageNum) for notebook in notebooks.values(): notebook.remove_page(pageNum) if headbook.get_n_pages() == 1 and conf.get("hideTabs", False): show_tabs(False) if headbook.get_n_pages() == 0: mainvbox = widgets["mainvbox"] centerVBox = mainvbox.get_children()[2] for child in centerVBox.get_children(): centerVBox.remove(child) mainvbox.remove(centerVBox) mainvbox.remove(mainvbox.get_children()[1]) mainvbox.pack_end(background, True, True, 0) background.show() if not called_from_preferences: # If the last (but not the designGW) gmwidg was closed # and we are FICS-ing, present the FICS lounge from pychess.ic.ICLogon import dialog try: dialog.lounge.present() except AttributeError: pass gmwidg._del() def _ensureReadForGameWidgets (): mainvbox = widgets["mainvbox"] if len(mainvbox.get_children()) == 3: return global background background = widgets["mainvbox"].get_children()[1] mainvbox.remove(background) # Initing headbook align = createAlignment (4, 4, 0, 4) align.set_property("yscale", 0) headbook = Gtk.Notebook() headbook.set_scrollable(True) align.add(headbook) mainvbox.pack_start(align, False, True, 0) show_tabs(not conf.get("hideTabs", False)) # Initing center centerVBox = Gtk.VBox() # The message area centerVBox.pack_start(notebooks["messageArea"], False, True, 0) def ma_switch_page (notebook, gpointer, page_num): notebook.props.visible = notebook.get_nth_page(page_num).get_child().props.visible notebooks["messageArea"].connect("switch-page", ma_switch_page) # The dock global dock, dockAlign dock = PyDockTop("main") dockAlign = createAlignment(4,4,0,4) dockAlign.add(dock) centerVBox.pack_start(dockAlign, True, True, 0) dockAlign.show() dock.show() for panel in sidePanels: hbox = Gtk.HBox() pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_size(panel.__icon__, 16, 16) icon = Gtk.Image.new_from_pixbuf(pixbuf) label = Gtk.Label(label=panel.__title__) label.set_size_request(0, 0) label.set_alignment(0, 1) hbox.pack_start(icon, False, False, 0) hbox.pack_start(label, True, True, 0) hbox.set_spacing(2) hbox.show_all() def cb (widget, x, y, keyboard_mode, tooltip, title, desc, filename): table = Gtk.Table(2,2) table.set_row_spacings(2) table.set_col_spacings(6) table.set_border_width(4) pixbuf = GdkPixbuf.Pixbuf.new_from_file_at_size(filename, 56, 56) image = Gtk.Image.new_from_pixbuf(pixbuf) image.set_alignment(0, 0) table.attach(image, 0,1,0,2) titleLabel = Gtk.Label() titleLabel.set_markup("<b>%s</b>" % title) titleLabel.set_alignment(0, 0) table.attach(titleLabel, 1,2,0,1) descLabel = Gtk.Label(label=desc) descLabel.props.wrap = True table.attach(descLabel, 1,2,1,2) tooltip.set_custom(table) table.show_all() return True hbox.props.has_tooltip = True hbox.connect("query-tooltip", cb, panel.__title__, panel.__desc__, panel.__icon__) docks[panel.__name__] = (hbox, notebooks[panel.__name__]) if os.path.isfile(dockLocation): try: dock.loadFromXML(dockLocation, docks) except Exception as e: stringio = StringIO() traceback.print_exc(file=stringio) error = stringio.getvalue() log.error("Dock loading error: %s\n%s" % (e, error)) md = Gtk.MessageDialog(widgets["window1"], type=Gtk.MessageType.ERROR, buttons=Gtk.ButtonsType.CLOSE) md.set_markup(_("<b><big>PyChess was unable to load your panel settings</big></b>")) md.format_secondary_text(_("Your panel settings have been reset. If this problem repeats, you should report it to the developers")) md.run() md.hide() os.remove(dockLocation) for title, panel in docks.values(): title.unparent() panel.unparent() if not os.path.isfile(dockLocation): leaf = dock.dock(docks["board"][1], CENTER, Gtk.Label(label=docks["board"][0]), "board") docks["board"][1].show_all() leaf.setDockable(False) # S epanel = leaf.dock(docks["bookPanel"][1], SOUTH, docks["bookPanel"][0], "bookPanel") epanel.default_item_height = 45 epanel = epanel.dock(docks["engineOutputPanel"][1], CENTER, docks["engineOutputPanel"][0], "engineOutputPanel") # NE leaf = leaf.dock(docks["annotationPanel"][1], EAST, docks["annotationPanel"][0], "annotationPanel") leaf = leaf.dock(docks["historyPanel"][1], CENTER, docks["historyPanel"][0], "historyPanel") leaf = leaf.dock(docks["scorePanel"][1], CENTER, docks["scorePanel"][0], "scorePanel") # SE leaf = leaf.dock(docks["chatPanel"][1], SOUTH, docks["chatPanel"][0], "chatPanel") leaf = leaf.dock(docks["commentPanel"][1], CENTER, docks["commentPanel"][0], "commentPanel") def unrealize (dock): # unhide the panel before saving so its configuration is saved correctly notebooks["board"].get_parent().get_parent().zoomDown() dock.saveToXML(dockLocation) dock._del() dock.connect("unrealize", unrealize) # The status bar notebooks["statusbar"].set_border_width(4) centerVBox.pack_start(notebooks["statusbar"], False, True, 0) mainvbox.pack_start(centerVBox, True, True, 0) centerVBox.show_all() mainvbox.show() # Connecting headbook to other notebooks def hb_switch_page (notebook, gpointer, page_num): for notebook in notebooks.values(): notebook.set_current_page(page_num) headbook.connect("switch-page", hb_switch_page) if hasattr(headbook, "set_tab_reorderable"): def page_reordered (widget, child, new_num, headbook): old_num = notebooks["board"].page_num(key2gmwidg[child].boardvbox) if old_num == -1: log.error('Games and labels are out of sync!') else: for notebook in notebooks.values(): notebook.reorder_child(notebook.get_nth_page(old_num), new_num) headbook.connect("page-reordered", page_reordered, headbook) def attachGameWidget (gmwidg): log.debug("attachGameWidget: %s" % gmwidg) _ensureReadForGameWidgets() headbook = getheadbook() key2gmwidg[gmwidg.notebookKey] = gmwidg headbook.append_page(gmwidg.notebookKey, gmwidg.tabcontent) gmwidg.notebookKey.show_all() #headbook.set_tab_label_packing(gmwidg.notebookKey, True, True, Gtk.PACK_START) if hasattr(headbook, "set_tab_reorderable"): headbook.set_tab_reorderable (gmwidg.notebookKey, True) def callback (notebook, gpointer, page_num, gmwidg): if notebook.get_nth_page(page_num) == gmwidg.notebookKey: gmwidg.emit("infront") headbook.connect_after("switch-page", callback, gmwidg) gmwidg.emit("infront") align = createAlignment(4,4,0,4) align.show() align.add(gmwidg.infobar) notebooks["messageArea"].append_page(align, None) notebooks["board"].append_page(gmwidg.boardvbox, None) gmwidg.boardvbox.show_all() for panel, instance in zip(sidePanels, gmwidg.panels): notebooks[panel.__name__].append_page(instance, None) instance.show_all() notebooks["statusbar"].append_page(gmwidg.stat_hbox, None) gmwidg.stat_hbox.show_all() # We should always show tabs if more than one exists if headbook.get_n_pages() == 2: show_tabs(True) headbook.set_current_page(-1) if headbook.get_n_pages() == 1 and not widgets["show_sidepanels"].get_active(): zoomToBoard(True) def cur_gmwidg (): headbook = getheadbook() if headbook == None: return None notebookKey = headbook.get_nth_page(headbook.get_current_page()) return key2gmwidg[notebookKey] def getheadbook (): if len(widgets["mainvbox"].get_children()) == 2: # If the headbook hasn't been added yet return None return widgets["mainvbox"].get_children()[1].get_child() def zoomToBoard (viewZoomed): if not notebooks["board"].get_parent(): return if viewZoomed: notebooks["board"].get_parent().get_parent().zoomUp() else: notebooks["board"].get_parent().get_parent().zoomDown() def show_tabs (show): if show: widgets["mainvbox"].get_children()[1].show_all() else: widgets["mainvbox"].get_children()[1].hide() def tabsCallback (none): head = getheadbook() if not head: return if head.get_n_pages() == 1: show_tabs(not conf.get("hideTabs", False)) conf.notify_add("hideTabs", tabsCallback) ################################################################################ # Handling of the special sidepanels-design-gamewidget used in preferences # ################################################################################ designGW = None def showDesignGW(): global designGW if not designGW: designGW = GameWidget(GameModel()) if isDesignGWShown(): return getWidgets()["show_sidepanels"].set_active(True) getWidgets()["show_sidepanels"].set_sensitive(False) attachGameWidget(designGW) def hideDesignGW(): if isDesignGWShown(): delGameWidget(designGW) getWidgets()["show_sidepanels"].set_sensitive(True) def isDesignGWShown(): return designGW in key2gmwidg.values()
gpl-3.0
tedder/ansible
test/units/modules/network/dellos10/test_dellos10_config.py
68
6392
# # (c) 2016 Red Hat Inc. # # (c) 2017 Dell EMC. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type from units.compat.mock import patch from ansible.modules.network.dellos10 import dellos10_config from units.modules.utils import set_module_args from .dellos10_module import TestDellos10Module, load_fixture class TestDellos10ConfigModule(TestDellos10Module): module = dellos10_config def setUp(self): super(TestDellos10ConfigModule, self).setUp() self.mock_get_config = patch('ansible.modules.network.dellos10.dellos10_config.get_config') self.get_config = self.mock_get_config.start() self.mock_load_config = patch('ansible.modules.network.dellos10.dellos10_config.load_config') self.load_config = self.mock_load_config.start() self.mock_run_commands = patch('ansible.modules.network.dellos10.dellos10_config.run_commands') self.run_commands = self.mock_run_commands.start() def tearDown(self): super(TestDellos10ConfigModule, self).tearDown() self.mock_get_config.stop() self.mock_load_config.stop() self.mock_run_commands.stop() def load_fixtures(self, commands=None): config_file = 'dellos10_config_config.cfg' self.get_config.return_value = load_fixture(config_file) self.load_config.return_value = None def test_dellos10_config_unchanged(self): src = load_fixture('dellos10_config_config.cfg') set_module_args(dict(src=src)) self.execute_module() def test_dellos10_config_src(self): src = load_fixture('dellos10_config_src.cfg') set_module_args(dict(src=src)) commands = ['hostname foo', 'interface ethernet1/1/2', 'no ip address'] self.execute_module(changed=True, commands=commands) def test_dellos10_config_backup(self): set_module_args(dict(backup=True)) result = self.execute_module() self.assertIn('__backup__', result) def test_dellos10_config_save(self): set_module_args(dict(save=True)) self.execute_module(changed=True) self.assertEqual(self.run_commands.call_count, 1) self.assertEqual(self.get_config.call_count, 0) self.assertEqual(self.load_config.call_count, 0) args = self.run_commands.call_args[0][1] self.assertDictContainsSubset({'command': 'copy running-config startup-config'}, args[0]) # self.assertIn('copy running-config startup-config\r', args) def test_dellos10_config_lines_wo_parents(self): set_module_args(dict(lines=['hostname foo'])) commands = ['hostname foo'] self.execute_module(changed=True, commands=commands) def test_dellos10_config_lines_w_parents(self): set_module_args(dict(lines=['shutdown'], parents=['interface ethernet1/1/2'])) commands = ['interface ethernet1/1/2', 'shutdown'] self.execute_module(changed=True, commands=commands) def test_dellos10_config_before(self): set_module_args(dict(lines=['hostname foo'], before=['snmp-server contact bar'])) commands = ['snmp-server contact bar', 'hostname foo'] self.execute_module(changed=True, commands=commands, sort=False) def test_dellos10_config_after(self): set_module_args(dict(lines=['hostname foo'], after=['snmp-server contact bar'])) commands = ['hostname foo', 'snmp-server contact bar'] self.execute_module(changed=True, commands=commands, sort=False) def test_dellos10_config_before_after_no_change(self): set_module_args(dict(lines=['hostname router'], before=['snmp-server contact bar'], after=['snmp-server location chennai'])) self.execute_module() def test_dellos10_config_config(self): config = 'hostname localhost' set_module_args(dict(lines=['hostname router'], config=config)) commands = ['hostname router'] self.execute_module(changed=True, commands=commands) def test_dellos10_config_replace_block(self): lines = ['description test string', 'test string'] parents = ['interface ethernet1/1/2'] set_module_args(dict(lines=lines, replace='block', parents=parents)) commands = parents + lines self.execute_module(changed=True, commands=commands) def test_dellos10_config_match_none(self): lines = ['hostname router'] set_module_args(dict(lines=lines, match='none')) self.execute_module(changed=True, commands=lines) def test_dellos10_config_match_none(self): lines = ['ip address 1.2.3.4/24', 'description test string'] parents = ['interface ethernet1/1/2'] set_module_args(dict(lines=lines, parents=parents, match='none')) commands = parents + lines self.execute_module(changed=True, commands=commands, sort=False) def test_dellos10_config_match_strict(self): lines = ['ip address 1.2.3.4/24', 'description test string', 'shutdown'] parents = ['interface ethernet1/1/2'] set_module_args(dict(lines=lines, parents=parents, match='strict')) commands = parents + ['shutdown'] self.execute_module(changed=True, commands=commands, sort=False) def test_dellos10_config_match_exact(self): lines = ['ip address 1.2.3.4/24', 'description test string', 'shutdown'] parents = ['interface ethernet1/1/2'] set_module_args(dict(lines=lines, parents=parents, match='exact')) commands = parents + lines self.execute_module(changed=True, commands=commands, sort=False)
gpl-3.0
willingc/oh-mainline
vendor/packages/celery/celery/bin/celeryd.py
18
7813
# -*- coding: utf-8 -*- """celeryd .. program:: celeryd .. cmdoption:: -c, --concurrency Number of child processes processing the queue. The default is the number of CPUs available on your system. .. cmdoption:: -f, --logfile Path to log file. If no logfile is specified, `stderr` is used. .. cmdoption:: -l, --loglevel Logging level, choose between `DEBUG`, `INFO`, `WARNING`, `ERROR`, `CRITICAL`, or `FATAL`. .. cmdoption:: -n, --hostname Set custom hostname. .. cmdoption:: -B, --beat Also run the `celerybeat` periodic task scheduler. Please note that there must only be one instance of this service. .. cmdoption:: -Q, --queues List of queues to enable for this worker, separated by comma. By default all configured queues are enabled. Example: `-Q video,image` .. cmdoption:: -I, --include Comma separated list of additional modules to import. Example: -I foo.tasks,bar.tasks .. cmdoption:: -s, --schedule Path to the schedule database if running with the `-B` option. Defaults to `celerybeat-schedule`. The extension ".db" will be appended to the filename. .. cmdoption:: --scheduler Scheduler class to use. Default is celery.beat.PersistentScheduler .. cmdoption:: -E, --events Send events that can be captured by monitors like `celerymon`. .. cmdoption:: --purge, --discard Discard all waiting tasks before the daemon is started. **WARNING**: This is unrecoverable, and the tasks will be deleted from the messaging server. .. cmdoption:: --time-limit Enables a hard time limit (in seconds) for tasks. .. cmdoption:: --soft-time-limit Enables a soft time limit (in seconds) for tasks. .. cmdoption:: --maxtasksperchild Maximum number of tasks a pool worker can execute before it's terminated and replaced by a new worker. """ from __future__ import absolute_import import sys try: from multiprocessing import freeze_support except ImportError: # pragma: no cover freeze_support = lambda: True # noqa from celery.bin.base import Command, Option class WorkerCommand(Command): namespace = "celeryd" enable_config_from_cmdline = True supports_args = False def run(self, *args, **kwargs): kwargs.pop("app", None) # Pools like eventlet/gevent needs to patch libs as early # as possible. from celery import concurrency kwargs["pool"] = concurrency.get_implementation( kwargs.get("pool") or self.app.conf.CELERYD_POOL) return self.app.Worker(**kwargs).run() def get_options(self): conf = self.app.conf return ( Option('-c', '--concurrency', default=conf.CELERYD_CONCURRENCY, action="store", dest="concurrency", type="int", help="Number of worker threads/processes"), Option('-P', '--pool', default=conf.CELERYD_POOL, action="store", dest="pool", type="str", help="Pool implementation: " "processes (default), eventlet, gevent, " "solo or threads."), Option('--purge', '--discard', default=False, action="store_true", dest="discard", help="Discard all waiting tasks before the server is" "started. WARNING: There is no undo operation " "and the tasks will be deleted."), Option('-f', '--logfile', default=conf.CELERYD_LOG_FILE, action="store", dest="logfile", help="Path to log file."), Option('-l', '--loglevel', default=conf.CELERYD_LOG_LEVEL, action="store", dest="loglevel", help="Choose between DEBUG/INFO/WARNING/ERROR/CRITICAL"), Option('-n', '--hostname', default=None, action="store", dest="hostname", help="Set custom host name. E.g. 'foo.example.com'."), Option('-B', '--beat', default=False, action="store_true", dest="run_clockservice", help="Also run the celerybeat periodic task scheduler. " "NOTE: Only one instance of celerybeat must be" "running at any one time."), Option('-s', '--schedule', default=conf.CELERYBEAT_SCHEDULE_FILENAME, action="store", dest="schedule", help="Path to the schedule database if running with the -B " "option. The extension '.db' will be appended to the " "filename. Default: %s" % ( conf.CELERYBEAT_SCHEDULE_FILENAME, )), Option('--scheduler', default=None, action="store", dest="scheduler_cls", help="Scheduler class. Default is " "celery.beat.PersistentScheduler"), Option('-S', '--statedb', default=conf.CELERYD_STATE_DB, action="store", dest="db", help="Path to the state database. The extension '.db' will " "be appended to the filename. Default: %s" % ( conf.CELERYD_STATE_DB, )), Option('-E', '--events', default=conf.CELERY_SEND_EVENTS, action="store_true", dest="events", help="Send events so the worker can be monitored by " "celeryev, celerymon and other monitors.."), Option('--time-limit', default=conf.CELERYD_TASK_TIME_LIMIT, action="store", type="int", dest="task_time_limit", help="Enables a hard time limit (in seconds) for tasks."), Option('--soft-time-limit', default=conf.CELERYD_TASK_SOFT_TIME_LIMIT, action="store", type="int", dest="task_soft_time_limit", help="Enables a soft time limit (in seconds) for tasks."), Option('--maxtasksperchild', default=conf.CELERYD_MAX_TASKS_PER_CHILD, action="store", type="int", dest="max_tasks_per_child", help="Maximum number of tasks a pool worker can execute" "before it's terminated and replaced by a new worker."), Option('--queues', '-Q', default=[], action="store", dest="queues", help="Comma separated list of queues to consume from. " "By default all configured queues are used. " "Example: -Q video,image"), Option('--include', '-I', default=[], action="store", dest="include", help="Comma separated list of additional modules to import. " "Example: -I foo.tasks,bar.tasks"), Option('--pidfile', default=None, help="Optional file used to store the workers pid. " "The worker will not start if this file already exists " "and the pid is still alive."), Option('--autoscale', default=None, help="Enable autoscaling by providing " "max_concurrency,min_concurrency. Example: " "--autoscale=10,3 (always keep 3 processes, " "but grow to 10 if necessary)."), ) def main(): freeze_support() worker = WorkerCommand() worker.execute_from_commandline() def windows_main(): sys.stderr.write(""" The celeryd command does not work on Windows. Instead, please use: ..> python -m celery.bin.celeryd You can also supply arguments: ..> python -m celery.bin.celeryd --concurrency=10 --loglevel=DEBUG """.strip()) if __name__ == "__main__": # pragma: no cover main()
agpl-3.0
jhaux/tensorflow
tensorflow/contrib/keras/python/keras/utils/np_utils.py
73
1816
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Numpy-related utilities.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np def to_categorical(y, num_classes=None): """Converts a class vector (integers) to binary class matrix. E.g. for use with categorical_crossentropy. Arguments: y: class vector to be converted into a matrix (integers from 0 to num_classes). num_classes: total number of classes. Returns: A binary matrix representation of the input. """ y = np.array(y, dtype='int').ravel() if not num_classes: num_classes = np.max(y) + 1 n = y.shape[0] categorical = np.zeros((n, num_classes)) categorical[np.arange(n), y] = 1 return categorical def normalize(x, axis=-1, order=2): """Normalizes a Numpy array. Arguments: x: Numpy array to normalize. axis: axis along which to normalize. order: Normalization order (e.g. 2 for L2 norm). Returns: A normalized copy of the array. """ l2 = np.atleast_1d(np.linalg.norm(x, order, axis)) l2[l2 == 0] = 1 return x / np.expand_dims(l2, axis)
apache-2.0
googleapis/googleapis-gen
google/cloud/translate/v3beta1/translation-v3beta1-py/google/cloud/translate_v3beta1/services/translation_service/pagers.py
1
5924
# -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from typing import Any, AsyncIterable, Awaitable, Callable, Iterable, Sequence, Tuple, Optional from google.cloud.translate_v3beta1.types import translation_service class ListGlossariesPager: """A pager for iterating through ``list_glossaries`` requests. This class thinly wraps an initial :class:`google.cloud.translate_v3beta1.types.ListGlossariesResponse` object, and provides an ``__iter__`` method to iterate through its ``glossaries`` field. If there are more pages, the ``__iter__`` method will make additional ``ListGlossaries`` requests and continue to iterate through the ``glossaries`` field on the corresponding responses. All the usual :class:`google.cloud.translate_v3beta1.types.ListGlossariesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__(self, method: Callable[..., translation_service.ListGlossariesResponse], request: translation_service.ListGlossariesRequest, response: translation_service.ListGlossariesResponse, *, metadata: Sequence[Tuple[str, str]] = ()): """Instantiate the pager. Args: method (Callable): The method that was originally called, and which instantiated this pager. request (google.cloud.translate_v3beta1.types.ListGlossariesRequest): The initial request object. response (google.cloud.translate_v3beta1.types.ListGlossariesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = translation_service.ListGlossariesRequest(request) self._response = response self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property def pages(self) -> Iterable[translation_service.ListGlossariesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = self._method(self._request, metadata=self._metadata) yield self._response def __iter__(self) -> Iterable[translation_service.Glossary]: for page in self.pages: yield from page.glossaries def __repr__(self) -> str: return '{0}<{1!r}>'.format(self.__class__.__name__, self._response) class ListGlossariesAsyncPager: """A pager for iterating through ``list_glossaries`` requests. This class thinly wraps an initial :class:`google.cloud.translate_v3beta1.types.ListGlossariesResponse` object, and provides an ``__aiter__`` method to iterate through its ``glossaries`` field. If there are more pages, the ``__aiter__`` method will make additional ``ListGlossaries`` requests and continue to iterate through the ``glossaries`` field on the corresponding responses. All the usual :class:`google.cloud.translate_v3beta1.types.ListGlossariesResponse` attributes are available on the pager. If multiple requests are made, only the most recent response is retained, and thus used for attribute lookup. """ def __init__(self, method: Callable[..., Awaitable[translation_service.ListGlossariesResponse]], request: translation_service.ListGlossariesRequest, response: translation_service.ListGlossariesResponse, *, metadata: Sequence[Tuple[str, str]] = ()): """Instantiates the pager. Args: method (Callable): The method that was originally called, and which instantiated this pager. request (google.cloud.translate_v3beta1.types.ListGlossariesRequest): The initial request object. response (google.cloud.translate_v3beta1.types.ListGlossariesResponse): The initial response object. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ self._method = method self._request = translation_service.ListGlossariesRequest(request) self._response = response self._metadata = metadata def __getattr__(self, name: str) -> Any: return getattr(self._response, name) @property async def pages(self) -> AsyncIterable[translation_service.ListGlossariesResponse]: yield self._response while self._response.next_page_token: self._request.page_token = self._response.next_page_token self._response = await self._method(self._request, metadata=self._metadata) yield self._response def __aiter__(self) -> AsyncIterable[translation_service.Glossary]: async def async_generator(): async for page in self.pages: for response in page.glossaries: yield response return async_generator() def __repr__(self) -> str: return '{0}<{1!r}>'.format(self.__class__.__name__, self._response)
apache-2.0
liorvh/infernal-twin
build/pillow/Tests/test_file_ppm.py
11
1028
from helper import unittest, PillowTestCase from PIL import Image # sample ppm stream test_file = "Tests/images/hopper.ppm" data = open(test_file, "rb").read() class TestFilePpm(PillowTestCase): def test_sanity(self): im = Image.open(test_file) im.load() self.assertEqual(im.mode, "RGB") self.assertEqual(im.size, (128, 128)) self.assertEqual(im.format, "PPM") def test_16bit_pgm(self): im = Image.open('Tests/images/16_bit_binary.pgm') im.load() self.assertEqual(im.mode, 'I') self.assertEqual(im.size, (20, 100)) tgt = Image.open('Tests/images/16_bit_binary_pgm.png') self.assert_image_equal(im, tgt) def test_16bit_pgm_write(self): im = Image.open('Tests/images/16_bit_binary.pgm') im.load() f = self.tempfile('temp.pgm') im.save(f, 'PPM') reloaded = Image.open(f) self.assert_image_equal(im, reloaded) if __name__ == '__main__': unittest.main() # End of file
gpl-3.0
Juniper/tempest
tempest/tests/lib/services/compute/test_baremetal_nodes_client.py
7
2912
# Copyright 2015 NEC Corporation. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import copy from tempest.lib.services.compute import baremetal_nodes_client from tempest.tests.lib import fake_auth_provider from tempest.tests.lib.services import base class TestBareMetalNodesClient(base.BaseServiceTest): FAKE_NODE_INFO = {'cpus': '8', 'disk_gb': '64', 'host': '10.0.2.15', 'id': 'Identifier', 'instance_uuid': "null", 'interfaces': [ { "address": "20::01", "datapath_id": "null", "id": 1, "port_no": None } ], 'memory_mb': '8192', 'task_state': None} def setUp(self): super(TestBareMetalNodesClient, self).setUp() fake_auth = fake_auth_provider.FakeAuthProvider() self.baremetal_nodes_client = (baremetal_nodes_client. BaremetalNodesClient (fake_auth, 'compute', 'regionOne')) def _test_bareMetal_nodes(self, operation='list', bytes_body=False): if operation != 'list': expected = {"node": self.FAKE_NODE_INFO} function = self.baremetal_nodes_client.show_baremetal_node else: node_info = copy.deepcopy(self.FAKE_NODE_INFO) del node_info['instance_uuid'] expected = {"nodes": [node_info]} function = self.baremetal_nodes_client.list_baremetal_nodes self.check_service_client_function( function, 'tempest.lib.common.rest_client.RestClient.get', expected, bytes_body, 200, baremetal_node_id='Identifier') def test_list_bareMetal_nodes_with_str_body(self): self._test_bareMetal_nodes() def test_list_bareMetal_nodes_with_bytes_body(self): self._test_bareMetal_nodes(bytes_body=True) def test_show_bareMetal_node_with_str_body(self): self._test_bareMetal_nodes('show') def test_show_bareMetal_node_with_bytes_body(self): self._test_bareMetal_nodes('show', True)
apache-2.0
matt-kwong/grpc
src/python/grpcio_tests/tests/http2/negative_http2_client.py
6
5342
# Copyright 2016 gRPC authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """The Python client used to test negative http2 conditions.""" import argparse import grpc import time from src.proto.grpc.testing import test_pb2 from src.proto.grpc.testing import messages_pb2 def _validate_payload_type_and_length(response, expected_type, expected_length): if response.payload.type is not expected_type: raise ValueError('expected payload type %s, got %s' % (expected_type, type(response.payload.type))) elif len(response.payload.body) != expected_length: raise ValueError('expected payload body size %d, got %d' % (expected_length, len(response.payload.body))) def _expect_status_code(call, expected_code): if call.code() != expected_code: raise ValueError('expected code %s, got %s' % (expected_code, call.code())) def _expect_status_details(call, expected_details): if call.details() != expected_details: raise ValueError('expected message %s, got %s' % (expected_details, call.details())) def _validate_status_code_and_details(call, expected_code, expected_details): _expect_status_code(call, expected_code) _expect_status_details(call, expected_details) # common requests _REQUEST_SIZE = 314159 _RESPONSE_SIZE = 271828 _SIMPLE_REQUEST = messages_pb2.SimpleRequest( response_type=messages_pb2.COMPRESSABLE, response_size=_RESPONSE_SIZE, payload=messages_pb2.Payload(body=b'\x00' * _REQUEST_SIZE)) def _goaway(stub): first_response = stub.UnaryCall(_SIMPLE_REQUEST) _validate_payload_type_and_length(first_response, messages_pb2.COMPRESSABLE, _RESPONSE_SIZE) time.sleep(1) second_response = stub.UnaryCall(_SIMPLE_REQUEST) _validate_payload_type_and_length(second_response, messages_pb2.COMPRESSABLE, _RESPONSE_SIZE) def _rst_after_header(stub): resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST) _validate_status_code_and_details(resp_future, grpc.StatusCode.INTERNAL, "Received RST_STREAM with error code 0") def _rst_during_data(stub): resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST) _validate_status_code_and_details(resp_future, grpc.StatusCode.INTERNAL, "Received RST_STREAM with error code 0") def _rst_after_data(stub): resp_future = stub.UnaryCall.future(_SIMPLE_REQUEST) _validate_status_code_and_details(resp_future, grpc.StatusCode.INTERNAL, "Received RST_STREAM with error code 0") def _ping(stub): response = stub.UnaryCall(_SIMPLE_REQUEST) _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, _RESPONSE_SIZE) def _max_streams(stub): # send one req to ensure server sets MAX_STREAMS response = stub.UnaryCall(_SIMPLE_REQUEST) _validate_payload_type_and_length(response, messages_pb2.COMPRESSABLE, _RESPONSE_SIZE) # give the streams a workout futures = [] for _ in range(15): futures.append(stub.UnaryCall.future(_SIMPLE_REQUEST)) for future in futures: _validate_payload_type_and_length( future.result(), messages_pb2.COMPRESSABLE, _RESPONSE_SIZE) def _run_test_case(test_case, stub): if test_case == 'goaway': _goaway(stub) elif test_case == 'rst_after_header': _rst_after_header(stub) elif test_case == 'rst_during_data': _rst_during_data(stub) elif test_case == 'rst_after_data': _rst_after_data(stub) elif test_case == 'ping': _ping(stub) elif test_case == 'max_streams': _max_streams(stub) else: raise ValueError("Invalid test case: %s" % test_case) def _args(): parser = argparse.ArgumentParser() parser.add_argument( '--server_host', help='the host to which to connect', type=str, default="127.0.0.1") parser.add_argument( '--server_port', help='the port to which to connect', type=int, default="8080") parser.add_argument( '--test_case', help='the test case to execute', type=str, default="goaway") return parser.parse_args() def _stub(server_host, server_port): target = '{}:{}'.format(server_host, server_port) channel = grpc.insecure_channel(target) grpc.channel_ready_future(channel).result() return test_pb2.TestServiceStub(channel) def main(): args = _args() stub = _stub(args.server_host, args.server_port) _run_test_case(args.test_case, stub) if __name__ == '__main__': main()
apache-2.0
ptisserand/ansible
lib/ansible/plugins/action/junos.py
7
4677
# # (c) 2016 Red Hat Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # from __future__ import (absolute_import, division, print_function) __metaclass__ = type import sys import copy from ansible import constants as C from ansible.module_utils._text import to_text from ansible.module_utils.connection import Connection from ansible.module_utils.network.common.utils import load_provider from ansible.module_utils.network.junos.junos import junos_provider_spec from ansible.plugins.loader import connection_loader, module_loader from ansible.plugins.action.normal import ActionModule as _ActionModule try: from __main__ import display except ImportError: from ansible.utils.display import Display display = Display() class ActionModule(_ActionModule): def run(self, tmp=None, task_vars=None): del tmp # tmp no longer has any effect module = module_loader._load_module_source(self._task.action, module_loader.find_plugin(self._task.action)) if not getattr(module, 'USE_PERSISTENT_CONNECTION', False): return super(ActionModule, self).run(task_vars=task_vars) socket_path = None if self._play_context.connection == 'local': provider = load_provider(junos_provider_spec, self._task.args) pc = copy.deepcopy(self._play_context) pc.network_os = 'junos' pc.remote_addr = provider['host'] or self._play_context.remote_addr if self._task.action == 'junos_netconf' or (provider['transport'] == 'cli' and self._task.action == 'junos_command'): pc.connection = 'network_cli' pc.port = int(provider['port'] or self._play_context.port or 22) else: pc.connection = 'netconf' pc.port = int(provider['port'] or self._play_context.port or 830) pc.remote_user = provider['username'] or self._play_context.connection_user pc.password = provider['password'] or self._play_context.password pc.private_key_file = provider['ssh_keyfile'] or self._play_context.private_key_file pc.timeout = int(provider['timeout'] or C.PERSISTENT_COMMAND_TIMEOUT) display.vvv('using connection plugin %s (was local)' % pc.connection, pc.remote_addr) connection = self._shared_loader_obj.connection_loader.get('persistent', pc, sys.stdin) socket_path = connection.run() display.vvvv('socket_path: %s' % socket_path, pc.remote_addr) if not socket_path: return {'failed': True, 'msg': 'unable to open shell. Please see: ' + 'https://docs.ansible.com/ansible/network_debug_troubleshooting.html#unable-to-open-shell'} task_vars['ansible_socket'] = socket_path elif self._play_context.connection in ('netconf', 'network_cli'): provider = self._task.args.get('provider', {}) if any(provider.values()): display.warning('provider is unnecessary when using connection=%s and will be ignored' % self._play_context.connection) else: return {'failed': True, 'msg': 'Connection type %s is not valid for this module' % self._play_context.connection} if (self._play_context.connection == 'local' and pc.connection == 'network_cli') or self._play_context.connection == 'network_cli': # make sure we are in the right cli context which should be # enable mode and not config module if socket_path is None: socket_path = self._connection.socket_path conn = Connection(socket_path) out = conn.get_prompt() while to_text(out, errors='surrogate_then_replace').strip().endswith('#'): display.vvvv('wrong context, sending exit to device', self._play_context.remote_addr) conn.send_command('exit') out = conn.get_prompt() result = super(ActionModule, self).run(None, task_vars) return result
gpl-3.0
fbradyirl/home-assistant
homeassistant/components/wwlln/geo_location.py
1
6590
"""Support for WWLLN geo location events.""" from datetime import timedelta import logging from aiowwlln.errors import WWLLNError from homeassistant.components.geo_location import GeolocationEvent from homeassistant.const import ( ATTR_ATTRIBUTION, CONF_LATITUDE, CONF_LONGITUDE, CONF_RADIUS, CONF_UNIT_SYSTEM, CONF_UNIT_SYSTEM_IMPERIAL, LENGTH_KILOMETERS, LENGTH_MILES, ) from homeassistant.core import callback from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, async_dispatcher_send, ) from homeassistant.helpers.event import async_track_time_interval from homeassistant.util.dt import utc_from_timestamp from .const import CONF_WINDOW, DATA_CLIENT, DOMAIN _LOGGER = logging.getLogger(__name__) ATTR_EXTERNAL_ID = "external_id" ATTR_PUBLICATION_DATE = "publication_date" DEFAULT_ATTRIBUTION = "Data provided by the WWLLN" DEFAULT_EVENT_NAME = "Lightning Strike: {0}" DEFAULT_ICON = "mdi:flash" DEFAULT_UPDATE_INTERVAL = timedelta(minutes=10) SIGNAL_DELETE_ENTITY = "delete_entity_{0}" async def async_setup_entry(hass, entry, async_add_entities): """Set up WWLLN based on a config entry.""" client = hass.data[DOMAIN][DATA_CLIENT][entry.entry_id] manager = WWLLNEventManager( hass, async_add_entities, client, entry.data[CONF_LATITUDE], entry.data[CONF_LONGITUDE], entry.data[CONF_RADIUS], entry.data[CONF_WINDOW], entry.data[CONF_UNIT_SYSTEM], ) await manager.async_init() class WWLLNEventManager: """Define a class to handle WWLLN events.""" def __init__( self, hass, async_add_entities, client, latitude, longitude, radius, window_seconds, unit_system, ): """Initialize.""" self._async_add_entities = async_add_entities self._client = client self._hass = hass self._latitude = latitude self._longitude = longitude self._managed_strike_ids = set() self._radius = radius self._strikes = {} self._window = timedelta(seconds=window_seconds) self._unit_system = unit_system if unit_system == CONF_UNIT_SYSTEM_IMPERIAL: self._unit = LENGTH_MILES else: self._unit = LENGTH_KILOMETERS @callback def _create_events(self, ids_to_create): """Create new geo location events.""" events = [] for strike_id in ids_to_create: strike = self._strikes[strike_id] event = WWLLNEvent( strike["distance"], strike["lat"], strike["long"], self._unit, strike_id, strike["unixTime"], ) events.append(event) self._async_add_entities(events) @callback def _remove_events(self, ids_to_remove): """Remove old geo location events.""" for strike_id in ids_to_remove: async_dispatcher_send(self._hass, SIGNAL_DELETE_ENTITY.format(strike_id)) async def async_init(self): """Schedule regular updates based on configured time interval.""" async def update(event_time): """Update.""" await self.async_update() await self.async_update() async_track_time_interval(self._hass, update, DEFAULT_UPDATE_INTERVAL) async def async_update(self): """Refresh data.""" _LOGGER.debug("Refreshing WWLLN data") try: self._strikes = await self._client.within_radius( self._latitude, self._longitude, self._radius, unit=self._unit_system, window=self._window, ) except WWLLNError as err: _LOGGER.error("Error while updating WWLLN data: %s", err) return new_strike_ids = set(self._strikes) ids_to_remove = self._managed_strike_ids.difference(new_strike_ids) self._remove_events(ids_to_remove) ids_to_create = new_strike_ids.difference(self._managed_strike_ids) self._create_events(ids_to_create) class WWLLNEvent(GeolocationEvent): """Define a lightning strike event.""" def __init__( self, distance, latitude, longitude, unit, strike_id, publication_date ): """Initialize entity with data provided.""" self._distance = distance self._latitude = latitude self._longitude = longitude self._publication_date = publication_date self._remove_signal_delete = None self._strike_id = strike_id self._unit_of_measurement = unit @property def device_state_attributes(self): """Return the device state attributes.""" attributes = {} for key, value in ( (ATTR_EXTERNAL_ID, self._strike_id), (ATTR_ATTRIBUTION, DEFAULT_ATTRIBUTION), (ATTR_PUBLICATION_DATE, utc_from_timestamp(self._publication_date)), ): attributes[key] = value return attributes @property def distance(self): """Return distance value of this external event.""" return self._distance @property def icon(self): """Return the icon to use in the front-end.""" return DEFAULT_ICON @property def latitude(self): """Return latitude value of this external event.""" return self._latitude @property def longitude(self): """Return longitude value of this external event.""" return self._longitude @property def name(self): """Return the name of the event.""" return DEFAULT_EVENT_NAME.format(self._strike_id) @property def source(self) -> str: """Return source value of this external event.""" return DOMAIN @property def should_poll(self): """Disable polling.""" return False @property def unit_of_measurement(self): """Return the unit of measurement.""" return self._unit_of_measurement @callback def _delete_callback(self): """Remove this entity.""" self._remove_signal_delete() self.hass.async_create_task(self.async_remove()) async def async_added_to_hass(self): """Call when entity is added to hass.""" self._remove_signal_delete = async_dispatcher_connect( self.hass, SIGNAL_DELETE_ENTITY.format(self._strike_id), self._delete_callback, )
apache-2.0
abdoosh00/edx-platform
lms/djangoapps/bulk_email/migrations/0002_change_field_names.py
182
5798
# -*- coding: utf-8 -*- from south.db import db from south.v2 import SchemaMigration class Migration(SchemaMigration): def forwards(self, orm): # Renaming field 'CourseEmail.to' db.rename_column('bulk_email_courseemail', 'to', 'to_option') # Renaming field 'CourseEmail.hash' db.rename_column('bulk_email_courseemail', 'hash', 'slug') # Adding field 'CourseEmail.text_message' db.add_column('bulk_email_courseemail', 'text_message', self.gf('django.db.models.fields.TextField')(null=True, blank=True), keep_default=False) def backwards(self, orm): # Renaming field 'CourseEmail.to_option' db.rename_column('bulk_email_courseemail', 'to_option', 'to') # Renaming field 'CourseEmail.slug' db.rename_column('bulk_email_courseemail', 'slug', 'hash') # Deleting field 'CourseEmail.text_message' db.delete_column('bulk_email_courseemail', 'text_message') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'bulk_email.courseemail': { 'Meta': {'object_name': 'CourseEmail'}, 'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), 'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'html_message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'sender': ('django.db.models.fields.related.ForeignKey', [], {'default': '1', 'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}), 'slug': ('django.db.models.fields.CharField', [], {'max_length': '128', 'db_index': 'True'}), 'subject': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}), 'text_message': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'to_option': ('django.db.models.fields.CharField', [], {'default': "'myself'", 'max_length': '64'}) }, 'bulk_email.optout': { 'Meta': {'unique_together': "(('email', 'course_id'),)", 'object_name': 'Optout'}, 'course_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), 'email': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) } } complete_apps = ['bulk_email']
agpl-3.0
aselle/tensorflow
tensorflow/python/debug/wrappers/grpc_wrapper.py
29
8652
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Debugger wrapper session that sends debug data to file:// URLs.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import signal import sys import traceback import six # Google-internal import(s). from tensorflow.python.debug.lib import common from tensorflow.python.debug.wrappers import framework def publish_traceback(debug_server_urls, graph, feed_dict, fetches, old_graph_version): """Publish traceback and source code if graph version is new. `graph.version` is compared with `old_graph_version`. If the former is higher (i.e., newer), the graph traceback and the associated source code is sent to the debug server at the specified gRPC URLs. Args: debug_server_urls: A single gRPC debug server URL as a `str` or a `list` of debug server URLs. graph: A Python `tf.Graph` object. feed_dict: Feed dictionary given to the `Session.run()` call. fetches: Fetches from the `Session.run()` call. old_graph_version: Old graph version to compare to. Returns: If `graph.version > old_graph_version`, the new graph version as an `int`. Else, the `old_graph_version` is returned. """ # TODO(cais): Consider moving this back to the top, after grpc becomes a # pip dependency of tensorflow or tf_debug. # pylint:disable=g-import-not-at-top from tensorflow.python.debug.lib import source_remote # pylint:enable=g-import-not-at-top if graph.version > old_graph_version: run_key = common.get_run_key(feed_dict, fetches) source_remote.send_graph_tracebacks( debug_server_urls, run_key, traceback.extract_stack(), graph, send_source=True) return graph.version else: return old_graph_version class GrpcDebugWrapperSession(framework.NonInteractiveDebugWrapperSession): """Debug Session wrapper that send debug data to gRPC stream(s).""" def __init__(self, sess, grpc_debug_server_addresses, watch_fn=None, thread_name_filter=None, log_usage=True): """Constructor of DumpingDebugWrapperSession. Args: sess: The TensorFlow `Session` object being wrapped. grpc_debug_server_addresses: (`str` or `list` of `str`) Single or a list of the gRPC debug server addresses, in the format of <host:port>, with or without the "grpc://" prefix. For example: "localhost:7000", ["localhost:7000", "192.168.0.2:8000"] watch_fn: (`Callable`) A Callable that can be used to define per-run debug ops and watched tensors. See the doc of `NonInteractiveDebugWrapperSession.__init__()` for details. thread_name_filter: Regular-expression white list for threads on which the wrapper session will be active. See doc of `BaseDebugWrapperSession` for more details. log_usage: (`bool`) whether the usage of this class is to be logged. Raises: TypeError: If `grpc_debug_server_addresses` is not a `str` or a `list` of `str`. """ if log_usage: pass # No logging for open-source. framework.NonInteractiveDebugWrapperSession.__init__( self, sess, watch_fn=watch_fn, thread_name_filter=thread_name_filter) if isinstance(grpc_debug_server_addresses, str): self._grpc_debug_server_urls = [ self._normalize_grpc_url(grpc_debug_server_addresses)] elif isinstance(grpc_debug_server_addresses, list): self._grpc_debug_server_urls = [] for address in grpc_debug_server_addresses: if not isinstance(address, str): raise TypeError( "Expected type str in list grpc_debug_server_addresses, " "received type %s" % type(address)) self._grpc_debug_server_urls.append(self._normalize_grpc_url(address)) else: raise TypeError( "Expected type str or list in grpc_debug_server_addresses, " "received type %s" % type(grpc_debug_server_addresses)) def prepare_run_debug_urls(self, fetches, feed_dict): """Implementation of abstract method in superclass. See doc of `NonInteractiveDebugWrapperSession.prepare_run_debug_urls()` for details. Args: fetches: Same as the `fetches` argument to `Session.run()` feed_dict: Same as the `feed_dict` argument to `Session.run()` Returns: debug_urls: (`str` or `list` of `str`) file:// debug URLs to be used in this `Session.run()` call. """ return self._grpc_debug_server_urls def _normalize_grpc_url(self, address): return (common.GRPC_URL_PREFIX + address if not address.startswith(common.GRPC_URL_PREFIX) else address) def _signal_handler(unused_signal, unused_frame): while True: response = six.moves.input( "\nSIGINT received. Quit program? (Y/n): ").strip() if response in ("", "Y", "y"): sys.exit(0) elif response in ("N", "n"): break def register_signal_handler(): try: signal.signal(signal.SIGINT, _signal_handler) except ValueError: # This can happen if we are not in the MainThread. pass class TensorBoardDebugWrapperSession(GrpcDebugWrapperSession): """A tfdbg Session wrapper that can be used with TensorBoard Debugger Plugin. This wrapper is the same as `GrpcDebugWrapperSession`, except that it uses a predefined `watch_fn` that 1) uses `DebugIdentity` debug ops with the `gated_grpc` attribute set to `True` to allow the interactive enabling and disabling of tensor breakpoints. 2) watches all tensors in the graph. This saves the need for the user to define a `watch_fn`. """ def __init__(self, sess, grpc_debug_server_addresses, thread_name_filter=None, send_traceback_and_source_code=True, log_usage=True): """Constructor of TensorBoardDebugWrapperSession. Args: sess: The `tf.Session` instance to be wrapped. grpc_debug_server_addresses: gRPC address(es) of debug server(s), as a `str` or a `list` of `str`s. E.g., "localhost:2333", "grpc://localhost:2333", ["192.168.0.7:2333", "192.168.0.8:2333"]. thread_name_filter: Optional filter for thread names. send_traceback_and_source_code: Whether traceback of graph elements and the source code are to be sent to the debug server(s). log_usage: Whether the usage of this class is to be logged (if applicable). """ def _gated_grpc_watch_fn(fetches, feeds): del fetches, feeds # Unused. return framework.WatchOptions( debug_ops=["DebugIdentity(gated_grpc=true)"]) super(TensorBoardDebugWrapperSession, self).__init__( sess, grpc_debug_server_addresses, watch_fn=_gated_grpc_watch_fn, thread_name_filter=thread_name_filter, log_usage=log_usage) self._send_traceback_and_source_code = send_traceback_and_source_code # Keeps track of the latest version of Python graph object that has been # sent to the debug servers. self._sent_graph_version = -1 register_signal_handler() def run(self, fetches, feed_dict=None, options=None, run_metadata=None, callable_runner=None, callable_runner_args=None, callable_options=None): if self._send_traceback_and_source_code: self._sent_graph_version = publish_traceback( self._grpc_debug_server_urls, self.graph, feed_dict, fetches, self._sent_graph_version) return super(TensorBoardDebugWrapperSession, self).run( fetches, feed_dict=feed_dict, options=options, run_metadata=run_metadata, callable_runner=callable_runner, callable_runner_args=callable_runner_args, callable_options=callable_options)
apache-2.0
Ozmodian/Wordpress_local
eb/linux/python3/scli/operation/version_operations.py
8
5953
#!/usr/bin/env python #============================================================================== # Copyright 2012 Amazon.com, Inc. or its affiliates. All Rights Reserved. # # Licensed under the Amazon Software License (the "License"). You may not use # this file except in compliance with the License. A copy of the License is # located at # # http://aws.amazon.com/asl/ # # or in the "license" file accompanying this file. This file is distributed on # an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, express or # implied. See the License for the specific language governing permissions # and limitations under the License. #============================================================================== import logging as _logging from lib.utility import shell_utils from lib.elasticbeanstalk.exception import AlreadyExistException from scli import prompt from scli.constants import ParameterSource, ServiceDefault from scli.operation.base import OperationBase, OperationResult from scli.parameter import Parameter, ParameterName from scli.resources import CreateApplicationVersionOpMessage, \ PushApplicationVersionOpMessage, RecordApplicationVersionOpMessage log = _logging.getLogger('cli.op') class CreateApplicationVersionOperation(OperationBase): _input_parameters = { ParameterName.AwsAccessKeyId, ParameterName.AwsSecretAccessKey, ParameterName.ServiceEndpoint, ParameterName.Region, ParameterName.SolutionStack, ParameterName.ApplicationName, } _output_parameters = set() # Create Sample Application Version def execute(self, parameter_pool): eb_client = self._get_eb_client(parameter_pool) app_name = parameter_pool.get_value(ParameterName.ApplicationName, False) try: response = eb_client.create_application_version(app_name, ServiceDefault.DEFAULT_VERSION_NAME) except AlreadyExistException: log.info('Version "{0}" of Application "{1}" already exists.'.\ format(ServiceDefault.DEFAULT_VERSION_NAME, app_name)) msg = CreateApplicationVersionOpMessage.AlreadyExist.format(ServiceDefault.DEFAULT_VERSION_NAME) prompt.info(msg) ret_result = OperationResult(self, None, msg, None) else: log.info('Received response for CreateApplicationVersion call.') self._log_api_result(self.__class__.__name__, 'CreateApplicationVersion', response.result) msg = CreateApplicationVersionOpMessage.Succeed.format(ServiceDefault.DEFAULT_VERSION_NAME) prompt.info(msg) ret_result = OperationResult(self, response.request_id, msg, response.result) return ret_result class PushApplicationVersionOperation(OperationBase): _input_parameters = { ParameterName.AwsAccessKeyId, ParameterName.AwsSecretAccessKey, ParameterName.ServiceEndpoint, ParameterName.Region, ParameterName.ApplicationName, ParameterName.EnvironmentName } _output_parameters = set() def execute(self, parameter_pool): eb_client = self._get_eb_client(parameter_pool) app_name = parameter_pool.get_value(ParameterName.ApplicationName, False) env_name = parameter_pool.get_value(ParameterName.EnvironmentName, False) response = eb_client.describe_environments(app_name, env_name, include_deleted = False) if len(response.result) > 0: shell_utils.git_aws_push(False) else: prompt.error(PushApplicationVersionOpMessage.EnvNotExist.format(env_name)) ret_result = OperationResult(self, None, None, None) return ret_result class RecordApplicationVersionOperation(OperationBase): _input_parameters = { ParameterName.AwsAccessKeyId, ParameterName.AwsSecretAccessKey, ParameterName.ServiceEndpoint, ParameterName.Region, ParameterName.ApplicationName, ParameterName.EnvironmentName } _output_parameters = { ParameterName.ApplicationVersionName } def execute(self, parameter_pool): eb_client = self._get_eb_client(parameter_pool) app_name = parameter_pool.get_value(ParameterName.ApplicationName, False) env_name = parameter_pool.get_value(ParameterName.EnvironmentName, False) response = eb_client.describe_environments(app_name, env_name, include_deleted = False) if len(response.result) > 0: # If have result version_name = response.result[0].version_label log.info('Retrieved application version {0} for environment {1}'.format(version_name, env_name)) prompt.info(RecordApplicationVersionOpMessage.Succeed.format(version_name)) parameter_pool.put(Parameter(ParameterName.ApplicationVersionName, version_name, ParameterSource.OperationOutput), True) ret_result = OperationResult(self, response.request_id, None, response.result) return ret_result
gpl-2.0
tgrammat/ML-Data_Challenges
Dato-tutorials/anomaly-detection/visualization_helper_functions.py
3
11251
# libraries required import graphlab.aggregate as agg from matplotlib import pyplot as plt import seaborn as sns def item_freq_plot(data_sf, item_column, hue=None, topk=None, pct_threshold=None ,reverse=False, seaborn_style='whitegrid', seaborn_palette='deep', color='b', **kwargs): '''Function for topk item frequency plot: Parameters ---------- data_sf: SFrame SFrame for plotting. If x and y are absent, this is interpreted as wide-form. Otherwise it is expected to be long-form. item_column: string The attribute name the frequency counts of which we want to visualize hue: seaborn barplot name of variable in vector data, optional Inputs for plotting long-form data. See seaborn examples for interpretation. topk: int, optional The number of most frequent items pct_threshold: float in [0,100] range, optional Lower frequency limit below which all the grouby counted items will be ignored. seaborn_style: dict, None, or one of {darkgrid, whitegrid, dark, white, ticks} Set the aesthetic style of the plots through the seaborn module. A dictionary of parameters or the name of a preconfigured set. seaborn_palette: {deep, muted, pastel, dark, bright, colorblind} Change how matplotlib color shorthands are interpreted. Calling this will change how shorthand codes like 'b' or 'g' are interpreted by matplotlib in subsequent plots. color: matplotlib color, optional Color for all of the elements, or seed for light_palette() when using hue nesting in seaborn.barplot(). kwargs : key, value mappings Other keyword arguments which are passed through (a)seaborn.countplot API and/or (b)plt.bar at draw time. ''' # set seaborn style sns.set(style=seaborn_style) # compute the item counts: (1) apply groupby count operation, # (2) check whether a nested grouping exist or not if hue is not None: item_counts = data_sf.groupby([item_column,hue], agg.COUNT()) hue_order = list(data_sf[hue].unique()) hue_length = len(hue_order) else: item_counts = data_sf.groupby(item_column, agg.COUNT()) hue_order=None hue_length=1 # compute frequencies pcts = (item_counts['Count'] / float(item_counts['Count'].sum())) * 100 item_counts['Percent'] = pcts # apply a percentage threshold if any if((pct_threshold is not None) & (pct_threshold < 100)): item_counts = item_counts[item_counts['Percent'] >= pct_threshold] elif((pct_threshold is not None) & (pct_threshold >=1)): print 'The frequency threshold was unacceptably high.',\ 'and have been removed from consideration.',\ 'If you want to use this flag please choose a value lower than one.' # print the number of remaining item counts print 'Number of Unique Items: %d' % len(item_counts) # determine the ysize per item ysize_per_item = 0.5 * hue_length # apply topk/sort operations if((topk is not None) & (topk < len(item_counts))): item_counts = item_counts.topk('Percent', k=topk, reverse=reverse) ysize = ysize_per_item * topk print 'Number of Most Frequent Items, Visualized: %d' % topk else: item_counts = item_counts.sort('Percent', ascending=False) ysize = ysize_per_item * len(item_counts) print 'Number of Most Frequent Items, Visualized: %d' % len(item_counts) # transform the item_counts SFrame into a Pandas DataFrame item_counts_df = item_counts.to_dataframe() # initialize the matplotlib figure ax = plt.figure(figsize=(7, ysize)) # plot the Freq Percentages of the topk Items ax = sns.barplot(x='Percent', y=item_column, hue=hue, data=item_counts_df, order=list(item_counts_df[item_column]), hue_order=hue_order, orient='h', color='b', palette='deep') # add informative axis labels # make final plot adjustments xmax = max(item_counts['Percent']) ax.set(xlim=(0, xmax), ylabel= item_column, xlabel='Most Frequent Items\n(% of total occurences)') if hue is not None: ax.legend(ncol=hue_length, loc="lower right", frameon=True) sns.despine(left=True, bottom=True) def segments_countplot(data_sf, x=None, y=None, hue=None, order=None, hue_order=None, figsize_tuple= None, title=None, seaborn_style='whitegrid', seaborn_palette='deep', color='b', **kwargs): '''Function for fancy seaborn barplot: Parameters ---------- data_sf: SFrame SFrame for plotting. If x and y are absent, this is interpreted as wide-form. Otherwise it is expected to be long-form. x, y, hue: seaborn countplot names of variables in data or vector data, optional Inputs for plotting long-form data. See examples for interpretation. order, hue_order: seaborn countplot lists of strings, optional Order to plot the categorical levels in, otherwise the levels are inferred from the data objects. figsize_tuple: tuple of integers, optional, default: None width, height in inches. If not provided, defaults to rc figure.figsize. title: string Provides the countplot title. seaborn_style: dict, None, or one of {darkgrid, whitegrid, dark, white, ticks} Set the aesthetic style of the plots through the seaborn module. A dictionary of parameters or the name of a preconfigured set. seaborn_palette: {deep, muted, pastel, dark, bright, colorblind} Change how matplotlib color shorthands are interpreted. Calling this will change how shorthand codes like 'b' or 'g' are interpreted by matplotlib in subsequent plots. color: matplotlib color, optional Color for all of the elements, or seed for light_palette() when using hue nesting in seaborn.barplot(). kwargs : key, value mappings Other keyword arguments which are passed through (a)seaborn.countplot API and/or (b)plt.bar at draw time. ''' # define the plotting style sns.set(style=seaborn_style) # initialize the matplotlib figure plt.figure(figsize=figsize_tuple) # transform the SFrame into a Pandas DataFrame data_df = data_sf.to_dataframe() # plot the segments counts ax = sns.countplot(x=x, y=y, hue=hue, data=data_df, order=order, hue_order=hue_order, orient='v', palette=seaborn_palette, color=color, **kwargs) # add informative axis labels, title # make final plot adjustments plt.title(title, {'fontweight': 'bold'}) sns.despine(left=True, bottom=True) plt.show() def univariate_summary_statistics_plot(data_sf, attribs_list, nsubplots_inrow=3, subplots_wspace=0.5, seaborn_style='whitegrid', seaborn_palette='deep', color='b', **kwargs): '''Function for fancy univariate summary plot: Parameters ---------- data_sf: SFrame SFrame of interest attribs_list: list of strings Provides the list of SFrame attributes the univariate plots of which we want to draw nsubplots_inrow: int Determines the desired number of subplots per row. seaborn_style: dict, None, or one of {darkgrid, whitegrid, dark, white, ticks} Set the aesthetic style of the plots through the seaborn module. A dictionary of parameters or the name of a preconfigured set. seaborn_palette: {deep, muted, pastel, dark, bright, colorblind} Change how matplotlib color shorthands are interpreted. Calling this will change how shorthand codes like 'b' or 'g' are interpreted by matplotlib in subsequent plots. color: matplotlib color, optional Color for all of the elements, or seed for light_palette() when using hue nesting in seaborn.barplot(). ''' import graphlab as gl # transform the SFrame into a Pandas DataFrame if isinstance(data_sf, gl.data_structures.sframe.SFrame): data_df = data_sf.to_dataframe() else: data_df = data_sf # define the plotting style sns.set(style=seaborn_style) # remove any offending attributes for a univariate summary statistics # filtering function def is_appropriate_attrib(attrib): if(data_df[attrib].dtype != 'datetime64[ns]'): return True else: return False # apply the filtering function attribs_list_before = attribs_list attribs_list = list(filter(is_appropriate_attrib, attribs_list)) xattribs_list =list([attrib for\ attrib in attribs_list_before if(attrib not in attribs_list)]) if(len(xattribs_list) !=0): print 'These attributes are not appropriate for a univariate summary statistics,',\ 'and have been removed from consideration:' print xattribs_list, '\n' # initialize the matplotlib figure nattribs = len(attribs_list) # compute the sublots nrows nrows = ((nattribs-1)/nsubplots_inrow) + 1 # compute the subplots ncols if(nattribs >= nsubplots_inrow): ncols = nsubplots_inrow else: ncols = nattribs # compute the subplots ysize row_ysize = 9 ysize = nrows * row_ysize # set figure dimensions plt.rcParams['figure.figsize'] = (14, ysize) #fig = plt.figure(figsize=(14, ysize)) # draw the relavant univariate plots for each attribute of interest num_plot = 1 for attrib in attribs_list: if(data_df[attrib].dtype == object): plt.subplot(nrows, ncols, num_plot) sns.countplot(y=attrib, data=data_df, palette=seaborn_palette, color=color, **kwargs) plt.xticks(rotation=45) plt.ylabel(attrib, {'fontweight': 'bold'}) elif((data_df[attrib].dtype == float) | (data_df[attrib].dtype == int)): plt.subplot(nrows, ncols, num_plot) sns.boxplot(y=attrib, data=data_df, palette=seaborn_palette, color=color, **kwargs) plt.ylabel(attrib, {'fontweight': 'bold'}) num_plot +=1 # final plot adjustments sns.despine(left=True, bottom=True) if subplots_wspace < 0.2: print 'Subplots White Space was less than default, 0.2.' print 'The default vaule is going to be used: \'subplots_wspace=0.2\'' subplots_wspace =0.2 plt.subplots_adjust(wspace=subplots_wspace) plt.show() # print the corresponding summary statistic print '\n', 'Univariate Summary Statistics:\n' summary = data_df[attribs_list].describe(include='all') print summary def plot_time_series(timestamp, values, title, **kwargs): plt.rcParams['figure.figsize'] = 14, 7 plt.plot_date(timestamp, values, fmt='g-', tz='utc', **kwargs) plt.title(title) plt.xlabel('Year') plt.ylabel('Dollars per Barrel') plt.rcParams.update({'font.size': 16})
apache-2.0
sumanau7/Ele_CC_Sumanau
lib/IPython/core/formatters.py
5
32800
# -*- coding: utf-8 -*- """Display formatters. Inheritance diagram: .. inheritance-diagram:: IPython.core.formatters :parts: 3 """ # Copyright (c) IPython Development Team. # Distributed under the terms of the Modified BSD License. import abc import inspect import json import sys import traceback import warnings from decorator import decorator from traitlets.config.configurable import Configurable from IPython.core.getipython import get_ipython from IPython.utils.sentinel import Sentinel from IPython.lib import pretty from traitlets import ( Bool, Dict, Integer, Unicode, CUnicode, ObjectName, List, ForwardDeclaredInstance, ) from IPython.utils.py3compat import ( with_metaclass, string_types, unicode_type, ) #----------------------------------------------------------------------------- # The main DisplayFormatter class #----------------------------------------------------------------------------- def _safe_get_formatter_method(obj, name): """Safely get a formatter method - Classes cannot have formatter methods, only instance - protect against proxy objects that claim to have everything """ if inspect.isclass(obj): # repr methods only make sense on instances, not classes return None method = pretty._safe_getattr(obj, name, None) if callable(method): # obj claims to have repr method... if callable(pretty._safe_getattr(obj, '_ipython_canary_method_should_not_exist_', None)): # ...but don't trust proxy objects that claim to have everything return None return method class DisplayFormatter(Configurable): # When set to true only the default plain text formatter will be used. plain_text_only = Bool(False, config=True) def _plain_text_only_changed(self, name, old, new): warnings.warn("""DisplayFormatter.plain_text_only is deprecated. It will be removed in IPython 5.0 Use DisplayFormatter.active_types = ['text/plain'] for the same effect. """, DeprecationWarning) if new: self.active_types = ['text/plain'] else: self.active_types = self.format_types active_types = List(Unicode(), config=True, help="""List of currently active mime-types to display. You can use this to set a white-list for formats to display. Most users will not need to change this value. """) def _active_types_default(self): return self.format_types def _active_types_changed(self, name, old, new): for key, formatter in self.formatters.items(): if key in new: formatter.enabled = True else: formatter.enabled = False ipython_display_formatter = ForwardDeclaredInstance('FormatterABC') def _ipython_display_formatter_default(self): return IPythonDisplayFormatter(parent=self) # A dict of formatter whose keys are format types (MIME types) and whose # values are subclasses of BaseFormatter. formatters = Dict() def _formatters_default(self): """Activate the default formatters.""" formatter_classes = [ PlainTextFormatter, HTMLFormatter, MarkdownFormatter, SVGFormatter, PNGFormatter, PDFFormatter, JPEGFormatter, LatexFormatter, JSONFormatter, JavascriptFormatter ] d = {} for cls in formatter_classes: f = cls(parent=self) d[f.format_type] = f return d def format(self, obj, include=None, exclude=None): """Return a format data dict for an object. By default all format types will be computed. The following MIME types are currently implemented: * text/plain * text/html * text/markdown * text/latex * application/json * application/javascript * application/pdf * image/png * image/jpeg * image/svg+xml Parameters ---------- obj : object The Python object whose format data will be computed. include : list or tuple, optional A list of format type strings (MIME types) to include in the format data dict. If this is set *only* the format types included in this list will be computed. exclude : list or tuple, optional A list of format type string (MIME types) to exclude in the format data dict. If this is set all format types will be computed, except for those included in this argument. Returns ------- (format_dict, metadata_dict) : tuple of two dicts format_dict is a dictionary of key/value pairs, one of each format that was generated for the object. The keys are the format types, which will usually be MIME type strings and the values and JSON'able data structure containing the raw data for the representation in that format. metadata_dict is a dictionary of metadata about each mime-type output. Its keys will be a strict subset of the keys in format_dict. """ format_dict = {} md_dict = {} if self.ipython_display_formatter(obj): # object handled itself, don't proceed return {}, {} for format_type, formatter in self.formatters.items(): if include and format_type not in include: continue if exclude and format_type in exclude: continue md = None try: data = formatter(obj) except: # FIXME: log the exception raise # formatters can return raw data or (data, metadata) if isinstance(data, tuple) and len(data) == 2: data, md = data if data is not None: format_dict[format_type] = data if md is not None: md_dict[format_type] = md return format_dict, md_dict @property def format_types(self): """Return the format types (MIME types) of the active formatters.""" return list(self.formatters.keys()) #----------------------------------------------------------------------------- # Formatters for specific format types (text, html, svg, etc.) #----------------------------------------------------------------------------- def _safe_repr(obj): """Try to return a repr of an object always returns a string, at least. """ try: return repr(obj) except Exception as e: return "un-repr-able object (%r)" % e class FormatterWarning(UserWarning): """Warning class for errors in formatters""" @decorator def catch_format_error(method, self, *args, **kwargs): """show traceback on failed format call""" try: r = method(self, *args, **kwargs) except NotImplementedError: # don't warn on NotImplementedErrors return None except Exception: exc_info = sys.exc_info() ip = get_ipython() if ip is not None: ip.showtraceback(exc_info) else: traceback.print_exception(*exc_info) return None return self._check_return(r, args[0]) class FormatterABC(with_metaclass(abc.ABCMeta, object)): """ Abstract base class for Formatters. A formatter is a callable class that is responsible for computing the raw format data for a particular format type (MIME type). For example, an HTML formatter would have a format type of `text/html` and would return the HTML representation of the object when called. """ # The format type of the data returned, usually a MIME type. format_type = 'text/plain' # Is the formatter enabled... enabled = True @abc.abstractmethod def __call__(self, obj): """Return a JSON'able representation of the object. If the object cannot be formatted by this formatter, warn and return None. """ return repr(obj) def _mod_name_key(typ): """Return a (__module__, __name__) tuple for a type. Used as key in Formatter.deferred_printers. """ module = getattr(typ, '__module__', None) name = getattr(typ, '__name__', None) return (module, name) def _get_type(obj): """Return the type of an instance (old and new-style)""" return getattr(obj, '__class__', None) or type(obj) _raise_key_error = Sentinel('_raise_key_error', __name__, """ Special value to raise a KeyError Raise KeyError in `BaseFormatter.pop` if passed as the default value to `pop` """) class BaseFormatter(Configurable): """A base formatter class that is configurable. This formatter should usually be used as the base class of all formatters. It is a traited :class:`Configurable` class and includes an extensible API for users to determine how their objects are formatted. The following logic is used to find a function to format an given object. 1. The object is introspected to see if it has a method with the name :attr:`print_method`. If is does, that object is passed to that method for formatting. 2. If no print method is found, three internal dictionaries are consulted to find print method: :attr:`singleton_printers`, :attr:`type_printers` and :attr:`deferred_printers`. Users should use these dictionaries to register functions that will be used to compute the format data for their objects (if those objects don't have the special print methods). The easiest way of using these dictionaries is through the :meth:`for_type` and :meth:`for_type_by_name` methods. If no function/callable is found to compute the format data, ``None`` is returned and this format type is not used. """ format_type = Unicode('text/plain') _return_type = string_types enabled = Bool(True, config=True) print_method = ObjectName('__repr__') # The singleton printers. # Maps the IDs of the builtin singleton objects to the format functions. singleton_printers = Dict(config=True) # The type-specific printers. # Map type objects to the format functions. type_printers = Dict(config=True) # The deferred-import type-specific printers. # Map (modulename, classname) pairs to the format functions. deferred_printers = Dict(config=True) @catch_format_error def __call__(self, obj): """Compute the format for an object.""" if self.enabled: # lookup registered printer try: printer = self.lookup(obj) except KeyError: pass else: return printer(obj) # Finally look for special method names method = _safe_get_formatter_method(obj, self.print_method) if method is not None: return method() return None else: return None def __contains__(self, typ): """map in to lookup_by_type""" try: self.lookup_by_type(typ) except KeyError: return False else: return True def _check_return(self, r, obj): """Check that a return value is appropriate Return the value if so, None otherwise, warning if invalid. """ if r is None or isinstance(r, self._return_type) or \ (isinstance(r, tuple) and r and isinstance(r[0], self._return_type)): return r else: warnings.warn( "%s formatter returned invalid type %s (expected %s) for object: %s" % \ (self.format_type, type(r), self._return_type, _safe_repr(obj)), FormatterWarning ) def lookup(self, obj): """Look up the formatter for a given instance. Parameters ---------- obj : object instance Returns ------- f : callable The registered formatting callable for the type. Raises ------ KeyError if the type has not been registered. """ # look for singleton first obj_id = id(obj) if obj_id in self.singleton_printers: return self.singleton_printers[obj_id] # then lookup by type return self.lookup_by_type(_get_type(obj)) def lookup_by_type(self, typ): """Look up the registered formatter for a type. Parameters ---------- typ : type or '__module__.__name__' string for a type Returns ------- f : callable The registered formatting callable for the type. Raises ------ KeyError if the type has not been registered. """ if isinstance(typ, string_types): typ_key = tuple(typ.rsplit('.',1)) if typ_key not in self.deferred_printers: # We may have it cached in the type map. We will have to # iterate over all of the types to check. for cls in self.type_printers: if _mod_name_key(cls) == typ_key: return self.type_printers[cls] else: return self.deferred_printers[typ_key] else: for cls in pretty._get_mro(typ): if cls in self.type_printers or self._in_deferred_types(cls): return self.type_printers[cls] # If we have reached here, the lookup failed. raise KeyError("No registered printer for {0!r}".format(typ)) def for_type(self, typ, func=None): """Add a format function for a given type. Parameters ----------- typ : type or '__module__.__name__' string for a type The class of the object that will be formatted using `func`. func : callable A callable for computing the format data. `func` will be called with the object to be formatted, and will return the raw data in this formatter's format. Subclasses may use a different call signature for the `func` argument. If `func` is None or not specified, there will be no change, only returning the current value. Returns ------- oldfunc : callable The currently registered callable. If you are registering a new formatter, this will be the previous value (to enable restoring later). """ # if string given, interpret as 'pkg.module.class_name' if isinstance(typ, string_types): type_module, type_name = typ.rsplit('.', 1) return self.for_type_by_name(type_module, type_name, func) try: oldfunc = self.lookup_by_type(typ) except KeyError: oldfunc = None if func is not None: self.type_printers[typ] = func return oldfunc def for_type_by_name(self, type_module, type_name, func=None): """Add a format function for a type specified by the full dotted module and name of the type, rather than the type of the object. Parameters ---------- type_module : str The full dotted name of the module the type is defined in, like ``numpy``. type_name : str The name of the type (the class name), like ``dtype`` func : callable A callable for computing the format data. `func` will be called with the object to be formatted, and will return the raw data in this formatter's format. Subclasses may use a different call signature for the `func` argument. If `func` is None or unspecified, there will be no change, only returning the current value. Returns ------- oldfunc : callable The currently registered callable. If you are registering a new formatter, this will be the previous value (to enable restoring later). """ key = (type_module, type_name) try: oldfunc = self.lookup_by_type("%s.%s" % key) except KeyError: oldfunc = None if func is not None: self.deferred_printers[key] = func return oldfunc def pop(self, typ, default=_raise_key_error): """Pop a formatter for the given type. Parameters ---------- typ : type or '__module__.__name__' string for a type default : object value to be returned if no formatter is registered for typ. Returns ------- obj : object The last registered object for the type. Raises ------ KeyError if the type is not registered and default is not specified. """ if isinstance(typ, string_types): typ_key = tuple(typ.rsplit('.',1)) if typ_key not in self.deferred_printers: # We may have it cached in the type map. We will have to # iterate over all of the types to check. for cls in self.type_printers: if _mod_name_key(cls) == typ_key: old = self.type_printers.pop(cls) break else: old = default else: old = self.deferred_printers.pop(typ_key) else: if typ in self.type_printers: old = self.type_printers.pop(typ) else: old = self.deferred_printers.pop(_mod_name_key(typ), default) if old is _raise_key_error: raise KeyError("No registered value for {0!r}".format(typ)) return old def _in_deferred_types(self, cls): """ Check if the given class is specified in the deferred type registry. Successful matches will be moved to the regular type registry for future use. """ mod = getattr(cls, '__module__', None) name = getattr(cls, '__name__', None) key = (mod, name) if key in self.deferred_printers: # Move the printer over to the regular registry. printer = self.deferred_printers.pop(key) self.type_printers[cls] = printer return True return False class PlainTextFormatter(BaseFormatter): """The default pretty-printer. This uses :mod:`IPython.lib.pretty` to compute the format data of the object. If the object cannot be pretty printed, :func:`repr` is used. See the documentation of :mod:`IPython.lib.pretty` for details on how to write pretty printers. Here is a simple example:: def dtype_pprinter(obj, p, cycle): if cycle: return p.text('dtype(...)') if hasattr(obj, 'fields'): if obj.fields is None: p.text(repr(obj)) else: p.begin_group(7, 'dtype([') for i, field in enumerate(obj.descr): if i > 0: p.text(',') p.breakable() p.pretty(field) p.end_group(7, '])') """ # The format type of data returned. format_type = Unicode('text/plain') # This subclass ignores this attribute as it always need to return # something. enabled = Bool(True, config=False) max_seq_length = Integer(pretty.MAX_SEQ_LENGTH, config=True, help="""Truncate large collections (lists, dicts, tuples, sets) to this size. Set to 0 to disable truncation. """ ) # Look for a _repr_pretty_ methods to use for pretty printing. print_method = ObjectName('_repr_pretty_') # Whether to pretty-print or not. pprint = Bool(True, config=True) # Whether to be verbose or not. verbose = Bool(False, config=True) # The maximum width. max_width = Integer(79, config=True) # The newline character. newline = Unicode('\n', config=True) # format-string for pprinting floats float_format = Unicode('%r') # setter for float precision, either int or direct format-string float_precision = CUnicode('', config=True) def _float_precision_changed(self, name, old, new): """float_precision changed, set float_format accordingly. float_precision can be set by int or str. This will set float_format, after interpreting input. If numpy has been imported, numpy print precision will also be set. integer `n` sets format to '%.nf', otherwise, format set directly. An empty string returns to defaults (repr for float, 8 for numpy). This parameter can be set via the '%precision' magic. """ if '%' in new: # got explicit format string fmt = new try: fmt%3.14159 except Exception: raise ValueError("Precision must be int or format string, not %r"%new) elif new: # otherwise, should be an int try: i = int(new) assert i >= 0 except ValueError: raise ValueError("Precision must be int or format string, not %r"%new) except AssertionError: raise ValueError("int precision must be non-negative, not %r"%i) fmt = '%%.%if'%i if 'numpy' in sys.modules: # set numpy precision if it has been imported import numpy numpy.set_printoptions(precision=i) else: # default back to repr fmt = '%r' if 'numpy' in sys.modules: import numpy # numpy default is 8 numpy.set_printoptions(precision=8) self.float_format = fmt # Use the default pretty printers from IPython.lib.pretty. def _singleton_printers_default(self): return pretty._singleton_pprinters.copy() def _type_printers_default(self): d = pretty._type_pprinters.copy() d[float] = lambda obj,p,cycle: p.text(self.float_format%obj) return d def _deferred_printers_default(self): return pretty._deferred_type_pprinters.copy() #### FormatterABC interface #### @catch_format_error def __call__(self, obj): """Compute the pretty representation of the object.""" if not self.pprint: return repr(obj) else: # handle str and unicode on Python 2 # io.StringIO only accepts unicode, # cStringIO doesn't handle unicode on py2, # StringIO allows str, unicode but only ascii str stream = pretty.CUnicodeIO() printer = pretty.RepresentationPrinter(stream, self.verbose, self.max_width, self.newline, max_seq_length=self.max_seq_length, singleton_pprinters=self.singleton_printers, type_pprinters=self.type_printers, deferred_pprinters=self.deferred_printers) printer.pretty(obj) printer.flush() return stream.getvalue() class HTMLFormatter(BaseFormatter): """An HTML formatter. To define the callables that compute the HTML representation of your objects, define a :meth:`_repr_html_` method or use the :meth:`for_type` or :meth:`for_type_by_name` methods to register functions that handle this. The return value of this formatter should be a valid HTML snippet that could be injected into an existing DOM. It should *not* include the ```<html>`` or ```<body>`` tags. """ format_type = Unicode('text/html') print_method = ObjectName('_repr_html_') class MarkdownFormatter(BaseFormatter): """A Markdown formatter. To define the callables that compute the Markdown representation of your objects, define a :meth:`_repr_markdown_` method or use the :meth:`for_type` or :meth:`for_type_by_name` methods to register functions that handle this. The return value of this formatter should be a valid Markdown. """ format_type = Unicode('text/markdown') print_method = ObjectName('_repr_markdown_') class SVGFormatter(BaseFormatter): """An SVG formatter. To define the callables that compute the SVG representation of your objects, define a :meth:`_repr_svg_` method or use the :meth:`for_type` or :meth:`for_type_by_name` methods to register functions that handle this. The return value of this formatter should be valid SVG enclosed in ```<svg>``` tags, that could be injected into an existing DOM. It should *not* include the ```<html>`` or ```<body>`` tags. """ format_type = Unicode('image/svg+xml') print_method = ObjectName('_repr_svg_') class PNGFormatter(BaseFormatter): """A PNG formatter. To define the callables that compute the PNG representation of your objects, define a :meth:`_repr_png_` method or use the :meth:`for_type` or :meth:`for_type_by_name` methods to register functions that handle this. The return value of this formatter should be raw PNG data, *not* base64 encoded. """ format_type = Unicode('image/png') print_method = ObjectName('_repr_png_') _return_type = (bytes, unicode_type) class JPEGFormatter(BaseFormatter): """A JPEG formatter. To define the callables that compute the JPEG representation of your objects, define a :meth:`_repr_jpeg_` method or use the :meth:`for_type` or :meth:`for_type_by_name` methods to register functions that handle this. The return value of this formatter should be raw JPEG data, *not* base64 encoded. """ format_type = Unicode('image/jpeg') print_method = ObjectName('_repr_jpeg_') _return_type = (bytes, unicode_type) class LatexFormatter(BaseFormatter): """A LaTeX formatter. To define the callables that compute the LaTeX representation of your objects, define a :meth:`_repr_latex_` method or use the :meth:`for_type` or :meth:`for_type_by_name` methods to register functions that handle this. The return value of this formatter should be a valid LaTeX equation, enclosed in either ```$```, ```$$``` or another LaTeX equation environment. """ format_type = Unicode('text/latex') print_method = ObjectName('_repr_latex_') class JSONFormatter(BaseFormatter): """A JSON string formatter. To define the callables that compute the JSONable representation of your objects, define a :meth:`_repr_json_` method or use the :meth:`for_type` or :meth:`for_type_by_name` methods to register functions that handle this. The return value of this formatter should be a JSONable list or dict. JSON scalars (None, number, string) are not allowed, only dict or list containers. """ format_type = Unicode('application/json') _return_type = (list, dict) print_method = ObjectName('_repr_json_') def _check_return(self, r, obj): """Check that a return value is appropriate Return the value if so, None otherwise, warning if invalid. """ if r is None: return md = None if isinstance(r, tuple): # unpack data, metadata tuple for type checking on first element r, md = r # handle deprecated JSON-as-string form from IPython < 3 if isinstance(r, string_types): warnings.warn("JSON expects JSONable list/dict containers, not JSON strings", FormatterWarning) r = json.loads(r) if md is not None: # put the tuple back together r = (r, md) return super(JSONFormatter, self)._check_return(r, obj) class JavascriptFormatter(BaseFormatter): """A Javascript formatter. To define the callables that compute the Javascript representation of your objects, define a :meth:`_repr_javascript_` method or use the :meth:`for_type` or :meth:`for_type_by_name` methods to register functions that handle this. The return value of this formatter should be valid Javascript code and should *not* be enclosed in ```<script>``` tags. """ format_type = Unicode('application/javascript') print_method = ObjectName('_repr_javascript_') class PDFFormatter(BaseFormatter): """A PDF formatter. To define the callables that compute the PDF representation of your objects, define a :meth:`_repr_pdf_` method or use the :meth:`for_type` or :meth:`for_type_by_name` methods to register functions that handle this. The return value of this formatter should be raw PDF data, *not* base64 encoded. """ format_type = Unicode('application/pdf') print_method = ObjectName('_repr_pdf_') _return_type = (bytes, unicode_type) class IPythonDisplayFormatter(BaseFormatter): """A Formatter for objects that know how to display themselves. To define the callables that compute the representation of your objects, define a :meth:`_ipython_display_` method or use the :meth:`for_type` or :meth:`for_type_by_name` methods to register functions that handle this. Unlike mime-type displays, this method should not return anything, instead calling any appropriate display methods itself. This display formatter has highest priority. If it fires, no other display formatter will be called. """ print_method = ObjectName('_ipython_display_') _return_type = (type(None), bool) @catch_format_error def __call__(self, obj): """Compute the format for an object.""" if self.enabled: # lookup registered printer try: printer = self.lookup(obj) except KeyError: pass else: printer(obj) return True # Finally look for special method names method = _safe_get_formatter_method(obj, self.print_method) if method is not None: method() return True FormatterABC.register(BaseFormatter) FormatterABC.register(PlainTextFormatter) FormatterABC.register(HTMLFormatter) FormatterABC.register(MarkdownFormatter) FormatterABC.register(SVGFormatter) FormatterABC.register(PNGFormatter) FormatterABC.register(PDFFormatter) FormatterABC.register(JPEGFormatter) FormatterABC.register(LatexFormatter) FormatterABC.register(JSONFormatter) FormatterABC.register(JavascriptFormatter) FormatterABC.register(IPythonDisplayFormatter) def format_display_data(obj, include=None, exclude=None): """Return a format data dict for an object. By default all format types will be computed. The following MIME types are currently implemented: * text/plain * text/html * text/markdown * text/latex * application/json * application/javascript * application/pdf * image/png * image/jpeg * image/svg+xml Parameters ---------- obj : object The Python object whose format data will be computed. Returns ------- format_dict : dict A dictionary of key/value pairs, one or each format that was generated for the object. The keys are the format types, which will usually be MIME type strings and the values and JSON'able data structure containing the raw data for the representation in that format. include : list or tuple, optional A list of format type strings (MIME types) to include in the format data dict. If this is set *only* the format types included in this list will be computed. exclude : list or tuple, optional A list of format type string (MIME types) to exclue in the format data dict. If this is set all format types will be computed, except for those included in this argument. """ from IPython.core.interactiveshell import InteractiveShell InteractiveShell.instance().display_formatter.format( obj, include, exclude )
apache-2.0
kaksmet/servo
tests/wpt/harness/wptrunner/browsers/firefox.py
24
9200
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. import os import subprocess import sys import mozinfo from mozprocess import ProcessHandler from mozprofile import FirefoxProfile, Preferences from mozprofile.permissions import ServerLocations from mozrunner import FirefoxRunner from mozcrash import mozcrash from .base import get_free_port, Browser, ExecutorBrowser, require_arg, cmd_arg, browser_command from ..executors import executor_kwargs as base_executor_kwargs from ..executors.executormarionette import MarionetteTestharnessExecutor, MarionetteRefTestExecutor from ..environment import hostnames here = os.path.join(os.path.split(__file__)[0]) __wptrunner__ = {"product": "firefox", "check_args": "check_args", "browser": "FirefoxBrowser", "executor": {"testharness": "MarionetteTestharnessExecutor", "reftest": "MarionetteRefTestExecutor"}, "browser_kwargs": "browser_kwargs", "executor_kwargs": "executor_kwargs", "env_options": "env_options", "run_info_extras": "run_info_extras"} def check_args(**kwargs): require_arg(kwargs, "binary") if kwargs["ssl_type"] != "none": require_arg(kwargs, "certutil_binary") def browser_kwargs(**kwargs): return {"binary": kwargs["binary"], "prefs_root": kwargs["prefs_root"], "debug_info": kwargs["debug_info"], "symbols_path": kwargs["symbols_path"], "stackwalk_binary": kwargs["stackwalk_binary"], "certutil_binary": kwargs["certutil_binary"], "ca_certificate_path": kwargs["ssl_env"].ca_cert_path(), "e10s": kwargs["gecko_e10s"]} def executor_kwargs(test_type, server_config, cache_manager, run_info_data, **kwargs): executor_kwargs = base_executor_kwargs(test_type, server_config, cache_manager, **kwargs) executor_kwargs["close_after_done"] = True if kwargs["timeout_multiplier"] is None: if kwargs["gecko_e10s"] and test_type == "reftest": if run_info_data["debug"]: executor_kwargs["timeout_multiplier"] = 4 else: executor_kwargs["timeout_multiplier"] = 2 elif run_info_data["debug"]: executor_kwargs["timeout_multiplier"] = 3 return executor_kwargs def env_options(): return {"host": "127.0.0.1", "external_host": "web-platform.test", "bind_hostname": "false", "certificate_domain": "web-platform.test", "supports_debugger": True} def run_info_extras(**kwargs): return {"e10s": kwargs["gecko_e10s"]} class FirefoxBrowser(Browser): used_ports = set() def __init__(self, logger, binary, prefs_root, debug_info=None, symbols_path=None, stackwalk_binary=None, certutil_binary=None, ca_certificate_path=None, e10s=False): Browser.__init__(self, logger) self.binary = binary self.prefs_root = prefs_root self.marionette_port = None self.runner = None self.debug_info = debug_info self.profile = None self.symbols_path = symbols_path self.stackwalk_binary = stackwalk_binary self.ca_certificate_path = ca_certificate_path self.certutil_binary = certutil_binary self.e10s = e10s def start(self): self.marionette_port = get_free_port(2828, exclude=self.used_ports) self.used_ports.add(self.marionette_port) env = os.environ.copy() env["MOZ_DISABLE_NONLOCAL_CONNECTIONS"] = "1" locations = ServerLocations(filename=os.path.join(here, "server-locations.txt")) preferences = self.load_prefs() self.profile = FirefoxProfile(locations=locations, preferences=preferences) self.profile.set_preferences({"marionette.defaultPrefs.enabled": True, "marionette.defaultPrefs.port": self.marionette_port, "dom.disable_open_during_load": False, "network.dns.localDomains": ",".join(hostnames)}) if self.e10s: self.profile.set_preferences({"browser.tabs.remote.autostart": True}) if self.ca_certificate_path is not None: self.setup_ssl() debug_args, cmd = browser_command(self.binary, [cmd_arg("marionette"), "about:blank"], self.debug_info) self.runner = FirefoxRunner(profile=self.profile, binary=cmd[0], cmdargs=cmd[1:], env=env, process_class=ProcessHandler, process_args={"processOutputLine": [self.on_output]}) self.logger.debug("Starting Firefox") self.runner.start(debug_args=debug_args, interactive=self.debug_info and self.debug_info.interactive) self.logger.debug("Firefox Started") def load_prefs(self): prefs_path = os.path.join(self.prefs_root, "prefs_general.js") if os.path.exists(prefs_path): preferences = Preferences.read_prefs(prefs_path) else: self.logger.warning("Failed to find base prefs file in %s" % prefs_path) preferences = [] return preferences def stop(self): self.logger.debug("Stopping browser") if self.runner is not None: try: self.runner.stop() except OSError: # This can happen on Windows if the process is already dead pass def pid(self): if self.runner.process_handler is None: return None try: return self.runner.process_handler.pid except AttributeError: return None def on_output(self, line): """Write a line of output from the firefox process to the log""" self.logger.process_output(self.pid(), line.decode("utf8", "replace"), command=" ".join(self.runner.command)) def is_alive(self): if self.runner: return self.runner.is_running() return False def cleanup(self): self.stop() def executor_browser(self): assert self.marionette_port is not None return ExecutorBrowser, {"marionette_port": self.marionette_port} def log_crash(self, process, test): dump_dir = os.path.join(self.profile.profile, "minidumps") mozcrash.log_crashes(self.logger, dump_dir, symbols_path=self.symbols_path, stackwalk_binary=self.stackwalk_binary, process=process, test=test) def setup_ssl(self): """Create a certificate database to use in the test profile. This is configured to trust the CA Certificate that has signed the web-platform.test server certificate.""" self.logger.info("Setting up ssl") # Make sure the certutil libraries from the source tree are loaded when using a # local copy of certutil # TODO: Maybe only set this if certutil won't launch? env = os.environ.copy() certutil_dir = os.path.dirname(self.binary) if mozinfo.isMac: env_var = "DYLD_LIBRARY_PATH" elif mozinfo.isUnix: env_var = "LD_LIBRARY_PATH" else: env_var = "PATH" env[env_var] = (os.path.pathsep.join([certutil_dir, env[env_var]]) if env_var in env else certutil_dir).encode( sys.getfilesystemencoding() or 'utf-8', 'replace') def certutil(*args): cmd = [self.certutil_binary] + list(args) self.logger.process_output("certutil", subprocess.check_output(cmd, env=env, stderr=subprocess.STDOUT), " ".join(cmd)) pw_path = os.path.join(self.profile.profile, ".crtdbpw") with open(pw_path, "w") as f: # Use empty password for certificate db f.write("\n") cert_db_path = self.profile.profile # Create a new certificate db certutil("-N", "-d", cert_db_path, "-f", pw_path) # Add the CA certificate to the database and mark as trusted to issue server certs certutil("-A", "-d", cert_db_path, "-f", pw_path, "-t", "CT,,", "-n", "web-platform-tests", "-i", self.ca_certificate_path) # List all certs in the database certutil("-L", "-d", cert_db_path)
mpl-2.0
espadrine/opera
chromium/src/third_party/WebKit/Tools/Scripts/webkitpy/thirdparty/mod_pywebsocket/common.py
139
10217
# Copyright 2012, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """This file must not depend on any module specific to the WebSocket protocol. """ from mod_pywebsocket import http_header_util # Additional log level definitions. LOGLEVEL_FINE = 9 # Constants indicating WebSocket protocol version. VERSION_HIXIE75 = -1 VERSION_HYBI00 = 0 VERSION_HYBI01 = 1 VERSION_HYBI02 = 2 VERSION_HYBI03 = 2 VERSION_HYBI04 = 4 VERSION_HYBI05 = 5 VERSION_HYBI06 = 6 VERSION_HYBI07 = 7 VERSION_HYBI08 = 8 VERSION_HYBI09 = 8 VERSION_HYBI10 = 8 VERSION_HYBI11 = 8 VERSION_HYBI12 = 8 VERSION_HYBI13 = 13 VERSION_HYBI14 = 13 VERSION_HYBI15 = 13 VERSION_HYBI16 = 13 VERSION_HYBI17 = 13 # Constants indicating WebSocket protocol latest version. VERSION_HYBI_LATEST = VERSION_HYBI13 # Port numbers DEFAULT_WEB_SOCKET_PORT = 80 DEFAULT_WEB_SOCKET_SECURE_PORT = 443 # Schemes WEB_SOCKET_SCHEME = 'ws' WEB_SOCKET_SECURE_SCHEME = 'wss' # Frame opcodes defined in the spec. OPCODE_CONTINUATION = 0x0 OPCODE_TEXT = 0x1 OPCODE_BINARY = 0x2 OPCODE_CLOSE = 0x8 OPCODE_PING = 0x9 OPCODE_PONG = 0xa # UUIDs used by HyBi 04 and later opening handshake and frame masking. WEBSOCKET_ACCEPT_UUID = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11' # Opening handshake header names and expected values. UPGRADE_HEADER = 'Upgrade' WEBSOCKET_UPGRADE_TYPE = 'websocket' WEBSOCKET_UPGRADE_TYPE_HIXIE75 = 'WebSocket' CONNECTION_HEADER = 'Connection' UPGRADE_CONNECTION_TYPE = 'Upgrade' HOST_HEADER = 'Host' ORIGIN_HEADER = 'Origin' SEC_WEBSOCKET_ORIGIN_HEADER = 'Sec-WebSocket-Origin' SEC_WEBSOCKET_KEY_HEADER = 'Sec-WebSocket-Key' SEC_WEBSOCKET_ACCEPT_HEADER = 'Sec-WebSocket-Accept' SEC_WEBSOCKET_VERSION_HEADER = 'Sec-WebSocket-Version' SEC_WEBSOCKET_PROTOCOL_HEADER = 'Sec-WebSocket-Protocol' SEC_WEBSOCKET_EXTENSIONS_HEADER = 'Sec-WebSocket-Extensions' SEC_WEBSOCKET_DRAFT_HEADER = 'Sec-WebSocket-Draft' SEC_WEBSOCKET_KEY1_HEADER = 'Sec-WebSocket-Key1' SEC_WEBSOCKET_KEY2_HEADER = 'Sec-WebSocket-Key2' SEC_WEBSOCKET_LOCATION_HEADER = 'Sec-WebSocket-Location' # Extensions DEFLATE_FRAME_EXTENSION = 'deflate-frame' PERFRAME_COMPRESSION_EXTENSION = 'perframe-compress' PERMESSAGE_COMPRESSION_EXTENSION = 'permessage-compress' PERMESSAGE_DEFLATE_EXTENSION = 'permessage-deflate' X_WEBKIT_DEFLATE_FRAME_EXTENSION = 'x-webkit-deflate-frame' X_WEBKIT_PERMESSAGE_COMPRESSION_EXTENSION = 'x-webkit-permessage-compress' MUX_EXTENSION = 'mux_DO_NOT_USE' # Status codes # Code STATUS_NO_STATUS_RECEIVED, STATUS_ABNORMAL_CLOSURE, and # STATUS_TLS_HANDSHAKE are pseudo codes to indicate specific error cases. # Could not be used for codes in actual closing frames. # Application level errors must use codes in the range # STATUS_USER_REGISTERED_BASE to STATUS_USER_PRIVATE_MAX. The codes in the # range STATUS_USER_REGISTERED_BASE to STATUS_USER_REGISTERED_MAX are managed # by IANA. Usually application must define user protocol level errors in the # range STATUS_USER_PRIVATE_BASE to STATUS_USER_PRIVATE_MAX. STATUS_NORMAL_CLOSURE = 1000 STATUS_GOING_AWAY = 1001 STATUS_PROTOCOL_ERROR = 1002 STATUS_UNSUPPORTED_DATA = 1003 STATUS_NO_STATUS_RECEIVED = 1005 STATUS_ABNORMAL_CLOSURE = 1006 STATUS_INVALID_FRAME_PAYLOAD_DATA = 1007 STATUS_POLICY_VIOLATION = 1008 STATUS_MESSAGE_TOO_BIG = 1009 STATUS_MANDATORY_EXTENSION = 1010 STATUS_INTERNAL_ENDPOINT_ERROR = 1011 STATUS_TLS_HANDSHAKE = 1015 STATUS_USER_REGISTERED_BASE = 3000 STATUS_USER_REGISTERED_MAX = 3999 STATUS_USER_PRIVATE_BASE = 4000 STATUS_USER_PRIVATE_MAX = 4999 # Following definitions are aliases to keep compatibility. Applications must # not use these obsoleted definitions anymore. STATUS_NORMAL = STATUS_NORMAL_CLOSURE STATUS_UNSUPPORTED = STATUS_UNSUPPORTED_DATA STATUS_CODE_NOT_AVAILABLE = STATUS_NO_STATUS_RECEIVED STATUS_ABNORMAL_CLOSE = STATUS_ABNORMAL_CLOSURE STATUS_INVALID_FRAME_PAYLOAD = STATUS_INVALID_FRAME_PAYLOAD_DATA STATUS_MANDATORY_EXT = STATUS_MANDATORY_EXTENSION # HTTP status codes HTTP_STATUS_BAD_REQUEST = 400 HTTP_STATUS_FORBIDDEN = 403 HTTP_STATUS_NOT_FOUND = 404 def is_control_opcode(opcode): return (opcode >> 3) == 1 class ExtensionParameter(object): """Holds information about an extension which is exchanged on extension negotiation in opening handshake. """ def __init__(self, name): self._name = name # TODO(tyoshino): Change the data structure to more efficient one such # as dict when the spec changes to say like # - Parameter names must be unique # - The order of parameters is not significant self._parameters = [] def name(self): return self._name def add_parameter(self, name, value): self._parameters.append((name, value)) def get_parameters(self): return self._parameters def get_parameter_names(self): return [name for name, unused_value in self._parameters] def has_parameter(self, name): for param_name, param_value in self._parameters: if param_name == name: return True return False def get_parameter_value(self, name): for param_name, param_value in self._parameters: if param_name == name: return param_value class ExtensionParsingException(Exception): def __init__(self, name): super(ExtensionParsingException, self).__init__(name) def _parse_extension_param(state, definition, allow_quoted_string): param_name = http_header_util.consume_token(state) if param_name is None: raise ExtensionParsingException('No valid parameter name found') http_header_util.consume_lwses(state) if not http_header_util.consume_string(state, '='): definition.add_parameter(param_name, None) return http_header_util.consume_lwses(state) if allow_quoted_string: # TODO(toyoshim): Add code to validate that parsed param_value is token param_value = http_header_util.consume_token_or_quoted_string(state) else: param_value = http_header_util.consume_token(state) if param_value is None: raise ExtensionParsingException( 'No valid parameter value found on the right-hand side of ' 'parameter %r' % param_name) definition.add_parameter(param_name, param_value) def _parse_extension(state, allow_quoted_string): extension_token = http_header_util.consume_token(state) if extension_token is None: return None extension = ExtensionParameter(extension_token) while True: http_header_util.consume_lwses(state) if not http_header_util.consume_string(state, ';'): break http_header_util.consume_lwses(state) try: _parse_extension_param(state, extension, allow_quoted_string) except ExtensionParsingException, e: raise ExtensionParsingException( 'Failed to parse parameter for %r (%r)' % (extension_token, e)) return extension def parse_extensions(data, allow_quoted_string=False): """Parses Sec-WebSocket-Extensions header value returns a list of ExtensionParameter objects. Leading LWSes must be trimmed. """ state = http_header_util.ParsingState(data) extension_list = [] while True: extension = _parse_extension(state, allow_quoted_string) if extension is not None: extension_list.append(extension) http_header_util.consume_lwses(state) if http_header_util.peek(state) is None: break if not http_header_util.consume_string(state, ','): raise ExtensionParsingException( 'Failed to parse Sec-WebSocket-Extensions header: ' 'Expected a comma but found %r' % http_header_util.peek(state)) http_header_util.consume_lwses(state) if len(extension_list) == 0: raise ExtensionParsingException( 'No valid extension entry found') return extension_list def format_extension(extension): """Formats an ExtensionParameter object.""" formatted_params = [extension.name()] for param_name, param_value in extension.get_parameters(): if param_value is None: formatted_params.append(param_name) else: quoted_value = http_header_util.quote_if_necessary(param_value) formatted_params.append('%s=%s' % (param_name, quoted_value)) return '; '.join(formatted_params) def format_extensions(extension_list): """Formats a list of ExtensionParameter objects.""" formatted_extension_list = [] for extension in extension_list: formatted_extension_list.append(format_extension(extension)) return ', '.join(formatted_extension_list) # vi:sts=4 sw=4 et
bsd-3-clause
lijiabogithub/QUANTAXIS
QUANTAXIS/QACmd/strategy_sample_simple.py
1
1511
# encoding: UTF-8 import QUANTAXIS as QA from QUANTAXIS.QAFetch.QAQuery import QA_fetch_data from pymongo import MongoClient from QUANTAXIS.QAUtil import QA_util_date_stamp,QA_util_log_info from QUANTAXIS.QAMarket import QA_QAMarket_bid,QA_Market from QUANTAXIS.QABacktest.QABacktest import QA_Backtest from QUANTAXIS.QAARP import QAAccount,QAPortfolio,QARisk from QUANTAXIS.QASignal import QA_signal_send from QUANTAXIS.QASignal import (QA_Signal_eventManager,QA_Signal_events, QA_Signal_Listener,QA_Signal_Sender,QA_signal_usual_model) import pandas from threading import * class backtest(QA_Backtest): def QA_backtest_init(self): pass def QA_backtest_start(self): pass def signal_handle(self): pass def message_center(self,name,listener_name): class QASS(QA_Signal_Sender): def QAS_send(self): pass class QASL(QA_Signal_Listener): def QA_receive(self,event): pass eventManager = QA_Signal_eventManager() for item in range(0,len(listener_name),1): listner = QASL(listener_name[item]) #订阅 eventManager.AddEventListener(name,listner.QA_receive) #绑定事件和监听器响应函数 eventManager.Start() publicAcc = QASS(eventManager) timer = Timer(1, publicAcc.QAS_send) timer.start() ###运行 backtest=backtest() backtest.QA_backtest_init() backtest.QA_backtest_start()
mit
sahildua2305/eden
modules/s3cfg.py
2
69474
# -*- coding: utf-8 -*- """ Deployment Settings @requires: U{B{I{gluon}} <http://web2py.com>} @copyright: 2009-2013 (c) Sahana Software Foundation @license: MIT Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ __all__ = ["S3Config"] try: # Python 2.7 from collections import OrderedDict except: # Python 2.6 from gluon.contrib.simplejson.ordered_dict import OrderedDict from gluon import current, URL, TR, TD from gluon.storage import Storage class S3Config(Storage): """ Deployment Settings Helper Class """ def __init__(self): self.auth = Storage() self.auth.email_domains = [] self.base = Storage() self.database = Storage() # @ToDo: Move to self.ui self.frontpage = Storage() self.frontpage.rss = [] self.fin = Storage() self.L10n = Storage() self.mail = Storage() self.msg = Storage() self.search = Storage() self.security = Storage() self.ui = Storage() self.cap = Storage() self.gis = Storage() self.hrm = Storage() self.inv = Storage() self.irs = Storage() self.org = Storage() self.pr = Storage() self.proc = Storage() self.project = Storage() self.req = Storage() self.supply = Storage() self.hms = Storage() # ------------------------------------------------------------------------- # Template def get_template(self): """ Which deployment template to use for config.py, parser.py, menus.py, etc http://eden.sahanafoundation.org/wiki/BluePrint/Templates """ return self.base.get("template", "default") def exec_template(self, path): """ Execute the template """ from gluon.fileutils import read_file from gluon.restricted import restricted code = read_file(path) restricted(code, layer=path) return # ------------------------------------------------------------------------- # Theme def get_theme(self): """ Which templates folder to use for views/layout.html """ return self.base.get("theme", "default") def get_base_xtheme(self): """ Whether there is a custom Ext theme or simply use the default xtheme-gray - specified as <themefolder>/xtheme-<filename>.css """ return self.base.get("xtheme", None) # ------------------------------------------------------------------------- def is_cd_version(self): """ Whether we're running from a non-writable CD """ return self.base.get("cd_version", False) # ------------------------------------------------------------------------- def get_google_analytics_tracking_id(self): """ Google Analytics Key """ return self.base.get("google_analytics_tracking_id", None) # ------------------------------------------------------------------------- def get_youtube_video_id(self): """ YouTube ID """ return self.base.get("youtube_id", None) # ------------------------------------------------------------------------- # Authentication settings def get_auth_hmac_key(self): """ salt to encrypt passwords - normally randomised during 1st run """ return self.auth.get("hmac_key", "akeytochange") def get_auth_password_min_length(self): """ To set the Minimum Password Length """ return self.auth.get("password_min_length", int(4)) def get_auth_facebook(self): """ Read the FaceBook OAuth settings - if configured, then it is assumed that FaceBook Authentication is enabled """ id = self.auth.get("facebook_id", False) secret = self.auth.get("facebook_secret", False) if id and secret: return dict(id=id, secret=secret) else: return False def get_auth_gmail_domains(self): """ List of domains which can use GMail SMTP for Authentication """ return self.auth.get("gmail_domains", []) def get_auth_google(self): """ Read the Google OAuth settings - if configured, then it is assumed that Google Authentication is enabled """ id = self.auth.get("google_id", False) secret = self.auth.get("google_secret", False) if id and secret: return dict(id=id, secret=secret) else: return False def get_auth_openid(self): """ Use OpenID for Authentication """ return self.auth.get("openid", False) def get_security_self_registration(self): return self.security.get("self_registration", True) def get_auth_registration_requires_verification(self): return self.auth.get("registration_requires_verification", False) def get_auth_registration_requires_approval(self): return self.auth.get("registration_requires_approval", False) def get_auth_always_notify_approver(self): return self.auth.get("always_notify_approver", True) def get_auth_login_next(self): """ Which page to go to after login """ return self.auth.get("login_next", URL(c="default", f="index")) def get_auth_show_link(self): return self.auth.get("show_link", True) def get_auth_registration_link_user_to(self): """ Link User accounts to none or more of: * Staff * Volunteer * Member """ return self.auth.get("registration_link_user_to", None) def get_auth_registration_link_user_to_default(self): """ Link User accounts to none or more of: * Staff * Volunteer * Member """ return self.auth.get("registration_link_user_to_default", None) def get_auth_opt_in_team_list(self): return self.auth.get("opt_in_team_list", []) def get_auth_opt_in_to_email(self): return self.get_auth_opt_in_team_list() != [] def get_auth_opt_in_default(self): return self.auth.get("opt_in_default", False) def get_auth_registration_requests_mobile_phone(self): return self.auth.get("registration_requests_mobile_phone", False) def get_auth_registration_mobile_phone_mandatory(self): " Make the selection of Mobile Phone Mandatory during registration " return self.auth.get("registration_mobile_phone_mandatory", False) def get_auth_registration_requests_organisation(self): " Have the registration form request the Organisation " return self.auth.get("registration_requests_organisation", False) def get_auth_registration_organisation_required(self): " Make the selection of Organisation required during registration " return self.auth.get("registration_organisation_required", False) def get_auth_registration_organisation_hidden(self): " Hide the Organisation field in the registration form unless an email is entered which isn't whitelisted " return self.auth.get("registration_organisation_hidden", False) def get_auth_registration_organisation_default(self): " Default the Organisation during registration " return self.auth.get("registration_organisation_default", None) def get_auth_registration_organisation_id_default(self): " Default the Organisation during registration - will return the organisation_id" name = self.auth.get("registration_organisation_default", None) if name: otable = current.s3db.org_organisation orow = current.db(otable.name == name).select(otable.id).first() if orow: organisation_id = orow.id else: organisation_id = otable.insert(name = name) else: organisation_id = None return organisation_id def get_auth_registration_requests_organisation_group(self): " Have the registration form request the Organisation Group " return self.auth.get("registration_requests_organisation_group", False) def get_auth_registration_organisation_group_required(self): " Make the selection of Organisation Group required during registration " return self.auth.get("registration_organisation_group_required", False) def get_auth_registration_requests_site(self): " Have the registration form request the Site " return self.auth.get("registration_requests_site", False) def get_auth_registration_site_required(self): " Make the selection of site required during registration " return self.auth.get("registration_site_required", False) def get_auth_registration_requests_image(self): """ Have the registration form request an Image """ return self.auth.get("registration_requests_image", False) def get_auth_registration_pending(self): """ Message someone gets when they register & they need approving """ return self.auth.get("registration_pending", "Registration is still pending approval from Approver (%s) - please wait until confirmation received." % \ self.get_mail_approver()) def get_auth_registration_pending_approval(self): """ Message someone gets when they register & they need approving """ return self.auth.get("registration_pending_approval", "Thank you for validating your email. Your user account is still pending for approval by the system administator (%s). You will get a notification by email when your account is activated." % \ self.get_mail_approver()) def get_auth_registration_roles(self): """ A dictionary of realms, with lists of role UUIDs, to assign to newly-registered users Use key = 0 to have the roles not restricted to a realm """ return self.auth.get("registration_roles", []) def get_auth_terms_of_service(self): """ Force users to accept Terms of Servcie before Registering an account - uses <template>/views/tos.html """ return self.auth.get("terms_of_service", False) def get_auth_registration_volunteer(self): """ Redirect the newly-registered user to their volunteer details page """ return self.auth.get("registration_volunteer", False) def get_auth_record_approval(self): """ Use record approval (False by default) """ return self.auth.get("record_approval", False) def get_auth_record_approval_required_for(self): """ Which tables record approval is required for """ return self.auth.get("record_approval_required_for", []) def get_auth_realm_entity(self): """ Hook to determine the owner entity of a record """ return self.auth.get("realm_entity", None) def get_auth_person_realm_human_resource_site_then_org(self): """ Should we set pr_person.realm_entity to that of hrm_human_resource.site_id$pe_id """ return self.auth.get("person_realm_human_resource_site_then_org", False) def get_auth_person_realm_member_org(self): """ Sets pr_person.realm_entity to organisation.pe_id of member_member """ return self.auth.get("person_realm_member_org", False) def get_auth_role_modules(self): """ Which modules are includes in the Role Manager - to assign discrete permissions to via UI """ T = current.T return self.auth.get("role_modules", OrderedDict([ ("staff", "Staff"), ("vol", "Volunteers"), ("member", "Members"), ("inv", "Warehouses"), ("asset", "Assets"), ("project", "Projects"), ("survey", "Assessments"), ("irs", "Incidents") ])) def get_auth_access_levels(self): """ Access levels for the Role Manager UI """ T = current.T return self.auth.get("access_levels", OrderedDict([ ("reader", "Reader"), ("data_entry", "Data Entry"), ("editor", "Editor"), ("super", "Super Editor") ])) def get_auth_set_presence_on_login(self): return self.auth.get("set_presence_on_login", False) def get_auth_ignore_levels_for_presence(self): return self.auth.get("ignore_levels_for_presence", ["L0"]) def get_auth_create_unknown_locations(self): return self.auth.get("create_unknown_locations", False) def get_auth_show_utc_offset(self): return self.auth.get("show_utc_offset", True) def get_security_archive_not_delete(self): return self.security.get("archive_not_delete", True) def get_security_audit_read(self): return self.security.get("audit_read", False) def get_security_audit_write(self): return self.security.get("audit_write", False) def get_security_policy(self): " Default is Simple Security Policy " return self.security.get("policy", 1) def get_security_strict_ownership(self): """ Ownership-rule for records without owner: True = not owned by any user (strict ownership, default) False = owned by any authenticated user """ return self.security.get("strict_ownership", True) def get_security_map(self): return self.security.get("map", False) # ------------------------------------------------------------------------- # Base settings def get_instance_name(self): """ Instance Name - for management scripts. e.g. prod or test """ return self.base.get("instance_name", "") def get_system_name(self): """ System Name - for the UI & Messaging """ return self.base.get("system_name", current.T("Sahana Eden Humanitarian Management Platform")) def get_system_name_short(self): """ System Name (Short Version) - for the UI & Messaging """ return self.base.get("system_name_short", "Sahana Eden") def get_base_debug(self): """ Debug mode: Serve CSS/JS in separate uncompressed files """ return self.base.get("debug", False) def get_base_migrate(self): """ Whether to allow Web2Py to migrate the SQL database to the new structure """ return self.base.get("migrate", True) def get_base_fake_migrate(self): """ Whether to have Web2Py create the .table files to match the expected SQL database structure """ return self.base.get("fake_migrate", False) def get_base_prepopulate(self): """ Whether to prepopulate the database &, if so, which set of data to use for this """ return self.base.get("prepopulate", 1) def get_base_guided_tour(self): """ Whether the guided tours are enabled """ return self.base.get("guided_tour", False) def get_base_public_url(self): """ The Public URL for the site - for use in email links, etc """ return self.base.get("public_url", "http://127.0.0.1:8000") def get_base_cdn(self): """ Should we use CDNs (Content Distribution Networks) to serve some common CSS/JS? """ return self.base.get("cdn", False) def get_base_session_memcache(self): """ Should we store sessions in a Memcache service to allow sharing between multiple instances? """ return self.base.get("session_memcache", False) def get_base_solr_url(self): """ URL to connect to solr server """ return self.base.get("solr_url", False) def get_import_callback(self, tablename, callback): """ Lookup callback to use for imports in the following order: - custom [create, update]_onxxxx - default [create, update]_onxxxx - custom onxxxx - default onxxxx NB: Currently only onaccept is actually used """ callbacks = self.base.get("import_callbacks", []) if tablename in callbacks: callbacks = callbacks[tablename] if callback in callbacks: return callbacks[callback] get_config = current.s3db.get_config default = get_config(tablename, callback) if default: return default if callback[:2] != "on": callback = callback[7:] if callback in callbacks: return callbacks[callback] default = get_config(tablename, callback) if default: return default # ------------------------------------------------------------------------- # Database settings def get_database_type(self): return self.database.get("db_type", "sqlite").lower() def get_database_string(self): db_type = self.database.get("db_type", "sqlite").lower() pool_size = self.database.get("pool_size", 30) if (db_type == "sqlite"): db_string = "sqlite://storage.db" elif (db_type == "mysql"): db_string = "mysql://%s:%s@%s:%s/%s" % \ (self.database.get("username", "sahana"), self.database.get("password", "password"), self.database.get("host", "localhost"), self.database.get("port", None) or "3306", self.database.get("database", "sahana")) elif (db_type == "postgres"): db_string = "postgres://%s:%s@%s:%s/%s" % \ (self.database.get("username", "sahana"), self.database.get("password", "password"), self.database.get("host", "localhost"), self.database.get("port", None) or "5432", self.database.get("database", "sahana")) else: from gluon import HTTP raise HTTP(501, body="Database type '%s' not recognised - please correct file models/000_config.py." % db_type) return (db_string, pool_size) # ------------------------------------------------------------------------- # Finance settings # @ToDo: Make these customisable per Organisation # => Move to a Table like hrm_course def get_fin_currencies(self): T = current.T currencies = { "EUR" :T("Euros"), "GBP" :T("Great British Pounds"), "USD" :T("United States Dollars"), } return self.fin.get("currencies", currencies) def get_fin_currency_default(self): return self.fin.get("currency_default", "USD") # Dollars def get_fin_currency_writable(self): return self.fin.get("currency_writable", True) # ------------------------------------------------------------------------- # GIS (Map) Settings # def get_gis_api_bing(self): """ API key for Bing """ return self.gis.get("api_bing", None) def get_gis_api_google(self): """ API key for Google - needed for Earth, MapMaker & GeoCoder - defaults to localhost """ return self.gis.get("api_google", "ABQIAAAAgB-1pyZu7pKAZrMGv3nksRTpH3CbXHjuCVmaTc5MkkU4wO1RRhQWqp1VGwrG8yPE2KhLCPYhD7itFw") def get_gis_api_yahoo(self): """ API key for Yahoo - deprecated """ return self.gis.get("api_yahoo", None) def get_gis_building_name(self): " Display Building Name when selecting Locations " return self.gis.get("building_name", True) def get_gis_check_within_parent_boundaries(self): """ Whether location Lat/Lons should be within the boundaries of the parent """ return self.gis.get("check_within_parent_boundaries", True) def get_gis_countries(self): """ Which country codes should be accessible to the location selector? """ return self.gis.get("countries", []) def get_gis_display_l0(self): return self.gis.get("display_L0", False) def get_gis_display_l1(self): return self.gis.get("display_L1", True) def get_gis_duplicate_features(self): """ Display duplicate features either side of the International date line? """ return self.gis.get("duplicate_features", False) def get_gis_edit_group(self): " Edit Location Groups " return self.gis.get("edit_GR", False) def get_gis_geocode_imported_addresses(self): " Should Addresses imported from CSV be passed to a Geocoder to try and automate Lat/Lon? " return self.gis.get("geocode_imported_addresses", False) def get_gis_geoserver_url(self): return self.gis.get("geoserver_url", "") def get_gis_geoserver_username(self): return self.gis.get("geoserver_username", "admin") def get_gis_geoserver_password(self): return self.gis.get("geoserver_password", "") def get_gis_latlon_selector(self): " Display Lat/Lon form fields when selecting Locations " return self.gis.get("latlon_selector", True) def get_gis_layer_metadata(self): " Use CMS to provide Metadata on Map Layers " return self.has_module("cms") and self.gis.get("layer_metadata", False) def get_gis_layer_properties(self): " Display Layer Properties Tool above Map's Layer Tree " return self.gis.get("layer_properties", True) def get_gis_layer_tree_base(self): " Display Base Layers folder in the Map's Layer Tree " return self.gis.get("layer_tree_base", True) def get_gis_layer_tree_overlays(self): " Display Overlays folder in the Map's Layer Tree " return self.gis.get("layer_tree_overlays", True) def get_gis_layer_tree_expanded(self): " Display folders in the Map's Layer Tree Open by default " return self.gis.get("layer_tree_expanded", True) def get_gis_layer_tree_radio(self): " Use a radio button for custom folders in the Map's Layer Tree " return self.gis.get("layer_tree_radio", False) def get_gis_layers_label(self): " Label for the Map's Layer Tree " return self.gis.get("layers_label", "Layers") def get_gis_map_height(self): """ Height of the Embedded Map Change this if-required for your theme NB API can override this in specific modules """ return self.gis.get("map_height", 600) def get_gis_map_width(self): """ Width of the Embedded Map Change this if-required for your theme NB API can override this in specific modules """ return self.gis.get("map_width", 1000) def get_gis_map_selector(self): " Display a Map-based tool to select Locations " return self.gis.get("map_selector", True) def get_gis_marker_max_height(self): return self.gis.get("marker_max_height", 35) def get_gis_marker_max_width(self): return self.gis.get("marker_max_width", 30) def get_gis_legend(self): """ Should we display a Legend on the Map? - set to True to show a GeoExt Legend (default) - set to False to not show a Legend - set to "float" to use a floating DIV """ return self.gis.get("legend", True) def get_gis_menu(self): """ Should we display a menu of GIS configurations? - set to False to not show the menu (default) - set to the label to use for the menu to enable it e.g. T("Events") or T("Regions") """ return self.gis.get("menu", False) def get_gis_mouse_position(self): """ What style of Coordinates for the current Mouse Position should be shown on the Map? 'normal', 'mgrs' or False """ return self.gis.get("mouse_position", "normal") def get_gis_nav_controls(self): """ Should the Map Toolbar display Navigation Controls? """ return self.gis.get("nav_controls", True) def get_gis_label_overlays(self): """ Label for the Map Overlays in the Layer Tree """ return self.gis.get("label_overlays", "Overlays") def get_gis_overview(self): """ Should the Map display an Overview Map? """ return self.gis.get("overview", True) def get_gis_permalink(self): """ Should the Map display a Permalink control? """ return self.gis.get("permalink", True) def get_gis_poi_resources(self): """ List of resources (tablenames) to import/export as PoIs from Admin Locations - KML & OpenStreetMap formats """ return self.gis.get("poi_resources", ["cr_shelter", "hms_hospital", "org_office"]) def get_gis_print_service(self): """ URL for a Print Service """ return self.gis.get("print_service", "") def get_gis_simplify_tolerance(self): """ Default Tolerance for the Simplification of Polygons - a lower value means less simplification, which is suitable for higher-resolution local activities - a higher value is suitable for global views """ return self.gis.get("simplify_tolerance", 0.01) def get_gis_scaleline(self): """ Should the Map display a ScaleLine control? """ return self.gis.get("scaleline", True) def get_gis_spatialdb(self): """ Does the database have Spatial extensions? """ db_type = self.get_database_type() if db_type != "postgres": # Only Postgres supported currently return False else: return self.gis.get("spatialdb", False) def get_gis_toolbar(self): """ Should the main Map display a Toolbar? """ return self.gis.get("toolbar", True) def get_gis_zoomcontrol(self): """ Should the Map display a Zoom control? """ return self.gis.get("zoomcontrol", True) # ------------------------------------------------------------------------- # L10N Settings def get_L10n_default_language(self): return self.L10n.get("default_language", "en") def get_L10n_display_toolbar(self): return self.L10n.get("display_toolbar", True) def get_L10n_languages(self): return self.L10n.get("languages", OrderedDict([ ("ar", "العربية"), ("zh-cn", "中文 (简体)"), ("zh-tw", "中文 (繁體)"), ("en", "English"), ("fr", "Français"), ("de", "Deutsch"), ("el", "ελληνικά"), ("it", "Italiano"), ("ja", "日本語"), ("ko", "한국어"), ("pt", "Português"), ("pt-br", "Português (Brasil)"), ("ru", "русский"), ("es", "Español"), ("tl", "Tagalog"), ("ur", "اردو"), ("vi", "Tiếng Việt"), ])) def get_L10n_languages_readonly(self): return self.L10n.get("languages_readonly", True) def get_L10n_religions(self): """ Religions used in Person Registry @ToDo: find a better code http://eden.sahanafoundation.org/ticket/594 """ T = current.T return self.L10n.get("religions", { "none":T("none"), "christian":T("Christian"), "muslim":T("Muslim"), "jewish":T("Jewish"), "buddhist":T("Buddhist"), "hindu":T("Hindu"), "bahai":T("Bahai"), "other":T("other") }) def get_L10n_date_format(self): return self.L10n.get("date_format", "%Y-%m-%d") def get_L10n_time_format(self): return self.L10n.get("time_format", "%H:%M") def get_L10n_datetime_separator(self): return self.L10n.get("datetime_separator", " ") def get_L10n_datetime_format(self): return "%s%s%s" % (self.get_L10n_date_format(), self.get_L10n_datetime_separator(), self.get_L10n_time_format() ) def get_L10n_utc_offset(self): return self.L10n.get("utc_offset", "UTC +0000") def get_L10n_firstDOW(self): return self.L10n.get("firstDOW", 1) def get_L10n_lat_lon_format(self): """ This is used to format latitude and longitude fields when they are displayed by eden. The format string may include the following placeholders: - %d -- Degress (integer) - %m -- Minutes (integer) - %s -- Seconds (double) - %f -- Degrees in decimal (double) """ return self.L10n.get("lat_lon_display_format", "%f") def get_L10n_default_country_code(self): """ Default Telephone Country Code """ return self.L10n.get("default_country_code", 1) def get_L10n_mandatory_lastname(self): return self.L10n.get("mandatory_lastname", False) def get_L10n_thousands_separator(self): return self.L10n.get("thousands_separator", " ") def get_L10n_thousands_grouping(self): return self.L10n.get("thousands_grouping", 3) def get_L10n_decimal_separator(self): return self.L10n.get("decimal_separator", ",") def get_L10n_translate_cms_series(self): """ Whether to translate CMS Series names """ return self.L10n.get("translate_cms_series", False) def get_L10n_translate_gis_location(self): """ Whether to translate Location names """ return self.L10n.get("translate_gis_location", False) def get_L10n_pootle_url(self): """ URL for Pootle server """ return self.L10n.get("pootle_url", "http://pootle.sahanafoundation.org/") def get_L10n_pootle_username(self): """ Username for Pootle server """ return self.L10n.get("pootle_username", False) def get_L10n_pootle_password(self): """ Password for Pootle server """ return self.L10n.get("pootle_password", False) # ------------------------------------------------------------------------- # PDF settings def get_paper_size(self): return self.base.get("paper_size", "A4") def get_pdf_logo(self): return self.ui.get("pdf_logo", None) # Optical Character Recognition (OCR) def get_pdf_excluded_fields(self, resourcename): excluded_fields_dict = { "hms_hospital" : [ "hrm_human_resource", ], "pr_group" : [ "pr_group_membership", ], } excluded_fields =\ excluded_fields_dict.get(resourcename, []) return excluded_fields # ------------------------------------------------------------------------- # UI Settings @staticmethod def default_formstyle(id, label, widget, comment, hidden=False): """ Provide the default Sahana Eden Form Style Label above the Inputs: http://uxmovement.com/design-articles/faster-with-top-aligned-labels Things that need to be looked at for custom formstyles: * subheadings (s3forms.py) * S3AddPersonWidget (s3widgets.py) * S3EmbedComponentWidget (s3widgets.py) """ row = [] if hidden: _class = "hide" else: _class = "" # Label on the 1st row row.append(TR(TD(label, _class="w2p_fl"), TD(""), _id=id + "1", _class=_class)) # Widget & Comment on the 2nd Row row.append(TR(widget, TD(comment, _class="w2p_fc"), _id=id, _class=_class)) return tuple(row) def get_ui_formstyle(self): return self.ui.get("formstyle", self.default_formstyle) # ------------------------------------------------------------------------- def get_ui_auth_user_represent(self): """ Should the auth_user created_by/modified_by be represented by Name or Email? - defaults to email """ return self.ui.get("auth_user_represent", "email") def get_ui_autocomplete(self): """ Currently Unused """ return self.ui.get("autocomplete", False) def get_ui_confirm(self): """ For Delete actions Workaround for this Bug in Selenium with FF4: http://code.google.com/p/selenium/issues/detail?id=1604 """ return self.ui.get("confirm", True) def get_ui_crud_form(self, tablename): """ Get custom crud_forms for diffent tables """ return self.ui.get("crud_form_%s" % tablename, None) def ui_customize(self, tablename, **attr): """ Customize a Controller """ customize = self.ui.get("customize_%s" % tablename) if customize: return customize(**attr) else: return attr def get_ui_export_formats(self): """ Which export formats should we display? - specify a list of export formats to restrict """ return self.ui.get("export_formats", ["have", "kml", "map", "pdf", "rss", "xls", "xml"]) def get_ui_hide_report_filter_options(self): """ Show report filter options form by default """ return self.ui.get("hide_report_filter_options", False) def get_ui_hide_report_options(self): """ Hide report options form by default """ return self.ui.get("hide_report_options", True) def get_ui_interim_save(self): """ Render interim-save button in CRUD forms by default """ return self.ui.get("interim_save", False) def get_ui_label_attachments(self): """ Label for attachments tab """ return current.T(self.ui.get("label_attachments", "Attachments")) def get_ui_label_camp(self): """ 'Camp' instead of 'Shelter'? """ return self.ui.get("camp", False) def get_ui_label_cluster(self): """ UN-style deployment? """ return self.ui.get("cluster", False) def get_ui_label_mobile_phone(self): """ Label for the Mobile Phone field e.g. 'Cell Phone' """ return current.T(self.ui.get("label_mobile_phone", "Mobile Phone")) def get_ui_label_postcode(self): """ Label for the Postcode field e.g. 'ZIP Code' """ return current.T(self.ui.get("label_postcode", "Postcode")) def get_ui_label_read(self): """ Label for buttons in list views which lead to a Read-only 'Display' page """ return self.ui.get("read_label", "Open") def get_ui_label_update(self): """ Label for buttons in list views which lead to an Editable 'Update' page """ return self.ui.get("update_label", "Open") def get_ui_navigate_away_confirm(self): return self.ui.get("navigate_away_confirm", True) def get_ui_search_submit_button(self): """ Class for submit buttons in search views """ return self.ui.get("search_submit_button", "search-button") def get_ui_social_buttons(self): """ Display social media Buttons in the footer? - requires support in the Theme """ return self.ui.get("social_buttons", False) def get_ui_summary(self): """ Default Summary Page Configuration (can also be configured per-resource using s3db.configure) @example: settings.ui.summary = [ { "name": "table", # the section name "label": "Table", # the section label, will # automatically be translated "common": False, # show this section on all tabs "translate": True, # turn automatic label translation on/off "widgets": [ # list of widgets for this section { "method": "datatable", # widget method, either a # name that resolves into # a S3Method, or a callable # to render the widget "filterable": True, # Whether the widget can # be filtered by the summary # filter form } ] } ] """ return self.ui.get("summary", None) def get_ui_filter_auto_submit(self): """ Time in milliseconds after the last filter option change to automatically update the filter target(s), set to 0 to disable """ return self.ui.get("filter_auto_submit", 0) def get_ui_report_auto_submit(self): """ Time in milliseconds after the last filter option change to automatically update the filter target(s), set to 0 to disable """ return self.ui.get("report_auto_submit", 0) # ========================================================================= # Messaging # ------------------------------------------------------------------------- # Mail settings def get_mail_server(self): return self.mail.get("server", "127.0.0.1:25") def get_mail_server_login(self): return self.mail.get("login", False) def get_mail_server_tls(self): """ Does the Mail Server use TLS? - default Debian is False - GMail is True """ return self.mail.get("tls", False) def get_mail_sender(self): """ The From Address for all Outbound Emails """ return self.mail.get("sender", None) def get_mail_approver(self): """ The default Address to send Requests for New Users to be Approved OR UUID of Role of users who should receive Requests for New Users to be Approved - unless overridden by per-domain entries in auth_organsiation """ return self.mail.get("approver", "useradmin@example.org") def get_mail_limit(self): """ A daily limit to the number of messages which can be sent """ return self.mail.get("limit", None) # ------------------------------------------------------------------------- # Parser def get_msg_parser(self): """ Which template folder to use to load parser.py """ return self.msg.get("parser", "default") # ------------------------------------------------------------------------- # Twitter def get_msg_twitter_oauth_consumer_key(self): return self.msg.get("twitter_oauth_consumer_key", "") def get_msg_twitter_oauth_consumer_secret(self): return self.msg.get("twitter_oauth_consumer_secret", "") # ------------------------------------------------------------------------- # Notifications def get_msg_notify_subject(self): """ Template for the subject line in update notifications. Available placeholders: $S = System Name (long) $s = System Name (short) $r = Resource Name Use {} to separate the placeholder from immediately following identifier characters (like: ${placeholder}text). """ return self.msg.get("notify_subject", "$s %s: $r" % current.T("Update Notification")) def get_msg_notify_email_format(self): """ The preferred email format for update notifications, "text" or "html". """ return self.msg.get("notify_email_format", "text") def get_msg_notify_renderer(self): """ Custom content renderer function for update notifications, function() """ return self.msg.get("notify_renderer", None) # ------------------------------------------------------------------------- # Outbox settings def get_msg_max_send_retries(self): """ Maximum number of retries to send a message before it is regarded as permanently failing; set to None to retry forever. """ return self.msg.get("max_send_retries", 9) # ------------------------------------------------------------------------- # Save Search and Subscription def get_search_max_results(self): """ The maximum number of results to return in an Autocomplete Search - more than this will prompt the user to enter a more exact match Lower this number to get extra performance from an overloaded server. """ return self.search.get("max_results", 200) # ------------------------------------------------------------------------- # Save Search and Subscription def get_search_save_widget(self): """ Enable the Saved Search widget """ return self.search.get("save_widget", True) # ------------------------------------------------------------------------- # Filter Manager Widget def get_search_filter_manager(self): """ Enable the filter manager widget """ return self.search.get("filter_manager", True) def get_search_filter_manager_allow_delete(self): """ Allow deletion of saved filters """ return self.search.get("filter_manager_allow_delete", True) def get_search_filter_manager_save(self): """ Text for saved filter save-button """ return self.search.get("filter_manager_save", None) def get_search_filter_manager_update(self): """ Text for saved filter update-button """ return self.search.get("filter_manager_update", None) def get_search_filter_manager_delete(self): """ Text for saved filter delete-button """ return self.search.get("filter_manager_delete", None) def get_search_filter_manager_load(self): """ Text for saved filter load-button """ return self.search.get("filter_manager_load", None) # ========================================================================= # Modules # ------------------------------------------------------------------------- # CAP def get_cap_identifier_prefix(self): """ Prefix to be prepended to identifiers of CAP alerts """ return self.cap.get("identifier_prefix", "") def get_cap_identifier_suffix(self): """ Suffix to be appended to identifiers of CAP alerts """ return self.cap.get("identifier_suffix", "") def get_cap_codes(self): """ Default codes for CAP alerts should return a list of dicts: [ {"key": "<ValueName>, "value": "<Value>", "comment": "<Help string>", "mutable": True|False}, ...] """ return self.cap.get("codes", []) def get_cap_event_codes(self): """ Default alert codes for CAP info segments should return a list of dicts: [ {"key": "<ValueName>, "value": "<Value>", "comment": "<Help string>", "mutable": True|False}, ...] """ return self.cap.get("event_codes", []) def get_cap_parameters(self): """ Default parameters for CAP info segments should return a list of dicts: [ {"key": "<ValueName>, "value": "<Value>", "comment": "<Help string>", "mutable": True|False}, ...] """ return self.cap.get("parameters", []) def get_cap_geocodes(self): """ Default geocodes. should return a list of dicts: [ {"key": "<ValueName>, "value": "<Value>", "comment": "<Help string>", "mutable": True|False}, ...] """ return self.cap.get("geocodes", []) def get_cap_base64(self): """ Should CAP resources be base64 encoded and embedded in the alert message? """ return self.cap.get("base64", False) def get_cap_languages(self): """ Languages for CAP info segments. This gets filled in the drop-down for selecting languages. These values should conform to RFC 3066. For a full list of languages and their codes, see: http://www.i18nguy.com/unicode/language-identifiers.html """ return self.cap.get("languages", OrderedDict([ ("ar", "العربية"), ("en", "English"), ("fr", "Français"), ("pt", "Português"), ("ru", "русский"), ("es", "Español") ])) def get_cap_priorities(self): """ Settings for CAP priorities Should be an ordered dict of the format OrderedDict([ ("<value>, "<Translated title>", <urgency>, <severity>, <certainty>, <color>), ... ]) """ T = current.T return self.cap.get("priorities", [ ("Urgent", T("Urgent"), "Immediate", "Extreme", "Observed", "red"), ("High", T("High"), "Expected", "Severe", "Observed", "orange"), ("Low", T("Low"), "Expected", "Moderate", "Observed", "green") ]) # ------------------------------------------------------------------------- # Human Resource Management def get_hrm_staff_label(self): """ Label for 'Staff' e.g. 'Contacts' """ return current.T(self.hrm.get("staff_label", "Staff")) def get_hrm_organisation_label(self): """ Label for Organisations in Human Resources """ return current.T(self.hrm.get("organisation_label", "Organization")) def get_hrm_email_required(self): """ If set to True then Staff & Volunteers require an email address """ return self.hrm.get("email_required", True) def get_hrm_org_required(self): """ If set to True then Staff & Volunteers require an Organisation """ return self.hrm.get("org_required", True) def get_hrm_deletable(self): """ If set to True then HRM records are deletable rather than just being able to be marked as obsolete """ return self.hrm.get("deletable", True) def get_hrm_filter_certificates(self): """ If set to True then Certificates are filtered by (Root) Organisation & hence certificates from other Organisations cannot be added to an HR's profile (except by Admins) """ return self.hrm.get("filter_certificates", False) def get_hrm_multiple_job_titles(self): """ If set to True then HRs can have multiple Job Titles """ return self.hrm.get("multi_job_titles", False) def get_hrm_show_staff(self): """ If set to True then show 'Staff' options when HRM enabled - needs a separate setting as vol requires hrm, but we may only wish to show Volunteers """ return self.hrm.get("show_staff", True) def get_hrm_skill_types(self): """ If set to True then Skill Types are exposed to the UI - each skill_type needs it's own set of competency levels If set to False then Skill Types are hidden from the UI - all skills use the same skill_type & hence the same set of competency levels """ return self.hrm.get("skill_types", False) def get_hrm_staff_experience(self): """ Whether to use Experience for Staff &, if so, which table to use - options are: False, "experience" """ return self.hrm.get("staff_experience", "experience") def get_hrm_vol_experience(self): """ Whether to use Experience for Volunteers &, if so, which table to use - options are: False, "experience" or "programme" """ return self.hrm.get("vol_experience", "programme") def get_hrm_show_organisation(self): """ Whether Human Resource representations should include the Organisation """ return self.hrm.get("show_organisation", False) def get_hrm_teams(self): """ Whether Human Resources should use Teams & what to call them """ return self.hrm.get("teams", "Team") def get_hrm_use_awards(self): """ Whether Volunteers should use Awards """ return self.hrm.get("use_awards", True) def get_hrm_use_certificates(self): """ Whether Human Resources should use Certificates """ return self.hrm.get("use_certificates", True) def get_hrm_use_credentials(self): """ Whether Human Resources should use Credentials """ return self.hrm.get("use_credentials", True) def get_hrm_use_description(self): """ Whether Human Resources should use Physical Description """ return self.hrm.get("use_description", True) def get_hrm_use_education(self): """ Whether Human Resources should show Education """ return self.hrm.get("use_education", False) def get_hrm_use_id(self): """ Whether Human Resources should use Staff ID """ return self.hrm.get("use_id", True) def get_hrm_use_skills(self): """ Whether Human Resources should use Skills """ return self.hrm.get("use_skills", True) def get_hrm_use_trainings(self): """ Whether Human Resources should use Trainings """ return self.hrm.get("use_trainings", True) # ------------------------------------------------------------------------- # Inventory Management Settings # def get_inv_collapse_tabs(self): return self.inv.get("collapse_tabs", True) def get_inv_facility_label(self): return self.inv.get("facility_label", current.T("Warehouse")) def get_inv_direct_stock_edits(self): """ Can Stock levels be adjusted directly? - defaults to False """ return self.inv.get("direct_stock_edits", False) def get_inv_send_show_mode_of_transport(self): """ Show mode of transport on Sent Shipments """ return self.inv.get("show_mode_of_transport", False) def get_inv_send_show_org(self): """ Show Organisation on Sent Shipments """ return self.inv.get("send_show_org", True) def get_inv_send_show_time_in(self): """ Show Time In on Sent Shipments """ return self.inv.get("send_show_time_in", False) def get_inv_stock_count(self): """ Call Stock Adjustments 'Stock Counts' """ return self.inv.get("stock_count", True) def get_inv_track_pack_values(self): """ Whether or not Pack values are tracked """ return self.inv.get("track_pack_values", True) def get_inv_item_status(self): """ Item Statuses which can also be Sent Shipment Types """ T = current.T return self.inv.get("item_status", { 0: current.messages["NONE"], 1: T("Dump"), 2: T("Sale"), 3: T("Reject"), 4: T("Surplus") }) def get_inv_shipment_name(self): """ Get the name of Shipments - currently supported options are: * shipment * order """ return self.inv.get("shipment_name", "shipment") def get_inv_shipment_types(self): """ Shipment types which are common to both Send & Receive """ return self.inv.get("shipment_types", { 0 : current.messages["NONE"], 11: current.T("Internal Shipment"), }) def get_inv_send_types(self): """ Shipment types which are just for Send """ return self.inv.get("send_types", { 21: current.T("Distribution"), }) def get_inv_send_type_default(self): """ Which Shipment type is default """ return self.inv.get("send_type_default", 0) def get_inv_recv_types(self): """ Shipment types which are just for Receive """ T = current.T return self.inv.get("recv_types", { #31: T("Other Warehouse"), Same as Internal Shipment 32: T("Donation"), #33: T("Foreign Donation"), 34: T("Purchase"), }) def get_inv_send_form_name(self): return self.inv.get("send_form_name", "Waybill") def get_inv_send_ref_field_name(self): return self.inv.get("send_ref_field_name", "Waybill Number") def get_inv_send_shortname(self): return self.inv.get("send_shortname", "WB") def get_inv_recv_form_name(self): return self.inv.get("recv_form_name", "Goods Received Note") def get_inv_recv_shortname(self): return self.inv.get("recv_shortname", "GRN") # ------------------------------------------------------------------------- # IRS def get_irs_vehicle(self): """ Use Vehicles to respond to Incident Reports """ return self.irs.get("vehicle", False) # ------------------------------------------------------------------------- # Organisation def get_org_branches(self): """ Whether to support Organisation branches or not """ return self.org.get("branches", True) def get_org_site_code_len(self): """ Length of auto-generated Codes for Facilities (org_site) """ return self.org.get("site_code_len", 10) def get_org_site_label(self): """ Label for site_id fields """ return current.T(self.org.get("site_label", "Facility")) def get_org_site_inv_req_tabs(self): """ Whether Sites should have Tabs for Inv/Req """ return self.org.get("site_inv_req_tabs", True) def get_org_site_autocomplete(self): """ Whether site_id fields should use an Autocomplete instead of a dropdown """ return self.org.get("site_autocomplete", False) def get_org_site_address_autocomplete(self): """ Whether site_id Autocomplete fields should search Address fields as well as name """ return self.org.get("site_address_autocomplete", False) def get_org_site_last_contacted(self): """ Whether to display the last_contacted field for a Site """ return self.org.get("site_last_contacted", False) def get_org_summary(self): """ Whether to use Summary fields for Organisation/Office: # National/International staff """ return self.org.get("summary", False) def set_org_dependent_field(self, tablename=None, fieldname=None, enable_field=True): """ Enables/Disables optional fields according to a user's Organisation - must specify either field or tablename/fieldname (e.g. for virtual fields) """ enabled = False dependent_fields = self.org.get("dependent_fields", None) if dependent_fields: org_name_list = dependent_fields.get("%s.%s" % (tablename, fieldname), None) if org_name_list: auth = current.auth if auth.s3_has_role(auth.get_system_roles().ADMIN): # Admins see all fields unless disabled for all orgs in this deployment enabled = True else: s3db = current.s3db otable = s3db.org_organisation root_org_id = auth.root_org() root_org = current.db(otable.id == root_org_id).select(otable.name, limitby=(0, 1), cache=s3db.cache ).first() if root_org: enabled = root_org.name in org_name_list if enable_field: field = current.s3db[tablename][fieldname] field.readable = enabled field.writable = enabled return enabled # ------------------------------------------------------------------------- # Persons def get_pr_request_dob(self): """ Include Date of Birth in the AddPersonWidget """ return self.pr.get("request_dob", True) def get_pr_request_gender(self): """ Include Gender in the AddPersonWidget """ return self.pr.get("request_gender", True) def get_pr_select_existing(self): """ Whether the AddPersonWidget allows selecting existing PRs - set to True if Persons can be found in multiple contexts - set to False if just a single context """ return self.pr.get("select_existing", True) def get_pr_import_update_requires_email(self): """ During imports, records are only updated if the import item contains a (matching) email address """ return self.pr.get("import_update_requires_email", True) # ------------------------------------------------------------------------- # Proc def get_proc_form_name(self): return self.proc.get("form_name", "Purchase Order") def get_proc_shortname(self): return self.proc.get("form_name", "PO") # ------------------------------------------------------------------------- # Projects def get_project_mode_3w(self): """ Enable 3W mode in the projects module """ return self.project.get("mode_3w", False) def get_project_mode_task(self): """ Enable Tasks mode in the projects module """ return self.project.get("mode_task", False) def get_project_mode_drr(self): """ Enable DRR extensions in the projects module """ return self.project.get("mode_drr", False) def get_project_activities(self): """ Use Activities in Projects """ return self.project.get("activities", False) def get_project_codes(self): """ Use Codes in Projects """ return self.project.get("codes", False) def get_project_community(self): """ Label project_location as 'Community' """ return self.project.get("community", False) #def get_project_locations_from_countries(self): # """ # Create a project_location for each country that a Project is # implemented in # """ # return self.project.get("locations_from_countries", False) def get_project_milestones(self): """ Use Milestones in Projects """ return self.project.get("milestones", False) def get_project_sectors(self): """ Use Sectors in Projects """ return self.project.get("sectors", True) def get_project_theme_percentages(self): """ Use Theme Percentages in Projects """ return self.project.get("theme_percentages", False) def get_project_multiple_budgets(self): """ Use Multiple Budgets in Projects """ return self.project.get("multiple_budgets", False) def get_project_multiple_organisations(self): """ Use Multiple Organisations in Projects """ return self.project.get("multiple_organisations", False) def get_project_organisation_roles(self): T = current.T return self.project.get("organisation_roles", { 1: T("Lead Implementer"), # T("Host National Society") 2: T("Partner"), # T("Partner National Society") 3: T("Donor"), #4: T("Customer"), # T("Beneficiary")? #5: T("Supplier") # T("Beneficiary")? }) def get_project_organisation_lead_role(self): return self.project.get("organisation_lead_role", 1) # ------------------------------------------------------------------------- # Request Settings def get_req_type_inv_label(self): return current.T(self.req.get("type_inv_label", "Warehouse Stock")) def get_req_type_hrm_label(self): return current.T(self.req.get("type_hrm_label", "People")) def get_req_requester_label(self): return current.T(self.req.get("requester_label", "Requester")) def get_req_requester_optional(self): return self.req.get("requester_optional", False) def get_req_requester_from_site(self): return self.req.get("requester_from_site", False) def get_req_date_writable(self): """ Whether Request Date should be manually editable """ return self.req.get("date_writable", True) def get_req_status_writable(self): """ Whether Request Status should be manually editable """ return self.req.get("status_writable", True) def get_req_item_quantities_writable(self): """ Whether Item Quantities should be manually editable """ return self.req.get("item_quantities_writable", False) def get_req_skill_quantities_writable(self): """ Whether People Quantities should be manually editable """ return self.req.get("skill_quantities_writable", False) def get_req_multiple_req_items(self): """ Can a Request have multiple line items? - e.g. ICS says that each request should be just for items of a single Type """ return self.req.get("multiple_req_items", True) def get_req_show_quantity_transit(self): return self.req.get("show_quantity_transit", True) def get_req_inline_forms(self): """ Whether Requests module should use inline forms for Items """ return self.req.get("inline_forms", True) def get_req_prompt_match(self): """ Whether a Requester is prompted to match each line item in an Item request """ return self.req.get("prompt_match", True) def get_req_summary(self): """ Whether to use Summary Needs for Sites (Office/Facility currently): """ return self.req.get("summary", False) def get_req_use_commit(self): """ Whether there is a Commit step in Requests Management """ return self.req.get("use_commit", True) def get_req_ask_security(self): """ Should Requests ask whether Security is required? """ return self.req.get("ask_security", False) def get_req_ask_transport(self): """ Should Requests ask whether Transportation is required? """ return self.req.get("ask_transport", False) def get_req_items_ask_purpose(self): """ Should Requests for Items ask for Purpose? """ return self.req.get("items_ask_purpose", True) def get_req_req_crud_strings(self, type = None): return self.req.get("req_crud_strings") and \ self.req.req_crud_strings.get(type, None) def get_req_use_req_number(self): return self.req.get("use_req_number", True) def get_req_generate_req_number(self): return self.req.get("generate_req_number", True) def get_req_req_type(self): """ The Types of Request which can be made. Select one or more from: * People * Stock * Other tbc: Assets, Shelter, Food """ return self.req.get("req_type", ["Stock", "People", "Other"]) def get_req_form_name(self): return self.req.get("req_form_name", "Requisition Form") def get_req_shortname(self): return self.req.get("req_shortname", "REQ") def get_req_restrict_on_complete(self): """ To restrict adding new commits to the Completed commits. """ return self.req.get("req_restrict_on_complete", False) # ------------------------------------------------------------------------- # Supply def get_supply_catalog_default(self): return self.inv.get("catalog_default", "Default") def get_supply_use_alt_name(self): return self.supply.get("use_alt_name", True) # ------------------------------------------------------------------------- # Hospital Registry def get_hms_track_ctc(self): return self.hms.get("track_ctc", False) def get_hms_activity_reports(self): return self.hms.get("activity_reports", False) # ------------------------------------------------------------------------- # Active modules list def has_module(self, module_name): if not self.modules: # Provide a minimal list of core modules _modules = [ "default", # Default "admin", # Admin "gis", # GIS "pr", # Person Registry "org" # Organization Registry ] else: _modules = self.modules return module_name in _modules # END =========================================================================
mit
renyi533/tensorflow
tensorflow/python/ops/proto_ops.py
12
1323
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================= # pylint: disable=wildcard-import,unused-import """Protocol Buffer encoding and decoding from tensors.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.framework import ops from tensorflow.python.ops.gen_decode_proto_ops import decode_proto_v2 as decode_proto from tensorflow.python.ops.gen_encode_proto_ops import encode_proto from tensorflow.python.util.tf_export import tf_export tf_export("io.decode_proto")(decode_proto) tf_export("io.encode_proto")(encode_proto) ops.NotDifferentiable("DecodeProtoV2") ops.NotDifferentiable("EncodeProto")
apache-2.0
neurospin/pylearn-epac
epac/tests/utils.py
1
2333
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Created on 20 June 2013 @author: jinpeng.li@cea.fr @author: edouard.duchesnay@cea.fr @author: benoit.da_mota@inria.fr """ import numpy as np import copy def _is_numeric(obj): return isinstance(obj, (int, long, float, complex)) def _is_dict_or_array_or_list(obj): if type(obj) is np.ndarray: return True if type(obj) is list: return True if type(obj) is dict: return True return False def _is_array_or_list(obj): if type(obj) is np.ndarray: return True if type(obj) is list: return True return False def isequal(obj1, obj2): _EPSILON = 0.00001 if _is_numeric(obj1): if (np.absolute(obj1 - obj2) > _EPSILON): return False else: return True elif (isinstance(obj1, dict)): for key in obj1.keys(): if not isequal(obj1[key], obj2[key]): return False return True elif (_is_array_or_list(obj1)): obj1 = np.asarray(list(obj1)) obj2 = np.asarray(list(obj2)) for index in xrange(len(obj1.flat)): if not isequal(obj1.flat[index], obj2.flat[index]): return False return True else: return obj1 == obj2 def compare_leaf_res(leaf_res1, leaf_res2): for i in range(len(leaf_res1)): for key in leaf_res1[i][leaf_res1[i].keys()[0]].keys(): return (np.all(leaf_res1[i][leaf_res1[i].keys()[0]][key] == leaf_res2[i][leaf_res2[i].keys()[0]][key])) def compare_two_node(node1, node2): leaf_res1 = [] for leaf1 in node1.walk_leaves(): res = copy.copy(leaf1.load_results()) leaf_res1.append(res) leaf_res2 = [] for leaf2 in node2.walk_leaves(): res = copy.copy(leaf2.load_results()) leaf_res2.append(res) return compare_leaf_res(leaf_res1, leaf_res2) def comp_2wf_reduce_res(wf1, wf2): res_wf1 = wf1.reduce() res_wf2 = wf2.reduce() return isequal(res_wf1, res_wf2) def displayres(d, indent=0): print repr(d) # for key, value in d.iteritems(): # print '\t' * indent + str(key) # if isinstance(value, dict): # displayres(value, indent + 1) # else: # print '\t' * (indent + 1) + str(value)
bsd-3-clause
guodongxiaren/thrift
contrib/fb303/py/fb303/FacebookBase.py
173
1917
#!/usr/bin/env python # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import time import FacebookService import thrift.reflection.limited from ttypes import fb_status class FacebookBase(FacebookService.Iface): def __init__(self, name): self.name = name self.alive = int(time.time()) self.counters = {} def getName(self, ): return self.name def getVersion(self, ): return '' def getStatus(self, ): return fb_status.ALIVE def getCounters(self): return self.counters def resetCounter(self, key): self.counters[key] = 0 def getCounter(self, key): if self.counters.has_key(key): return self.counters[key] return 0 def incrementCounter(self, key): self.counters[key] = self.getCounter(key) + 1 def setOption(self, key, value): pass def getOption(self, key): return "" def getOptions(self): return {} def getOptions(self): return {} def aliveSince(self): return self.alive def getCpuProfile(self, duration): return "" def getLimitedReflection(self): return thrift.reflection.limited.Service() def reinitialize(self): pass def shutdown(self): pass
apache-2.0
mae/tech-hive
node_modules/node-gyp/gyp/tools/graphviz.py
2679
2878
#!/usr/bin/env python # Copyright (c) 2011 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Using the JSON dumped by the dump-dependency-json generator, generate input suitable for graphviz to render a dependency graph of targets.""" import collections import json import sys def ParseTarget(target): target, _, suffix = target.partition('#') filename, _, target = target.partition(':') return filename, target, suffix def LoadEdges(filename, targets): """Load the edges map from the dump file, and filter it to only show targets in |targets| and their depedendents.""" file = open('dump.json') edges = json.load(file) file.close() # Copy out only the edges we're interested in from the full edge list. target_edges = {} to_visit = targets[:] while to_visit: src = to_visit.pop() if src in target_edges: continue target_edges[src] = edges[src] to_visit.extend(edges[src]) return target_edges def WriteGraph(edges): """Print a graphviz graph to stdout. |edges| is a map of target to a list of other targets it depends on.""" # Bucket targets by file. files = collections.defaultdict(list) for src, dst in edges.items(): build_file, target_name, toolset = ParseTarget(src) files[build_file].append(src) print 'digraph D {' print ' fontsize=8' # Used by subgraphs. print ' node [fontsize=8]' # Output nodes by file. We must first write out each node within # its file grouping before writing out any edges that may refer # to those nodes. for filename, targets in files.items(): if len(targets) == 1: # If there's only one node for this file, simplify # the display by making it a box without an internal node. target = targets[0] build_file, target_name, toolset = ParseTarget(target) print ' "%s" [shape=box, label="%s\\n%s"]' % (target, filename, target_name) else: # Group multiple nodes together in a subgraph. print ' subgraph "cluster_%s" {' % filename print ' label = "%s"' % filename for target in targets: build_file, target_name, toolset = ParseTarget(target) print ' "%s" [label="%s"]' % (target, target_name) print ' }' # Now that we've placed all the nodes within subgraphs, output all # the edges between nodes. for src, dsts in edges.items(): for dst in dsts: print ' "%s" -> "%s"' % (src, dst) print '}' def main(): if len(sys.argv) < 2: print >>sys.stderr, __doc__ print >>sys.stderr print >>sys.stderr, 'usage: %s target1 target2...' % (sys.argv[0]) return 1 edges = LoadEdges('dump.json', sys.argv[1:]) WriteGraph(edges) return 0 if __name__ == '__main__': sys.exit(main())
mit
dprince/python-prettytable
prettytable.py
1
44037
#!/usr/bin/env python # # Copyright (c) 2009, Luke Maurits <luke@maurits.id.au> # All rights reserved. # With contributions from: # * Chris Clark # * Klein Stephane # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # * The name of the author may not be used to endorse or promote products # derived from this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF # SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. __version__ = "TRUNK" import copy import csv import random import sys import textwrap import itertools import unicodedata py3k = sys.version_info[0] >= 3 if py3k: unicode = str basestring = str itermap = map iterzip = zip uni_chr = chr else: itermap = itertools.imap iterzip = itertools.izip uni_chr = unichr if py3k and sys.version_info[1] >= 2: from html import escape else: from cgi import escape # hrule styles FRAME = 0 ALL = 1 NONE = 2 # Table styles DEFAULT = 10 MSWORD_FRIENDLY = 11 PLAIN_COLUMNS = 12 RANDOM = 20 def _get_size(text): lines = text.split("\n") height = len(lines) width = max([_str_block_width(line) for line in lines]) return (width, height) class PrettyTable(object): def __init__(self, field_names=None, **kwargs): """Return a new PrettyTable instance Arguments: encoding - Unicode encoding scheme used to decode any encoded input field_names - list or tuple of field names fields - list or tuple of field names to include in displays start - index of first data row to include in output end - index of last data row to include in output PLUS ONE (list slice style) fields - names of fields (columns) to include header - print a header showing field names (True or False) header_style - stylisation to apply to field names in header ("cap", "title", "upper", "lower" or None) border - print a border around the table (True or False) hrules - controls printing of horizontal rules after rows. Allowed values: FRAME, ALL, NONE int_format - controls formatting of integer data float_format - controls formatting of floating point data padding_width - number of spaces on either side of column data (only used if left and right paddings are None) left_padding_width - number of spaces on left hand side of column data right_padding_width - number of spaces on right hand side of column data vertical_char - single character string used to draw vertical lines horizontal_char - single character string used to draw horizontal lines junction_char - single character string used to draw line junctions sortby - name of field to sort rows by sort_key - sorting key function, applied to data points before sorting reversesort - True or False to sort in descending or ascending order""" if "encoding" in kwargs: self.encoding = kwargs["encoding"] else: self.encoding = "UTF-8" # Data self._field_names = [] self._align = {} self._max_width = {} self._rows = [] if field_names: self.field_names = field_names else: self._widths = [] self._rows = [] # Options self._options = "start end fields header border sortby reversesort sort_key attributes format hrules".split() self._options.extend("int_format float_format padding_width left_padding_width right_padding_width".split()) self._options.extend("vertical_char horizontal_char junction_char header_style".split()) for option in self._options: if option in kwargs: self._validate_option(option, kwargs[option]) else: kwargs[option] = None self._start = kwargs["start"] or 0 self._end = kwargs["end"] or None self._fields = kwargs["fields"] or None self._header = kwargs["header"] or True self._header_style = kwargs["header_style"] or None self._border = kwargs["border"] or True self._hrules = kwargs["hrules"] or FRAME self._sortby = kwargs["sortby"] or None self._reversesort = kwargs["reversesort"] or False self._sort_key = kwargs["sort_key"] or (lambda x: x) self._int_format = kwargs["int_format"] or {} self._float_format = kwargs["float_format"] or {} self._padding_width = kwargs["padding_width"] or 1 self._left_padding_width = kwargs["left_padding_width"] or None self._right_padding_width = kwargs["right_padding_width"] or None self._vertical_char = kwargs["vertical_char"] or self._unicode("|") self._horizontal_char = kwargs["horizontal_char"] or self._unicode("-") self._junction_char = kwargs["junction_char"] or self._unicode("+") self._format = kwargs["format"] or False self._attributes = kwargs["attributes"] or {} def _unicode(self, value): if not isinstance(value, basestring): value = str(value) if not isinstance(value, unicode): value = unicode(value, self.encoding, "strict") return value def _justify(self, text, width, align): excess = width - _str_block_width(text) if align == "l": return text + excess * " " elif align == "r": return excess * " " + text else: if excess % 2: # Uneven padding # Put more space on right if text is of odd length... if _str_block_width(text) % 2: return (excess//2)*" " + text + (excess//2 + 1)*" " # and more space on left if text is of even length else: return (excess//2 + 1)*" " + text + (excess//2)*" " # Why distribute extra space this way? To match the behaviour of # the inbuilt str.center() method. else: # Equal padding on either side return (excess//2)*" " + text + (excess//2)*" " def __getattr__(self, name): if name == "rowcount": return len(self._rows) elif name == "colcount": if self._field_names: return len(self._field_names) elif self._rows: return len(self._rows[0]) else: return 0 else: raise AttributeError(name) def __getitem__(self, index): newtable = copy.deepcopy(self) if isinstance(index, slice): newtable._rows = self._rows[index] elif isinstance(index, int): newtable._rows = [self._rows[index],] else: raise Exception("Index %s is invalid, must be an integer or slice" % str(index)) return newtable if py3k: def __str__(self): return self.__unicode__() else: def __str__(self): return self.__unicode__().encode(self.encoding) def __unicode__(self): return self.get_string() ############################## # ATTRIBUTE VALIDATORS # ############################## # The method _validate_option is all that should be used elsewhere in the code base to validate options. # It will call the appropriate validation method for that option. The individual validation methods should # never need to be called directly (although nothing bad will happen if they *are*). # Validation happens in TWO places. # Firstly, in the property setters defined in the ATTRIBUTE MANAGMENT section. # Secondly, in the _get_options method, where keyword arguments are mixed with persistent settings def _validate_option(self, option, val): if option in ("field_names"): self._validate_field_names(val) elif option in ("start", "end", "max_width", "padding_width", "left_padding_width", "right_padding_width", "format"): self._validate_nonnegative_int(option, val) elif option in ("sortby"): self._validate_field_name(option, val) elif option in ("sort_key"): self._validate_function(option, val) elif option in ("hrules"): self._validate_hrules(option, val) elif option in ("fields"): self._validate_all_field_names(option, val) elif option in ("header", "border", "reversesort"): self._validate_true_or_false(option, val) elif option in ("header_style"): self._validate_header_style(val) # elif option in ("int_format"): # self._validate_int_format(option, val) # elif option in ("float_format"): # self._validate_float_format(option, val) elif option in ("vertical_char", "horizontal_char", "junction_char"): self._validate_single_char(option, val) elif option in ("attributes"): self._validate_attributes(option, val) else: raise Exception("Unrecognised option: %s!" % option) def _validate_field_names(self, val): # Check for appropriate length if self._field_names: try: assert len(val) == len(self._field_names) except AssertionError: raise Exception("Field name list has incorrect number of values, (actual) %d!=%d (expected)" % (len(val), len(self._field_names))) if self._rows: try: assert len(val) == len(self._rows[0]) except AssertionError: raise Exception("Field name list has incorrect number of values, (actual) %d!=%d (expected)" % (len(val), len(self._rows[0]))) # Check for uniqueness try: assert len(val) == len(set(val)) except AssertionError: raise Exception("Field names must be unique!") def _validate_header_style(self, val): try: assert val in ("cap", "title", "upper", "lower", None) except AssertionError: raise Exception("Invalid header style, use cap, title, upper, lower or None!") def _validate_align(self, val): try: assert val in ["l","c","r"] except AssertionError: raise Exception("Alignment %s is invalid, use l, c or r!" % val) def _validate_nonnegative_int(self, name, val): try: assert int(val) >= 0 except AssertionError: raise Exception("Invalid value for %s: %s!" % (name, self._unicode(val))) def _validate_true_or_false(self, name, val): try: assert val in (True, False) except AssertionError: raise Exception("Invalid value for %s! Must be True or False." % name) def _validate_int_format(self, name, val): if val == "": return try: assert type(val) in (str, unicode) assert val.isdigit() except AssertionError: raise Exception("Invalid value for %s! Must be an integer format string." % name) def _validate_float_format(self, name, val): if val == "": return try: assert type(val) in (str, unicode) assert "." in val bits = val.split(".") assert len(bits) <= 2 assert bits[0] == "" or bits[0].isdigit() assert bits[1] == "" or bits[1].isdigit() except AssertionError: raise Exception("Invalid value for %s! Must be a float format string." % name) def _validate_function(self, name, val): try: assert hasattr(val, "__call__") except AssertionError: raise Exception("Invalid value for %s! Must be a function." % name) def _validate_hrules(self, name, val): try: assert val in (ALL, FRAME, NONE) except AssertionError: raise Exception("Invalid value for %s! Must be ALL, FRAME or NONE." % name) def _validate_field_name(self, name, val): try: assert val in self._field_names except AssertionError: raise Exception("Invalid field name: %s!" % val) def _validate_all_field_names(self, name, val): try: for x in val: self._validate_field_name(name, x) except AssertionError: raise Exception("fields must be a sequence of field names!") def _validate_single_char(self, name, val): try: assert _str_block_width(val) == 1 except AssertionError: raise Exception("Invalid value for %s! Must be a string of length 1." % name) def _validate_attributes(self, name, val): try: assert isinstance(val, dict) except AssertionError: raise Exception("attributes must be a dictionary of name/value pairs!") ############################## # ATTRIBUTE MANAGEMENT # ############################## def _get_field_names(self): return self._field_names """The names of the fields Arguments: fields - list or tuple of field names""" def _set_field_names(self, val): val = [self._unicode(x) for x in val] self._validate_option("field_names", val) if self._field_names: old_names = self._field_names[:] self._field_names = val if self._align and old_names: for old_name, new_name in zip(old_names, val): self._align[new_name] = self._align[old_name] for old_name in old_names: self._align.pop(old_name) else: for field in self._field_names: self._align[field] = "c" field_names = property(_get_field_names, _set_field_names) def _get_align(self): return self._align def _set_align(self, val): self._validate_align(val) for field in self._field_names: self._align[field] = val align = property(_get_align, _set_align) def _get_max_width(self): return self._max_width def _set_max_width(self, val): self._validate_option("max_width", val) for field in self._field_names: self._max_width[field] = val max_width = property(_get_max_width, _set_max_width) def _get_start(self): """Start index of the range of rows to print Arguments: start - index of first data row to include in output""" return self._start def _set_start(self, val): self._validate_option("start", val) self._start = val start = property(_get_start, _set_start) def _get_end(self): """End index of the range of rows to print Arguments: end - index of last data row to include in output PLUS ONE (list slice style)""" return self._end def _set_end(self, val): self._validate_option("end", val) self._end = val end = property(_get_end, _set_end) def _get_sortby(self): """Name of field by which to sort rows Arguments: sortby - field name to sort by""" return self._sortby def _set_sortby(self, val): self._validate_option("sortby", val) self._sortby = val sortby = property(_get_sortby, _set_sortby) def _get_reversesort(self): """Controls direction of sorting (ascending vs descending) Arguments: reveresort - set to True to sort by descending order, or False to sort by ascending order""" return self._reversesort def _set_reversesort(self, val): self._validate_option("reversesort", val) self._reversesort = val reversesort = property(_get_reversesort, _set_reversesort) def _get_sort_key(self): """Sorting key function, applied to data points before sorting Arguments: sort_key - a function which takes one argument and returns something to be sorted""" return self._sort_key def _set_sort_key(self, val): self._validate_option("sort_key", val) self._sort_key = val sort_key = property(_get_sort_key, _set_sort_key) def _get_header(self): """Controls printing of table header with field names Arguments: header - print a header showing field names (True or False)""" return self._header def _set_header(self, val): self._validate_option("header", val) self._header = val header = property(_get_header, _set_header) def _get_header_style(self): """Controls stylisation applied to field names in header Arguments: header_style - stylisation to apply to field names in header ("cap", "title", "upper", "lower" or None)""" return self._header_style def _set_header_style(self, val): self._validate_header_style(val) self._header_style = val header_style = property(_get_header_style, _set_header_style) def _get_border(self): """Controls printing of border around table Arguments: border - print a border around the table (True or False)""" return self._border def _set_border(self, val): self._validate_option("border", val) self._border = val border = property(_get_border, _set_border) def _get_hrules(self): """Controls printing of horizontal rules after rows Arguments: hrules - horizontal rules style. Allowed values: FRAME, ALL, NONE""" return self._hrules def _set_hrules(self, val): self._validate_option("hrules", val) self._hrules = val hrules = property(_get_hrules, _set_hrules) def _get_int_format(self): """Controls formatting of integer data Arguments: int_format - integer format string""" return self._int_format def _set_int_format(self, val): # self._validate_option("int_format", val) for field in self._field_names: self._int_format[field] = val int_format = property(_get_int_format, _set_int_format) def _get_float_format(self): """Controls formatting of floating point data Arguments: float_format - floating point format string""" return self._float_format def _set_float_format(self, val): # self._validate_option("float_format", val) for field in self._field_names: self._float_format[field] = val float_format = property(_get_float_format, _set_float_format) def _get_padding_width(self): """The number of empty spaces between a column's edge and its content Arguments: padding_width - number of spaces, must be a positive integer""" return self._padding_width def _set_padding_width(self, val): self._validate_option("padding_width", val) self._padding_width = val padding_width = property(_get_padding_width, _set_padding_width) def _get_left_padding_width(self): """The number of empty spaces between a column's left edge and its content Arguments: left_padding - number of spaces, must be a positive integer""" return self._left_padding_width def _set_left_padding_width(self, val): self._validate_option("left_padding_width", val) self._left_padding_width = val left_padding_width = property(_get_left_padding_width, _set_left_padding_width) def _get_right_padding_width(self): """The number of empty spaces between a column's right edge and its content Arguments: right_padding - number of spaces, must be a positive integer""" return self._right_padding_width def _set_right_padding_width(self, val): self._validate_option("right_padding_width", val) self._right_padding_width = val right_padding_width = property(_get_right_padding_width, _set_right_padding_width) def _get_vertical_char(self): """The charcter used when printing table borders to draw vertical lines Arguments: vertical_char - single character string used to draw vertical lines""" return self._vertical_char def _set_vertical_char(self, val): val = self._unicode(val) self._validate_option("vertical_char", val) self._vertical_char = val vertical_char = property(_get_vertical_char, _set_vertical_char) def _get_horizontal_char(self): """The charcter used when printing table borders to draw horizontal lines Arguments: horizontal_char - single character string used to draw horizontal lines""" return self._horizontal_char def _set_horizontal_char(self, val): val = self._unicode(val) self._validate_option("horizontal_char", val) self._horizontal_char = val horizontal_char = property(_get_horizontal_char, _set_horizontal_char) def _get_junction_char(self): """The charcter used when printing table borders to draw line junctions Arguments: junction_char - single character string used to draw line junctions""" return self._junction_char def _set_junction_char(self, val): val = self._unicode(val) self._validate_option("vertical_char", val) self._junction_char = val junction_char = property(_get_junction_char, _set_junction_char) def _get_format(self): """Controls whether or not HTML tables are formatted to match styling options Arguments: format - True or False""" return self._format def _set_format(self, val): self._validate_option("format", val) self._format = val format = property(_get_format, _set_format) def _get_attributes(self): """A dictionary of HTML attribute name/value pairs to be included in the <table> tag when printing HTML Arguments: attributes - dictionary of attributes""" return self._attributes def _set_attributes(self, val): self._validate_option("attributes", val) self._attributes = val attributes = property(_get_attributes, _set_attributes) ############################## # OPTION MIXER # ############################## def _get_options(self, kwargs): options = {} for option in self._options: if option in kwargs: self._validate_option(option, kwargs[option]) options[option] = kwargs[option] else: options[option] = getattr(self, "_"+option) return options ############################## # PRESET STYLE LOGIC # ############################## def set_style(self, style): if style == DEFAULT: self._set_default_style() elif style == MSWORD_FRIENDLY: self._set_msword_style() elif style == PLAIN_COLUMNS: self._set_columns_style() elif style == RANDOM: self._set_random_style() else: raise Exception("Invalid pre-set style!") def _set_default_style(self): self.header = True self.border = True self._hrules = FRAME self.padding_width = 1 self.left_padding_width = 1 self.right_padding_width = 1 self.vertical_char = "|" self.horizontal_char = "-" self.junction_char = "+" def _set_msword_style(self): self.header = True self.border = True self._hrules = NONE self.padding_width = 1 self.left_padding_width = 1 self.right_padding_width = 1 self.vertical_char = "|" def _set_columns_style(self): self.header = True self.border = False self.padding_width = 1 self.left_padding_width = 0 self.right_padding_width = 8 def _set_random_style(self): # Just for fun! self.header = random.choice((True, False)) self.border = random.choice((True, False)) self._hrules = random.choice((ALL, FRAME, NONE)) self.left_padding_width = random.randint(0,5) self.right_padding_width = random.randint(0,5) self.vertical_char = random.choice("~!@#$%^&*()_+|-=\{}[];':\",./;<>?") self.horizontal_char = random.choice("~!@#$%^&*()_+|-=\{}[];':\",./;<>?") self.junction_char = random.choice("~!@#$%^&*()_+|-=\{}[];':\",./;<>?") ############################## # DATA INPUT METHODS # ############################## def add_row(self, row): """Add a row to the table Arguments: row - row of data, should be a list with as many elements as the table has fields""" if self._field_names and len(row) != len(self._field_names): raise Exception("Row has incorrect number of values, (actual) %d!=%d (expected)" %(len(row),len(self._field_names))) if not self._field_names: self.field_names = [("Field %d" % (n+1)) for n in range(0,len(row))] self._rows.append(list(row)) def del_row(self, row_index): """Delete a row to the table Arguments: row_index - The index of the row you want to delete. Indexing starts at 0.""" if row_index > len(self._rows)-1: raise Exception("Cant delete row at index %d, table only has %d rows!" % (row_index, len(self._rows))) del self._rows[row_index] def add_column(self, fieldname, column, align="c"): """Add a column to the table. Arguments: fieldname - name of the field to contain the new column of data column - column of data, should be a list with as many elements as the table has rows align - desired alignment for this column - "l" for left, "c" for centre and "r" for right""" if len(self._rows) in (0, len(column)): self._validate_align(align) self._field_names.append(fieldname) self._align[fieldname] = align for i in range(0, len(column)): if len(self._rows) < i+1: self._rows.append([]) self._rows[i].append(column[i]) else: raise Exception("Column length %d does not match number of rows %d!" % (len(column), len(self._rows))) def clear_rows(self): """Delete all rows from the table but keep the current field names""" self._rows = [] def clear(self): """Delete all rows and field names from the table, maintaining nothing but styling options""" self._rows = [] self._field_names = [] self._widths = [] ############################## # MISC PUBLIC METHODS # ############################## def copy(self): return copy.deepcopy(self) ############################## # MISC PRIVATE METHODS # ############################## def _format_value(self, field, value): if isinstance(value, int) and field in self._int_format: value = self._unicode(("{0:" + self._int_format[field] + "}").format(value)) elif isinstance(value, float) and field in self._float_format: value = self._unicode(("{0:" + self._float_format[field] + "}").format(value)) return self._unicode(value) def _compute_widths(self, rows, options): if options["header"]: widths = [_get_size(field)[0] for field in self._field_names] else: widths = len(self.field_names) * [0] for row in rows: for index, value in enumerate(row): fieldname = self.field_names[index] if fieldname in self.max_width: widths[index] = max(widths[index], min(_get_size(value)[0], self.max_width[fieldname])) else: widths[index] = max(widths[index], _get_size(value)[0]) self._widths = widths def _get_padding_widths(self, options): if options["left_padding_width"] is not None: lpad = options["left_padding_width"] else: lpad = options["padding_width"] if options["right_padding_width"] is not None: rpad = options["right_padding_width"] else: rpad = options["padding_width"] return lpad, rpad def _get_rows(self, options): """Return only those data rows that should be printed, based on slicing and sorting. Arguments: options - dictionary of option settings.""" # Make a copy of only those rows in the slice range rows = copy.deepcopy(self._rows[options["start"]:options["end"]]) # Sort if necessary if options["sortby"]: sortindex = self._field_names.index(options["sortby"]) # Decorate rows = [[row[sortindex]]+row for row in rows] # Sort rows.sort(reverse=options["reversesort"], key=options["sort_key"]) # Undecorate rows = [row[1:] for row in rows] return rows def _format_row(self, row, options): return [self._format_value(field, value) for (field, value) in zip(self._field_names, row)] def _format_rows(self, rows, options): return [self._format_row(row, options) for row in rows] ############################## # PLAIN TEXT STRING METHODS # ############################## def get_string(self, **kwargs): """Return string representation of table in current state. Arguments: start - index of first data row to include in output end - index of last data row to include in output PLUS ONE (list slice style) fields - names of fields (columns) to include header - print a header showing field names (True or False) border - print a border around the table (True or False) hrules - controls printing of horizontal rules after rows. Allowed values: FRAME, ALL, NONE int_format - controls formatting of integer data float_format - controls formatting of floating point data padding_width - number of spaces on either side of column data (only used if left and right paddings are None) left_padding_width - number of spaces on left hand side of column data right_padding_width - number of spaces on right hand side of column data vertical_char - single character string used to draw vertical lines horizontal_char - single character string used to draw horizontal lines junction_char - single character string used to draw line junctions sortby - name of field to sort rows by sort_key - sorting key function, applied to data points before sorting reversesort - True or False to sort in descending or ascending order""" options = self._get_options(kwargs) lines = [] # Don't think too hard about an empty table # Is this the desired behaviour? Maybe we should still print the header? if self.rowcount == 0: return "" # Get the rows we need to print, taking into account slicing, sorting, etc. rows = self._get_rows(options) # Turn all data in all rows into Unicode, formatted as desired formatted_rows = self._format_rows(rows, options) # Compute column widths self._compute_widths(formatted_rows, options) # Add header or top of border self._hrule = self._stringify_hrule(options) if options["header"]: lines.append(self._stringify_header(options)) elif options["border"] and options["hrules"] != NONE: lines.append(self._hrule) # Add rows for row in formatted_rows: lines.append(self._stringify_row(row, options)) # Add bottom of border if options["border"] and not options["hrules"]: lines.append(self._hrule) return self._unicode("\n").join(lines) def _stringify_hrule(self, options): if not options["border"]: return "" lpad, rpad = self._get_padding_widths(options) bits = [options["junction_char"]] for field, width in zip(self._field_names, self._widths): if options["fields"] and field not in options["fields"]: continue bits.append((width+lpad+rpad)*options["horizontal_char"]) bits.append(options["junction_char"]) return "".join(bits) def _stringify_header(self, options): bits = [] lpad, rpad = self._get_padding_widths(options) if options["border"]: if options["hrules"] != NONE: bits.append(self._hrule) bits.append("\n") bits.append(options["vertical_char"]) for field, width, in zip(self._field_names, self._widths): if options["fields"] and field not in options["fields"]: continue if self._header_style == "cap": fieldname = field.capitalize() elif self._header_style == "title": fieldname = field.title() elif self._header_style == "upper": fieldname = field.upper() elif self._header_style == "lower": fieldname = field.lower() else: fieldname = field bits.append(" " * lpad + self._justify(fieldname, width, self._align[field]) + " " * rpad) if options["border"]: bits.append(options["vertical_char"]) if options["border"] and options["hrules"] != NONE: bits.append("\n") bits.append(self._hrule) return "".join(bits) def _stringify_row(self, row, options): for index, field, value, width, in zip(range(0,len(row)), self._field_names, row, self._widths): # Enforce max widths lines = value.split("\n") new_lines = [] for line in lines: if _str_block_width(line) > width: line = textwrap.fill(line, width) new_lines.append(line) lines = new_lines value = "\n".join(lines) row[index] = value row_height = 0 for c in row: h = _get_size(c)[1] if h > row_height: row_height = h bits = [] lpad, rpad = self._get_padding_widths(options) for y in range(0, row_height): bits.append([]) if options["border"]: bits[y].append(self.vertical_char) for field, value, width, in zip(self._field_names, row, self._widths): lines = value.split("\n") if len(lines) < row_height: lines = lines + ([""] * (row_height-len(lines))) y = 0 for l in lines: if options["fields"] and field not in options["fields"]: continue bits[y].append(" " * lpad + self._justify(l, width, self._align[field]) + " " * rpad) if options["border"]: bits[y].append(self.vertical_char) y += 1 if options["border"] and options["hrules"]== ALL: bits[row_height-1].append("\n") bits[row_height-1].append(self._hrule) for y in range(0, row_height): bits[y] = "".join(bits[y]) return "\n".join(bits) ############################## # HTML STRING METHODS # ############################## def get_html_string(self, **kwargs): """Return string representation of HTML formatted version of table in current state. Arguments: start - index of first data row to include in output end - index of last data row to include in output PLUS ONE (list slice style) fields - names of fields (columns) to include header - print a header showing field names (True or False) border - print a border around the table (True or False) hrules - controls printing of horizontal rules after rows. Allowed values: FRAME, ALL, NONE int_format - controls formatting of integer data float_format - controls formatting of floating point data padding_width - number of spaces on either side of column data (only used if left and right paddings are None) left_padding_width - number of spaces on left hand side of column data right_padding_width - number of spaces on right hand side of column data sortby - name of field to sort rows by sort_key - sorting key function, applied to data points before sorting attributes - dictionary of name/value pairs to include as HTML attributes in the <table> tag""" options = self._get_options(kwargs) if options["format"]: string = self._get_formatted_html_string(options) else: string = self._get_simple_html_string(options) return string def _get_simple_html_string(self, options): lines = [] open_tag = [] open_tag.append("<table") if options["border"]: open_tag.append(" border=\"1\"") if options["attributes"]: for attr_name in options["attributes"]: open_tag.append(" %s=\"%s\"" % (attr_name, options["attributes"][attr_name])) open_tag.append(">") lines.append("".join(open_tag)) # Headers if options["header"]: lines.append(" <tr>") for field in self._field_names: if options["fields"] and field not in options["fields"]: continue lines.append(" <th>%s</th>" % escape(field).replace("\n", "<br />")) lines.append(" </tr>") # Data rows = self._get_rows(options) formatted_rows = self._format_rows(rows, options) for row in formatted_rows: lines.append(" <tr>") for field, datum in zip(self._field_names, row): if options["fields"] and field not in options["fields"]: continue lines.append(" <td>%s</td>" % escape(datum).replace("\n", "<br />")) lines.append(" </tr>") lines.append("</table>") return self._unicode("\n").join(lines) def _get_formatted_html_string(self, options): lines = [] lpad, rpad = self._get_padding_widths(options) open_tag = [] open_tag.append("<table") if options["border"]: open_tag.append(" border=\"1\"") if options["hrules"] == NONE: open_tag.append(" frame=\"vsides\" rules=\"cols\"") if options["attributes"]: for attr_name in options["attributes"]: open_tag.append(" %s=\"%s\"" % (attr_name, options["attributes"][attr_name])) open_tag.append(">") lines.append("".join(open_tag)) # Headers if options["header"]: lines.append(" <tr>") for field in self._field_names: if options["fields"] and field not in options["fields"]: continue lines.append(" <th style=\"padding-left: %dem; padding-right: %dem; text-align: center\">%s</th>" % (lpad, rpad, escape(field).replace("\n", "<br />"))) lines.append(" </tr>") # Data rows = self._get_rows(options) formatted_rows = self._format_rows(rows, options) aligns = [] for field in self._field_names: aligns.append({ "l" : "left", "r" : "right", "c" : "center" }[self._align[field]]) for row in formatted_rows: lines.append(" <tr>") for field, datum, align in zip(self._field_names, row, aligns): if options["fields"] and field not in options["fields"]: continue lines.append(" <td style=\"padding-left: %dem; padding-right: %dem; text-align: %s\">%s</td>" % (lpad, rpad, align, escape(datum).replace("\n", "<br />"))) lines.append(" </tr>") lines.append("</table>") return self._unicode("\n").join(lines) ############################## # UNICODE WIDTH FUNCTIONS # ############################## def _char_block_width(char): # Basic Latin, which is probably the most common case #if char in xrange(0x0021, 0x007e): #if char >= 0x0021 and char <= 0x007e: if 0x0021 <= char <= 0x007e: return 1 # Chinese, Japanese, Korean (common) if 0x4e00 <= char <= 0x9fff: return 2 # Hangul if 0xac00 <= char <= 0xd7af: return 2 # Combining? if unicodedata.combining(uni_chr(char)): return 0 # Hiragana and Katakana if 0x3040 <= char <= 0x309f or 0x30a0 <= char <= 0x30ff: return 2 # Full-width Latin characters if 0xff01 <= char <= 0xff60: return 2 # CJK punctuation if 0x3000 <= char <= 0x303e: return 2 # Backspace and delete if char in (0x0008, 0x007f): return -1 # Other control characters elif char in (0x0000, 0x001f): return 0 # Take a guess return 1 def _str_block_width(val): return sum(itermap(_char_block_width, itermap(ord, val))) ############################## # TABLE FACTORIES # ############################## def from_csv(fp, field_names = None): dialect = csv.Sniffer().sniff(fp.read(1024)) fp.seek(0) reader = csv.reader(fp, dialect) table = PrettyTable() if field_names: table.field_names = field_names else: table.field_names = [x.strip() for x in next(reader)] for row in reader: table.add_row([x.strip() for x in row]) return table def from_db_cursor(cursor): table = PrettyTable() table.field_names = [col[0] for col in cursor.description] for row in cursor.fetchall(): table.add_row(row) return table ############################## # MAIN (TEST FUNCTION) # ############################## def main(): x = PrettyTable(["City name", "Area", "Population", "Annual Rainfall"]) x.sortby = "Population" x.reversesort = True x.int_format["Area"] = "04d" x.float_format = "6.1f" x.align["City name"] = "l" # Left align city names x.add_row(["Adelaide", 1295, 1158259, 600.5]) x.add_row(["Brisbane", 5905, 1857594, 1146.4]) x.add_row(["Darwin", 112, 120900, 1714.7]) x.add_row(["Hobart", 1357, 205556, 619.5]) x.add_row(["Sydney", 2058, 4336374, 1214.8]) x.add_row(["Melbourne", 1566, 3806092, 646.9]) x.add_row(["Perth", 5386, 1554769, 869.4]) print(x) if __name__ == "__main__": main()
bsd-3-clause
DeltaEpsilon-HackFMI2/FMICalendar-REST
venv/lib/python2.7/site-packages/django/contrib/gis/db/models/sql/compiler.py
93
13247
try: from itertools import zip_longest except ImportError: from itertools import izip_longest as zip_longest from django.utils.six.moves import zip from django.db.backends.util import truncate_name, typecast_timestamp from django.db.models.sql import compiler from django.db.models.sql.constants import MULTI from django.utils import six SQLCompiler = compiler.SQLCompiler class GeoSQLCompiler(compiler.SQLCompiler): def get_columns(self, with_aliases=False): """ Return the list of columns to use in the select statement. If no columns have been specified, returns all columns relating to fields in the model. If 'with_aliases' is true, any column names that are duplicated (without the table names) are given unique aliases. This is needed in some cases to avoid ambiguitity with nested queries. This routine is overridden from Query to handle customized selection of geometry columns. """ qn = self.quote_name_unless_alias qn2 = self.connection.ops.quote_name result = ['(%s) AS %s' % (self.get_extra_select_format(alias) % col[0], qn2(alias)) for alias, col in six.iteritems(self.query.extra_select)] aliases = set(self.query.extra_select.keys()) if with_aliases: col_aliases = aliases.copy() else: col_aliases = set() if self.query.select: only_load = self.deferred_to_columns() # This loop customized for GeoQuery. for col, field in zip(self.query.select, self.query.select_fields): if isinstance(col, (list, tuple)): alias, column = col table = self.query.alias_map[alias].table_name if table in only_load and column not in only_load[table]: continue r = self.get_field_select(field, alias, column) if with_aliases: if col[1] in col_aliases: c_alias = 'Col%d' % len(col_aliases) result.append('%s AS %s' % (r, c_alias)) aliases.add(c_alias) col_aliases.add(c_alias) else: result.append('%s AS %s' % (r, qn2(col[1]))) aliases.add(r) col_aliases.add(col[1]) else: result.append(r) aliases.add(r) col_aliases.add(col[1]) else: result.append(col.as_sql(qn, self.connection)) if hasattr(col, 'alias'): aliases.add(col.alias) col_aliases.add(col.alias) elif self.query.default_cols: cols, new_aliases = self.get_default_columns(with_aliases, col_aliases) result.extend(cols) aliases.update(new_aliases) max_name_length = self.connection.ops.max_name_length() result.extend([ '%s%s' % ( self.get_extra_select_format(alias) % aggregate.as_sql(qn, self.connection), alias is not None and ' AS %s' % qn(truncate_name(alias, max_name_length)) or '' ) for alias, aggregate in self.query.aggregate_select.items() ]) # This loop customized for GeoQuery. for (table, col), field in zip(self.query.related_select_cols, self.query.related_select_fields): r = self.get_field_select(field, table, col) if with_aliases and col in col_aliases: c_alias = 'Col%d' % len(col_aliases) result.append('%s AS %s' % (r, c_alias)) aliases.add(c_alias) col_aliases.add(c_alias) else: result.append(r) aliases.add(r) col_aliases.add(col) self._select_aliases = aliases return result def get_default_columns(self, with_aliases=False, col_aliases=None, start_alias=None, opts=None, as_pairs=False, local_only=False): """ Computes the default columns for selecting every field in the base model. Will sometimes be called to pull in related models (e.g. via select_related), in which case "opts" and "start_alias" will be given to provide a starting point for the traversal. Returns a list of strings, quoted appropriately for use in SQL directly, as well as a set of aliases used in the select statement (if 'as_pairs' is True, returns a list of (alias, col_name) pairs instead of strings as the first component and None as the second component). This routine is overridden from Query to handle customized selection of geometry columns. """ result = [] if opts is None: opts = self.query.model._meta aliases = set() only_load = self.deferred_to_columns() if start_alias: seen = {None: start_alias} for field, model in opts.get_fields_with_model(): # For local fields (even if through proxy) the model should # be None. if model == opts.concrete_model: model = None if local_only and model is not None: continue if start_alias: try: alias = seen[model] except KeyError: link_field = opts.get_ancestor_link(model) alias = self.query.join((start_alias, model._meta.db_table, link_field.column, model._meta.pk.column)) seen[model] = alias else: # If we're starting from the base model of the queryset, the # aliases will have already been set up in pre_sql_setup(), so # we can save time here. alias = self.query.included_inherited_models[model] table = self.query.alias_map[alias].table_name if table in only_load and field.column not in only_load[table]: continue if as_pairs: result.append((alias, field.column)) aliases.add(alias) continue # This part of the function is customized for GeoQuery. We # see if there was any custom selection specified in the # dictionary, and set up the selection format appropriately. field_sel = self.get_field_select(field, alias) if with_aliases and field.column in col_aliases: c_alias = 'Col%d' % len(col_aliases) result.append('%s AS %s' % (field_sel, c_alias)) col_aliases.add(c_alias) aliases.add(c_alias) else: r = field_sel result.append(r) aliases.add(r) if with_aliases: col_aliases.add(field.column) return result, aliases def resolve_columns(self, row, fields=()): """ This routine is necessary so that distances and geometries returned from extra selection SQL get resolved appropriately into Python objects. """ values = [] aliases = list(self.query.extra_select) # Have to set a starting row number offset that is used for # determining the correct starting row index -- needed for # doing pagination with Oracle. rn_offset = 0 if self.connection.ops.oracle: if self.query.high_mark is not None or self.query.low_mark: rn_offset = 1 index_start = rn_offset + len(aliases) # Converting any extra selection values (e.g., geometries and # distance objects added by GeoQuerySet methods). values = [self.query.convert_values(v, self.query.extra_select_fields.get(a, None), self.connection) for v, a in zip(row[rn_offset:index_start], aliases)] if self.connection.ops.oracle or getattr(self.query, 'geo_values', False): # We resolve the rest of the columns if we're on Oracle or if # the `geo_values` attribute is defined. for value, field in zip_longest(row[index_start:], fields): values.append(self.query.convert_values(value, field, self.connection)) else: values.extend(row[index_start:]) return tuple(values) #### Routines unique to GeoQuery #### def get_extra_select_format(self, alias): sel_fmt = '%s' if hasattr(self.query, 'custom_select') and alias in self.query.custom_select: sel_fmt = sel_fmt % self.query.custom_select[alias] return sel_fmt def get_field_select(self, field, alias=None, column=None): """ Returns the SELECT SQL string for the given field. Figures out if any custom selection SQL is needed for the column The `alias` keyword may be used to manually specify the database table where the column exists, if not in the model associated with this `GeoQuery`. Similarly, `column` may be used to specify the exact column name, rather than using the `column` attribute on `field`. """ sel_fmt = self.get_select_format(field) if field in self.query.custom_select: field_sel = sel_fmt % self.query.custom_select[field] else: field_sel = sel_fmt % self._field_column(field, alias, column) return field_sel def get_select_format(self, fld): """ Returns the selection format string, depending on the requirements of the spatial backend. For example, Oracle and MySQL require custom selection formats in order to retrieve geometries in OGC WKT. For all other fields a simple '%s' format string is returned. """ if self.connection.ops.select and hasattr(fld, 'geom_type'): # This allows operations to be done on fields in the SELECT, # overriding their values -- used by the Oracle and MySQL # spatial backends to get database values as WKT, and by the # `transform` method. sel_fmt = self.connection.ops.select # Because WKT doesn't contain spatial reference information, # the SRID is prefixed to the returned WKT to ensure that the # transformed geometries have an SRID different than that of the # field -- this is only used by `transform` for Oracle and # SpatiaLite backends. if self.query.transformed_srid and ( self.connection.ops.oracle or self.connection.ops.spatialite ): sel_fmt = "'SRID=%d;'||%s" % (self.query.transformed_srid, sel_fmt) else: sel_fmt = '%s' return sel_fmt # Private API utilities, subject to change. def _field_column(self, field, table_alias=None, column=None): """ Helper function that returns the database column for the given field. The table and column are returned (quoted) in the proper format, e.g., `"geoapp_city"."point"`. If `table_alias` is not specified, the database table associated with the model of this `GeoQuery` will be used. If `column` is specified, it will be used instead of the value in `field.column`. """ if table_alias is None: table_alias = self.query.model._meta.db_table return "%s.%s" % (self.quote_name_unless_alias(table_alias), self.connection.ops.quote_name(column or field.column)) class SQLInsertCompiler(compiler.SQLInsertCompiler, GeoSQLCompiler): pass class SQLDeleteCompiler(compiler.SQLDeleteCompiler, GeoSQLCompiler): pass class SQLUpdateCompiler(compiler.SQLUpdateCompiler, GeoSQLCompiler): pass class SQLAggregateCompiler(compiler.SQLAggregateCompiler, GeoSQLCompiler): pass class SQLDateCompiler(compiler.SQLDateCompiler, GeoSQLCompiler): """ This is overridden for GeoDjango to properly cast date columns, since `GeoQuery.resolve_columns` is used for spatial values. See #14648, #16757. """ def results_iter(self): if self.connection.ops.oracle: from django.db.models.fields import DateTimeField fields = [DateTimeField()] else: needs_string_cast = self.connection.features.needs_datetime_string_cast offset = len(self.query.extra_select) for rows in self.execute_sql(MULTI): for row in rows: date = row[offset] if self.connection.ops.oracle: date = self.resolve_columns(row, fields)[offset] elif needs_string_cast: date = typecast_timestamp(str(date)) yield date
mit
memsharded/conan
conans/test/integration/test_package_config_test.py
1
5057
import unittest from conans.test.utils.tools import TestClient test_conanfile = """from conans import ConanFile class test_packageConan(ConanFile): name = "conan_test_package" options = {"shared": [True, False]} default_options = "shared=False" def configure(self): self.output.info("shared (configure): %s" % (self.options.shared)) for package in self.requires: self.options[package.split('/', 1)[0]].shared = self.options.shared def requirements(self): self.output.info("shared (requirements): %s" % (self.options.shared)) def build(self): self.output.info("shared (build): %s" % (self.options.shared)) def test(self): self.output.info("shared (test): %s" % (self.options.shared)) """ create_conanfile = """from conans import ConanFile class test_packageConan(ConanFile): options = {"shared": [True, False]} default_options = "shared=False" def build(self): self.output.info("shared (build): %s" % (self.options["conan_package"].shared)) def test(self): self.output.info("shared (test): %s" % (self.options["conan_package"].shared)) """ conanfile = """from conans import ConanFile class PkgConan(ConanFile): name = "conan_package" version = "0.1" options = {"shared": [True, False]} default_options = "shared=False" def configure(self): self.output.info("shared (configure): %s" % str(self.options.shared)) def requirements(self): self.output.info("shared (requirements): %s" % str(self.options.shared)) def build(self): self.output.info("shared (build): %s" % str(self.options.shared)) """ class TestPackageConfigTest(unittest.TestCase): def test_package_test(self): client = TestClient() client.save({"conanfile.py": conanfile, "test_package/conanfile.py": test_conanfile}) client.run("create . lasote/stable -o conan_test_package:shared=True") self.assertIn("conan_package/0.1@lasote/stable (test package): shared (configure): True", client.out) self.assertIn("conan_package/0.1@lasote/stable (test package): shared (requirements): True", client.out) self.assertIn("conan_package/0.1@lasote/stable: shared (configure): True", client.out) self.assertIn("conan_package/0.1@lasote/stable: shared (configure): True", client.out) self.assertIn("conan_package/0.1@lasote/stable (test package): shared (build): True", client.out) self.assertIn("conan_package/0.1@lasote/stable (test package): shared (test): True", client.out) self.assertNotIn("False", client.out) client.run("create . lasote/stable -o conan_test_package:shared=False") self.assertIn("conan_package/0.1@lasote/stable (test package): shared (configure): False", client.out) self.assertIn("conan_package/0.1@lasote/stable (test package): shared (requirements): False", client.out) self.assertIn("conan_package/0.1@lasote/stable: shared (configure): False", client.out) self.assertIn("conan_package/0.1@lasote/stable: shared (configure): False", client.out) self.assertIn("conan_package/0.1@lasote/stable (test package): shared (build): False", client.out) self.assertIn("conan_package/0.1@lasote/stable (test package): shared (test): False", client.out) self.assertNotIn("True", client.out) def create_test(self): client = TestClient() client.save({"conanfile.py": conanfile, "test_package/conanfile.py": create_conanfile}) client.run("create . lasote/stable -o conan_package:shared=True") self.assertIn("conan_package/0.1@lasote/stable: shared (configure): True", client.out) self.assertIn("conan_package/0.1@lasote/stable: shared (configure): True", client.out) self.assertIn("conan_package/0.1@lasote/stable (test package): shared (build): True", client.out) self.assertIn("conan_package/0.1@lasote/stable (test package): shared (test): True", client.out) self.assertNotIn("False", client.out) client.run("create . lasote/stable -o shared=False") self.assertIn("conan_package/0.1@lasote/stable: shared (configure): False", client.out) self.assertIn("conan_package/0.1@lasote/stable: shared (configure): False", client.out) self.assertIn("conan_package/0.1@lasote/stable (test package): shared (build): False", client.out) self.assertIn("conan_package/0.1@lasote/stable (test package): shared (test): False", client.out) self.assertNotIn("True", client.out)
mit
thomaslundgaard/pimp
src/serverInterface.py
1
14654
# -*- coding: utf-8 -*- # Pimp - A mpd-frontend to be used as a jukebox at parties. # Copyright (C) 2010 Peter Bjørn # Copyright (C) 2010 Thomas Lundgaard # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import random import socket import sqlite3 import time from datetime import datetime from PyQt4 import QtCore, QtGui from mpd import * from settings import Settings from dbUpdate import * from helperFunctions import * class ServerInterfaceError(Exception): def __init__(self): Exception.__init__(self) class AddToPlaylistError(Exception): def __init__(self, message): Exception.__init__(self, message) class ServerInterface(QtCore.QObject): sigConnected = QtCore.pyqtSignal() sigDisconnected = QtCore.pyqtSignal() sigDbUpdated = QtCore.pyqtSignal() sigStatusChanged = QtCore.pyqtSignal('PyQt_PyObject','PyQt_PyObject') # changeList , mpdStatusDict def __init__(self, parent=None): QtCore.QObject.__init__(self, parent) self.client = MPDClient() self.settings = Settings() self.mpdServer = str(self.settings.value("mpdServer")) self.mpdPort = str(self.settings.value("mpdPort")) self.mpdPassword = str(self.settings.value("mpdPassword")) self.lastState=-9999 self.lastSongid=-9999 self.lastTime=-9999 self.lastPlaylist=-9999 self.lastVolume=-1 self.connected = False self.shuffleList = [] self.timerId = False if self.settings.value("playOnConnect") == "True": self.autoAdd = True else: self.autoAdd = False self.trackDB = None self.sigConnected.connect(self._onConnected) self.sigStatusChanged.connect(self._onStatusChanged) QtGui.qApp.aboutToQuit.connect(self._onAppQuit) def connect(self): try: self.client.connect(host=self.mpdServer, port=self.mpdPort) except socket.error: print datetime.now().isoformat(" ") + \ ": Unable to connect to MPD: Socket error (will try again in 2 sec)" QtCore.QTimer.singleShot(2000, self.connect) return if self.mpdPassword != "": try: self.client.password(self.mpdPassword) except CommandError: print datetime.now().isoformat(" ") + \ ": Unable to connect to MPD: Invalid password" return if not self.trackDB: # this is the first connection self.dbUpdate() else: self.connected = True self.sigConnected.emit() def dbUpdate(self): self.trackDB = sqlite3.connect(':memory:') self.dbUpdateDialog = DbUpdateDialog() self.dbUpdateDialog.ui.mpdupdatePixmap.show() self.dbUpdateDialog.setModal(True) self.dbUpdateDialog.show() self.dbUpdateWorker = DbUpdateWorker(self.client) self.dbUpdateWorker.sigRemoteUpdateFinished.connect( \ self.onRemoteUpdateFinished) self.dbUpdateWorker.sigDbDownloaded.connect(self.onDbDownloaded) self.dbUpdateWorker.sigDbUpdateFailed.connect(self.onDbUpdateFailed) self.dbUpdateWorker.start() def onRemoteUpdateFinished(self): self.dbUpdateDialog.ui.mpdupdatePixmap.setEnabled(True) self.dbUpdateDialog.ui.sqlupdatePixmap.show() def onDbUpdateFailed(self): self.trackDB = None self.dbUpdateDialog.accept() self._lostConnection() @QtCore.pyqtSlot('PyQt_PyObject') def onDbDownloaded(self, tracks): settings = Settings() if settings.value("excludeLongTracks") == "True": excluding = True maxTrackSeconds = int(settings.value("maxTrackLength")) * 60 else: excluding = False cursor = self.trackDB.cursor() cursor.execute("drop table if exists tracks") cursor.execute('''create table if not exists tracks (title text, artist text, file text, time integer, tag text) ''') for t in tracks: if excluding and int(t['time']) > maxTrackSeconds: continue cursor.execute('''insert into tracks(title, artist, file, time, tag) values( ?, ?, ?, ?, ?) ''',\ (t['title'], t['artist'], t['file'], t['time'], t['tag']) ) self.trackDB.commit() cursor.close() self.dbUpdateDialog.ui.sqlupdatePixmap.setEnabled(True) self.dbUpdateDialog.accept() self.connected = True self.sigConnected.emit() self.sigDbUpdated.emit() def play(self): if not self.connected: raise ServerInterfaceError() if int(self.status()['playlistlength']) == 0: self.addRandomTrack() try: return self.client.play() except (socket.error, ConnectionError): self._lostConnection() raise ServerInterfaceError() def pause(self): if not self.connected: raise ServerInterfaceError() try: return self.client.pause() except (socket.error, ConnectionError): self._lostConnection() raise ServerInterfaceError() def stop(self): if not self.connected: raise ServerInterfaceError() try: return self.client.stop() except (socket.error, ConnectionError): self._lostConnection() raise ServerInterfaceError() def playPause(self): if self.status()['state'] == 'play': return self.pause() else: return self.play() def next(self): if not self.connected: raise ServerInterfaceError() try: return self.client.next() except (socket.error, ConnectionError): self._lostConnection() raise ServerInterfaceError() def add(self, filename): if not self.connected: raise ServerInterfaceError() try: return self.client.add(filename.encode("utf-8")) except (socket.error, ConnectionError): self._lostConnection() raise ServerInterfaceError() def clear(self): if not self.connected: raise ServerInterfaceError() try: return self.client.clear() except (socket.error, ConnectionError): self._lostConnection() raise ServerInterfaceError() def status(self): if not self.connected: raise ServerInterfaceError() try: return self.client.status() except (socket.error, ConnectionError): self._lostConnection() raise ServerInterfaceError() def setvol(self, volume): if not self.connected: raise ServerInterfaceError() try: return self.client.setvol(volume) except (socket.error, ConnectionError): self._lostConnection() raise ServerInterfaceError() def currentsong(self): if not self.connected: raise ServerInterfaceError() try: return self.client.currentsong() except (socket.error, ConnectionError): self._lostConnection() raise ServerInterfaceError() def playlistinfo(self): if not self.connected: raise ServerInterfaceError() try: return self.client.playlistinfo() except (socket.error, ConnectionError): self._lostConnection() raise ServerInterfaceError() def listall(self): if not self.connected: raise ServerInterfaceError() try: return self.client.listall() except (socket.error, ConnectionError): self._lostConnection() raise ServerInterfaceError() def listallinfo(self): if not self.connected: raise ServerInterfaceError() try: return self.client.listallinfo() except (socket.error, ConnectionError): self._lostConnection() raise ServerInterfaceError() def deleteid(self, id): if not self.connected: raise ServerInterfaceError() try: return self.client.deleteid(id) except (socket.error, ConnectionError): self._lostConnection() raise ServerInterfaceError() def update(self): if not self.connected: raise ServerInterfaceError() try: return self.client.update() except (socket.error, ConnectionError): self._lostConnection() raise ServerInterfaceError() def clearExceptCurrent(self): try: playlist = self.playlistinfo() status = self.status() if status['state'] == "play": for item in playlist: if item['id'] != status['songid']: self.deleteid(item['id']) else: self.autoAdd = False self.clear() # clear completely if not playing except ServerInterfaceError: pass def addToPlaylist(self, filename): for item in self.playlistinfo(): if parseTrackInfo(item)['file'] == filename: raise AddToPlaylistError("Track already in playlist!") playlistLength = int(self.status()['playlistlength']) if playlistLength >= int(self.settings.value("maxPlaylist")): raise AddToPlaylistError("Playlist full!") else: self.add(filename) self.play() def addRandomTrack(self): if len(self.shuffleList) <= 0: cursor = self.trackDB.cursor() cursor.execute("select file from tracks") self.shuffleList = [item[0] for item in cursor] cursor.close() random.shuffle(self.shuffleList) if self.shuffleList: # shuffleList can be empty if no tracks in mpd db self.add(self.shuffleList.pop()) def searchDBtag(self, anded, *argwords): keywords = [ '%' + word + '%' for word in argwords] if anded: lop = 'and' else: lop='or' cursor = self.trackDB.cursor() query = """ select * from tracks where tag like ?""" for i in range(len(keywords) - 1): query += " %s tag like ?" % lop query += " order by tag asc" cursor.execute(query, tuple(keywords)) for row in cursor: yield {'title': row[0],\ 'artist': row[1],\ 'file': row[2],\ 'time': row[3],\ 'tag': row[4], } cursor.close() def timerEvent(self, event): changeList = [] try: status = self.status() except ServerInterfaceError: return if status['playlist'] != self.lastPlaylist: changeList.append('playlist') self.lastPlaylist = status['playlist'] if 'songid' in status and status['songid'] != self.lastSongid: changeList.append('song') self.lastSongid = status['songid'] if 'time' in status and status['time'] != self.lastTime: changeList.append('time') self.lastTime = status['time'] if status['state'] != self.lastState: changeList.append('state') self.lastState = status['state'] if status['volume'] != self.lastVolume: changeList.append('volume') self.lastVolume = status['volume'] if changeList: self.sigStatusChanged.emit(changeList, status) def _onAppQuit(self): if self.settings.value("stopOnQuit") == "True": try: self.stop() except ServerInterfaceError: pass @QtCore.pyqtSlot('PyQt_PyObject','PyQt_PyObject') def _onStatusChanged(self, changeList, status): if not self.autoAdd: self.autoAdd = True return if 'state' in changeList and status['state'] == 'stop' and \ int(status['playlistlength']) == 0: self.play() # play() adds random track if 'time' in changeList: timeBeforePlAdd = int(status['xfade']) + 2 elapsed, total = status['time'].split(":") elapsed = int(elapsed) total = int(total) if total-elapsed<=timeBeforePlAdd and \ int(status['playlistlength'])<=1: self.addRandomTrack() def _onConnected(self): self.timerId = self.startTimer(400) try: self.client.random(0) self.client.repeat(1) try: self.client.single(0) self.client.consume(1) except AttributeError: # Ugly hack: python-mpd doesn't support these commands (yet), # so we just add them self.client._commands["consume"] = self.client._commands["play"] self.client._commands["single"] = self.client._commands["play"] self.client.single(0) self.client.consume(1) except (socket.error, ConnectionError): self._lostConnection() return if self.settings.value("playOnConnect") == "True": self.play() def _lostConnection(self): print datetime.now().isoformat(" ") + \ ": Lost connection to MPD. Trying to reconnect..." if self.timerId: self.killTimer(self.timerId) self. timerId = False self.lastState = -9999 self.lastSong = -9999 self.lastTime = -9999 self.lastPlaylist = -9999 self.connected = False self.sigDisconnected.emit() self.client.disconnect() self.connect()
gpl-3.0
stasiek/robotframework
utest/running/test_handlers.py
18
13066
import unittest import sys import inspect from robot.running.handlers import _PythonHandler, _JavaHandler, DynamicHandler from robot import utils from robot.utils.asserts import * from robot.running.testlibraries import TestLibrary from robot.running.dynamicmethods import ( GetKeywordArguments, GetKeywordDocumentation, RunKeyword) from robot.errors import DataError from classes import NameLibrary, DocLibrary, ArgInfoLibrary from ArgumentsPython import ArgumentsPython if utils.JYTHON: import ArgumentsJava def _get_handler_methods(lib): attrs = [getattr(lib, a) for a in dir(lib) if not a.startswith('_')] return [a for a in attrs if inspect.ismethod(a)] def _get_java_handler_methods(lib): # This hack assumes that all java handlers used start with 'a_' -- easier # than excluding 'equals' etc. otherwise return [a for a in _get_handler_methods(lib) if a.__name__.startswith('a_') ] class LibraryMock: def __init__(self, name='MyLibrary', scope='GLOBAL'): self.name = self.orig_name = name self.scope = scope class TestPythonHandler(unittest.TestCase): def test_name(self): for method in _get_handler_methods(NameLibrary()): handler = _PythonHandler(LibraryMock('mylib'), method.__name__, method) assert_equals(handler.name, method.__doc__) assert_equals(handler.longname, 'mylib.'+method.__doc__) def test_docs(self): for method in _get_handler_methods(DocLibrary()): handler = _PythonHandler(LibraryMock(), method.__name__, method) assert_equals(handler.doc, method.expected_doc) assert_equals(handler.shortdoc, method.expected_shortdoc) def test_arguments(self): for method in _get_handler_methods(ArgInfoLibrary()): handler = _PythonHandler(LibraryMock(), method.__name__, method) args = handler.arguments argspec = (args.positional, args.defaults, args.varargs, args.kwargs) expected = eval(method.__doc__) assert_equals(argspec, expected, method.__name__) def test_arg_limits(self): for method in _get_handler_methods(ArgumentsPython()): handler = _PythonHandler(LibraryMock(), method.__name__, method) exp_mina, exp_maxa = eval(method.__doc__) assert_equals(handler.arguments.minargs, exp_mina) assert_equals(handler.arguments.maxargs, exp_maxa) def test_getarginfo_getattr(self): handlers = TestLibrary('classes.GetattrLibrary').handlers assert_equals(len(handlers), 3) for handler in handlers: assert_true(handler.name in ['Foo','Bar','Zap']) assert_equals(handler.arguments.minargs, 0) assert_equals(handler.arguments.maxargs, sys.maxint) class TestDynamicHandlerCreation(unittest.TestCase): def test_none_doc(self): self._assert_doc(None, '') def test_empty_doc(self): self._assert_doc('') def test_non_empty_doc(self): self._assert_doc('This is some documentation') def test_non_ascii_doc(self): self._assert_doc(u'P\xe4iv\xe4\xe4') if not utils.IRONPYTHON: def test_with_utf8_doc(self): doc = u'P\xe4iv\xe4\xe4' self._assert_doc(doc.encode('UTF-8'), doc) def test_invalid_doc_type(self): self._assert_fails('Return value must be string.', doc=True) def test_none_argspec(self): self._assert_spec(None, maxargs=sys.maxint, vararg='varargs', kwarg=False) def test_none_argspec_when_kwargs_supported(self): self._assert_spec(None, maxargs=sys.maxint, vararg='varargs', kwarg='kwargs') def test_empty_argspec(self): self._assert_spec([]) def test_mandatory_args(self): for argspec in [['arg'], ['arg1', 'arg2', 'arg3']]: self._assert_spec(argspec, len(argspec), len(argspec), argspec) def test_only_default_args(self): self._assert_spec(['defarg1=value', 'defarg2=defvalue'], 0, 2, ['defarg1', 'defarg2'], ['value', 'defvalue']) def test_default_value_may_contain_equal_sign(self): self._assert_spec(['d=foo=bar'], 0, 1, ['d'], ['foo=bar']) def test_varargs(self): self._assert_spec(['*vararg'], 0, sys.maxint, vararg='vararg') def test_kwargs(self): self._assert_spec(['**kwarg'], 0, 0, kwarg='kwarg') def test_varargs_and_kwargs(self): self._assert_spec(['*vararg', '**kwarg'], 0, sys.maxint, vararg='vararg', kwarg='kwarg') def test_integration(self): self._assert_spec(['arg', 'default=value'], 1, 2, ['arg', 'default'], ['value']) self._assert_spec(['arg', 'default=value', '*var'], 1, sys.maxint, ['arg', 'default'], ['value'], 'var') self._assert_spec(['arg', 'default=value', '**kw'], 1, 2, ['arg', 'default'], ['value'], None, 'kw') self._assert_spec(['arg', 'default=value', '*var', '**kw'], 1, sys.maxint, ['arg', 'default'], ['value'], 'var', 'kw') def test_invalid_argspec_type(self): for argspec in [True, [1, 2]]: self._assert_fails("Return value must be list of strings.", argspec) def test_mandatory_arg_after_default_arg(self): for argspec in [['d=v', 'arg'], ['a', 'b', 'c=v', 'd']]: self._assert_fails('Invalid argument specification: ' 'Non-default argument after default arguments.', argspec) def test_positional_after_vararg(self): for argspec in [['*foo', 'arg'], ['arg', '*var', 'arg'], ['a', 'b=d', '*var', 'c'], ['*var', '*vararg']]: self._assert_fails('Invalid argument specification: ' 'Positional argument after varargs.', argspec) def test_kwarg_not_last(self): for argspec in [['**foo', 'arg'], ['arg', '**kw', 'arg'], ['a', 'b=d', '**kw', 'c'], ['**kw', '*vararg'], ['**kw', '**kwarg']]: self._assert_fails('Invalid argument specification: ' 'Only last argument can be kwargs.', argspec) def test_missing_kwargs_support(self): self._assert_fails("Too few 'run_keyword' method parameters" " for **kwargs support.", ['**kwargs']) def _assert_doc(self, doc, expected=None): expected = doc if expected is None else expected assert_equals(self._create_handler(doc=doc).doc, expected) def _assert_spec(self, argspec, minargs=0, maxargs=0, positional=[], defaults=[], vararg=None, kwarg=None): if kwarg is None: kwargs_support_modes = [True, False] elif kwarg is False: kwargs_support_modes = [False] kwarg = None else: kwargs_support_modes = [True] for kwargs_support in kwargs_support_modes: arguments = self._create_handler(argspec, kwargs_support=kwargs_support ).arguments assert_equals(arguments.minargs, minargs) assert_equals(arguments.maxargs, maxargs) assert_equals(arguments.positional, positional) assert_equals(arguments.defaults, defaults) assert_equals(arguments.varargs, vararg) assert_equals(arguments.kwargs, kwarg) def _assert_fails(self, error, argspec=None, doc=None): assert_raises_with_msg(DataError, error, self._create_handler, argspec, doc) def _create_handler(self, argspec=None, doc=None, kwargs_support=False): lib = LibraryMock('TEST CASE') if kwargs_support: lib.run_keyword = lambda name, args, kwargs: None else: lib.run_keyword = lambda name, args: None lib.run_keyword.__name__ = 'run_keyword' doc = GetKeywordDocumentation(lib)._handle_return_value(doc) argspec = GetKeywordArguments(lib)._handle_return_value(argspec) return DynamicHandler(lib, 'mock', RunKeyword(lib), doc, argspec) if utils.JYTHON: handlers = dict((method.__name__, method) for method in _get_java_handler_methods(ArgumentsJava('Arg', ['varargs']))) class TestJavaHandler(unittest.TestCase): def test_arg_limits_no_defaults_or_varargs(self): for count in [0, 1, 3]: method = handlers['a_%d' % count] handler = _JavaHandler(LibraryMock(), method.__name__, method) assert_equals(handler.arguments.minargs, count) assert_equals(handler.arguments.maxargs, count) def test_arg_limits_with_varargs(self): for count in [0, 1]: method = handlers['a_%d_n' % count] handler = _JavaHandler(LibraryMock(), method.__name__, method) assert_equals(handler.arguments.minargs, count) assert_equals(handler.arguments.maxargs, sys.maxint) def test_arg_limits_with_defaults(self): # defaults i.e. multiple signatures for mina, maxa in [(0, 1), (1, 3)]: method = handlers['a_%d_%d' % (mina, maxa)] handler = _JavaHandler(LibraryMock(), method.__name__, method) assert_equals(handler.arguments.minargs, mina) assert_equals(handler.arguments.maxargs, maxa) class TestArgumentCoercer(unittest.TestCase): def setUp(self): self.lib = TestLibrary('ArgTypeCoercion', ['42', 'true']) def test_coercion_in_constructor(self): instance = self.lib.get_instance() assert_equals(instance.myInt, 42) assert_equals(instance.myBool, True) def test_coercing_to_integer(self): self._test_coercion(self._handler_named('intArgument'), ['1'], [1]) def test_coercing_to_boolean(self): handler = self._handler_named('booleanArgument') self._test_coercion(handler, ['True'], [True]) self._test_coercion(handler, ['FALSE'], [ False]) def test_coercing_to_real_number(self): self._test_coercion(self._handler_named('doubleArgument'), ['1.42'], [1.42]) self._test_coercion(self._handler_named('floatArgument'), ['-9991.098'], [-9991.098]) def test_coercion_with_compatible_types(self): self._test_coercion(self._handler_named('coercableKeywordWithCompatibleTypes'), ['9999', '-42', 'FaLsE', '31.31'], [9999, -42, False, 31.31]) def test_arguments_that_are_not_strings_are_not_coerced(self): self._test_coercion(self._handler_named('intArgument'), [self.lib], [self.lib]) self._test_coercion(self._handler_named('booleanArgument'), [42], [42]) def test_coercion_fails_with_reasonable_message(self): exp_msg = 'Argument at position 1 cannot be coerced to %s.' self._test_coercion_fails(self._handler_named('intArgument'), exp_msg % 'integer') self._test_coercion_fails(self._handler_named('booleanArgument'), exp_msg % 'boolean') self._test_coercion_fails(self._handler_named('floatArgument'), exp_msg % 'floating point number') def test_no_arg_no_coercion(self): self._test_coercion(self._handler_named('noArgument'), [], []) def test_coercing_multiple_arguments(self): self._test_coercion(self._handler_named('coercableKeyword'), ['10.0', '42', 'tRUe'], [10.0, 42, True]) def test_coercion_is_not_done_with_conflicting_signatures(self): self._test_coercion(self._handler_named('unCoercableKeyword'), ['True', '42'], ['True', '42']) def test_coercable_and_uncoercable_args_in_same_kw(self): self._test_coercion(self._handler_named('coercableAndUnCoercableArgs'), ['1', 'False', '-23', '0'], ['1', False, -23, '0']) def _handler_named(self, name): return self.lib.handlers[name] def _test_coercion(self, handler, args, expected): assert_equals(handler._arg_coercer.coerce(args, {}), expected) def _test_coercion_fails(self, handler, expected_message): assert_raises_with_msg(ValueError, expected_message, handler._arg_coercer.coerce, ['invalid'], {}) if __name__ == '__main__': unittest.main()
apache-2.0
UCL-INGI/INGInious
inginious/frontend/pages/api/auth_methods.py
1
1476
# -*- coding: utf-8 -*- # # This file is part of INGInious. See the LICENSE and the COPYRIGHTS files for # more information about the licensing of this file. """ Auth methods """ from inginious.frontend.pages.api._api_page import APIPage class APIAuthMethods(APIPage): """ Endpoint /api/v0/auth_methods """ def API_GET(self): """ Returns all the auth methods available. (200 OK) Response: list of auth methods. The values in the last are auth methods, represented by: id id of the auth method name the name of the authentication method, typically displayed by the webapp input a list containing the inputs to this method. Each input is represented as a dictionary containing three fields: id the id of the input, to be returned as id in the POST request of /api/v0/authentication name the placeholder for the input type text or password """ # this is an old API, not used anymore. This ensures retrocompatibility. return 200, [{ "id": 0, "name": "INGInious account", "input": [ {"id": "login", "name": "Login", "type": "text"}, {"id": "password", "name": "Password", "type": "password"} ] }]
agpl-3.0
HiSPARC/station-software
user/python/Lib/json/tests/__init__.py
145
2452
import os import sys import json import doctest import unittest from test import test_support # import json with and without accelerations cjson = test_support.import_fresh_module('json', fresh=['_json']) pyjson = test_support.import_fresh_module('json', blocked=['_json']) # create two base classes that will be used by the other tests class PyTest(unittest.TestCase): json = pyjson loads = staticmethod(pyjson.loads) dumps = staticmethod(pyjson.dumps) @unittest.skipUnless(cjson, 'requires _json') class CTest(unittest.TestCase): if cjson is not None: json = cjson loads = staticmethod(cjson.loads) dumps = staticmethod(cjson.dumps) # test PyTest and CTest checking if the functions come from the right module class TestPyTest(PyTest): def test_pyjson(self): self.assertEqual(self.json.scanner.make_scanner.__module__, 'json.scanner') self.assertEqual(self.json.decoder.scanstring.__module__, 'json.decoder') self.assertEqual(self.json.encoder.encode_basestring_ascii.__module__, 'json.encoder') class TestCTest(CTest): def test_cjson(self): self.assertEqual(self.json.scanner.make_scanner.__module__, '_json') self.assertEqual(self.json.decoder.scanstring.__module__, '_json') self.assertEqual(self.json.encoder.c_make_encoder.__module__, '_json') self.assertEqual(self.json.encoder.encode_basestring_ascii.__module__, '_json') here = os.path.dirname(__file__) def test_suite(): suite = additional_tests() loader = unittest.TestLoader() for fn in os.listdir(here): if fn.startswith("test") and fn.endswith(".py"): modname = "json.tests." + fn[:-3] __import__(modname) module = sys.modules[modname] suite.addTests(loader.loadTestsFromModule(module)) return suite def additional_tests(): suite = unittest.TestSuite() for mod in (json, json.encoder, json.decoder): suite.addTest(doctest.DocTestSuite(mod)) suite.addTest(TestPyTest('test_pyjson')) suite.addTest(TestCTest('test_cjson')) return suite def main(): suite = test_suite() runner = unittest.TextTestRunner() runner.run(suite) if __name__ == '__main__': sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) main()
gpl-3.0
dmordom/nipype
nipype/interfaces/slicer/tests/test_auto_LabelMapSmoothing.py
5
1363
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT from nipype.testing import assert_equal from nipype.interfaces.slicer.surface import LabelMapSmoothing def test_LabelMapSmoothing_inputs(): input_map = dict(args=dict(argstr='%s', ), environ=dict(nohash=True, usedefault=True, ), gaussianSigma=dict(argstr='--gaussianSigma %f', ), ignore_exception=dict(nohash=True, usedefault=True, ), inputVolume=dict(argstr='%s', position=-2, ), labelToSmooth=dict(argstr='--labelToSmooth %d', ), maxRMSError=dict(argstr='--maxRMSError %f', ), numberOfIterations=dict(argstr='--numberOfIterations %d', ), outputVolume=dict(argstr='%s', hash_files=False, position=-1, ), terminal_output=dict(mandatory=True, nohash=True, ), ) inputs = LabelMapSmoothing.input_spec() for key, metadata in input_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(inputs.traits()[key], metakey), value def test_LabelMapSmoothing_outputs(): output_map = dict(outputVolume=dict(position=-1, ), ) outputs = LabelMapSmoothing.output_spec() for key, metadata in output_map.items(): for metakey, value in metadata.items(): yield assert_equal, getattr(outputs.traits()[key], metakey), value
bsd-3-clause
ravindrapanda/tensorflow
tensorflow/contrib/timeseries/python/timeseries/state_space_models/varma.py
28
8893
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== r"""Multivariate autoregressive model (vector autoregression). Implements the following model (num_blocks = max(ar_order, ma_order + 1)): y(t, 1) = \sum_{i=1}^{ar_order} ar_coefs[i] * y(t - 1, i) y(t, i) = y(t - 1, i - 1) + ma_coefs[i - 1] * e(t) for 1 < i < num_blocks y(t, num_blocks) = y(t - 1, num_blocks - 1) + e(t) Where e(t) are Gaussian with zero mean and learned covariance. Each element of ar_coefs and ma_coefs is a [num_features x num_features] matrix. Each y(t, i) is a vector of length num_features. Indices in the above equations are one-based. Initial conditions y(0, i) come from prior state (which may either be learned or left as a constant with high prior covariance). If ar_order > ma_order, the observation model is: y(t, 1) + observation_noise(t) If ma_order >= ar_order, it is (to observe the moving average component): y(t, 1) + y(t, num_blocks) + observation_noise(t) Where observation_noise(t) are Gaussian with zero mean and learned covariance. This implementation uses a formulation which puts all of the autoregressive coefficients in the transition equation for the observed component, which enables learning using truncated backpropagation. Noise is not applied directly to the observed component (with the exception of standard observation noise), which further aids learning of the autoregressive coefficients when VARMA is in an ensemble with other models (in which case having an observation noise term is usually unavoidable). """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.contrib.timeseries.python.timeseries import math_utils from tensorflow.contrib.timeseries.python.timeseries.state_space_models import state_space_model from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import init_ops from tensorflow.python.ops import linalg_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import variable_scope class VARMA(state_space_model.StateSpaceModel): """A VARMA model implementation as a special case of the state space model.""" def __init__(self, autoregressive_order, moving_average_order, configuration=state_space_model.StateSpaceModelConfiguration()): """Construct a VARMA model. The size of the latent state for this model is: num_features * max(autoregressive_order, moving_average_order + 1) Square matrices of this size are constructed and multiplied. Args: autoregressive_order: The maximum autoregressive lag. moving_average_order: The maximum moving average lag, after which transient deviations are expected to return to their long-term mean. configuration: A StateSpaceModelConfiguration object. """ self.ar_order = autoregressive_order self.ma_order = moving_average_order self.state_num_blocks = max(autoregressive_order, moving_average_order + 1) super(VARMA, self).__init__(configuration=configuration) self.state_dimension = self.state_num_blocks * self.num_features def _define_parameters(self, observation_transition_tradeoff_log=None): with variable_scope.variable_scope(self._variable_scope): # TODO(allenl): Evaluate parameter transformations for AR/MA coefficients # which improve interpretability/stability. self.ar_coefs = variable_scope.get_variable( name="ar_coefs", shape=[self.num_features, self.num_features, self.ar_order], dtype=self.dtype, initializer=init_ops.zeros_initializer()) self.ma_coefs = variable_scope.get_variable( name="ma_coefs", initializer=array_ops.tile( linalg_ops.eye(self.num_features, dtype=self.dtype)[None, :, :], [self.ma_order, 1, 1]), dtype=self.dtype) super(VARMA, self)._define_parameters( observation_transition_tradeoff_log=observation_transition_tradeoff_log) def get_state_transition(self): """Construct state transition matrix from VARMA parameters. Returns: the state transition matrix. It has shape [self.state_dimendion, self.state_dimension]. """ # Pad any unused AR blocks with zeros. The extra state is necessary if # ma_order >= ar_order. ar_coefs_padded = array_ops.reshape( array_ops.pad(self.ar_coefs, [[0, 0], [0, 0], [0, self.state_num_blocks - self.ar_order]]), [self.num_features, self.state_dimension]) shift_matrix = array_ops.pad( linalg_ops.eye( (self.state_num_blocks - 1) * self.num_features, dtype=self.dtype), [[0, 0], [0, self.num_features]]) return array_ops.concat([ar_coefs_padded, shift_matrix], axis=0) def get_noise_transform(self): """Construct state noise transform matrix from VARMA parameters. Returns: the state noise transform matrix. It has shape [self.state_dimendion, self.num_features]. """ # Noise is broadcast, through the moving average coefficients, to # un-observed parts of the latent state. ma_coefs_padded = array_ops.reshape( array_ops.pad(self.ma_coefs, [[self.state_num_blocks - 1 - self.ma_order, 0], [0, 0], [0, 0]]), [(self.state_num_blocks - 1) * self.num_features, self.num_features], name="noise_transform") # Deterministically apply noise to the oldest component. return array_ops.concat( [ma_coefs_padded, linalg_ops.eye(self.num_features, dtype=self.dtype)], axis=0) def get_observation_model(self, times): """Construct observation model matrix from VARMA parameters. Args: times: A [batch size] vector indicating the times observation models are requested for. Unused. Returns: the observation model matrix. It has shape [self.num_features, self.state_dimension]. """ del times # StateSpaceModel will broadcast along the batch dimension if self.ar_order > self.ma_order or self.state_num_blocks < 2: return array_ops.pad( linalg_ops.eye(self.num_features, dtype=self.dtype), [[0, 0], [0, self.num_features * (self.state_num_blocks - 1)]], name="observation_model") else: # Add a second observed component which "catches" the accumulated moving # average errors as they reach the end of the state. If ar_order > # ma_order, this is unnecessary, since accumulated errors cycle naturally. return array_ops.concat( [ array_ops.pad( linalg_ops.eye(self.num_features, dtype=self.dtype), [[0, 0], [0, self.num_features * (self.state_num_blocks - 2)]]), linalg_ops.eye(self.num_features, dtype=self.dtype) ], axis=1, name="observation_model") def get_state_transition_noise_covariance( self, minimum_initial_variance=1e-5): # Most state space models use only an explicit observation noise term to # model deviations from expectations, and so a low initial transition noise # parameter is helpful there. Since deviations from expectations are also # modeled as transition noise in VARMA, we set its initial value based on a # slight over-estimate empirical observation noise. if self._input_statistics is not None: feature_variance = self._scale_variance( self._input_statistics.series_start_moments.variance) initial_transition_noise_scale = math_ops.log( math_ops.maximum( math_ops.reduce_mean(feature_variance), minimum_initial_variance)) else: initial_transition_noise_scale = 0. state_noise_transform = ops.convert_to_tensor( self.get_noise_transform(), dtype=self.dtype) state_noise_dimension = state_noise_transform.get_shape()[1].value return math_utils.variable_covariance_matrix( state_noise_dimension, "state_transition_noise", dtype=self.dtype, initial_overall_scale_log=initial_transition_noise_scale)
apache-2.0
sameetb-cuelogic/edx-platform-test
common/djangoapps/track/views/tests/test_segmentio.py
13
21647
"""Ensure we can parse events sent to us from the segment.io webhook integration""" from datetime import datetime import json from ddt import ddt, data, unpack from mock import sentinel from django.contrib.auth.models import User from django.test.client import RequestFactory from django.test.utils import override_settings from track.middleware import TrackMiddleware from track.tests import EventTrackingTestCase from track.views import segmentio SECRET = 'anything' ENDPOINT = '/segmentio/test/event' USER_ID = 10 MOBILE_SHIM_PROCESSOR = [ { 'ENGINE': 'track.shim.LegacyFieldMappingProcessor' }, { 'ENGINE': 'track.shim.VideoEventProcessor' } ] def expect_failure_with_message(message): """Ensure the test raises an exception and does not emit an event""" def test_decorator(func): def test_decorated(self, *args, **kwargs): self.assertRaisesRegexp(segmentio.EventValidationError, message, func, self, *args, **kwargs) self.assert_no_events_emitted() return test_decorated return test_decorator @ddt @override_settings( TRACKING_SEGMENTIO_WEBHOOK_SECRET=SECRET, TRACKING_IGNORE_URL_PATTERNS=[ENDPOINT], TRACKING_SEGMENTIO_ALLOWED_TYPES=['track'], TRACKING_SEGMENTIO_DISALLOWED_SUBSTRING_NAMES=['.bi.'], TRACKING_SEGMENTIO_SOURCE_MAP={'test-app': 'mobile'}, EVENT_TRACKING_PROCESSORS=MOBILE_SHIM_PROCESSOR, ) class SegmentIOTrackingTestCase(EventTrackingTestCase): """Test processing of segment.io events""" def setUp(self): super(SegmentIOTrackingTestCase, self).setUp() self.maxDiff = None # pylint: disable=invalid-name self.request_factory = RequestFactory() def test_get_request(self): request = self.request_factory.get(ENDPOINT) response = segmentio.segmentio_event(request) self.assertEquals(response.status_code, 405) self.assert_no_events_emitted() @override_settings( TRACKING_SEGMENTIO_WEBHOOK_SECRET=None ) def test_no_secret_config(self): request = self.request_factory.post(ENDPOINT) response = segmentio.segmentio_event(request) self.assertEquals(response.status_code, 401) self.assert_no_events_emitted() def test_no_secret_provided(self): request = self.request_factory.post(ENDPOINT) response = segmentio.segmentio_event(request) self.assertEquals(response.status_code, 401) self.assert_no_events_emitted() def test_secret_mismatch(self): request = self.create_request(key='y') response = segmentio.segmentio_event(request) self.assertEquals(response.status_code, 401) self.assert_no_events_emitted() def create_request(self, key=None, **kwargs): """Create a fake request that emulates a request from the segment.io servers to ours""" if key is None: key = SECRET request = self.request_factory.post(ENDPOINT + "?key=" + key, **kwargs) if 'data' in kwargs: request.json = json.loads(kwargs['data']) return request @data('identify', 'Group', 'Alias', 'Page', 'identify', 'screen') @expect_failure_with_message(segmentio.WARNING_IGNORED_TYPE) def test_segmentio_ignore_actions(self, action): self.post_segmentio_event(action=action) @data('edx.bi.some_name', 'EDX.BI.CAPITAL_NAME') @expect_failure_with_message(segmentio.WARNING_IGNORED_TYPE) def test_segmentio_ignore_names(self, name): self.post_segmentio_event(name=name) def post_segmentio_event(self, **kwargs): """Post a fake segment.io event to the view that processes it""" request = self.create_request( data=self.create_segmentio_event_json(**kwargs), content_type='application/json' ) segmentio.track_segmentio_event(request) def create_segmentio_event(self, **kwargs): """Populate a fake segment.io event with data of interest""" action = kwargs.get('action', 'Track') sample_event = { "userId": kwargs.get('user_id', USER_ID), "event": "Did something", "properties": { 'name': kwargs.get('name', str(sentinel.name)), 'data': kwargs.get('data', {}), 'context': { 'course_id': kwargs.get('course_id') or '', 'app_name': 'edx.mobile.android', } }, "channel": 'server', "context": { "library": { "name": kwargs.get('library_name', 'test-app'), "version": "unknown" }, "app": { "version": "1.0.1", }, 'userAgent': str(sentinel.user_agent), }, "receivedAt": "2014-08-27T16:33:39.100Z", "timestamp": "2014-08-27T16:33:39.215Z", "type": action.lower(), "projectId": "u0j33yjkr8", "messageId": "qy52hwp4", "version": 2, "integrations": {}, "options": { "library": "unknown", "providers": {} }, "action": action } if 'context' in kwargs: sample_event['properties']['context'].update(kwargs['context']) return sample_event def create_segmentio_event_json(self, **kwargs): """Return a json string containing a fake segment.io event""" return json.dumps(self.create_segmentio_event(**kwargs)) @expect_failure_with_message(segmentio.WARNING_IGNORED_SOURCE) def test_segmentio_ignore_unknown_libraries(self): self.post_segmentio_event(library_name='foo') @expect_failure_with_message(segmentio.ERROR_USER_NOT_EXIST) def test_no_user_for_user_id(self): self.post_segmentio_event(user_id=40) @expect_failure_with_message(segmentio.ERROR_INVALID_USER_ID) def test_invalid_user_id(self): self.post_segmentio_event(user_id='foobar') @data('foo/bar/baz', 'course-v1:foo+bar+baz') def test_success(self, course_id): middleware = TrackMiddleware() request = self.create_request( data=self.create_segmentio_event_json(data={'foo': 'bar'}, course_id=course_id), content_type='application/json' ) User.objects.create(pk=USER_ID, username=str(sentinel.username)) middleware.process_request(request) # The middleware normally emits an event, make sure it doesn't in this case. self.assert_no_events_emitted() try: response = segmentio.segmentio_event(request) self.assertEquals(response.status_code, 200) expected_event = { 'accept_language': '', 'referer': '', 'username': str(sentinel.username), 'ip': '', 'session': '', 'event_source': 'mobile', 'event_type': str(sentinel.name), 'name': str(sentinel.name), 'event': {'foo': 'bar'}, 'agent': str(sentinel.user_agent), 'page': None, 'time': datetime.strptime("2014-08-27T16:33:39.215Z", "%Y-%m-%dT%H:%M:%S.%fZ"), 'host': 'testserver', 'context': { 'application': { 'name': 'edx.mobile.android', 'version': '1.0.1', }, 'user_id': USER_ID, 'course_id': course_id, 'org_id': u'foo', 'path': ENDPOINT, 'client': { 'library': { 'name': 'test-app', 'version': 'unknown' }, 'app': { 'version': '1.0.1', }, }, 'received_at': datetime.strptime("2014-08-27T16:33:39.100Z", "%Y-%m-%dT%H:%M:%S.%fZ"), }, } finally: middleware.process_response(request, None) self.assertEqualUnicode(self.get_event(), expected_event) def test_invalid_course_id(self): request = self.create_request( data=self.create_segmentio_event_json(course_id='invalid'), content_type='application/json' ) User.objects.create(pk=USER_ID, username=str(sentinel.username)) segmentio.track_segmentio_event(request) self.assert_events_emitted() @expect_failure_with_message(segmentio.ERROR_MISSING_NAME) def test_missing_name(self): sample_event_raw = self.create_segmentio_event() del sample_event_raw['properties']['name'] request = self.create_request( data=json.dumps(sample_event_raw), content_type='application/json' ) User.objects.create(pk=USER_ID, username=str(sentinel.username)) segmentio.track_segmentio_event(request) @expect_failure_with_message(segmentio.ERROR_MISSING_DATA) def test_missing_data(self): sample_event_raw = self.create_segmentio_event() del sample_event_raw['properties']['data'] request = self.create_request( data=json.dumps(sample_event_raw), content_type='application/json' ) User.objects.create(pk=USER_ID, username=str(sentinel.username)) segmentio.track_segmentio_event(request) @expect_failure_with_message(segmentio.ERROR_MISSING_TIMESTAMP) def test_missing_timestamp(self): sample_event_raw = self.create_event_without_fields('timestamp') request = self.create_request( data=json.dumps(sample_event_raw), content_type='application/json' ) User.objects.create(pk=USER_ID, username=str(sentinel.username)) segmentio.track_segmentio_event(request) @expect_failure_with_message(segmentio.ERROR_MISSING_RECEIVED_AT) def test_missing_received_at(self): sample_event_raw = self.create_event_without_fields('receivedAt') request = self.create_request( data=json.dumps(sample_event_raw), content_type='application/json' ) User.objects.create(pk=USER_ID, username=str(sentinel.username)) segmentio.track_segmentio_event(request) def create_event_without_fields(self, *fields): """Create a fake event and remove some fields from it""" event = self.create_segmentio_event() for field in fields: if field in event: del event[field] return event def test_string_user_id(self): User.objects.create(pk=USER_ID, username=str(sentinel.username)) self.post_segmentio_event(user_id=str(USER_ID)) self.assert_events_emitted() def test_hiding_failure(self): sample_event_raw = self.create_event_without_fields('timestamp') request = self.create_request( data=json.dumps(sample_event_raw), content_type='application/json' ) User.objects.create(pk=USER_ID, username=str(sentinel.username)) response = segmentio.segmentio_event(request) self.assertEquals(response.status_code, 200) self.assert_no_events_emitted() @data( ('edx.video.played', 'play_video'), ('edx.video.paused', 'pause_video'), ('edx.video.stopped', 'stop_video'), ('edx.video.loaded', 'load_video'), ('edx.video.position.changed', 'seek_video'), ('edx.video.transcript.shown', 'show_transcript'), ('edx.video.transcript.hidden', 'hide_transcript'), ) @unpack def test_video_event(self, name, event_type): course_id = 'foo/bar/baz' middleware = TrackMiddleware() input_payload = { 'current_time': 132.134456, 'module_id': 'i4x://foo/bar/baz/some_module', 'code': 'mobile' } if name == 'edx.video.loaded': del input_payload['current_time'] request = self.create_request( data=self.create_segmentio_event_json( name=name, data=input_payload, context={ 'open_in_browser_url': 'https://testserver/courses/foo/bar/baz/courseware/Week_1/Activity/2', 'course_id': course_id, 'application': { 'name': 'edx.mobileapp.android', 'version': '29', 'component': 'videoplayer' } }), content_type='application/json' ) User.objects.create(pk=USER_ID, username=str(sentinel.username)) middleware.process_request(request) try: response = segmentio.segmentio_event(request) self.assertEquals(response.status_code, 200) expected_event_without_payload = { 'accept_language': '', 'referer': '', 'username': str(sentinel.username), 'ip': '', 'session': '', 'event_source': 'mobile', 'event_type': event_type, 'name': name, 'agent': str(sentinel.user_agent), 'page': 'https://testserver/courses/foo/bar/baz/courseware/Week_1/Activity', 'time': datetime.strptime("2014-08-27T16:33:39.215Z", "%Y-%m-%dT%H:%M:%S.%fZ"), 'host': 'testserver', 'context': { 'user_id': USER_ID, 'course_id': course_id, 'org_id': 'foo', 'path': ENDPOINT, 'client': { 'library': { 'name': 'test-app', 'version': 'unknown' }, 'app': { 'version': '1.0.1', }, }, 'application': { 'name': 'edx.mobileapp.android', 'version': '29', 'component': 'videoplayer' }, 'received_at': datetime.strptime("2014-08-27T16:33:39.100Z", "%Y-%m-%dT%H:%M:%S.%fZ"), }, } expected_payload = { 'currentTime': 132.134456, 'id': 'i4x-foo-bar-baz-some_module', 'code': 'mobile' } if name == 'edx.video.loaded': del expected_payload['currentTime'] finally: middleware.process_response(request, None) actual_event = dict(self.get_event()) payload = json.loads(actual_event.pop('event')) self.assertEqualUnicode(actual_event, expected_event_without_payload) self.assertEqualUnicode(payload, expected_payload) @data( # Verify positive slide case. Verify slide to onSlideSeek. Verify edx.video.seeked emitted from iOS v1.0.02 is changed to edx.video.position.changed. (1, 1, "seek_type", "slide", "onSlideSeek", "edx.video.seeked", "edx.video.position.changed", 'edx.mobileapp.iOS', '1.0.02'), # Verify negative slide case. Verify slide to onSlideSeek. Verify edx.video.seeked to edx.video.position.changed. (-2, -2, "seek_type", "slide", "onSlideSeek", "edx.video.seeked", "edx.video.position.changed", 'edx.mobileapp.iOS', '1.0.02'), # Verify +30 is changed to -30 which is incorrectly emitted in iOS v1.0.02. Verify skip to onSkipSeek (30, -30, "seek_type", "skip", "onSkipSeek", "edx.video.position.changed", "edx.video.position.changed", 'edx.mobileapp.iOS', '1.0.02'), # Verify the correct case of -30 is also handled as well. Verify skip to onSkipSeek (-30, -30, "seek_type", "skip", "onSkipSeek", "edx.video.position.changed", "edx.video.position.changed", 'edx.mobileapp.iOS', '1.0.02'), # Verify positive slide case where onSkipSeek is changed to onSlideSkip. Verify edx.video.seeked emitted from Android v1.0.02 is changed to edx.video.position.changed. (1, 1, "type", "onSkipSeek", "onSlideSeek", "edx.video.seeked", "edx.video.position.changed", 'edx.mobileapp.android', '1.0.02'), # Verify positive slide case where onSkipSeek is changed to onSlideSkip. Verify edx.video.seeked emitted from Android v1.0.02 is changed to edx.video.position.changed. (-2, -2, "type", "onSkipSeek", "onSlideSeek", "edx.video.seeked", "edx.video.position.changed", 'edx.mobileapp.android', '1.0.02'), # Verify positive skip case where onSkipSeek is not changed and does not become negative. (30, 30, "type", "onSkipSeek", "onSkipSeek", "edx.video.position.changed", "edx.video.position.changed", 'edx.mobileapp.android', '1.0.02'), # Verify positive skip case where onSkipSeek is not changed. (-30, -30, "type", "onSkipSeek", "onSkipSeek", "edx.video.position.changed", "edx.video.position.changed", 'edx.mobileapp.android', '1.0.02') ) @unpack def test_previous_builds(self, requested_skip_interval, expected_skip_interval, seek_type_key, seek_type, expected_seek_type, name, expected_name, platform, version, ): """ Test backwards compatibility of previous app builds iOS version 1.0.02: Incorrectly emits the skip back 30 seconds as +30 instead of -30. Android version 1.0.02: Skip and slide were both being returned as a skip. Skip or slide is determined by checking if the skip time is == -30 Additionally, for both of the above mentioned versions, edx.video.seeked was sent instead of edx.video.position.changed """ course_id = 'foo/bar/baz' middleware = TrackMiddleware() input_payload = { "code": "mobile", "new_time": 89.699177437, "old_time": 119.699177437, seek_type_key: seek_type, "requested_skip_interval": requested_skip_interval, 'module_id': 'i4x://foo/bar/baz/some_module', } request = self.create_request( data=self.create_segmentio_event_json( name=name, data=input_payload, context={ 'open_in_browser_url': 'https://testserver/courses/foo/bar/baz/courseware/Week_1/Activity/2', 'course_id': course_id, 'application': { 'name': platform, 'version': version, 'component': 'videoplayer' } }, ), content_type='application/json' ) User.objects.create(pk=USER_ID, username=str(sentinel.username)) middleware.process_request(request) try: response = segmentio.segmentio_event(request) self.assertEquals(response.status_code, 200) expected_event_without_payload = { 'accept_language': '', 'referer': '', 'username': str(sentinel.username), 'ip': '', 'session': '', 'event_source': 'mobile', 'event_type': "seek_video", 'name': expected_name, 'agent': str(sentinel.user_agent), 'page': 'https://testserver/courses/foo/bar/baz/courseware/Week_1/Activity', 'time': datetime.strptime("2014-08-27T16:33:39.215Z", "%Y-%m-%dT%H:%M:%S.%fZ"), 'host': 'testserver', 'context': { 'user_id': USER_ID, 'course_id': course_id, 'org_id': 'foo', 'path': ENDPOINT, 'client': { 'library': { 'name': 'test-app', 'version': 'unknown' }, 'app': { 'version': '1.0.1', }, }, 'application': { 'name': platform, 'version': version, 'component': 'videoplayer' }, 'received_at': datetime.strptime("2014-08-27T16:33:39.100Z", "%Y-%m-%dT%H:%M:%S.%fZ"), }, } expected_payload = { "code": "mobile", "new_time": 89.699177437, "old_time": 119.699177437, "type": expected_seek_type, "requested_skip_interval": expected_skip_interval, 'id': 'i4x-foo-bar-baz-some_module', } finally: middleware.process_response(request, None) actual_event = dict(self.get_event()) payload = json.loads(actual_event.pop('event')) self.assertEqualUnicode(actual_event, expected_event_without_payload) self.assertEqualUnicode(payload, expected_payload)
agpl-3.0
zding5/Microblog-Flask
flask/lib/python2.7/site-packages/migrate/tests/versioning/test_cfgparse.py
63
1112
#!/usr/bin/python # -*- coding: utf-8 -*- from migrate.versioning import cfgparse from migrate.versioning.repository import * from migrate.versioning.template import Template from migrate.tests import fixture class TestConfigParser(fixture.Base): def test_to_dict(self): """Correctly interpret config results as dictionaries""" parser = cfgparse.Parser(dict(default_value=42)) self.assertTrue(len(parser.sections()) == 0) parser.add_section('section') parser.set('section','option','value') self.assertEqual(parser.get('section', 'option'), 'value') self.assertEqual(parser.to_dict()['section']['option'], 'value') def test_table_config(self): """We should be able to specify the table to be used with a repository""" default_text = Repository.prepare_config(Template().get_repository(), 'repository_name', {}) specified_text = Repository.prepare_config(Template().get_repository(), 'repository_name', {'version_table': '_other_table'}) self.assertNotEqual(default_text, specified_text)
mit
dahaic/outerspace
server/lib/ige/ospace/IPlanet.py
2
55054
# # Copyright 2001 - 2016 Ludek Smid [http://www.ospace.net/] # # This file is part of Outer Space. # # Outer Space is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # Outer Space is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Outer Space; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA # import copy import math import random from xml.dom.minidom import Node import ige import Const import Rules import Utils import ShipUtils from ige import log from ige.IObject import IObject, public from ige.IDataHolder import IDataHolder class IPlanet(IObject): typeID = Const.T_PLANET def init(self, obj): IObject.init(self, obj) # obj.x = 0.0 obj.y = 0.0 obj.plDiameter = 0 obj.plType = u'-' obj.plMin = 0 obj.plBio = 0 obj.plEn = 0 obj.plEnv = 0 obj.plSlots = 0 obj.plMaxSlots = 0 obj.plStratRes = 0 obj.plDisease = 0 obj.plStarting = 0 obj.orbit = 0 obj.storPop = 0 obj.slots = [] obj.lastPirCapture = 0 # storage obj.storBio = 0 obj.storEn = 0 obj.minBio = Rules.colonyMinBio obj.minEn = Rules.colonyMinEn obj.maxBio = 0 obj.maxEn = 0 # changes/prod obj.prodQueue = [] obj.globalQueue = 0 obj.changeBio = 0 obj.changeEn = 0 obj.changePop = 0 obj.changeEnv = 0 obj.prodProd = 0 obj.effProdProd = 0 obj.prodSci = 0 obj.effProdSci = 0 obj.unemployedPop = 0 # eating / housing obj.popEatBio = 10 obj.popEatEn = 0 obj.maxPop = 0 # extra goodies obj.solarmod = 0 obj.scannerPwr = 0 obj.signature = 75 obj.autoMinStor = 1 obj.morale = Rules.maxMorale obj.changeMorale = 0.0 obj.moraleTrgt = 0.0 # moraleModifiers [ base morale by distance from homeworld, from buildings, from population, from unemployment, summary 1+2+3+4 ] obj.moraleModifiers = [0.0, 0.0, 0.0, 0.0, 0.0] obj.revoltLen = 0 obj.combatExp = 0 obj.isMilitary = 0 obj.refuelMax = 0 obj.refuelInc = 0 obj.repairShip = 0.0 obj.upgradeShip = 0.0 obj.trainShipInc = 0 obj.trainShipMax = 0 obj.fleetSpeedBoost = 1.0 obj.ownerSince = 0 obj.shield = 0 #current planetary shield level obj.maxShield = 0 #structural max sheild (best structure method) obj.prevShield = 0 #previous turn's shield level (for client growth calculation) @public(Const.AL_FULL) def startConstruction(self, tran, obj, techID, quantity, targetID, isShip, reportFinished, demolishStruct): if len(obj.prodQueue) > Rules.maxProdQueueLen: raise ige.GameException('Queue is full.') if quantity < 1: raise ige.GameException("Quantity must be greater than 0") player = tran.db[obj.owner] if not player.techs.has_key(techID) and isShip == 0: raise ige.GameException('You do not own this kind of technology.') if not player.shipDesigns.has_key(techID) and isShip == 1: raise ige.GameException('You do not own this ship design.') if targetID not in tran.db[obj.compOf].planets: raise ige.GameException('You can build only in the same system.') if isShip: tech = player.shipDesigns[techID] if tech.upgradeTo: raise ige.GameException("You cannot build obsolete ship design.") else: tech = Rules.techs[techID] if not (tech.isStructure or tech.isProject): raise ige.GameException('You cannot construct this technology.') if not tech.validateConstrHandler(tran, obj, tran.db[targetID], tech): raise ige.GameException('Conditions for construction are not satisfied.') neededSR = {} for sr in tech.buildSRes: nSR = neededSR.get(sr, 0) + quantity * tech.buildSRes[sr] if player.stratRes.get(sr, 0) < nSR: raise ige.GameException("You do not own enough of required strategic resource(s)") neededSR[sr] = nSR # consume strategic resources for sr in neededSR: player.stratRes[sr] -= neededSR[sr] # start construction item = IDataHolder() item.techID = techID item.currProd = 0 item.currTurn = 0 item.quantity = int(quantity) item.targetID = targetID item.changePerc = 0 item.isShip = bool(isShip) item.reportFin = bool(reportFinished) item.demolishStruct = demolishStruct item.type = Const.T_TASK obj.prodQueue.append(item) return obj.prodQueue, player.stratRes @public(Const.AL_FULL) def changeConstruction(self, tran, obj, index, quantity): if index < 0 or index >= len(obj.prodQueue): raise ige.GameException("No such item in the construction queue.") if quantity < 1: raise ige.GameException("Quantity must be greater than 0") player = tran.db[obj.owner] item = obj.prodQueue[index] if item.isShip: tech = player.shipDesigns[item.techID] else: tech = Rules.techs[item.techID] quantityChange = quantity - obj.prodQueue[index].quantity neededSR = {} for sr in tech.buildSRes: nSR = neededSR.get(sr, 0) + quantityChange * tech.buildSRes[sr] if player.stratRes.get(sr, 0) < nSR: raise ige.GameException("You do not own enough of required strategic resource(s)") neededSR[sr] = nSR # consume strategic resources for sr in neededSR: player.stratRes[sr] -= neededSR[sr] obj.prodQueue[index].quantity = quantity return obj.prodQueue, player.stratRes @public(Const.AL_FULL) def abortConstruction(self, tran, obj, index): if index >= len(obj.prodQueue): raise ige.GameException('No such item in the construction queue.') # Free strategic resources player = tran.db[obj.owner] item = obj.prodQueue[index] if item.isShip: tech = player.shipDesigns[item.techID] else: tech = Rules.techs[item.techID] for sr in tech.buildSRes: player.stratRes[sr] = player.stratRes.get(sr, 0) + item.quantity * tech.buildSRes[sr] # delete task del obj.prodQueue[index] return obj.prodQueue, player.stratRes @public(Const.AL_FULL) def moveConstrItem(self, tran, obj, index, rel): if index >= len(obj.prodQueue): raise ige.GameException('No such item in the construction queue.') if index + rel < 0 or index + rel >= len(obj.prodQueue): raise ige.GameException('Cannot move.') item = obj.prodQueue[index] del obj.prodQueue[index] obj.prodQueue.insert(index + rel, item) return obj.prodQueue @public(Const.AL_ADMIN) def changeOwner(self, tran, obj, ownerID, force = 0): oldOwnerID = obj.owner if obj.owner == ownerID: # the owner is the same return elif obj.owner != Const.OID_NONE and force == 0: # this planet is already owned! # TODO resolve conflict (based on player relations) raise ige.GameException('Planet is already owned by another commander.') elif obj.owner != Const.OID_NONE and force == 1: # remove planet from old owner try: oldOwner = tran.db[obj.owner] oldOwner.planets.remove(obj.oid) if tran.db.has_key(obj.owner): Utils.sendMessage(tran, obj, Const.MSG_LOST_PLANET, obj.oid, None) except Exception: log.warning("Cannot remove planet from owner", obj.oid, obj.owner) oldOwnerID = Const.OID_NONE # reset timer obj.ownerSince = tran.db[Const.OID_UNIVERSE].turn # add planet to new owner's empire if ownerID != Const.OID_NONE: newOwner = tran.db[ownerID] newOwner.planets.append(obj.oid) # reset some attributes obj.owner = ownerID obj.revoltLen = 0 # no revolt obj.prodQueue = [] # clear production queue obj.globalQueue = 0 # default global queue obj.autoMinStor = 1 # storage is set to auto if ownerID != Const.OID_NONE: # notify player Utils.sendMessage(tran, obj, Const.MSG_GAINED_PLANET, obj.oid, None) @public(Const.AL_FULL) def setStructOn(self, tran, obj, slotIdx, on): if slotIdx >= len(obj.slots) or slotIdx < 0: raise ige.GameException('No such structure.') if on: obj.slots[slotIdx][Const.STRUCT_IDX_STATUS] |= Const.STRUCT_STATUS_ON else: obj.slots[slotIdx][Const.STRUCT_IDX_STATUS] &= ~Const.STRUCT_STATUS_ON return obj.slots[slotIdx] @public(Const.AL_FULL) def demolishStruct(self, tran, obj, slotIdx): # TODO implement special button for demolishing structures when # planet surrenders #isCombat = tran.db[obj.compOf].combatCounter > 0 #if isCombat and len(obj.slots) < obj.plSlots: # raise ige.GameException("You cannot destroy this structure under fire - at least one slot is free.") if slotIdx >= len(obj.slots) or slotIdx < 0: raise ige.GameException('No such structure.') del obj.slots[slotIdx] return obj.slots @public(Const.AL_FULL) def moveStruct(self, tran, obj, slotIdx, rel): if slotIdx >= len(obj.slots) or slotIdx < 0: raise ige.GameException('No such structure.') if slotIdx + rel < 0 or slotIdx + rel >= len(obj.slots): raise ige.GameException('Cannot move.') struct = obj.slots[slotIdx] del obj.slots[slotIdx] obj.slots.insert(slotIdx + rel, struct) return obj.slots @public(Const.AL_ADMIN) def processINITPhase(self, tran, obj, data): # get rid of the NEW states for struct in obj.slots: struct[Const.STRUCT_IDX_STATUS] &= Const.STRUCT_STATUS_RESETFLGS def _getStructProdMod(self, obj, techProdMod): b, m, e, d = techProdMod return (b * obj.plBio + m * obj.plMin + e * obj.plEn + d * 100) / 100 def _getOpStatus(self, obj, techProdMod, techOper, techProd, stor): prodMod = self._getStructProdMod(obj, techProdMod) slope = techProd * prodMod - techOper if slope >= 0: # structure is self-sufficient in this aspect return 1.0 else: return min(stor / - slope, 1.0) def _getStructStatus(self, obj, struct, tech, maxHP): # find most limitating condition if not struct[Const.STRUCT_IDX_STATUS] & Const.STRUCT_STATUS_ON: return 0.0, 0.0 try: opStatusHP = min(1.0, float(struct[Const.STRUCT_IDX_HP]) / maxHP) except: opStatusHP = 0.0 log.warning('Invalid max HP of structure', Const.STRUCT_IDX_TECHID) opStatusBio = self._getOpStatus(obj, tech.prodBioMod, tech.operBio, tech.prodBio, obj.storBio) opStatusEn = self._getOpStatus(obj, tech.prodEnMod, tech.operEn, tech.prodEn, obj.storEn) opStatusPop = min(1.0, float(obj.unemployedPop) / tech.operWorkers) opStatus = min(opStatusHP, opStatusBio, opStatusEn, opStatusPop) if opStatus < 1.0: if opStatusBio == opStatus: struct[Const.STRUCT_IDX_STATUS] |= Const.STRUCT_STATUS_NOBIO if opStatusEn == opStatus: struct[Const.STRUCT_IDX_STATUS] |= Const.STRUCT_STATUS_NOEN if opStatusPop == opStatus: struct[Const.STRUCT_IDX_STATUS] |= Const.STRUCT_STATUS_NOPOP struct[Const.STRUCT_IDX_OPSTATUS] = int(100 * opStatus) return opStatusHP, min(opStatusBio, opStatusEn, opStatusPop) def _updateStructHP(self, obj, struct, tech, opStatuses, maxHP): # auto repair/damage # also damage structures on not owned planets opStatusHP, opStatusOther = opStatuses properHP = opStatusOther * maxHP if struct[Const.STRUCT_IDX_HP] < properHP: repairDiff = min(properHP - struct[Const.STRUCT_IDX_HP], Rules.repairRatioFunc(tech.buildProd) * maxHP) struct[Const.STRUCT_IDX_HP] += repairDiff struct[Const.STRUCT_IDX_STATUS] |= Const.STRUCT_STATUS_REPAIRING elif struct[Const.STRUCT_IDX_HP] > properHP: decayDiff = min(struct[Const.STRUCT_IDX_HP] - properHP, Rules.decayRatioFunc(tech.buildProd) * maxHP) struct[Const.STRUCT_IDX_STATUS] |= Const.STRUCT_STATUS_DETER # damage it a bit struct[Const.STRUCT_IDX_HP] -= decayDiff if obj.storPop > 0: # do not fall below 1 HP for populated planets so it won't destroy buildings struct[Const.STRUCT_IDX_HP] = max(struct[Const.STRUCT_IDX_HP], 1) if struct[Const.STRUCT_IDX_HP] <= 0: obj.slots.remove(struct) def _processStructs(self, tran, obj): for struct in obj.slots[:]: # skip structure if it was built this turn if struct[Const.STRUCT_IDX_STATUS] & Const.STRUCT_STATUS_NEW: continue tech = Rules.techs[struct[Const.STRUCT_IDX_TECHID]] # compute struct effectivity techEff = Utils.getTechEff(tran, struct[Const.STRUCT_IDX_TECHID], obj.owner) # morale does not affect hit points of structures maxHP = int(tech.maxHP * techEff) # auto regulation of min resources if obj.autoMinStor: obj.minBio += tech.operBio * Rules.autoMinStorTurns obj.minEn += tech.operEn * Rules.autoMinStorTurns # produce/consume resources opStatuses = self._getStructStatus(obj, struct, tech, maxHP) self._updateStructHP(obj, struct, tech, opStatuses, maxHP) opStatus = min(opStatuses) # solarmod effects ENV change and terraforming only if benificial if tech.solarMod * opStatus > 0: obj.solarmod = max(obj.solarmod, tech.solarMod * techEff * opStatus) elif tech.solarMod * opStatus < 0: obj.solarmod = min(obj.solarmod, tech.solarMod * techEff * opStatus) # bio prodMod = self._getStructProdMod(obj, tech.prodBioMod) obj.storBio += int(tech.prodBio * prodMod * techEff * opStatus) - int(tech.operBio * opStatus) # en prodMod = self._getStructProdMod(obj, tech.prodEnMod) obj.storEn += int(tech.prodEn * prodMod * techEff * opStatus) - int(tech.operEn * opStatus) obj.unemployedPop -= min(obj.unemployedPop, int(tech.operWorkers * opStatus)) obj.storPop += int(tech.prodPop * techEff * opStatus) obj.scannerPwr = max(int(tech.scannerPwr * techEff * opStatus), obj.scannerPwr) obj.scannerPwr = min(obj.scannerPwr, Rules.scannerMaxPwr) # rebellion and combat has common penalty prodMod = self._getStructProdMod(obj, tech.prodProdMod) obj.prodProd += int(tech.prodProd * prodMod * techEff * opStatus) # science prodMod = self._getStructProdMod(obj, tech.prodSciMod) obj.prodSci += int(tech.prodSci * prodMod * techEff * opStatus) # refuelling & repairing obj.refuelMax = max(obj.refuelMax, int(tech.refuelMax * techEff * opStatus)) # refuelling obj.refuelInc = max(obj.refuelInc, int(tech.refuelInc * techEff * opStatus)) # repair obj.repairShip += tech.repairShip * techEff * opStatus obj.upgradeShip += tech.upgradeShip * techEff * opStatus # train obj.trainShipMax = max(obj.trainShipMax, tech.trainShipMax) obj.trainShipInc = max(obj.trainShipInc, tech.trainShipInc * techEff * opStatus) # shielding obj.maxShield = max(tech.planetShield * techEff * opStatus, obj.maxShield) # stargates obj.fleetSpeedBoost = max(obj.fleetSpeedBoost, tech.fleetSpeedBoost * techEff * opStatus) # storage obj.maxBio += int(tech.storBio * techEff) obj.maxEn += int(tech.storEn * techEff) # each structure accomodate it's workers obj.maxPop += tech.operWorkers obj.maxPop += int(tech.storPop * techEff) obj.plEnv += int(tech.prodEnv * techEff * opStatus) # morale modifier of the building obj.moraleModifiers[1] += tech.moraleTrgt * techEff * opStatus def _processPopulation(self, obj, owner): if not obj.storPop: return # population reserve obj.maxPop += obj.plSlots * getattr(owner, "techLevel", 1) * Rules.tlPopReserve # max pop maxPop = obj.maxPop if obj.popEatBio: maxPop = min(maxPop, 1000.0 * obj.storBio / obj.popEatBio) if obj.popEatEn: maxPop = min(maxPop, 1000.0 * obj.storEn / obj.popEatEn) maxPop = int(maxPop) # eat pop = obj.storPop / 1000.0 wantBio = int(math.ceil(pop * obj.popEatBio)) wantEn = int(math.ceil(pop * obj.popEatEn)) # auto regulation of min resources if obj.autoMinStor: obj.minBio += wantBio * Rules.autoMinStorTurns obj.minEn += wantEn * Rules.autoMinStorTurns # consume resources obj.storBio -= min(obj.storBio, wantBio) obj.storEn -= min(obj.storEn, wantEn) # modify pop if obj.storPop > maxPop: # die obj.storPop -= max(int((obj.storPop - maxPop) * Rules.popDieRate), Rules.popMinDieRate) #if obj.storPop < maxPop: obj.storPop = maxPop # do not generate this message when construction has been destroyed # and do not lower morale too if obj.storPop < obj.maxPop: obj.morale = max(obj.morale - Rules.moraleLostNoFood,0) elif obj.storPop < maxPop: # born obj.storPop += max(min(int(obj.storPop * Rules.popGrowthRate), maxPop - obj.storPop), Rules.popMinGrowthRate) def _buildShip(self, tran, obj, item, owner): system = tran.db[obj.compOf] # find commander's fleet fleet = None # check if current system has any redirection hasRedirection = obj.compOf in owner.shipRedirections for fleetID in system.fleets: tmpFleet = tran.db[fleetID] if tmpFleet.owner == obj.owner and Utils.isIdleFleet(tmpFleet): fleet = tmpFleet break if not fleet or hasRedirection: fleet = self.new(Const.T_FLEET) tran.db.create(fleet) self.cmd(fleet).create(tran, fleet, system, obj.owner) self.cmd(fleet).addAction(tran, fleet, 0, Const.FLACTION_REDIRECT, Const.OID_NONE, None) # add ships to the fleet self.cmd(fleet).addNewShip(tran, fleet, item.techID) if item.reportFin and item.quantity == 1: Utils.sendMessage(tran, obj, Const.MSG_COMPLETED_SHIP, obj.oid, item.techID) def _buildStructure(self, tran, obj, item, tech, target): # if there is struct to demolish, find it, determine its CP value, and remove it validStructs = [struct for struct in target.slots if struct[Const.STRUCT_IDX_TECHID] == item.demolishStruct] if len(validStructs): assert item.demolishStruct != Const.OID_NONE oldStruct = validStructs[0] target.slots.remove(oldStruct) # replacement of structure means new struct will start in slightly more complete state oldStructTech = Rules.techs[item.demolishStruct] try: oldStructImpr = tran.db[obj.owner].techs[item.demolishStruct] except KeyError: oldStructImpr = 1 maxHP = oldStructTech.maxHP * Rules.techImprEff[oldStructImpr] structValue = oldStructTech.buildProd * float(oldStruct[Const.STRUCT_IDX_HP]) / maxHP newStruct = Rules.techs[item.techID] buildHPRatio = min(Rules.structTransferMaxRatio, Rules.structTransferWaste * structValue / newStruct.buildProd) else: buildHPRatio = Rules.structDefaultHpRatio if len(target.slots) < target.plSlots: target.slots.append(Utils.newStructure(tran, item.techID, obj.owner, hpRatio = buildHPRatio)) try: tech.finishConstrHandler(tran, obj, target, tech) except Exception: log.warning("Cannot execute finish constr handler") if item.reportFin and item.quantity == 1: Utils.sendMessage(tran, obj, Const.MSG_COMPLETED_STRUCTURE, target.oid, item.techID) else: # no free slot! Utils.sendMessage(tran, obj, Const.MSG_CANNOTBUILD_NOSLOT, target.oid, None) def _processProduction(self, tran, obj, owner): # produce items in construction queue if owner: moraleBonus = Rules.moraleProdBonus[int(obj.morale / Rules.moraleProdStep)] prod = obj.effProdProd = max(0, int(obj.prodProd * (owner.prodEff + moraleBonus))) if (obj.morale > 15 and prod == 0 and obj.prodProd > 0 and owner.prodEff > 0): #added for super-low moral bonus issues prod = obj.effProdProd = 1 else: prod = obj.prodProd explicitIdleProd = 0.0 # empty queue should be filled by global queue if len(obj.prodQueue) == 0 and prod: task = self.cmd(obj).popGlobalQueue(tran, obj) if task: obj.prodQueue.append(task) index = 0 while prod > 0 and index < len(obj.prodQueue): item = obj.prodQueue[index] # check if owner has this tech if not item.isShip and item.techID not in owner.techs: # bad tech del obj.prodQueue[index] # TODO send message # set target target = tran.db[item.targetID] # set tech and build conditions if item.isShip: tech = tran.db[obj.owner].shipDesigns[item.techID] mod = Rules.buildOnSamePlanetMod else: tech = Rules.techs[item.techID] # check validity of the project if not tech.validateConstrHandler(tran, obj, target, tech): index += 1 # message to player Utils.sendMessage(tran, obj, Const.MSG_INVALID_TASK, obj.oid, item.techID) continue # building on other planet is more expensive if item.targetID == obj.oid: mod = Rules.buildOnSamePlanetMod else: mod = Rules.buildOnAnotherPlanetMod # compute needs (do not consume resources under minimal storage) wantProd = min(int(tech.buildProd * mod / tech.buildTurns - item.currProd), prod) # production item.changePerc = wantProd * 10000 / (tech.buildProd * mod) # consume / produce if item.techID == Rules.Tech.IDLETASK and item.isShip == 0: explicitIdleProd += wantProd prod -= wantProd item.currProd += wantProd # check, if production is complete if item.currProd >= tech.buildProd * mod: # item is complete if item.isShip: self._buildShip(tran, obj, item, owner) elif tech.isStructure: self._buildStructure(tran, obj, item, tech, target) elif tech.isProject: tech.finishConstrHandler(tran, obj, target, tech) if item.reportFin and item.quantity == 1: Utils.sendMessage(tran, obj, Const.MSG_COMPLETED_PROJECT, target.oid, item.techID) else: raise ige.GameException('Unsupported type of technology %d ' % item.techID) # remove item from prod queue item.quantity -= 1 if item.quantity == 0: # remove item from the queue del obj.prodQueue[index] # was it last item in the queue? pop the global one! if index == len(obj.prodQueue): task = self.cmd(obj).popGlobalQueue(tran, obj) if task: obj.prodQueue.append(task) else: # try to produce another item item.currProd = 0 else: # item is not complete stop production index += 1 break # decay items not currently produced while index < len(obj.prodQueue): item = obj.prodQueue[index] item.currProd -= int(item.currProd * Rules.decayProdQueue) index += 1 # use excess raw CP to increase production elsewhere prod += explicitIdleProd if prod > 0.0: owner.prodIncreasePool += prod def _processEnvironmentChange(self, tran, obj, owner): downgradeTo = Rules.planetSpec[obj.plType].downgradeTo solarminus = min(0, obj.solarmod) solarplus = max(0, obj.solarmod) if downgradeTo is not None: if (Rules.planetSpec[downgradeTo].upgradeEnReqs[0] > obj.plEn + solarplus) or (Rules.planetSpec[downgradeTo].upgradeEnReqs[1] < obj.plEn + solarminus): # auto damage on plEn outside downgrade's upgrade range obj.plEnv -= Rules.envAutoMod if obj.plBio > Rules.planetSpec[obj.plType].maxBio: # auto damage on plBio > maxBio of class dEnv = int((obj.plBio - Rules.planetSpec[obj.plType].maxBio) * Rules.envAutoMod) if obj.plEnv > 0: obj.plEnv -= min(obj.plEnv, dEnv) else: obj.plEnv -= dEnv # small chance of self-upgrading spec = Rules.planetSpec[obj.plType] race = owner.race if owner else "H" chance = int((obj.plBio - spec.maxBio) * Rules.envSelfUpgradeChance[race]) if Utils.rand(0, 10001) < chance and spec.upgradeTo and \ obj.plEn + solarplus >= spec.upgradeEnReqs[0] and \ obj.plEn + solarminus <= spec.upgradeEnReqs[1]: log.debug('IPlanet', obj.oid, 'Upgraded to', spec.upgradeTo) obj.plType = spec.upgradeTo Utils.sendMessage(tran, obj, Const.MSG_UPGRADED_PLANET_ECO, obj.oid, spec.upgradeTo) while obj.plEnv >= Rules.envInterval: #@log.debug('IPlanet', obj.oid, 'Env improved') obj.plEnv -= Rules.envInterval obj.changeEnv += Rules.envInterval if obj.plBio < 200: obj.plBio += 1 while obj.plEnv < 0: if obj.plBio > 0: obj.plBio -= 1 obj.plEnv += Rules.envInterval obj.changeEnv -= Rules.envInterval else: obj.changeEnv += obj.plEnv obj.plEnv = 0 # downgrade planet if necessary if obj.plBio < Rules.planetSpec[obj.plType].minBio: downgradeTo = Rules.planetSpec[obj.plType].downgradeTo if downgradeTo: log.debug('IPlanet', obj.oid, 'Downgraded to', downgradeTo) obj.plType = downgradeTo Utils.sendMessage(tran, obj, Const.MSG_DOWNGRADED_PLANET_ECO, obj.oid, downgradeTo) # record changes obj.changeBio += obj.storBio obj.changeEn += obj.storEn obj.changePop += obj.storPop obj.changeEnv += obj.plEnv @public(Const.AL_ADMIN) def processPRODPhase(self, tran, obj, data): # max storage obj.maxPop = obj.plSlots * Rules.popPerSlot + Rules.popBaseStor obj.maxBio = obj.plSlots * Rules.bioPerSlot + Rules.bioBaseStor obj.maxEn = obj.plSlots * Rules.enPerSlot + Rules.enBaseStor # refuel & repair obj.refuelMax = 0 obj.refuelInc = 0 obj.repairShip = 0.0 obj.upgradeShip = 0.0 # train obj.trainShipInc = 0 obj.trainShipMax = 0 obj.fleetSpeedBoost = 1.0 # if obj.storPop <= 0 and not obj.slots and obj.owner == Const.OID_NONE: # do not process this planet return obj.scannerPwr = Rules.scannerMinPwr obj.prodProd = Rules.basePlanetProdProd obj.prodSci = 0 obj.changeBio = - obj.storBio obj.changeEn = - obj.storEn obj.changePop = - obj.storPop obj.changeEnv = - obj.plEnv obj.changeMorale = - obj.morale # parent objects system = tran.db[obj.compOf] galaxy = tran.db[system.compOf] # collect strategic resources owner = tran.db.get(obj.owner, None) if owner and obj.plStratRes != Const.SR_NONE: turn = tran.db[Const.OID_UNIVERSE].turn if turn % Rules.stratResRate == 0: owner.stratRes[obj.plStratRes] = owner.stratRes.get(obj.plStratRes, 0) + Rules.stratResAmountBig Utils.sendMessage(tran, obj, Const.MSG_EXTRACTED_STRATRES, obj.oid, obj.plStratRes) # compute base morale if owner: homePlanet = tran.db[owner.planets[0]] dist = int(math.sqrt((homePlanet.x - obj.x) ** 2 + (homePlanet.y - obj.y) ** 2)) moraleTrgt = -37.5 * dist / owner.govPwrCtrlRange + 107.5 obj.moraleModifiers[0] = max(Rules.minMoraleTrgt, min(moraleTrgt, Rules.maxMorale)) #@log.debug(obj.oid, "Morale target", obj.moraleTrgt, "dist", dist, owner.govPwrCtrlRange) # auto regulation of min resources if obj.autoMinStor: obj.minBio = obj.minEn = 0 # combat? isCombat = system.combatCounter > 0 obj.unemployedPop = obj.storPop # ok, reset max pop obj.maxPop = 0 # process all structures obj.maxShield = 0 obj.solarmod = 0 #@log.debug("Morale bonus/penalty for planet", obj.oid, moraleBonus) # reset of "morale modifier by buildings" value obj.moraleModifiers[1] = 0 self._processStructs(tran, obj) if obj.revoltLen > 0 or isCombat: # no services available if distressed obj.refuelInc = obj.repairShip = obj.upgradeShip = obj.trainShipMax = obj.trainShipInc = 0 # do shield self generation obj.prevShield = obj.shield #for planet display of shield growth if obj.maxShield < obj.shield: obj.shield = obj.maxShield if obj.maxShield > obj.shield and not isCombat: regenTemp = max(1, Rules.plShieldRegen* obj.maxShield) #always regen at at least 1 obj.shield = min(obj.shield + regenTemp, obj.maxShield) #don't let it regen over shieldMax # pass scanner/... to the system obj.scannerPwr = min(obj.scannerPwr * (2.0 - galaxy.emrLevel), Rules.scannerMaxPwr) system.scannerPwrs[obj.owner] = max(obj.scannerPwr, system.scannerPwrs.get(obj.owner, 0)) self._processPopulation(obj, owner) self._processProduction(tran, obj, owner) self._processEnvironmentChange(tran, obj, owner) # auto regulation of min resources if obj.autoMinStor: obj.minBio = min(obj.minBio, obj.maxBio / 2) obj.minEn = min(obj.minEn, obj.maxEn / 2) # science if owner: moraleBonus = Rules.moraleProdBonus[int(obj.morale / Rules.moraleProdStep)] obj.effProdSci = max(0, int(obj.prodSci * (owner.sciEff + moraleBonus))) owner.sciPoints += obj.effProdSci # planet with no population cannot have an owner # and planet with no owner cannot have population if (obj.storPop <= 0 and obj.owner != Const.OID_NONE) or obj.owner == Const.OID_NONE: self.cmd(obj).changeOwner(tran, obj, Const.OID_NONE, force = 1) obj.storPop = 0 @public(Const.AL_ADMIN) def processACTIONPhase(self, tran, obj, data): return @public(Const.AL_ADMIN) def processFINALPhase(self, tran, obj, data): if obj.storPop <= 0 and not obj.slots and obj.owner == Const.OID_NONE: # do not process this planet return # reset of "morale modifier by population" value obj.moraleModifiers[2] = 0 system = tran.db[obj.compOf] galaxy = tran.db[system.compOf] if galaxy.timeEnabled: owner = tran.db.get(obj.owner, None) # too much population affects morale (if there is more than base population) if obj.storPop > Rules.moraleBasePop: obj.moraleModifiers[2] -= Rules.moraleHighPopPenalty * obj.storPop / Rules.moraleBasePop elif obj.storPop <= Rules.moraleLowPop: obj.moraleModifiers[2] += Rules.moraleLowPopBonus else: # gradually removing LowPop bonus as we approach BasePop - big jumps are awful game # mechanic moraleBonusRange = Rules.moraleBasePop - Rules.moraleLowPop moraleBonus = float(obj.storPop - Rules.moraleLowPop) / moraleBonusRange obj.moraleModifiers[2] += int(Rules.moraleLowPopBonus * (1 - moraleBonus) ) # there is effect of unemployed population # if there is none, there is a hit, if there is what's necessary, there is a bonus # effect between the two is linear idealUnemployedPop = obj.plSlots * getattr(owner, "techLevel", 1) * Rules.tlPopReserve moraleBonusRange = Rules.unemployedMoraleHigh - Rules.unemployedMoraleLow unemployedRatio = min(1.0, float(obj.unemployedPop) / idealUnemployedPop) obj.moraleModifiers[3] = Rules.unemployedMoraleLow + int(moraleBonusRange * unemployedRatio) # count final morale values obj.moraleModifiers[4] = obj.moraleModifiers[0] +obj.moraleModifiers[1] + obj.moraleModifiers[2] + obj.moraleModifiers[3] obj.moraleTrgt = obj.moraleModifiers[4] obj.moraleTrgt = max(0.0, min(obj.moraleTrgt, Rules.maxMorale)) if obj.morale > int(obj.moraleTrgt): obj.morale -= max(1.0, (obj.morale - obj.moraleTrgt) * Rules.moraleChngPerc) elif obj.morale < int(obj.moraleTrgt) and system.combatCounter == 0: obj.morale += max(1.0, (obj.moraleTrgt - obj.morale) * Rules.moraleChngPerc) #@log.debug('IPlanet', 'Mor Mor trgt/reb thr', obj.morale, obj.moraleTrgt) # revolt? if obj.revoltLen > 0: obj.revoltLen += 1 if obj.morale < Rules.revoltThr and obj.owner != Const.OID_NONE and obj.revoltLen == 0: chance = (Rules.revoltThr - obj.morale) * Rules.moralePerPointChance #@log.debug('IPlanet', 'Start revolt? mor, mor trgt, reb thr, chance', obj.morale, obj.moraleTrgt, chance) if Utils.rand(0, 101) <= chance: # rebelion starts #@log.debug('IPlanet', 'Revolt on', obj.oid) obj.revoltLen = 1 Utils.sendMessage(tran, obj, Const.MSG_REVOLT_STARTED, obj.oid, None) elif obj.revoltLen > 0 and obj.morale > Rules.revoltThr: chance = (obj.morale - Rules.revoltThr) * Rules.moralePerPointChance #@log.debug('IPlanet', 'Stop revolt? mor, mor trgt, reb thr, chance', obj.morale, obj.moraleTrgt, chance) if Utils.rand(0, 101) <= chance: # revolt ends obj.revoltLen = 0 Utils.sendMessage(tran, obj, Const.MSG_REVOLT_ENDED, obj.oid, None) obj.morale = max(0.0, min(Rules.maxMorale, obj.morale)) obj.changeMorale += obj.morale # when rebelling destroy some resources if obj.revoltLen > 0: obj.storBio -= int(obj.storBio * Rules.revoltDestrBio) obj.storEn -= int(obj.storEn * Rules.revoltDestrEn) # storage obj.storBio = min(obj.storBio, obj.maxBio) obj.storEn = min(obj.storEn, obj.maxEn) # collect stats if obj.owner != Const.OID_NONE: player = tran.db[obj.owner] player.stats.storPop += obj.storPop player.stats.prodProd += obj.prodProd player.stats.effProdProd += obj.effProdProd player.stats.prodSci += obj.prodSci player.stats.effProdSci += obj.effProdSci player.stats.structs += len(obj.slots) player.stats.slots += obj.plSlots # morale computation homePlanet = tran.db[player.planets[0]] dist = int(math.sqrt((homePlanet.x - obj.x) ** 2 + (homePlanet.y - obj.y) ** 2)) player.tmpPopDistr[dist] = player.tmpPopDistr.get(dist, 0) + obj.storPop def getScanInfos(self, tran, obj, scanPwr, player): if scanPwr >= Rules.level1InfoScanPwr: result = IDataHolder() result._type = Const.T_SCAN result.scanPwr = scanPwr result.oid = obj.oid result.signature = obj.signature result.type = obj.type result.orbit = obj.orbit result.compOf = obj.compOf result.x = obj.x result.y = obj.y result.plType = obj.plType if scanPwr >= Rules.level2InfoScanPwr: result.plDiameter = obj.plDiameter if getattr(obj, "plType", 'X') != 'G': result.plMin = obj.plMin result.plBio = obj.plBio result.plEn = obj.plEn result.plSlots = obj.plSlots result.plStratRes = obj.plStratRes result.plMaxSlots = obj.plMaxSlots if scanPwr >= Rules.level3InfoScanPwr: result.name = obj.name result.storPop = obj.storPop result.owner = obj.owner if scanPwr >= Rules.level4InfoScanPwr: # TODO provide less information result.hasRefuel = (obj.refuelInc > 0) #simple detect if docks exist for problems dialog result.slots = obj.slots result.shield = obj.shield result.prevShield = -1 result.maxShield = -1 if scanPwr >= Rules.partnerScanPwr: result.maxShield = obj.maxShield result.prevShield = obj.prevShield result.refuelMax = obj.refuelMax result.refuelInc = obj.refuelInc result.scannerPwr = obj.scannerPwr result.trainShipInc = obj.trainShipInc result.trainShipMax = obj.trainShipMax result.upgradeShip = obj.upgradeShip result.repairShip = obj.repairShip result.fleetSpeedBoost = obj.fleetSpeedBoost return [result] def loadDOMNode(self, tran, obj, xoff, yoff, orbit, node): obj.x = xoff obj.y = yoff obj.orbit = orbit for elem in node.childNodes: if elem.nodeType == Node.ELEMENT_NODE: name = elem.tagName if name == 'properties': self.loadDOMAttrs(obj, elem) elif name == 'startingpoint': galaxy = tran.db[tran.db[obj.compOf].compOf] galaxy.startingPos.append(obj.oid) galaxy.numOfStartPos += 1 else: raise ige.GameException('Unknown element %s' % name) return Const.SUCC def update(self, tran, obj): # clean up negative build queues and fix missing demolishStruct keys loopAgain = True while loopAgain: deletedKey = False for key in range(0,len(obj.prodQueue)): item = obj.prodQueue[key] if not hasattr(item, "demolishStruct"): item.demolishStruct = Const.OID_NONE if item.quantity < 0: log.warning("Deleting negative item queue on", obj.oid,"for player",obj.owner) if item.isShip: tech = player.shipDesigns[item.techID] else: tech = Rules.techs[item.techID] player = tran.db[obj.owner] for sr in tech.buildSRes: player.stratRes[sr] = player.stratRes.get(sr, 0) + item.quantity #quantity negative, so subtracting strat resources # del the bad item. Since this changes indicies, start the check over again on remaining items deletedKey = True del obj.prodQueue[key] break # no more bad entries found; break the while loop if not deletedKey: loopAgain = False # change owner to Const.OID_NONE when owner is invalid if obj.owner != Const.OID_NONE: player = tran.db.get(obj.owner, None) if not player or player.type not in Const.PLAYER_TYPES or obj.oid not in player.planets: # TODO this can be a probem - this planet cannot be attacked! log.warning("Changing owner to Const.OID_NONE - invalid owner", obj) self.cmd(obj).changeOwner(tran, obj, Const.OID_NONE, force = 1) # kill all population obj.storPop = 0 return # check compOf if not tran.db.has_key(obj.compOf) or tran.db[obj.compOf].type != Const.T_SYSTEM: log.debug("CONSISTENCY invalid compOf for planet", obj.oid) # fix signature obj.signature = 75 @public(Const.AL_FULL) def changePlanetsGlobalQueue(self, tran, obj, newQueue): player = tran.db[obj.owner] if newQueue < 0 or newQueue >= len(player.prodQueues): raise ige.GameException("Invalid queue") obj.globalQueue = newQueue return obj.globalQueue def popGlobalQueue(self, tran, obj): player = tran.db[obj.owner] queue = obj.globalQueue task = None if len(player.prodQueues[queue]): task = copy.copy(player.prodQueues[queue][0]) if task.quantity > 1: player.prodQueues[queue][0].quantity -= 1 else: if task.reportFin: Utils.sendMessage(tran, obj, Const.MSG_QUEUE_TASK_ALLOTED, Const.OID_NONE, (queue, task.techID)) del player.prodQueues[queue][0] # add other demanded values, report finalization was used to report allot (to prevent reporting every unit) task.reportFin = 0 task.quantity = 1 task.isShip = task.techID < 1000 task.targetID = obj.oid task.currProd = 0 task.demolishStruct = Const.OID_NONE return task def deleteDesign(self, tran, obj, designID, keepWIP = 0): # TODO: handle stategic resources for task in obj.prodQueue[:]: if task.isShip and task.techID == designID: if task.currProd > 0 and keepWIP: self.cmd(obj).changeConstruction(tran, obj, obj.procQueue.index(task), 1) else: self.cmd(obj).abortConstruction(tran, obj, obj.prodQueue.index(task)) def changeShipDesign(self, tran, obj, oldDesignID, newDesignID): # TODO: handle strategic resources for task in obj.prodQueue[:]: if task.isShip and task.techID == oldDesignID: task.techID = newDesignID task.currProd = int(task.currProd / Rules.shipUpgradeMod) ## ## Combat related functions ## def getPreCombatData(self, tran, obj): # scan buildings and fire their weapons shots = {0: [], 1: [], 2: [], 3: []} if obj.owner == Const.OID_NONE: return shots, [0, 0, 0, 8], False player = tran.db[obj.owner] system = tran.db[obj.compOf] desCount = {} firing = False systemAtt = 0; systemDef = 0; for struct in obj.slots: structTechID = struct[Const.STRUCT_IDX_TECHID] opStatus = struct[Const.STRUCT_IDX_OPSTATUS] / 100.0 tech = Rules.techs[structTechID] desCount[structTechID] = desCount.get(structTechID, 0) + 1 wpnCount = {} if not tech.structWeapons: continue firing = True for cClass in range(0, 4): weaponID = player.planetWeapons[cClass] if weaponID is None: continue weapon = Rules.techs[weaponID] maxWeaponCount = int(tech.structWeapons[cClass] * opStatus) for weaponIdx in range(0, maxWeaponCount): #@log.debug(obj.oid, "FIRING PLANET WEAPON", weapon.name) wpnCount[weaponID] = wpnCount.get(weaponID, 0) + 1 # weaponEff = Rules.techImprEff[player.techs.get(weaponID, Rules.techBaseImprovement)] # base attack attack = tech.combatAtt + int(weapon.weaponAtt * weaponEff) # because ALL counters starts at 1, subtract 3 count = system.combatCounter + desCount[structTechID] + wpnCount[weaponID] - 2 # add to attacks #@log.debug('IPlanet', obj.oid, structTechID, "Count", count, 'Shots', weapon.name, ShipUtils.getRounds(weapon.weaponROF, count)) for round in xrange(0, ShipUtils.getRounds(weapon.weaponROF, count)): shots[weapon.weaponClass].append((attack, weaponID)) # hit limit obj.maxHits = len(obj.slots) obj.hitCounter = 0 obj.lastHitClass = 3 obj.hitMod = 1.0 log.debug(obj.oid, "Combat settings", obj.maxHits) # +1 means population only hit return shots, [0, 0, 0, 8], firing def applyShot(self, tran, obj, defense, attack, weaponID, cClass, count): #@log.debug('IPlanet', 'Apply shot', weaponID, attack, cClass, count) # compute chance to hit weapon = Rules.techs[weaponID] #system defense bonus is dropped for planets...structures can't move; just calculate defense off structure defense defense = Rules.combatStructDefense destroyed = 0 dmg = 0 # limit number of shots if weapon.weaponClass < obj.lastHitClass: #@log.debug(obj.oid, "Different class", obj.lastHitClass, weapon.weaponClass, obj.maxHits) obj.maxHits = int(Rules.combatHitXferMod * obj.maxHits * (obj.lastHitClass - weapon.weaponClass)) obj.hitCounter = int(Rules.combatHitXferMod * obj.hitCounter * (obj.lastHitClass - weapon.weaponClass)) obj.lastHitClass = weapon.weaponClass if weapon.weaponROF > 1: #@log.debug(obj.oid, "Increasing counter PL", 1.0 / weapon.weaponROF) obj.hitCounter += 1.0 / weapon.weaponROF else: #@log.debug(obj.oid, "Increasing counter PL", 1) obj.hitCounter += 1 if obj.hitCounter > obj.maxHits: obj.hitCounter = 0 obj.hitMod *= Rules.combatStructureHitMod #@log.debug(obj.oid, "Increasing hit penalty", obj.hitMod, obj.maxHits) attackChance = obj.hitMod * attack / (attack + defense) #@log.debug(obj.oid, "Chance to attack", attackChance, obj.hitMod, obj.hitCounter, obj.maxHits, #@ "without penalty:", float(attack) / (attack + defense)) #@log.debug('IPlanet', obj.oid, 'HIT?', attack + defense + 1, defense) absorb = 0 #for when it doesn't hit if random.random() <= attackChance: # hit player = tran.db[obj.owner] weaponEff = Rules.techImprEff[player.techs.get(weaponID, Rules.techBaseImprovement)] dmg = ShipUtils.computeDamage(weapon.weaponClass, 3, weapon.weaponDmgMin, weapon.weaponDmgMax, weaponEff) #@log.debug(obj.oid, 'HIT! att=%d vs def=%d, dmg=%d '% (attack, defense, dmg)) #shield strike if obj.shield > 0: absorb = min(dmg,obj.shield) obj.shield -= absorb dmg -= absorb if dmg == 0: return 0+absorb, 0, 3 # select slot if count == 7 or not obj.slots: #@log.debug('IPlanet', 'Population hit') # population hit if obj.storPop == 0: dmg = 0 else: # free slot hit -> dmg population # OLD dmgPop = int(Rules.popPerSlot * float(dmg) / Rules.popSlotHP * Rules.popKillMod) dmgPop = int(dmg * Rules.popSlotKillMod) obj.storPop = max(obj.storPop - dmgPop, 0) obj.changePop -= dmgPop if obj.storPop > 0: obj.morale -= Rules.moraleModPlHit * float(dmgPop) / float(obj.storPop) #@log.debug('IPlanet', obj.oid, 'Morale penalty', dmg, maxHP, Rules.moraleModPlHit * float(dmg) / float(maxHP)) elif count < 0: # TODO can be count negative? log.warning('IPlanet', 'applyShot: count is negative') else: if count == 6: # random structure hit #@log.debug('IPlanet', 'Random structure hit') struct = obj.slots[Utils.rand(0, len(obj.slots))] else: # most damaged structure hit #@log.debug('IPlanet', 'Most damaged structure hit') struct = obj.slots[-1] for tmpStruct in obj.slots: if tmpStruct[Const.STRUCT_IDX_HP] <= struct[Const.STRUCT_IDX_HP]: struct = tmpStruct # compute sum hp of all buildings sumHP = 0 for tmpStruct in obj.slots: sumHP += tmpStruct[Const.STRUCT_IDX_HP] # damage building struct[Const.STRUCT_IDX_HP] -= dmg # "damage" population tech = Rules.techs[struct[Const.STRUCT_IDX_TECHID]] # compute struct effectivity techEff = Utils.getTechEff(tran, struct[Const.STRUCT_IDX_TECHID], obj.owner) maxHP = int(tech.maxHP * techEff) dmgPop = int(tech.operWorkers * float(dmg) / maxHP * Rules.popKillMod) obj.storPop = max(obj.storPop - dmgPop, 0) obj.changePop -= dmgPop # destroy building if struct[Const.STRUCT_IDX_HP] <= 0: destroyed = 1 dmg += struct[Const.STRUCT_IDX_HP] obj.slots.remove(struct) # compute morale penalty if dmg: obj.morale -= Rules.moraleModPlHit * float(dmg) / float(sumHP) #@log.debug('IPlanet', obj.oid, 'Morale penalty', dmg, sumHP, Rules.moraleModPlHit * float(dmg) / float(sumHP)) #@log.debug('IPlanet', 'Shot applied', dmg, destroyed) # when destroyed, only class 3 (structure) i valid return dmg+absorb, destroyed, 3 def distributeExp(self, tran, obj): # TODO - will buildings have exp? Answ: NO if hasattr(obj, "maxHits"): del obj.maxHits del obj.hitCounter del obj.lastHitClass del obj.hitMod def surrenderTo(self, tran, obj, newOwnerID): # morale is lost when this is called obj.morale -= Rules.moraleLostWhenSurrender if obj.morale >= Rules.revoltThr: #@log.debug('IPlanet', 'Surrender - revolt thr not reached', obj.morale) return 0 chance = (Rules.revoltThr - obj.morale) * Rules.moralePerPointChance #@log.debug('IPlanet', 'Surrender? mor, mor trgt, reb thr, chance', obj.morale, obj.moraleTrgt, chance) if Utils.rand(0, 101) > chance: # do not surrender! #@log.debug('IPlanet', 'Surrender - pure luck', obj.morale, obj.revoltLen) return 0 # we've lost the battle - we have a new owner #@log.debug('IPlanet', 'Surrender - surrending to', newOwnerID) newOwner = tran.db[newOwnerID] if newOwner.type == Const.T_PIRPLAYER or newOwner.type == Const.T_AIPIRPLAYER: # special handling for pirates currentTurn = tran.db[Const.OID_UNIVERSE].turn # prevent abuse - require 8 turns between capturing the same planet and require the owner to control the planet at least 2 turns if you want to gain fame & tech (two turns prevents orbiting pirate fleet from immediately bombing) if (currentTurn - obj.lastPirCapture) > 8 and (currentTurn - obj.ownerSince) > 2: # gain/lose fame self.cmd(newOwner).capturePlanet(tran, newOwner, obj) # steal ship techs self.cmd(newOwner).stealTechs(tran, newOwner, obj.owner, obj.oid) else: log.debug(obj.oid, "Pirate captured planet too soon after previous capture or colonization to gain bonuses", obj.oid) obj.storPop = 0 obj.lastPirCapture = currentTurn self.cmd(obj).changeOwner(tran, obj, Const.OID_NONE, force = 1) else: # change owner self.cmd(obj).changeOwner(tran, obj, newOwnerID, force = 1) # blow up all military buildings for struct in obj.slots[:]: tech = Rules.techs[struct[Const.STRUCT_IDX_TECHID]] if tech.isMilitary: obj.slots.remove(struct) return 1
gpl-2.0
jmachuca77/ardupilot
Tools/autotest/rover.py
1
249268
#!/usr/bin/env python ''' Drive Rover in SITL AP_FLAKE8_CLEAN ''' from __future__ import print_function import copy import math import operator import os import shutil import sys import time from common import AutoTest from pysim import util from common import AutoTestTimeoutException from common import MsgRcvTimeoutException from common import NotAchievedException from common import PreconditionFailedException from pymavlink import mavextra from pymavlink import mavutil # get location of scripts testdir = os.path.dirname(os.path.realpath(__file__)) SITL_START_LOCATION = mavutil.location(40.071374969556928, -105.22978898137808, 1583.702759, 246) class AutoTestRover(AutoTest): @staticmethod def get_not_armable_mode_list(): return ["RTL", "SMART_RTL"] @staticmethod def get_not_disarmed_settable_modes_list(): return ["FOLLOW"] @staticmethod def get_no_position_not_settable_modes_list(): return [] @staticmethod def get_position_armable_modes_list(): return ["GUIDED", "LOITER", "STEERING", "AUTO"] @staticmethod def get_normal_armable_modes_list(): return ["ACRO", "HOLD", "MANUAL"] def log_name(self): return "Rover" def test_filepath(self): return os.path.realpath(__file__) def set_current_test_name(self, name): self.current_test_name_directory = "ArduRover_Tests/" + name + "/" def sitl_start_location(self): return SITL_START_LOCATION def default_frame(self): return "rover" def is_rover(self): return True def get_stick_arming_channel(self): return int(self.get_parameter("RCMAP_ROLL")) ########################################################## # TESTS DRIVE ########################################################## # Drive a square in manual mode def drive_square(self, side=50): """Drive a square, Driving N then E .""" self.context_push() ex = None try: self.progress("TEST SQUARE") self.set_parameter("RC7_OPTION", 7) self.set_parameter("RC9_OPTION", 58) self.change_mode('MANUAL') self.wait_ready_to_arm() self.arm_vehicle() self.clear_wp(9) # first aim north self.progress("\nTurn right towards north") self.reach_heading_manual(10) # save bottom left corner of box as home AND waypoint self.progress("Save HOME") self.save_wp() self.progress("Save WP") self.save_wp() # pitch forward to fly north self.progress("\nGoing north %u meters" % side) self.reach_distance_manual(side) # save top left corner of square as waypoint self.progress("Save WP") self.save_wp() # roll right to fly east self.progress("\nGoing east %u meters" % side) self.reach_heading_manual(100) self.reach_distance_manual(side) # save top right corner of square as waypoint self.progress("Save WP") self.save_wp() # pitch back to fly south self.progress("\nGoing south %u meters" % side) self.reach_heading_manual(190) self.reach_distance_manual(side) # save bottom right corner of square as waypoint self.progress("Save WP") self.save_wp() # roll left to fly west self.progress("\nGoing west %u meters" % side) self.reach_heading_manual(280) self.reach_distance_manual(side) # save bottom left corner of square (should be near home) as waypoint self.progress("Save WP") self.save_wp() self.progress("Checking number of saved waypoints") num_wp = self.save_mission_to_file( os.path.join(testdir, "ch7_mission.txt")) expected = 7 # home + 6 toggled in if num_wp != expected: raise NotAchievedException("Did not get %u waypoints; got %u" % (expected, num_wp)) # TODO: actually drive the mission self.clear_wp(9) except Exception as e: self.print_exception_caught(e) ex = e self.disarm_vehicle() self.context_pop() if ex: raise ex def drive_left_circuit(self): """Drive a left circuit, 50m on a side.""" self.change_mode('MANUAL') self.set_rc(3, 2000) self.progress("Driving left circuit") # do 4 turns for i in range(0, 4): # hard left self.progress("Starting turn %u" % i) self.set_rc(1, 1000) self.wait_heading(270 - (90*i), accuracy=10) self.set_rc(1, 1500) self.progress("Starting leg %u" % i) self.wait_distance(50, accuracy=7) self.set_rc(3, 1500) self.progress("Circuit complete") # def test_throttle_failsafe(self, home, distance_min=10, side=60, # timeout=300): # """Fly east, Failsafe, return, land.""" # # self.mavproxy.send('switch 6\n') # manual mode # self.wait_mode('MANUAL') # self.mavproxy.send("param set FS_ACTION 1\n") # # # first aim east # self.progress("turn east") # if not self.reach_heading_manual(135): # return False # # # fly east 60 meters # self.progress("# Going forward %u meters" % side) # if not self.reach_distance_manual(side): # return False # # # pull throttle low # self.progress("# Enter Failsafe") # self.mavproxy.send('rc 3 900\n') # # tstart = self.get_sim_time() # success = False # while self.get_sim_time() < tstart + timeout and not success: # m = self.mav.recv_match(type='VFR_HUD', blocking=True) # pos = self.mav.location() # home_distance = self.get_distance(home, pos) # self.progress("Alt: %u HomeDistance: %.0f" % # (m.alt, home_distance)) # # check if we've reached home # if home_distance <= distance_min: # self.progress("RTL Complete") # success = True # # # reduce throttle # self.mavproxy.send('rc 3 1500\n') # self.mavproxy.expect('APM: Failsafe ended') # self.mavproxy.send('switch 2\n') # manual mode # self.wait_heartbeat() # self.wait_mode('MANUAL') # # if success: # self.progress("Reached failsafe home OK") # return True # else: # self.progress("Failed to reach Home on failsafe RTL - " # "timed out after %u seconds" % timeout) # return False def test_sprayer(self): """Test sprayer functionality.""" self.context_push() ex = None try: rc_ch = 5 pump_ch = 5 spinner_ch = 6 pump_ch_min = 1050 pump_ch_trim = 1520 pump_ch_max = 1950 spinner_ch_min = 975 spinner_ch_trim = 1510 spinner_ch_max = 1975 self.set_parameters({ "SPRAY_ENABLE": 1, "SERVO%u_FUNCTION" % pump_ch: 22, "SERVO%u_MIN" % pump_ch: pump_ch_min, "SERVO%u_TRIM" % pump_ch: pump_ch_trim, "SERVO%u_MAX" % pump_ch: pump_ch_max, "SERVO%u_FUNCTION" % spinner_ch: 23, "SERVO%u_MIN" % spinner_ch: spinner_ch_min, "SERVO%u_TRIM" % spinner_ch: spinner_ch_trim, "SERVO%u_MAX" % spinner_ch: spinner_ch_max, "SIM_SPR_ENABLE": 1, "SIM_SPR_PUMP": pump_ch, "SIM_SPR_SPIN": spinner_ch, "RC%u_OPTION" % rc_ch: 15, "LOG_DISARMED": 1, }) self.reboot_sitl() self.wait_ready_to_arm() self.arm_vehicle() self.progress("test bootup state - it's zero-output!") self.wait_servo_channel_value(spinner_ch, 0) self.wait_servo_channel_value(pump_ch, 0) self.progress("Enable sprayer") self.set_rc(rc_ch, 2000) self.progress("Testing zero-speed state") self.wait_servo_channel_value(spinner_ch, spinner_ch_min) self.wait_servo_channel_value(pump_ch, pump_ch_min) self.progress("Testing turning it off") self.set_rc(rc_ch, 1000) self.wait_servo_channel_value(spinner_ch, spinner_ch_min) self.wait_servo_channel_value(pump_ch, pump_ch_min) self.progress("Testing turning it back on") self.set_rc(rc_ch, 2000) self.wait_servo_channel_value(spinner_ch, spinner_ch_min) self.wait_servo_channel_value(pump_ch, pump_ch_min) self.progress("Testing speed-ramping") self.set_rc(3, 1700) # start driving forward # this is somewhat empirical... self.wait_servo_channel_value(pump_ch, 1695, timeout=60) self.progress("Turning it off again") self.set_rc(rc_ch, 1000) self.wait_servo_channel_value(spinner_ch, spinner_ch_min) self.wait_servo_channel_value(pump_ch, pump_ch_min) self.start_subtest("Sprayer Mission") self.load_mission("sprayer-mission.txt") self.change_mode("AUTO") # self.send_debug_trap() self.progress("Waiting for sprayer to start") self.wait_servo_channel_value(pump_ch, 1300, timeout=60, comparator=operator.gt) self.progress("Waiting for sprayer to stop") self.wait_servo_channel_value(pump_ch, pump_ch_min, timeout=120) self.start_subtest("Checking mavlink commands") self.change_mode("MANUAL") self.progress("Starting Sprayer") self.run_cmd(mavutil.mavlink.MAV_CMD_DO_SPRAYER, 1, # p1 0, # p2 0, # p3 0, # p4 0, # p5 0, # p6 0) # p7 self.progress("Testing speed-ramping") self.set_rc(3, 1700) # start driving forward self.wait_servo_channel_value(pump_ch, 1690, timeout=60, comparator=operator.gt) self.start_subtest("Stopping Sprayer") self.run_cmd(mavutil.mavlink.MAV_CMD_DO_SPRAYER, 0, # p1 0, # p2 0, # p3 0, # p4 0, # p5 0, # p6 0) # p7 self.wait_servo_channel_value(pump_ch, pump_ch_min) self.set_rc(3, 1000) # start driving forward self.progress("Sprayer OK") except Exception as e: self.print_exception_caught(e) ex = e self.context_pop() self.disarm_vehicle(force=True) self.reboot_sitl() if ex: raise ex def drive_max_rcin(self, timeout=30): """Test max RC inputs""" self.context_push() ex = None try: self.progress("Testing max RC inputs") self.change_mode("MANUAL") self.wait_ready_to_arm() self.arm_vehicle() self.set_rc(3, 2000) self.set_rc(1, 1000) tstart = self.get_sim_time() while self.get_sim_time_cached() - tstart < timeout: m = self.mav.recv_match(type='VFR_HUD', blocking=True, timeout=1) if m is not None: self.progress("Current speed: %f" % m.groundspeed) # reduce throttle self.set_rc(3, 1500) self.set_rc(1, 1500) except Exception as e: self.print_exception_caught(e) ex = e self.disarm_vehicle() self.context_pop() if ex: raise ex ################################################# # AUTOTEST ALL ################################################# def drive_mission(self, filename, strict=True): """Drive a mission from a file.""" self.progress("Driving mission %s" % filename) self.load_mission(filename, strict=strict) self.wait_ready_to_arm() self.arm_vehicle() self.change_mode('AUTO') self.wait_waypoint(1, 4, max_dist=5) self.wait_statustext("Mission Complete", timeout=600) self.disarm_vehicle() self.progress("Mission OK") def test_gripper_mission(self): self.load_mission("rover-gripper-mission.txt") self.change_mode('AUTO') self.wait_ready_to_arm() self.arm_vehicle() self.wait_statustext("Gripper Grabbed", timeout=60) self.wait_statustext("Gripper Released", timeout=60) self.wait_statustext("Mission Complete", timeout=60) self.disarm_vehicle() def do_get_banner(self): target_sysid = self.sysid_thismav() target_compid = 1 self.mav.mav.command_long_send( target_sysid, target_compid, mavutil.mavlink.MAV_CMD_DO_SEND_BANNER, 1, # confirmation 1, # send it 0, 0, 0, 0, 0, 0) start = time.time() while True: m = self.mav.recv_match(type='STATUSTEXT', blocking=True, timeout=1) if m is not None and "ArduRover" in m.text: self.progress("banner received: %s" % m.text) return if time.time() - start > 10: break raise MsgRcvTimeoutException("banner not received") def drive_brake_get_stopping_distance(self, speed): # measure our stopping distance: old_cruise_speed = self.get_parameter('CRUISE_SPEED') old_accel_max = self.get_parameter('ATC_ACCEL_MAX') # controller tends not to meet cruise speed (max of ~14 when 15 # set), thus *1.2 self.set_parameter('CRUISE_SPEED', speed*1.2) # at time of writing, the vehicle is only capable of 10m/s/s accel self.set_parameter('ATC_ACCEL_MAX', 15) self.change_mode("STEERING") self.set_rc(3, 2000) self.wait_groundspeed(15, 100) initial = self.mav.location() initial_time = time.time() while time.time() - initial_time < 2: # wait for a position update from the autopilot start = self.mav.location() if start != initial: break self.set_rc(3, 1500) self.wait_groundspeed(0, 0.2) # why do we not stop?! initial = self.mav.location() initial_time = time.time() while time.time() - initial_time < 2: # wait for a position update from the autopilot stop = self.mav.location() if stop != initial: break delta = self.get_distance(start, stop) self.set_parameter('CRUISE_SPEED', old_cruise_speed) self.set_parameter('ATC_ACCEL_MAX', old_accel_max) return delta def drive_brake(self): old_using_brake = self.get_parameter('ATC_BRAKE') old_cruise_speed = self.get_parameter('CRUISE_SPEED') self.set_parameter('CRUISE_SPEED', 15) self.set_parameter('ATC_BRAKE', 0) self.arm_vehicle() distance_without_brakes = self.drive_brake_get_stopping_distance(15) # brakes on: self.set_parameter('ATC_BRAKE', 1) distance_with_brakes = self.drive_brake_get_stopping_distance(15) # revert state: self.set_parameter('ATC_BRAKE', old_using_brake) self.set_parameter('CRUISE_SPEED', old_cruise_speed) delta = distance_without_brakes - distance_with_brakes if delta < distance_without_brakes * 0.05: # 5% isn't asking for much self.disarm_vehicle() raise NotAchievedException(""" Brakes have negligible effect (with=%0.2fm without=%0.2fm delta=%0.2fm) """ % (distance_with_brakes, distance_without_brakes, delta)) self.disarm_vehicle() self.progress( "Brakes work (with=%0.2fm without=%0.2fm delta=%0.2fm)" % (distance_with_brakes, distance_without_brakes, delta)) def drive_rtl_mission_max_distance_from_home(self): '''maximum distance allowed from home at end''' return 6.5 def drive_rtl_mission(self, timeout=120): self.wait_ready_to_arm() self.arm_vehicle() self.load_mission("rtl.txt") self.change_mode("AUTO") tstart = self.get_sim_time() while True: now = self.get_sim_time_cached() if now - tstart > timeout: raise AutoTestTimeoutException("Didn't see wp 3") m = self.mav.recv_match(type='MISSION_CURRENT', blocking=True, timeout=1) self.progress("MISSION_CURRENT: %s" % str(m)) if m.seq == 3: break self.drain_mav() m = self.mav.recv_match(type='NAV_CONTROLLER_OUTPUT', blocking=True, timeout=1) if m is None: raise MsgRcvTimeoutException( "Did not receive NAV_CONTROLLER_OUTPUT message") wp_dist_min = 5 if m.wp_dist < wp_dist_min: raise PreconditionFailedException( "Did not start at least %f metres from destination (is=%f)" % (wp_dist_min, m.wp_dist)) self.progress("NAV_CONTROLLER_OUTPUT.wp_dist looks good (%u >= %u)" % (m.wp_dist, wp_dist_min,)) # wait for mission to complete self.wait_statustext("Mission Complete", timeout=60) # the EKF doesn't pull us down to 0 speed: self.wait_groundspeed(0, 0.5, timeout=600) # current Rover blows straight past the home position and ends # up ~6m past the home point. home_distance = self.distance_to_home() home_distance_min = 5.5 home_distance_max = self.drive_rtl_mission_max_distance_from_home() if home_distance > home_distance_max: raise NotAchievedException( "Did not stop near home (%f metres distant (%f > want > %f))" % (home_distance, home_distance_min, home_distance_max)) self.disarm_vehicle() self.progress("RTL Mission OK (%fm)" % home_distance) def drive_fence_ac_avoidance(self): self.context_push() ex = None try: self.load_fence("rover-fence-ac-avoid.txt") self.set_parameter("FENCE_ENABLE", 0) self.set_parameter("PRX_TYPE", 10) self.set_parameter("RC10_OPTION", 40) # proximity-enable self.reboot_sitl() # start = self.mav.location() self.wait_ready_to_arm() self.arm_vehicle() # first make sure we can breach the fence: self.set_rc(10, 1000) self.change_mode("ACRO") self.set_rc(3, 1550) self.wait_distance_to_home(25, 100000, timeout=60) self.change_mode("RTL") self.wait_statustext("Reached destination", timeout=60) # now enable avoidance and make sure we can't: self.set_rc(10, 2000) self.change_mode("ACRO") self.wait_groundspeed(0, 0.7, timeout=60) # watch for speed zero self.wait_groundspeed(0, 0.2, timeout=120) except Exception as e: self.print_exception_caught(e) ex = e self.context_pop() self.clear_mission(mavutil.mavlink.MAV_MISSION_TYPE_FENCE) self.disarm_vehicle(force=True) self.reboot_sitl() if ex: raise ex def test_servorelayevents(self): self.do_set_relay(0, 0) off = self.get_parameter("SIM_PIN_MASK") self.do_set_relay(0, 1) on = self.get_parameter("SIM_PIN_MASK") if on == off: raise NotAchievedException( "Pin mask unchanged after relay cmd") self.progress("Pin mask changed after relay command") def test_setting_modes_via_mavproxy_switch(self): self.customise_SITL_commandline([ "--rc-in-port", "5502", ]) self.load_mission(self.arming_test_mission()) self.wait_ready_to_arm() fnoo = [(1, 'MANUAL'), (2, 'MANUAL'), (3, 'RTL'), (4, 'AUTO'), (5, 'AUTO'), # non-existant mode, should stay in RTL (6, 'MANUAL')] for (num, expected) in fnoo: self.mavproxy.send('switch %u\n' % num) self.wait_mode(expected) def test_setting_modes_via_mavproxy_mode_command(self): fnoo = [(1, 'ACRO'), (3, 'STEERING'), (4, 'HOLD'), ] for (num, expected) in fnoo: self.mavproxy.send('mode manual\n') self.wait_mode("MANUAL") self.mavproxy.send('mode %u\n' % num) self.wait_mode(expected) self.mavproxy.send('mode manual\n') self.wait_mode("MANUAL") self.mavproxy.send('mode %s\n' % expected) self.wait_mode(expected) def test_setting_modes_via_modeswitch(self): # test setting of modes through mode switch self.context_push() ex = None try: self.set_parameter("MODE_CH", 8) self.set_rc(8, 1000) # mavutil.mavlink.ROVER_MODE_HOLD: self.set_parameter("MODE6", 4) # mavutil.mavlink.ROVER_MODE_ACRO self.set_parameter("MODE5", 1) self.set_rc(8, 1800) # PWM for mode6 self.wait_mode("HOLD") self.set_rc(8, 1700) # PWM for mode5 self.wait_mode("ACRO") self.set_rc(8, 1800) # PWM for mode6 self.wait_mode("HOLD") self.set_rc(8, 1700) # PWM for mode5 self.wait_mode("ACRO") except Exception as e: self.print_exception_caught(e) ex = e self.context_pop() if ex is not None: raise ex def test_setting_modes_via_auxswitches(self): self.context_push() ex = None try: # from mavproxy_rc.py mapping = [0, 1165, 1295, 1425, 1555, 1685, 1815] self.set_parameter("MODE1", 1) # acro self.set_rc(8, mapping[1]) self.wait_mode('ACRO') self.set_rc(9, 1000) self.set_rc(10, 1000) self.set_parameter("RC9_OPTION", 53) # steering self.set_parameter("RC10_OPTION", 54) # hold self.set_rc(9, 1900) self.wait_mode("STEERING") self.set_rc(10, 1900) self.wait_mode("HOLD") # reset both switches - should go back to ACRO self.set_rc(9, 1000) self.set_rc(10, 1000) self.wait_mode("ACRO") self.set_rc(9, 1900) self.wait_mode("STEERING") self.set_rc(10, 1900) self.wait_mode("HOLD") self.set_rc(10, 1000) # this re-polls the mode switch self.wait_mode("ACRO") self.set_rc(9, 1000) except Exception as e: self.print_exception_caught(e) ex = e self.context_pop() if ex is not None: raise ex def test_rc_override_cancel(self): self.set_parameter("SYSID_MYGCS", self.mav.source_system) self.change_mode('MANUAL') self.wait_ready_to_arm() self.zero_throttle() self.arm_vehicle() # start moving forward a little: normal_rc_throttle = 1700 throttle_override = 1900 self.progress("Establishing baseline RC input") self.set_rc(3, normal_rc_throttle) self.drain_mav() tstart = self.get_sim_time() while True: if self.get_sim_time_cached() - tstart > 10: raise AutoTestTimeoutException("Did not get rc change") m = self.mav.recv_match(type='RC_CHANNELS', blocking=True) if m.chan3_raw == normal_rc_throttle: break self.progress("Set override with RC_CHANNELS_OVERRIDE") self.drain_mav() tstart = self.get_sim_time() while True: if self.get_sim_time_cached() - tstart > 10: raise AutoTestTimeoutException("Did not override") self.progress("Sending throttle of %u" % (throttle_override,)) self.mav.mav.rc_channels_override_send( 1, # target system 1, # targe component 65535, # chan1_raw 65535, # chan2_raw throttle_override, # chan3_raw 65535, # chan4_raw 65535, # chan5_raw 65535, # chan6_raw 65535, # chan7_raw 65535) # chan8_raw m = self.mav.recv_match(type='RC_CHANNELS', blocking=True) self.progress("chan3=%f want=%f" % (m.chan3_raw, throttle_override)) if m.chan3_raw == throttle_override: break self.progress("disabling override and making sure we revert to RC input in good time") self.drain_mav() tstart = self.get_sim_time() while True: if self.get_sim_time_cached() - tstart > 0.5: raise AutoTestTimeoutException("Did not cancel override") self.progress("Sending cancel of throttle override") self.mav.mav.rc_channels_override_send( 1, # target system 1, # targe component 65535, # chan1_raw 65535, # chan2_raw 0, # chan3_raw 65535, # chan4_raw 65535, # chan5_raw 65535, # chan6_raw 65535, # chan7_raw 65535) # chan8_raw self.do_timesync_roundtrip() m = self.mav.recv_match(type='RC_CHANNELS', blocking=True) self.progress("chan3=%f want=%f" % (m.chan3_raw, normal_rc_throttle)) if m.chan3_raw == normal_rc_throttle: break self.disarm_vehicle() def test_rc_overrides(self): self.context_push() self.set_parameter("SYSID_MYGCS", self.mav.source_system) ex = None try: self.set_parameter("RC12_OPTION", 46) self.reboot_sitl() self.change_mode('MANUAL') self.wait_ready_to_arm() self.set_rc(3, 1500) # throttle at zero self.arm_vehicle() # start moving forward a little: normal_rc_throttle = 1700 self.set_rc(3, normal_rc_throttle) self.wait_groundspeed(5, 100) # allow overrides: self.set_rc(12, 2000) # now override to stop: throttle_override = 1500 tstart = self.get_sim_time_cached() while True: if self.get_sim_time_cached() - tstart > 10: raise AutoTestTimeoutException("Did not reach speed") self.progress("Sending throttle of %u" % (throttle_override,)) self.mav.mav.rc_channels_override_send( 1, # target system 1, # targe component 65535, # chan1_raw 65535, # chan2_raw throttle_override, # chan3_raw 65535, # chan4_raw 65535, # chan5_raw 65535, # chan6_raw 65535, # chan7_raw 65535) # chan8_raw m = self.mav.recv_match(type='VFR_HUD', blocking=True) want_speed = 2.0 self.progress("Speed=%f want=<%f" % (m.groundspeed, want_speed)) if m.groundspeed < want_speed: break # now override to stop - but set the switch on the RC # transmitter to deny overrides; this should send the # speed back up to 5 metres/second: self.set_rc(12, 1000) throttle_override = 1500 tstart = self.get_sim_time_cached() while True: if self.get_sim_time_cached() - tstart > 10: raise AutoTestTimeoutException("Did not speed back up") self.progress("Sending throttle of %u" % (throttle_override,)) self.mav.mav.rc_channels_override_send( 1, # target system 1, # targe component 65535, # chan1_raw 65535, # chan2_raw throttle_override, # chan3_raw 65535, # chan4_raw 65535, # chan5_raw 65535, # chan6_raw 65535, # chan7_raw 65535) # chan8_raw m = self.mav.recv_match(type='VFR_HUD', blocking=True) want_speed = 5.0 self.progress("Speed=%f want=>%f" % (m.groundspeed, want_speed)) if m.groundspeed > want_speed: break # re-enable RC overrides self.set_rc(12, 2000) # check we revert to normal RC inputs when gcs overrides cease: self.progress("Waiting for RC to revert to normal RC input") self.wait_rc_channel_value(3, normal_rc_throttle, timeout=10) self.start_subtest("Check override time of zero disables overrides") old = self.get_parameter("RC_OVERRIDE_TIME") ch = 2 self.set_rc(ch, 1000) channels = [65535] * 18 ch_override_value = 1700 channels[ch-1] = ch_override_value channels[7] = 1234 # that's channel 8! self.progress("Sending override message %u" % ch_override_value) self.mav.mav.rc_channels_override_send( 1, # target system 1, # targe component *channels ) # long timeout required here as we may have sent a lot of # things via MAVProxy... self.wait_rc_channel_value(ch, ch_override_value, timeout=30) self.set_parameter("RC_OVERRIDE_TIME", 0) self.wait_rc_channel_value(ch, 1000) self.set_parameter("RC_OVERRIDE_TIME", old) self.wait_rc_channel_value(ch, ch_override_value) ch_override_value = 1720 channels[ch-1] = ch_override_value self.progress("Sending override message %u" % ch_override_value) self.mav.mav.rc_channels_override_send( 1, # target system 1, # targe component *channels ) self.wait_rc_channel_value(ch, ch_override_value, timeout=10) self.set_parameter("RC_OVERRIDE_TIME", 0) self.wait_rc_channel_value(ch, 1000) self.set_parameter("RC_OVERRIDE_TIME", old) self.progress("Ensuring timeout works") self.wait_rc_channel_value(ch, 1000, timeout=5) self.delay_sim_time(10) self.set_parameter("RC_OVERRIDE_TIME", 10) self.progress("Sending override message") ch_override_value = 1730 channels[ch-1] = ch_override_value self.progress("Sending override message %u" % ch_override_value) self.mav.mav.rc_channels_override_send( 1, # target system 1, # targe component *channels ) self.wait_rc_channel_value(ch, ch_override_value, timeout=10) tstart = self.get_sim_time() self.progress("Waiting for channel to revert to 1000 in ~10s") self.wait_rc_channel_value(ch, 1000, timeout=15) delta = self.get_sim_time() - tstart if delta > 12: raise NotAchievedException("Took too long to revert RC channel value (delta=%f)" % delta) min_delta = 9 if delta < min_delta: raise NotAchievedException("Didn't take long enough to revert RC channel value (delta=%f want>=%f)" % (delta, min_delta)) self.progress("Disabling RC override timeout") self.set_parameter("RC_OVERRIDE_TIME", -1) ch_override_value = 1740 channels[ch-1] = ch_override_value self.progress("Sending override message %u" % ch_override_value) self.mav.mav.rc_channels_override_send( 1, # target system 1, # targe component *channels ) self.wait_rc_channel_value(ch, ch_override_value, timeout=10) tstart = self.get_sim_time() while True: # warning: this is get_sim_time() and can slurp messages on you! delta = self.get_sim_time() - tstart if delta > 20: break m = self.mav.recv_match(type='RC_CHANNELS', blocking=True, timeout=1) if m is None: raise NotAchievedException("Did not get RC_CHANNELS") channel_field = "chan%u_raw" % ch m_value = getattr(m, channel_field) if m_value != ch_override_value: raise NotAchievedException("Value reverted after %f seconds when it should not have (got=%u) (want=%u)" % (delta, m_value, ch_override_value)) # noqa self.set_parameter("RC_OVERRIDE_TIME", old) self.delay_sim_time(10) self.start_subtest("Checking higher-channel semantics") self.context_push() self.set_parameter("RC_OVERRIDE_TIME", 30) ch = 11 rc_value = 1010 self.set_rc(ch, rc_value) channels = [65535] * 18 ch_override_value = 1234 channels[ch-1] = ch_override_value self.progress("Sending override message ch%u=%u" % (ch, ch_override_value)) self.mav.mav.rc_channels_override_send( 1, # target system 1, # targe component *channels ) self.progress("Wait for override value") self.wait_rc_channel_value(ch, ch_override_value, timeout=10) self.progress("Sending return-to-RC-input value") channels[ch-1] = 65534 self.mav.mav.rc_channels_override_send( 1, # target system 1, # targe component *channels ) self.wait_rc_channel_value(ch, rc_value, timeout=10) channels[ch-1] = ch_override_value self.progress("Sending override message ch%u=%u" % (ch, ch_override_value)) self.mav.mav.rc_channels_override_send( 1, # target system 1, # targe component *channels ) self.progress("Wait for override value") self.wait_rc_channel_value(ch, ch_override_value, timeout=10) # make we keep the override vaue for at least 10 seconds: tstart = self.get_sim_time() while True: if self.get_sim_time_cached() - tstart > 10: break # try both ignore values: ignore_value = 0 if self.get_sim_time_cached() - tstart > 5: ignore_value = 65535 self.progress("Sending ignore value %u" % ignore_value) channels[ch-1] = ignore_value self.mav.mav.rc_channels_override_send( 1, # target system 1, # targe component *channels ) if self.get_rc_channel_value(ch) != ch_override_value: raise NotAchievedException("Did not maintain value") self.context_pop() self.end_subtest("Checking higher-channel semantics") except Exception as e: self.print_exception_caught(e) ex = e self.context_pop() self.disarm_vehicle() self.reboot_sitl() if ex is not None: raise ex def test_manual_control(self): self.context_push() self.set_parameter("SYSID_MYGCS", self.mav.source_system) ex = None try: self.set_parameter("RC12_OPTION", 46) # enable/disable rc overrides self.reboot_sitl() self.change_mode("MANUAL") self.wait_ready_to_arm() self.zero_throttle() self.arm_vehicle() self.progress("start moving forward a little") normal_rc_throttle = 1700 self.set_rc(3, normal_rc_throttle) self.wait_groundspeed(5, 100) self.progress("allow overrides") self.set_rc(12, 2000) self.progress("now override to stop") throttle_override_normalized = 0 expected_throttle = 0 # in VFR_HUD tstart = self.get_sim_time_cached() while True: if self.get_sim_time_cached() - tstart > 10: raise AutoTestTimeoutException("Did not reach speed") self.progress("Sending normalized throttle of %d" % (throttle_override_normalized,)) self.mav.mav.manual_control_send( 1, # target system 32767, # x (pitch) 32767, # y (roll) throttle_override_normalized, # z (thrust) 32767, # r (yaw) 0) # button mask m = self.mav.recv_match(type='VFR_HUD', blocking=True) want_speed = 2.0 self.progress("Speed=%f want=<%f throttle=%u want=%u" % (m.groundspeed, want_speed, m.throttle, expected_throttle)) if m.groundspeed < want_speed and m.throttle == expected_throttle: break self.progress("now override to stop - but set the switch on the RC transmitter to deny overrides; this should send the speed back up to 5 metres/second") # noqa self.set_rc(12, 1000) throttle_override_normalized = 500 expected_throttle = 36 # in VFR_HUD, corresponding to normal_rc_throttle adjusted for channel min/max tstart = self.get_sim_time_cached() while True: if self.get_sim_time_cached() - tstart > 10: raise AutoTestTimeoutException("Did not stop") self.progress("Sending normalized throttle of %u" % (throttle_override_normalized,)) self.mav.mav.manual_control_send( 1, # target system 32767, # x (pitch) 32767, # y (roll) throttle_override_normalized, # z (thrust) 32767, # r (yaw) 0) # button mask m = self.mav.recv_match(type='VFR_HUD', blocking=True) want_speed = 5.0 self.progress("Speed=%f want=>%f throttle=%u want=%u" % (m.groundspeed, want_speed, m.throttle, expected_throttle)) if m.groundspeed > want_speed and m.throttle == expected_throttle: break # re-enable RC overrides self.set_rc(12, 2000) # check we revert to normal RC inputs when gcs overrides cease: self.progress("Waiting for RC to revert to normal RC input") self.wait_rc_channel_value(3, normal_rc_throttle, timeout=10) except Exception as e: self.print_exception_caught(e) ex = e self.context_pop() self.disarm_vehicle() self.reboot_sitl() if ex is not None: raise ex def test_camera_mission_items(self): self.context_push() ex = None try: self.load_mission("rover-camera-mission.txt") self.wait_ready_to_arm() self.change_mode("AUTO") self.wait_ready_to_arm() self.arm_vehicle() prev_cf = None while True: cf = self.mav.recv_match(type='CAMERA_FEEDBACK', blocking=True) if prev_cf is None: prev_cf = cf continue dist_travelled = self.get_distance_int(prev_cf, cf) prev_cf = cf mc = self.mav.messages.get("MISSION_CURRENT", None) if mc is None: continue elif mc.seq == 2: expected_distance = 2 elif mc.seq == 4: expected_distance = 5 elif mc.seq == 5: break else: continue self.progress("Expected distance %f got %f" % (expected_distance, dist_travelled)) error = abs(expected_distance - dist_travelled) # Rover moves at ~5m/s; we appear to do something at # 5Hz, so we do see over a meter of error! max_error = 1.5 if error > max_error: raise NotAchievedException("Camera distance error: %f (%f)" % (error, max_error)) self.disarm_vehicle() except Exception as e: self.print_exception_caught(e) ex = e self.context_pop() if ex is not None: raise ex def test_do_set_mode_via_command_long(self): self.do_set_mode_via_command_long("HOLD") self.do_set_mode_via_command_long("MANUAL") def test_mavproxy_do_set_mode_via_command_long(self): self.mavproxy_do_set_mode_via_command_long("HOLD") self.mavproxy_do_set_mode_via_command_long("MANUAL") def test_sysid_enforce(self): '''Run the same arming code with correct then incorrect SYSID''' if self.mav.source_system != self.mav.mav.srcSystem: raise PreconditionFailedException("Expected mav.source_system and mav.srcSystem to match") self.context_push() old_srcSystem = self.mav.mav.srcSystem ex = None try: self.set_parameter("SYSID_MYGCS", self.mav.source_system) self.set_parameter("SYSID_ENFORCE", 1, add_to_context=False) self.change_mode('MANUAL') self.progress("make sure I can arm ATM") self.wait_ready_to_arm() self.arm_vehicle(timeout=5) self.disarm_vehicle() self.do_timesync_roundtrip() # should not be able to arm from a system id which is not MY_SYSID self.progress("Attempting to arm vehicle from bad system-id") success = None try: # temporarily set a different system ID than normal: self.mav.mav.srcSystem = 72 self.arm_vehicle(timeout=5) self.disarm_vehicle() success = False except AutoTestTimeoutException: success = True self.mav.mav.srcSystem = old_srcSystem if not success: raise NotAchievedException("Managed to arm with SYSID_ENFORCE set") # should be able to arm from the vehicle's own components: self.progress("Attempting to arm vehicle from vehicle component") comp_arm_exception = None try: self.mav.mav.srcSystem = 1 self.arm_vehicle(timeout=5) self.disarm_vehicle() except Exception as e: comp_arm_exception = e self.mav.mav.srcSystem = old_srcSystem if comp_arm_exception is not None: raise comp_arm_exception except Exception as e: self.print_exception_caught(e) ex = e self.mav.mav.srcSystem = old_srcSystem self.set_parameter("SYSID_ENFORCE", 0, add_to_context=False) self.context_pop() if ex is not None: raise ex def test_rally_points(self): self.reboot_sitl() # to ensure starting point is as expected self.load_rally("rover-test-rally.txt") accuracy = self.get_parameter("WP_RADIUS") self.wait_ready_to_arm() self.arm_vehicle() self.reach_heading_manual(10) self.reach_distance_manual(50) self.change_mode("RTL") # location copied in from rover-test-rally.txt: loc = mavutil.location(40.071553, -105.229401, 0, 0) self.wait_location(loc, accuracy=accuracy) self.disarm_vehicle() def fence_with_bad_frame(self, target_system=1, target_component=1): return [ self.mav.mav.mission_item_int_encode( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, mavutil.mavlink.MAV_CMD_NAV_FENCE_RETURN_POINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(1.0017 * 1e7), # latitude int(1.0017 * 1e7), # longitude 31.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), ] def fence_with_zero_vertex_count(self, target_system=1, target_component=1): return [ self.mav.mav.mission_item_int_encode( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_POLYGON_VERTEX_INCLUSION, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(1.0017 * 1e7), # latitude int(1.0017 * 1e7), # longitude 31.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), ] def fence_with_wrong_vertex_count(self, target_system=1, target_component=1): return [ self.mav.mav.mission_item_int_encode( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_POLYGON_VERTEX_INCLUSION, 0, # current 0, # autocontinue 2, # p1 0, # p2 0, # p3 0, # p4 int(1.0017 * 1e7), # latitude int(1.0017 * 1e7), # longitude 31.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), ] def fence_with_multiple_return_points(self, target_system=1, target_component=1): return [ self.mav.mav.mission_item_int_encode( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_RETURN_POINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(1.0017 * 1e7), # latitude int(1.0017 * 1e7), # longitude 31.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), self.mav.mav.mission_item_int_encode( target_system, target_component, 1, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_RETURN_POINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(1.0017 * 1e7), # latitude int(1.0017 * 1e7), # longitude 31.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), ] def fence_with_invalid_latlon(self, target_system=1, target_component=1): return [ self.mav.mav.mission_item_int_encode( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_RETURN_POINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(100 * 1e7), # bad latitude. bad. int(1.0017 * 1e7), # longitude 31.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), ] def fence_with_multiple_return_points_with_bad_sequence_numbers(self, target_system=1, target_component=1): return [ self.mav.mav.mission_item_int_encode( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_RETURN_POINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(1.0 * 1e7), # latitude int(1.0017 * 1e7), # longitude 31.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), self.mav.mav.mission_item_int_encode( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_RETURN_POINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(2.0 * 1e7), # latitude int(2.0017 * 1e7), # longitude 31.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), ] def fence_which_exceeds_storage_space(self, target_system=1, target_component=1): ret = [] for i in range(0, 60): ret.append(self.mav.mav.mission_item_int_encode( target_system, target_component, i, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_CIRCLE_EXCLUSION, 0, # current 0, # autocontinue 10, # p1 0, # p2 0, # p3 0, # p4 int(1.0 * 1e7), # latitude int(1.0017 * 1e7), # longitude 31.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), ) return ret def fences_which_should_not_upload(self, target_system=1, target_component=1): return [ ("Bad Frame", self.fence_with_bad_frame( target_system=target_system, target_component=target_component)), ("Zero Vertex Count", self.fence_with_zero_vertex_count( target_system=target_system, target_component=target_component)), ("Wrong Vertex Count", self.fence_with_wrong_vertex_count( target_system=target_system, target_component=target_component)), ("Multiple return points", self.fence_with_multiple_return_points( target_system=target_system, target_component=target_component)), ("Invalid lat/lon", self.fence_with_invalid_latlon( target_system=target_system, target_component=target_component)), ("Multiple Return points with bad sequence numbers", self.fence_with_multiple_return_points_with_bad_sequence_numbers( # noqa target_system=target_system, target_component=target_component)), ("Fence which exceeds storage space", self.fence_which_exceeds_storage_space( target_system=target_system, target_component=target_component)), ] def fence_with_single_return_point(self, target_system=1, target_component=1): return [ self.mav.mav.mission_item_int_encode( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_RETURN_POINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(1.0017 * 1e7), # latitude int(1.0017 * 1e7), # longitude 31.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), ] def fence_with_single_return_point_and_5_vertex_inclusion(self, target_system=1, target_component=1): return [ self.mav.mav.mission_item_int_encode( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_RETURN_POINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(1.0017 * 1e7), # latitude int(1.0017 * 1e7), # longitude 31.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), self.mav.mav.mission_item_int_encode( target_system, target_component, 1, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_POLYGON_VERTEX_INCLUSION, 0, # current 0, # autocontinue 5, # p1 0, # p2 0, # p3 0, # p4 int(1.0000 * 1e7), # latitude int(1.0000 * 1e7), # longitude 31.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), self.mav.mav.mission_item_int_encode( target_system, target_component, 2, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_POLYGON_VERTEX_INCLUSION, 0, # current 0, # autocontinue 5, # p1 0, # p2 0, # p3 0, # p4 int(1.0001 * 1e7), # latitude int(1.0000 * 1e7), # longitude 32.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), self.mav.mav.mission_item_int_encode( target_system, target_component, 3, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_POLYGON_VERTEX_INCLUSION, 0, # current 0, # autocontinue 5, # p1 0, # p2 0, # p3 0, # p4 int(1.0001 * 1e7), # latitude int(1.0001 * 1e7), # longitude 33.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), self.mav.mav.mission_item_int_encode( target_system, target_component, 4, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_POLYGON_VERTEX_INCLUSION, 0, # current 0, # autocontinue 5, # p1 0, # p2 0, # p3 0, # p4 int(1.0002 * 1e7), # latitude int(1.0002 * 1e7), # longitude 33.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), self.mav.mav.mission_item_int_encode( target_system, target_component, 5, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_POLYGON_VERTEX_INCLUSION, 0, # current 0, # autocontinue 5, # p1 0, # p2 0, # p3 0, # p4 int(1.0002 * 1e7), # latitude int(1.0003 * 1e7), # longitude 33.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), ] def fence_with_many_exclusion_circles(self, count=50, target_system=1, target_component=1): ret = [] for i in range(0, count): lat_deg = 1.0003 + count/10 lng_deg = 1.0002 + count/10 item = self.mav.mav.mission_item_int_encode( target_system, target_component, i, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_CIRCLE_EXCLUSION, 0, # current 0, # autocontinue count, # p1 0, # p2 0, # p3 0, # p4 int(lat_deg * 1e7), # latitude int(lng_deg * 1e7), # longitude 33.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE) ret.append(item) return ret def fence_with_many_exclusion_polyfences(self, target_system=1, target_component=1): ret = [] seq = 0 for fencenum in range(0, 4): pointcount = fencenum + 6 for p in range(0, pointcount): lat_deg = 1.0003 + p/10 + fencenum/100 lng_deg = 1.0002 + p/10 + fencenum/100 item = self.mav.mav.mission_item_int_encode( target_system, target_component, seq, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_POLYGON_VERTEX_EXCLUSION, 0, # current 0, # autocontinue pointcount, # p1 0, # p2 0, # p3 0, # p4 int(lat_deg * 1e7), # latitude int(lng_deg * 1e7), # longitude 33.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE) ret.append(item) seq += 1 return ret def fences_which_should_upload(self, target_system=1, target_component=1): return [ ("Single Return Point", self.fence_with_single_return_point( target_system=target_system, target_component=target_component)), ("Return and 5-vertex-inclusion", self.fence_with_single_return_point_and_5_vertex_inclusion( target_system=target_system, target_component=target_component)), ("Many exclusion circles", self.fence_with_many_exclusion_circles( target_system=target_system, target_component=target_component)), ("Many exclusion polyfences", self.fence_with_many_exclusion_polyfences( target_system=target_system, target_component=target_component)), ("Empty fence", []), ] def assert_fence_does_not_upload(self, fence, target_system=1, target_component=1): self.clear_mission(mavutil.mavlink.MAV_MISSION_TYPE_FENCE, target_system=target_system, target_component=target_component) # upload single item using mission item protocol: upload_failed = False try: self.upload_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_FENCE, fence) except NotAchievedException: # TODO: make sure we failed for correct reason upload_failed = True if not upload_failed: raise NotAchievedException("Uploaded fence when should not be possible") self.progress("Fence rightfully bounced") def send_fencepoint_expect_statustext(self, offset, count, lat, lng, statustext_fragment, target_system=1, target_component=1, timeout=10): self.mav.mav.fence_point_send(target_system, target_component, offset, count, lat, lng) tstart = self.get_sim_time_cached() while True: if self.get_sim_time_cached() - tstart > timeout: raise NotAchievedException("Did not get error message back") m = self.mav.recv_match(type='STATUSTEXT', blocking=True, timeout=1) self.progress("statustext: %s (want='%s')" % (str(m), statustext_fragment)) if m is None: continue if statustext_fragment in m.text: break def test_gcs_fence_centroid(self, target_system=1, target_component=1): self.start_subtest("Ensuring if we don't have a centroid it gets calculated") items = self.test_gcs_fence_need_centroid( target_system=target_system, target_component=target_component) self.upload_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_FENCE, items) centroid = self.get_fence_point(0) want_lat = 1.0001 want_lng = 1.00005 if abs(centroid.lat - want_lat) > 0.000001: raise NotAchievedException("Centroid lat not as expected (want=%f got=%f)" % (want_lat, centroid.lat)) if abs(centroid.lng - want_lng) > 0.000001: raise NotAchievedException("Centroid lng not as expected (want=%f got=%f)" % (want_lng, centroid.lng)) def test_gcs_fence_update_fencepoint(self, target_system=1, target_component=1): self.start_subtest("Ensuring we can move a fencepoint") items = self.test_gcs_fence_boring_triangle( target_system=target_system, target_component=target_component) self.upload_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_FENCE, items) # downloaded_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_FENCE) item_seq = 2 item = items[item_seq] print("item is (%s)" % str(item)) self.progress("original x=%d" % item.x) item.x += int(0.1 * 1e7) self.progress("new x=%d" % item.x) self.progress("try to overwrite item %u" % item_seq) self.mav.mav.mission_write_partial_list_send( target_system, target_component, item_seq, item_seq, mavutil.mavlink.MAV_MISSION_TYPE_FENCE) self.assert_receive_mission_item_request(mavutil.mavlink.MAV_MISSION_TYPE_FENCE, item_seq) item.pack(self.mav.mav) self.mav.mav.send(item) self.progress("Answered request for fence point %u" % item_seq) self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_FENCE) downloaded_items2 = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_FENCE) if downloaded_items2[item_seq].x != item.x: raise NotAchievedException("Item did not update") self.check_fence_items_same([items[0], items[1], item, items[3]], downloaded_items2) def test_gcs_fence_boring_triangle(self, target_system=1, target_component=1): return copy.copy([ self.mav.mav.mission_item_int_encode( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_POLYGON_VERTEX_INCLUSION, 0, # current 0, # autocontinue 3, # p1 0, # p2 0, # p3 0, # p4 int(1.0000 * 1e7), # latitude int(1.0000 * 1e7), # longitude 31.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), self.mav.mav.mission_item_int_encode( target_system, target_component, 1, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_POLYGON_VERTEX_INCLUSION, 0, # current 0, # autocontinue 3, # p1 0, # p2 0, # p3 0, # p4 int(1.0001 * 1e7), # latitude int(1.0000 * 1e7), # longitude 32.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), self.mav.mav.mission_item_int_encode( target_system, target_component, 2, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_POLYGON_VERTEX_INCLUSION, 0, # current 0, # autocontinue 3, # p1 0, # p2 0, # p3 0, # p4 int(1.0001 * 1e7), # latitude int(1.0001 * 1e7), # longitude 33.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), self.mav.mav.mission_item_int_encode( target_system, target_component, 3, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_RETURN_POINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(1.00015 * 1e7), # latitude int(1.00015 * 1e7), # longitude 33.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), ]) def test_gcs_fence_need_centroid(self, target_system=1, target_component=1): return copy.copy([ self.mav.mav.mission_item_int_encode( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_POLYGON_VERTEX_INCLUSION, 0, # current 0, # autocontinue 4, # p1 0, # p2 0, # p3 0, # p4 int(1.0000 * 1e7), # latitude int(1.0000 * 1e7), # longitude 31.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), self.mav.mav.mission_item_int_encode( target_system, target_component, 1, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_POLYGON_VERTEX_INCLUSION, 0, # current 0, # autocontinue 4, # p1 0, # p2 0, # p3 0, # p4 int(1.0002 * 1e7), # latitude int(1.0000 * 1e7), # longitude 32.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), self.mav.mav.mission_item_int_encode( target_system, target_component, 2, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_POLYGON_VERTEX_INCLUSION, 0, # current 0, # autocontinue 4, # p1 0, # p2 0, # p3 0, # p4 int(1.0002 * 1e7), # latitude int(1.0001 * 1e7), # longitude 33.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), self.mav.mav.mission_item_int_encode( target_system, target_component, 3, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_POLYGON_VERTEX_INCLUSION, 0, # current 0, # autocontinue 4, # p1 0, # p2 0, # p3 0, # p4 int(1.0000 * 1e7), # latitude int(1.0001 * 1e7), # longitude 33.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), ]) def click_location_from_item(self, item): self.mavproxy.send("click %f %f\n" % (item.x*1e-7, item.y*1e-7)) def test_gcs_fence_via_mavproxy(self, target_system=1, target_component=1): self.start_subtest("Fence via MAVProxy") if not self.mavproxy_can_do_mision_item_protocols(): return self.start_subsubtest("fence addcircle") self.mavproxy.send("fence clear\n") self.delay_sim_time(1) radius = 20 item = self.mav.mav.mission_item_int_encode( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_CIRCLE_INCLUSION, 0, # current 0, # autocontinue radius, # p1 0, # p2 0, # p3 0, # p4 int(1.0017 * 1e7), # latitude int(1.0017 * 1e7), # longitude 0.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE) print("item is (%s)" % str(item)) self.click_location_from_item(item) self.mavproxy.send("fence addcircle inc %u\n" % radius) self.delay_sim_time(1) downloaded_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_FENCE) print("downloaded items: %s" % str(downloaded_items)) self.check_fence_items_same([item], downloaded_items) radius_exc = 57.3 item2 = self.mav.mav.mission_item_int_encode( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_CIRCLE_EXCLUSION, 0, # current 0, # autocontinue radius_exc, # p1 0, # p2 0, # p3 0, # p4 int(1.0017 * 1e7), # latitude int(1.0017 * 1e7), # longitude 0.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE) self.click_location_from_item(item2) self.mavproxy.send("fence addcircle exc %f\n" % radius_exc) self.delay_sim_time(1) downloaded_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_FENCE) print("downloaded items: %s" % str(downloaded_items)) self.check_fence_items_same([item, item2], downloaded_items) self.end_subsubtest("fence addcircle") self.start_subsubtest("fence addpoly") self.mavproxy.send("fence clear\n") self.delay_sim_time(1) pointcount = 7 self.mavproxy.send("fence addpoly inc 20 %u 37.2\n" % pointcount) # radius, pointcount, rotaiton self.delay_sim_time(5) downloaded_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_FENCE) if len(downloaded_items) != pointcount: raise NotAchievedException("Did not get expected number of points returned (want=%u got=%u)" % (pointcount, len(downloaded_items))) self.end_subsubtest("fence addpoly") self.start_subsubtest("fence movepolypoint") self.mavproxy.send("fence clear\n") self.delay_sim_time(1) triangle = self.test_gcs_fence_boring_triangle( target_system=target_system, target_component=target_component) self.upload_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_FENCE, triangle) self.mavproxy.send("fence list\n") self.delay_sim_time(1) triangle[2].x += 500 triangle[2].y += 700 self.click_location_from_item(triangle[2]) self.mavproxy.send("fence movepolypoint 0 2\n") self.delay_sim_time(10) downloaded_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_FENCE) self.check_fence_items_same(triangle, downloaded_items) self.end_subsubtest("fence movepolypoint") self.start_subsubtest("fence enable and disable") self.mavproxy.send("fence enable\n") self.mavproxy.expect("fence enabled") self.mavproxy.send("fence disable\n") self.mavproxy.expect("fence disabled") self.end_subsubtest("fence enable and disable") # MANUAL> usage: fence <addcircle|addpoly|changealt|clear|disable|draw|enable|list|load|move|movemulti|movepolypoint|param|remove|save|savecsv|savelocal|show|status|undo|update> # noqa def test_gcs_fence(self): target_system = 1 target_component = 1 self.progress("Testing FENCE_POINT protocol") self.start_subtest("FENCE_TOTAL manipulation") self.clear_mission(mavutil.mavlink.MAV_MISSION_TYPE_FENCE) self.assert_parameter_value("FENCE_TOTAL", 0) self.set_parameter("FENCE_TOTAL", 5) self.assert_parameter_value("FENCE_TOTAL", 5) self.clear_mission(mavutil.mavlink.MAV_MISSION_TYPE_FENCE) self.assert_parameter_value("FENCE_TOTAL", 0) self.progress("sending out-of-range fencepoint") self.send_fencepoint_expect_statustext(0, 0, 1.2345, 5.4321, "index past total", target_system=target_component, target_component=target_component) self.progress("sending another out-of-range fencepoint") self.send_fencepoint_expect_statustext(0, 1, 1.2345, 5.4321, "bad count", target_system=target_component, target_component=target_component) self.set_parameter("FENCE_TOTAL", 1) self.assert_parameter_value("FENCE_TOTAL", 1) self.send_fencepoint_expect_statustext(0, 1, 1.2345, 5.4321, "Invalid FENCE_TOTAL", target_system=target_component, target_component=target_component) self.set_parameter("FENCE_TOTAL", 5) self.progress("Checking default points") for i in range(5): m = self.get_fence_point(i) if m.count != 5: raise NotAchievedException("Unexpected count in fence point (want=%u got=%u" % (5, m.count)) if m.lat != 0 or m.lng != 0: raise NotAchievedException("Unexpected lat/lon in fencepoint") self.progress("Storing a return point") self.roundtrip_fencepoint_protocol(0, 5, 1.2345, 5.4321, target_system=target_system, target_component=target_component) lat = 2.345 lng = 4.321 self.roundtrip_fencepoint_protocol(0, 5, lat, lng, target_system=target_system, target_component=target_component) if not self.mavproxy_can_do_mision_item_protocols(): self.progress("MAVProxy too old to do fence point protocols") return self.progress("Download with new protocol") items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_FENCE) if len(items) != 1: raise NotAchievedException("Unexpected fencepoint count (want=%u got=%u)" % (1, len(items))) if items[0].command != mavutil.mavlink.MAV_CMD_NAV_FENCE_RETURN_POINT: raise NotAchievedException( "Fence return point not of correct type expected (%u) got %u" % (items[0].command, mavutil.mavlink.MAV_CMD_NAV_FENCE_RETURN_POINT)) if items[0].frame != mavutil.mavlink.MAV_FRAME_GLOBAL: raise NotAchievedException( "Unexpected frame want=%s got=%s," % (self.string_for_frame(mavutil.mavlink.MAV_FRAME_GLOBAL), self.string_for_frame(items[0].frame))) got_lat = items[0].x want_lat = lat * 1e7 if abs(got_lat - want_lat) > 1: raise NotAchievedException("Disagree in lat (got=%f want=%f)" % (got_lat, want_lat)) if abs(items[0].y - lng * 1e7) > 1: raise NotAchievedException("Disagree in lng") if items[0].seq != 0: raise NotAchievedException("Disagree in offset") self.progress("Downloaded with new protocol OK") # upload using mission protocol: items = self.test_gcs_fence_boring_triangle( target_system=target_system, target_component=target_component) self.upload_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_FENCE, items) self.progress("Download with new protocol") downloaded_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_FENCE) if len(downloaded_items) != len(items): raise NotAchievedException("Did not download expected number of items (wanted=%u got=%u)" % (len(items), len(downloaded_items))) self.assert_parameter_value("FENCE_TOTAL", len(items) + 1) # +1 for closing self.progress("Ensuring fence items match what we sent up") self.check_fence_items_same(items, downloaded_items) # now check centroid self.progress("Requesting fence return point") self.mav.mav.fence_fetch_point_send(target_system, target_component, 0) m = self.mav.recv_match(type="FENCE_POINT", blocking=True, timeout=1) print("m: %s" % str(m)) self.clear_mission(mavutil.mavlink.MAV_MISSION_TYPE_FENCE, target_system=target_system, target_component=target_component) self.progress("Checking count post-nuke") self.clear_mission(mavutil.mavlink.MAV_MISSION_TYPE_FENCE, target_system=target_system, target_component=target_component) self.assert_mission_count_on_link(self.mav, 0, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_FENCE) self.start_subtest("Ensuring bad fences get bounced") for fence in self.fences_which_should_not_upload(target_system=target_system, target_component=target_component): (name, items) = fence self.progress("Ensuring (%s) gets bounced" % (name,)) self.assert_fence_does_not_upload(items) self.start_subtest("Ensuring good fences don't get bounced") for fence in self.fences_which_should_upload(target_system=target_system, target_component=target_component): (name, items) = fence self.progress("Ensuring (%s) gets uploaded" % (name,)) self.check_fence_upload_download(items) self.progress("(%s) uploaded just fine" % (name,)) self.test_gcs_fence_update_fencepoint(target_system=target_system, target_component=target_component) self.test_gcs_fence_centroid(target_system=target_system, target_component=target_component) self.test_gcs_fence_via_mavproxy(target_system=target_system, target_component=target_component) # explode the write_type_to_storage method # FIXME: test converting invalid fences / minimally valid fences / normal fences # FIXME: show that uploading smaller items take up less space # FIXME: add test for consecutive breaches within the manual recovery period # FIXME: ensure truncation does the right thing by fence_total # FIXME: test vehicle escape from outside inclusion zones to # inside inclusion zones (and inside exclusion zones to outside # exclusion zones) # FIXME: add test that a fence with edges that cross can't be uploaded # FIXME: add a test that fences enclose an area (e.g. all the points aren't the same value! def test_offboard(self, timeout=90): self.load_mission("rover-guided-mission.txt") self.wait_ready_to_arm(require_absolute=True) self.arm_vehicle() self.change_mode("AUTO") offboard_expected_duration = 10 # see mission file if self.mav.messages.get("SET_POSITION_TARGET_GLOBAL_INT", None): raise PreconditionFailedException("Already have SET_POSITION_TARGET_GLOBAL_INT") tstart = self.get_sim_time_cached() last_heartbeat_sent = 0 got_ptgi = False magic_waypoint_tstart = 0 magic_waypoint_tstop = 0 while True: now = self.get_sim_time_cached() if now - last_heartbeat_sent > 1: last_heartbeat_sent = now self.mav.mav.heartbeat_send(mavutil.mavlink.MAV_TYPE_ONBOARD_CONTROLLER, mavutil.mavlink.MAV_AUTOPILOT_INVALID, 0, 0, 0) if now - tstart > timeout: raise AutoTestTimeoutException("Didn't complete") magic_waypoint = 3 mc = self.mav.recv_match(type=["MISSION_CURRENT", "STATUSTEXT"], blocking=False) if mc is not None: print("%s" % str(mc)) if mc.get_type() == "STATUSTEXT": if "Mission Complete" in mc.text: break continue if mc.seq == magic_waypoint: print("At magic waypoint") if magic_waypoint_tstart == 0: magic_waypoint_tstart = self.get_sim_time_cached() ptgi = self.mav.messages.get("POSITION_TARGET_GLOBAL_INT", None) if ptgi is not None: got_ptgi = True elif mc.seq > magic_waypoint: if magic_waypoint_tstop == 0: magic_waypoint_tstop = self.get_sim_time_cached() self.disarm_vehicle() offboard_duration = magic_waypoint_tstop - magic_waypoint_tstart if abs(offboard_duration - offboard_expected_duration) > 1: raise NotAchievedException("Did not stay in offboard control for correct time (want=%f got=%f)" % (offboard_expected_duration, offboard_duration)) if not got_ptgi: raise NotAchievedException("Did not get ptgi message") print("pgti: %s" % str(ptgi)) def assert_mission_count_on_link(self, mav, expected_count, target_system, target_component, mission_type): self.drain_mav_unparsed(mav=mav, freshen_sim_time=True) self.progress("waiting for a message - any message....") m = mav.recv_match(blocking=True, timeout=1) self.progress("Received (%s)" % str(m)) if not mav.mavlink20(): raise NotAchievedException("Not doing mavlink2") mav.mav.mission_request_list_send(target_system, target_component, mission_type) self.assert_receive_mission_count_on_link(mav, expected_count, target_system, target_component, mission_type) def assert_receive_mission_count_on_link(self, mav, expected_count, target_system, target_component, mission_type, expected_target_system=None, expected_target_component=None, timeout=120): if expected_target_system is None: expected_target_system = mav.mav.srcSystem if expected_target_component is None: expected_target_component = mav.mav.srcComponent self.progress("Waiting for mission count of (%u) from (%u:%u) to (%u:%u)" % (expected_count, target_system, target_component, expected_target_system, expected_target_component)) tstart = self.get_sim_time_cached() while True: delta = self.get_sim_time_cached() - tstart if delta > timeout: raise NotAchievedException("Did not receive MISSION_COUNT on link after %fs" % delta) m = mav.recv_match(blocking=True, timeout=1) if m is None: self.progress("No messages") continue # self.progress("Received (%s)" % str(m)) if m.get_type() == "MISSION_ACK": if m.type != mavutil.mavlink.MAV_MISSION_ACCEPTED: raise NotAchievedException("Expected MAV_MISSION_ACCEPTED, got (%s)" % m) if m.get_type() == "MISSION_COUNT": break if m.target_system != expected_target_system: raise NotAchievedException("Incorrect target system in MISSION_COUNT (want=%u got=%u)" % (expected_target_system, m.target_system)) if m.target_component != expected_target_component: raise NotAchievedException("Incorrect target component in MISSION_COUNT") if m.mission_type != mission_type: raise NotAchievedException("Did not get expected mission type (want=%u got=%u)" % (mission_type, m.mission_type)) if m.count != expected_count: raise NotAchievedException("Bad count received (want=%u got=%u)" % (expected_count, m.count)) self.progress("Asserted mission count (type=%u) is %u after %fs" % ( (mission_type, m.count, delta))) def get_mission_item_int_on_link(self, item, mav, target_system, target_component, mission_type): self.drain_mav(mav=mav, unparsed=True) mav.mav.mission_request_int_send(target_system, target_component, item, mission_type) m = mav.recv_match(type='MISSION_ITEM_INT', blocking=True, timeout=60, condition='MISSION_ITEM_INT.mission_type==%u' % mission_type) if m is None: raise NotAchievedException("Did not receive MISSION_ITEM_INT") if m.mission_type != mission_type: raise NotAchievedException("Mission item of incorrect type") if m.target_system != mav.mav.srcSystem: raise NotAchievedException("Unexpected target system %u want=%u" % (m.target_system, mav.mav.srcSystem)) if m.seq != item: raise NotAchievedException( "Incorrect sequence number on received item got=%u want=%u" % (m.seq, item)) if m.mission_type != mission_type: raise NotAchievedException( "Mission type incorrect on received item (want=%u got=%u)" % (mission_type, m.mission_type)) if m.target_component != mav.mav.srcComponent: raise NotAchievedException( "Unexpected target component %u want=%u" % (m.target_component, mav.mav.srcComponent)) return m def get_mission_item_on_link(self, item, mav, target_system, target_component, mission_type): self.drain_mav(mav=mav, unparsed=True) mav.mav.mission_request_send(target_system, target_component, item, mission_type) m = mav.recv_match(type='MISSION_ITEM', blocking=True, timeout=60) if m is None: raise NotAchievedException("Did not receive MISSION_ITEM") if m.target_system != mav.mav.srcSystem: raise NotAchievedException("Unexpected target system %u want=%u" % (m.target_system, mav.mav.srcSystem)) if m.seq != item: raise NotAchievedException("Incorrect sequence number on received item_int got=%u want=%u" % (m.seq, item)) if m.mission_type != mission_type: raise NotAchievedException("Mission type incorrect on received item_int (want=%u got=%u)" % (mission_type, m.mission_type)) if m.target_component != mav.mav.srcComponent: raise NotAchievedException("Unexpected target component %u want=%u" % (m.target_component, mav.mav.srcComponent)) return m def assert_receive_mission_item_request(self, mission_type, seq): self.progress("Expecting request for item %u" % seq) m = self.mav.recv_match(type='MISSION_REQUEST', blocking=True, timeout=1) if m is None: raise NotAchievedException("Did not get MISSION_REQUEST") if m.mission_type != mission_type: raise NotAchievedException("Incorrect mission type (wanted=%u got=%u)" % (mission_type, m.mission_type)) if m.seq != seq: raise NotAchievedException("Unexpected sequence number (want=%u got=%u)" % (seq, m.seq)) self.progress("Received item request OK") def assert_receive_mission_ack(self, mission_type, want_type=mavutil.mavlink.MAV_MISSION_ACCEPTED, target_system=None, target_component=None, mav=None): if mav is None: mav = self.mav if target_system is None: target_system = mav.mav.srcSystem if target_component is None: target_component = mav.mav.srcComponent self.progress("Expecting mission ack") m = mav.recv_match(type='MISSION_ACK', blocking=True, timeout=5) self.progress("Received ACK (%s)" % str(m)) if m is None: raise NotAchievedException("Expected mission ACK") if m.target_system != target_system: raise NotAchievedException("ACK not targetted at correct system want=%u got=%u" % (target_system, m.target_system)) if m.target_component != target_component: raise NotAchievedException("ACK not targetted at correct component") if m.mission_type != mission_type: raise NotAchievedException("Unexpected mission type %u want=%u" % (m.mission_type, mission_type)) if m.type != want_type: raise NotAchievedException("Expected ack type got %u got %u" % (want_type, m.type)) def assert_filepath_content(self, filepath, want): with open(filepath) as f: got = f.read() if want != got: raise NotAchievedException("Did not get expected file content (want=%s) (got=%s)" % (want, got)) def mavproxy_can_do_mision_item_protocols(self): return False if not self.mavproxy_version_gt(1, 8, 12): self.progress("MAVProxy is too old; skipping tests") return False return True def check_rally_items_same(self, want, got, epsilon=None): check_atts = ['mission_type', 'command', 'x', 'y', 'z', 'seq', 'param1'] return self.check_mission_items_same(check_atts, want, got, epsilon=epsilon) def click_three_in(self, target_system=1, target_component=1): self.mavproxy.send('rally clear\n') self.drain_mav_unparsed() # there are race conditions in MAVProxy. Beware. self.mavproxy.send("click 1.0 1.0\n") self.mavproxy.send("rally add\n") self.delay_sim_time(1) self.mavproxy.send("click 2.0 2.0\n") self.mavproxy.send("rally add\n") self.delay_sim_time(1) self.mavproxy.send("click 3.0 3.0\n") self.mavproxy.send("rally add\n") self.delay_sim_time(10) self.assert_mission_count_on_link( self.mav, 3, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY, ) def test_gcs_rally_via_mavproxy(self, target_system=1, target_component=1): self.start_subtest("Testing mavproxy CLI for rally points") if not self.mavproxy_can_do_mision_item_protocols(): return self.start_subsubtest("rally add") self.mavproxy.send('rally clear\n') lat_s = "-5.6789" lng_s = "98.2341" lat = float(lat_s) lng = float(lng_s) self.mavproxy.send('click %s %s\n' % (lat_s, lng_s)) self.drain_mav_unparsed() self.mavproxy.send('rally add\n') self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, target_system=255, target_component=0) self.delay_sim_time(5) downloaded_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) if len(downloaded_items) != 1: raise NotAchievedException("Unexpected count (got=%u want=1)" % (len(downloaded_items), )) if (downloaded_items[0].x - int(lat * 1e7)) > 1: raise NotAchievedException("Bad rally lat. Want=%d got=%d" % (int(lat * 1e7), downloaded_items[0].x)) if (downloaded_items[0].y - int(lng * 1e7)) > 1: raise NotAchievedException("Bad rally lng. Want=%d got=%d" % (int(lng * 1e7), downloaded_items[0].y)) if (downloaded_items[0].z - int(90)) > 1: raise NotAchievedException("Bad rally alt. Want=90 got=%d" % (downloaded_items[0].y)) self.end_subsubtest("rally add") self.start_subsubtest("rally list") util.pexpect_drain(self.mavproxy) self.mavproxy.send('rally list\n') self.mavproxy.expect(r"Saved 1 rally items to ([^\s]*)\s") filename = self.mavproxy.match.group(1) self.assert_rally_filepath_content(filename, '''QGC WPL 110 0 0 3 5100 0.000000 0.000000 0.000000 0.000000 -5.678900 98.234100 90.000000 0 ''') self.end_subsubtest("rally list") self.start_subsubtest("rally save") util.pexpect_drain(self.mavproxy) save_tmppath = self.buildlogs_path("rally-testing-tmp.txt") self.mavproxy.send('rally save %s\n' % save_tmppath) self.mavproxy.expect(r"Saved 1 rally items to ([^\s]*)\s") filename = self.mavproxy.match.group(1) if filename != save_tmppath: raise NotAchievedException("Bad save filepath; want=%s got=%s" % (save_tmppath, filename)) self.assert_rally_filepath_content(filename, '''QGC WPL 110 0 0 3 5100 0.000000 0.000000 0.000000 0.000000 -5.678900 98.234100 90.000000 0 ''') self.end_subsubtest("rally save") self.start_subsubtest("rally savecsv") util.pexpect_drain(self.mavproxy) csvpath = self.buildlogs_path("rally-testing-tmp.csv") self.mavproxy.send('rally savecsv %s\n' % csvpath) self.mavproxy.expect('"Seq","Frame"') expected_content = '''"Seq","Frame","Cmd","P1","P2","P3","P4","X","Y","Z" "0","Rel","NAV_RALLY_POINT","0.0","0.0","0.0","0.0","-5.67890024185","98.2341003418","90.0" ''' if sys.version_info[0] >= 3: # greater precision output by default expected_content = '''"Seq","Frame","Cmd","P1","P2","P3","P4","X","Y","Z" "0","Rel","NAV_RALLY_POINT","0.0","0.0","0.0","0.0","-5.678900241851807","98.23410034179688","90.0" ''' self.assert_filepath_content(csvpath, expected_content) self.end_subsubtest("rally savecsv") self.start_subsubtest("rally load") self.drain_mav() self.mavproxy.send('rally clear\n') self.assert_mission_count_on_link(self.mav, 0, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) # warning: uses file saved from previous test self.start_subtest("Check rally load from filepath") self.mavproxy.send('rally load %s\n' % save_tmppath) self.mavproxy.expect(r"Loaded 1 rally items from ([^\s]*)\s") self.mavproxy.expect("Sent all .* rally items") # notional race condition here downloaded_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) if len(downloaded_items) != 1: raise NotAchievedException("Unexpected item count (%u)" % len(downloaded_items)) if abs(int(downloaded_items[0].x) - int(lat * 1e7)) > 3: raise NotAchievedException("Expected lat=%d got=%d" % (lat * 1e7, downloaded_items[0].x)) if abs(int(downloaded_items[0].y) - int(lng * 1e7)) > 10: raise NotAchievedException("Expected lng=%d got=%d" % (lng * 1e7, downloaded_items[0].y)) self.end_subsubtest("rally load") self.start_subsubtest("rally changealt") self.mavproxy.send('rally clear\n') self.mavproxy.send("click 1.0 1.0\n") self.mavproxy.send("rally add\n") self.mavproxy.send("click 2.0 2.0\n") self.mavproxy.send("rally add\n") self.delay_sim_time(10) self.assert_mission_count_on_link( self.mav, 2, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY, ) self.drain_mav() self.mavproxy.send("rally changealt 1 17.6\n") self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, target_system=255, target_component=0) self.delay_sim_time(10) self.mavproxy.send("rally changealt 2 19.1\n") self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, target_system=255, target_component=0) self.delay_sim_time(10) downloaded_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) if len(downloaded_items) != 2: raise NotAchievedException("Unexpected item count (%u)" % len(downloaded_items)) if abs(int(downloaded_items[0].x) - int(1 * 1e7)) > 3: raise NotAchievedException("Expected lat=%d got=%d" % (1 * 1e7, downloaded_items[0].x)) if abs(int(downloaded_items[0].y) - int(1 * 1e7)) > 10: raise NotAchievedException("Expected lng=%d got=%d" % (1 * 1e7, downloaded_items[0].y)) # at some stage ArduPilot will stop rounding altitude. This # will break then. if abs(int(downloaded_items[0].z) - int(17.6)) > 0.0001: raise NotAchievedException("Expected alt=%f got=%f" % (17.6, downloaded_items[0].z)) if abs(int(downloaded_items[1].x) - int(2 * 1e7)) > 3: raise NotAchievedException("Expected lat=%d got=%d" % (2 * 1e7, downloaded_items[0].x)) if abs(int(downloaded_items[1].y) - int(2 * 1e7)) > 10: raise NotAchievedException("Expected lng=%d got=%d" % (2 * 1e7, downloaded_items[0].y)) # at some stage ArduPilot will stop rounding altitude. This # will break then. if abs(int(downloaded_items[1].z) - int(19.1)) > 0.0001: raise NotAchievedException("Expected alt=%f got=%f" % (19.1, downloaded_items[1].z)) self.progress("Now change two at once") self.mavproxy.send("rally changealt 1 17.3 2\n") self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, target_system=255, target_component=0) downloaded_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) if len(downloaded_items) != 2: raise NotAchievedException("Unexpected item count (%u)" % len(downloaded_items)) if abs(int(downloaded_items[0].x) - int(1 * 1e7)) > 3: raise NotAchievedException("Expected lat=%d got=%d" % (1 * 1e7, downloaded_items[0].x)) if abs(int(downloaded_items[0].y) - int(1 * 1e7)) > 10: raise NotAchievedException("Expected lng=%d got=%d" % (1 * 1e7, downloaded_items[0].y)) # at some stage ArduPilot will stop rounding altitude. This # will break then. if abs(int(downloaded_items[0].z) - int(17.3)) > 0.0001: raise NotAchievedException("Expected alt=%f got=%f" % (17.3, downloaded_items[0].z)) if abs(int(downloaded_items[1].x) - int(2 * 1e7)) > 3: raise NotAchievedException("Expected lat=%d got=%d" % (2 * 1e7, downloaded_items[0].x)) if abs(int(downloaded_items[1].y) - int(2 * 1e7)) > 10: raise NotAchievedException("Expected lng=%d got=%d" % (2 * 1e7, downloaded_items[0].y)) # at some stage ArduPilot will stop rounding altitude. This # will break then. if abs(int(downloaded_items[1].z) - int(17.3)) > 0.0001: raise NotAchievedException("Expected alt=%f got=%f" % (17.3, downloaded_items[0].z)) self.end_subsubtest("rally changealt") self.start_subsubtest("rally move") self.mavproxy.send('rally clear\n') self.mavproxy.send("click 1.0 1.0\n") self.mavproxy.send("rally add\n") self.mavproxy.send("click 2.0 2.0\n") self.mavproxy.send("rally add\n") self.delay_sim_time(5) self.assert_mission_count_on_link( self.mav, 2, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY, ) self.mavproxy.send("click 3.0 3.0\n") self.mavproxy.send("rally move 2\n") self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, target_system=255, target_component=0) self.mavproxy.send("click 4.12345 4.987654\n") self.mavproxy.send("rally move 1\n") self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, target_system=255, target_component=0) downloaded_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) if len(downloaded_items) != 2: raise NotAchievedException("Unexpected item count (%u)" % len(downloaded_items)) if downloaded_items[0].x != 41234500: raise NotAchievedException("Bad latitude") if downloaded_items[0].y != 49876540: raise NotAchievedException("Bad longitude") if downloaded_items[0].z != 90: raise NotAchievedException("Bad altitude (want=%u got=%u)" % (90, downloaded_items[0].z)) if downloaded_items[1].x != 30000000: raise NotAchievedException("Bad latitude") if downloaded_items[1].y != 30000000: raise NotAchievedException("Bad longitude") if downloaded_items[1].z != 90: raise NotAchievedException("Bad altitude (want=%u got=%u)" % (90, downloaded_items[1].z)) self.end_subsubtest("rally move") self.start_subsubtest("rally movemulti") self.drain_mav_unparsed() self.mavproxy.send('rally clear\n') self.drain_mav_unparsed() # there are race conditions in MAVProxy. Beware. self.mavproxy.send("click 1.0 1.0\n") self.mavproxy.send("rally add\n") self.mavproxy.send("click 2.0 2.0\n") self.mavproxy.send("rally add\n") self.mavproxy.send("click 3.0 3.0\n") self.mavproxy.send("rally add\n") self.delay_sim_time(10) self.assert_mission_count_on_link( self.mav, 3, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY, ) click_lat = 2.0 click_lon = 3.0 unmoved_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) if len(unmoved_items) != 3: raise NotAchievedException("Unexpected item count") self.mavproxy.send("click %f %f\n" % (click_lat, click_lon)) self.mavproxy.send("rally movemulti 2 1 3\n") # MAVProxy currently sends three separate items up. That's # not great and I don't want to lock that behaviour in here. self.delay_sim_time(10) self.drain_mav_unparsed() expected_moved_items = copy.copy(unmoved_items) expected_moved_items[0].x = 1.0 * 1e7 expected_moved_items[0].y = 2.0 * 1e7 expected_moved_items[1].x = 2.0 * 1e7 expected_moved_items[1].y = 3.0 * 1e7 expected_moved_items[2].x = 3.0 * 1e7 expected_moved_items[2].y = 4.0 * 1e7 moved_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) # we're moving an entire degree in latitude; quite an epsilon required... self.check_rally_items_same(expected_moved_items, moved_items, epsilon=10000) self.progress("now move back and rotate through 90 degrees") self.mavproxy.send("click %f %f\n" % (2, 2)) self.mavproxy.send("rally movemulti 2 1 3 90\n") # MAVProxy currently sends three separate items up. That's # not great and I don't want to lock that behaviour in here. self.delay_sim_time(10) self.drain_mav_unparsed() expected_moved_items = copy.copy(unmoved_items) expected_moved_items[0].x = 3.0 * 1e7 expected_moved_items[0].y = 1.0 * 1e7 expected_moved_items[1].x = 2.0 * 1e7 expected_moved_items[1].y = 2.0 * 1e7 expected_moved_items[2].x = 1.0 * 1e7 expected_moved_items[2].y = 3.0 * 1e7 moved_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) # we're moving an entire degree in latitude; quite an epsilon required... self.check_rally_items_same(expected_moved_items, moved_items, epsilon=12000) self.end_subsubtest("rally movemulti") self.start_subsubtest("rally param") self.mavproxy.send("rally param 3 2 5\n") self.mavproxy.expect("Set param 2 for 3 to 5.000000") self.end_subsubtest("rally param") self.start_subsubtest("rally remove") self.click_three_in(target_system=target_system, target_component=target_component) pure_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.progress("Removing last in list") self.mavproxy.send("rally remove 3\n") self.delay_sim_time(10) self.assert_mission_count_on_link( self.mav, 2, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY, ) fewer_downloaded_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) if len(fewer_downloaded_items) != 2: raise NotAchievedException("Unexpected download list length") shorter_items = copy.copy(pure_items) shorter_items = shorter_items[0:2] self.check_rally_items_same(shorter_items, fewer_downloaded_items) self.progress("Removing first in list") self.mavproxy.send("rally remove 1\n") self.delay_sim_time(5) self.drain_mav_unparsed() self.assert_mission_count_on_link( self.mav, 1, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY, ) fewer_downloaded_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) if len(fewer_downloaded_items) != 1: raise NotAchievedException("Unexpected download list length") shorter_items = shorter_items[1:] self.check_rally_items_same(shorter_items, fewer_downloaded_items) self.progress("Removing remaining item") self.mavproxy.send("rally remove 1\n") self.delay_sim_time(5) self.drain_mav_unparsed() self.assert_mission_count_on_link( self.mav, 0, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY, ) self.end_subsubtest("rally remove") self.start_subsubtest("rally show") # what can we test here? self.mavproxy.send("rally show %s\n" % save_tmppath) self.end_subsubtest("rally show") # savelocal must be run immediately after show! self.start_subsubtest("rally savelocal") util.pexpect_drain(self.mavproxy) savelocal_path = self.buildlogs_path("rally-testing-tmp-local.txt") self.mavproxy.send('rally savelocal %s\n' % savelocal_path) self.delay_sim_time(5) self.assert_rally_filepath_content(savelocal_path, '''QGC WPL 110 0 0 3 5100 0.000000 0.000000 0.000000 0.000000 -5.678900 98.234100 90.000000 0 ''') self.end_subsubtest("rally savelocal") self.start_subsubtest("rally status") self.click_three_in(target_system=target_system, target_component=target_component) self.mavproxy.send("rally status\n") self.mavproxy.expect("Have 3 of 3 rally items") self.mavproxy.send("rally clear\n") self.mavproxy.send("rally status\n") self.mavproxy.expect("Have 0 of 0 rally items") self.end_subsubtest("rally status") self.start_subsubtest("rally undo") self.progress("Testing undo-remove") self.click_three_in(target_system=target_system, target_component=target_component) pure_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.progress("Removing first in list") self.mavproxy.send("rally remove 1\n") self.delay_sim_time(5) self.drain_mav_unparsed() self.assert_mission_count_on_link( self.mav, 2, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY, ) self.mavproxy.send("rally undo\n") self.delay_sim_time(5) undone_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.check_rally_items_same(pure_items, undone_items) self.progress("Testing undo-move") pure_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.mavproxy.send("click 4.12345 4.987654\n") self.mavproxy.send("rally move 1\n") # move has already been tested, assume it works... self.delay_sim_time(5) self.drain_mav_unparsed() self.mavproxy.send("rally undo\n") self.delay_sim_time(5) self.drain_mav_unparsed() undone_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.check_rally_items_same(pure_items, undone_items) self.end_subsubtest("rally undo") self.start_subsubtest("rally update") self.click_three_in(target_system=target_system, target_component=target_component) pure_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) rally_update_tmpfilepath = self.buildlogs_path("rally-tmp-update.txt") self.mavproxy.send("rally save %s\n" % rally_update_tmpfilepath) self.delay_sim_time(5) self.progress("Moving waypoint") self.mavproxy.send("click 13.0 13.0\n") self.mavproxy.send("rally move 1\n") self.delay_sim_time(5) self.progress("Reverting to original") self.mavproxy.send("rally update %s\n" % rally_update_tmpfilepath) self.delay_sim_time(5) reverted_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.check_rally_items_same(pure_items, reverted_items) self.progress("Making sure specifying a waypoint to be updated works") self.mavproxy.send("click 13.0 13.0\n") self.mavproxy.send("rally move 1\n") self.delay_sim_time(5) self.mavproxy.send("click 17.0 17.0\n") self.mavproxy.send("rally move 2\n") self.delay_sim_time(5) self.progress("Reverting to original item 2") self.mavproxy.send("rally update %s 2\n" % rally_update_tmpfilepath) self.delay_sim_time(5) reverted_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) if reverted_items[0].x != 130000000: raise NotAchievedException("Expected item1 x to stay changed (got=%u want=%u)" % (reverted_items[0].x, 130000000)) if reverted_items[1].x == 170000000: raise NotAchievedException("Expected item2 x to revert") self.end_subsubtest("rally update") # MANUAL> usage: rally <add|alt|changealt|clear|list|load|move|movemulti|param|remove|save|savecsv|savelocal|show|status|undo|update> # noqa def test_gcs_rally(self): target_system = 1 target_component = 1 self.test_gcs_rally_via_mavproxy(target_system=target_system, target_component=target_component) self.mavproxy.send('rally clear\n') self.delay_sim_time(1) if self.get_parameter("RALLY_TOTAL") != 0: raise NotAchievedException("Failed to clear rally points") old_srcSystem = self.mav.mav.srcSystem # stop MAVProxy poking the autopilot: self.mavproxy.send('module unload rally\n') self.mavproxy.expect("Unloaded module rally") self.mavproxy.send('module unload wp\n') self.mavproxy.expect("Unloaded module wp") self.drain_mav() try: item1_lat = int(2.0000 * 1e7) items = [ self.mav.mav.mission_item_int_encode( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, mavutil.mavlink.MAV_CMD_NAV_RALLY_POINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(1.0000 * 1e7), # latitude int(1.0000 * 1e7), # longitude 31.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_RALLY), self.mav.mav.mission_item_int_encode( target_system, target_component, 1, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, mavutil.mavlink.MAV_CMD_NAV_RALLY_POINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 item1_lat, # latitude int(2.0000 * 1e7), # longitude 32.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_RALLY), self.mav.mav.mission_item_int_encode( target_system, target_component, 2, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, mavutil.mavlink.MAV_CMD_NAV_RALLY_POINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(3.0000 * 1e7), # latitude int(3.0000 * 1e7), # longitude 33.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_RALLY), ] self.upload_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, items) downloaded = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) print("Got items (%s)" % str(items)) if len(downloaded) != len(items): raise NotAchievedException( "Did not download correct number of items want=%u got=%u" % (len(downloaded), len(items))) rally_total = self.get_parameter("RALLY_TOTAL") if rally_total != len(downloaded): raise NotAchievedException( "Unexpected rally point count: want=%u got=%u" % (len(items), rally_total)) self.progress("Pruning count by setting parameter (urgh)") self.set_parameter("RALLY_TOTAL", 2) downloaded = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) if len(downloaded) != 2: raise NotAchievedException( "Failed to prune rally points by setting parameter. want=%u got=%u" % (2, len(downloaded))) self.progress("Uploading a third item using old protocol") new_item2_lat = int(6.0 * 1e7) self.set_parameter("RALLY_TOTAL", 3) self.mav.mav.rally_point_send(target_system, target_component, 2, # sequence number 3, # total count new_item2_lat, int(7.0 * 1e7), 15, 0, # "break" alt?! 0, # "land dir" 0) # flags downloaded = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) if len(downloaded) != 3: raise NotAchievedException( "resetting rally point count didn't change items returned") if downloaded[2].x != new_item2_lat: raise NotAchievedException( "Bad lattitude in downloaded item: want=%u got=%u" % (new_item2_lat, downloaded[2].x)) self.progress("Grabbing original item 1 using original protocol") self.mav.mav.rally_fetch_point_send(target_system, target_component, 1) m = self.mav.recv_match(type="RALLY_POINT", blocking=True, timeout=1) if m.target_system != self.mav.source_system: raise NotAchievedException( "Bad target_system on received rally point (want=%u got=%u)" % (255, m.target_system)) if m.target_component != self.mav.source_component: # autotest's component ID raise NotAchievedException("Bad target_component on received rally point") if m.lat != item1_lat: raise NotAchievedException("Bad latitude on received rally point") self.start_subtest("Test upload lockout and timeout") self.progress("Starting upload from normal sysid") self.mav.mav.mission_count_send(target_system, target_component, len(items), mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.drain_mav() # throw away requests for items self.mav.mav.srcSystem = 243 self.progress("Attempting upload from sysid=%u" % (self.mav.mav.srcSystem,)) self.mav.mav.mission_count_send(target_system, target_component, len(items), mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, want_type=mavutil.mavlink.MAV_MISSION_DENIED) self.progress("Attempting download from sysid=%u" % (self.mav.mav.srcSystem,)) self.mav.mav.mission_request_list_send(target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, want_type=mavutil.mavlink.MAV_MISSION_DENIED) # wait for the upload from sysid=1 to time out: tstart = self.get_sim_time() got_statustext = False got_ack = False while True: if got_statustext and got_ack: self.progress("Got both ack and statustext") break if self.get_sim_time_cached() - tstart > 100: raise NotAchievedException("Did not get both ack and statustext") m = self.mav.recv_match(type=['STATUSTEXT', 'MISSION_ACK'], blocking=True, timeout=1) if m is None: continue self.progress("Got (%s)" % str(m)) if m.get_type() == 'STATUSTEXT': if "upload timeout" in m.text: got_statustext = True self.progress("Received desired statustext") continue if m.get_type() == 'MISSION_ACK': if m.target_system != old_srcSystem: raise NotAchievedException("Incorrect sourcesystem") if m.type != mavutil.mavlink.MAV_MISSION_OPERATION_CANCELLED: raise NotAchievedException("Incorrect result") if m.mission_type != mavutil.mavlink.MAV_MISSION_TYPE_RALLY: raise NotAchievedException("Incorrect mission_type") got_ack = True self.progress("Received desired ACK") continue raise NotAchievedException("Huh?") self.progress("Now trying to upload empty mission after timeout") self.mav.mav.mission_count_send(target_system, target_component, 0, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.drain_mav() self.start_subtest("Check rally upload/download across separate links") self.upload_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, items) self.progress("ensure a mavlink1 connection can't do anything useful with new item types") self.set_parameter("SERIAL2_PROTOCOL", 1) self.reboot_sitl() mav2 = mavutil.mavlink_connection("tcp:localhost:5763", robust_parsing=True, source_system=7, source_component=7) mav2.mav.mission_request_list_send(target_system, target_component, mission_type=mavutil.mavlink.MAV_MISSION_TYPE_RALLY) # so this looks a bit odd; the other end isn't sending # mavlink2 so can't fill in the extension here. self.assert_receive_mission_ack( mavutil.mavlink.MAV_MISSION_TYPE_MISSION, want_type=mavutil.mavlink.MAV_MISSION_UNSUPPORTED, mav=mav2, ) # this relies on magic upgrade to serial2: self.set_parameter("SERIAL2_PROTOCOL", 2) expected_count = 3 self.progress("Assert mission count on new link") self.assert_mission_count_on_link( mav2, expected_count, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.progress("Assert mission count on original link") self.assert_mission_count_on_link( self.mav, expected_count, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.progress("Get first item on new link") m2 = self.get_mission_item_int_on_link( 2, mav2, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.progress("Get first item on original link") m = self.get_mission_item_int_on_link( 2, self.mav, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) if m2.x != m.x: raise NotAchievedException("mission items do not match (%d vs %d)" % (m2.x, m.x)) self.get_mission_item_on_link(2, self.mav, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) # ensure we get nacks for bad mission item requests: self.mav.mav.mission_request_send(target_system, target_component, 65, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_receive_mission_ack( mavutil.mavlink.MAV_MISSION_TYPE_RALLY, want_type=mavutil.mavlink.MAV_MISSION_INVALID_SEQUENCE, ) self.mav.mav.mission_request_int_send(target_system, target_component, 65, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_receive_mission_ack( mavutil.mavlink.MAV_MISSION_TYPE_RALLY, want_type=mavutil.mavlink.MAV_MISSION_INVALID_SEQUENCE, ) self.start_subtest("Should enforce items come from correct GCS") self.drain_mav(unparsed=True) self.mav.mav.mission_count_send(target_system, target_component, 1, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_receive_mission_item_request(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, 0) self.progress("Attempting to upload from bad sysid") old_sysid = self.mav.mav.srcSystem self.mav.mav.srcSystem = 17 items[0].pack(self.mav.mav) self.drain_mav(unparsed=True) self.mav.mav.send(items[0]) self.mav.mav.srcSystem = old_sysid self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, want_type=mavutil.mavlink.MAV_MISSION_DENIED, target_system=17) self.progress("Sending from correct sysid") items[0].pack(self.mav.mav) self.drain_mav(unparsed=True) self.mav.mav.send(items[0]) self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.drain_mav() self.drain_all_pexpects() self.start_subtest("Attempt to send item on different link to that which we are sending requests on") self.progress("Sending count") self.drain_mav(unparsed=True) self.mav.mav.mission_count_send(target_system, target_component, 2, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_receive_mission_item_request(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, 0) old_mav2_system = mav2.mav.srcSystem old_mav2_component = mav2.mav.srcComponent mav2.mav.srcSystem = self.mav.mav.srcSystem mav2.mav.srcComponent = self.mav.mav.srcComponent self.progress("Sending item on second link") # note that the routing table in ArduPilot now will say # this sysid/compid is on both links which may cause # weirdness... items[0].pack(mav2.mav) self.drain_mav(mav=self.mav, unparsed=True) mav2.mav.send(items[0]) mav2.mav.srcSystem = old_mav2_system mav2.mav.srcComponent = old_mav2_component # we continue to receive requests on the original link: m = self.mav.recv_match(type='MISSION_REQUEST', blocking=True, timeout=1) if m is None: raise NotAchievedException("Did not get mission request") if m.mission_type != mavutil.mavlink.MAV_MISSION_TYPE_RALLY: raise NotAchievedException("Mission request of incorrect type") if m.seq != 1: raise NotAchievedException("Unexpected sequence number (expected=%u got=%u)" % (1, m.seq)) items[1].pack(self.mav.mav) self.drain_mav(unparsed=True) self.mav.mav.send(items[1]) self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.drain_mav() self.drain_all_pexpects() self.start_subtest("Upload mission and rally points at same time") self.progress("Sending rally count") self.drain_mav(unparsed=True) self.mav.mav.mission_count_send(target_system, target_component, 3, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_receive_mission_item_request(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, 0) self.progress("Sending wp count") self.mav.mav.mission_count_send(target_system, target_component, 3, mavutil.mavlink.MAV_MISSION_TYPE_MISSION) self.assert_receive_mission_item_request(mavutil.mavlink.MAV_MISSION_TYPE_MISSION, 0) self.progress("Answering request for mission item 0") self.drain_mav(mav=self.mav, unparsed=True) self.mav.mav.mission_item_int_send( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(1.1000 * 1e7), # latitude int(1.2000 * 1e7), # longitude 321.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_MISSION), self.assert_receive_mission_item_request(mavutil.mavlink.MAV_MISSION_TYPE_MISSION, 1) self.progress("Answering request for rally point 0") items[0].pack(self.mav.mav) self.drain_mav(unparsed=True) self.mav.mav.send(items[0]) self.progress("Expecting request for rally item 1") self.assert_receive_mission_item_request(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, 1) self.progress("Answering request for rally point 1") items[1].pack(self.mav.mav) self.drain_mav(unparsed=True) self.mav.mav.send(items[1]) self.progress("Expecting request for rally item 2") self.assert_receive_mission_item_request(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, 2) self.progress("Answering request for rally point 2") items[2].pack(self.mav.mav) self.drain_mav(unparsed=True) self.mav.mav.send(items[2]) self.progress("Expecting mission ack for rally") self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.progress("Answering request for waypoints item 1") self.drain_mav(unparsed=True) self.mav.mav.mission_item_int_send( target_system, target_component, 1, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(1.1000 * 1e7), # latitude int(1.2000 * 1e7), # longitude 321.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_MISSION), self.assert_receive_mission_item_request(mavutil.mavlink.MAV_MISSION_TYPE_MISSION, 2) self.progress("Answering request for waypoints item 2") self.drain_mav(unparsed=True) self.mav.mav.mission_item_int_send( target_system, target_component, 2, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(1.1000 * 1e7), # latitude int(1.2000 * 1e7), # longitude 321.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_MISSION), self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_MISSION) self.start_subtest("Test write-partial-list") self.progress("Clearing rally points using count-send") self.clear_mission(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, target_system=target_system, target_component=target_component) self.progress("Should not be able to set items completely past the waypoint count") self.upload_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, items) self.drain_mav(unparsed=True) self.mav.mav.mission_write_partial_list_send( target_system, target_component, 17, 20, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, want_type=mavutil.mavlink.MAV_MISSION_ERROR) self.progress("Should not be able to set items overlapping the waypoint count") self.drain_mav(unparsed=True) self.mav.mav.mission_write_partial_list_send( target_system, target_component, 0, 20, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, want_type=mavutil.mavlink.MAV_MISSION_ERROR) self.progress("try to overwrite items 1 and 2") self.drain_mav(unparsed=True) self.mav.mav.mission_write_partial_list_send( target_system, target_component, 1, 2, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_receive_mission_item_request(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, 1) self.progress("Try shoving up an incorrectly sequenced item") self.drain_mav(unparsed=True) self.mav.mav.mission_item_int_send( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, mavutil.mavlink.MAV_CMD_NAV_RALLY_POINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(1.1000 * 1e7), # latitude int(1.2000 * 1e7), # longitude 321.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_RALLY), self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, want_type=mavutil.mavlink.MAV_MISSION_INVALID_SEQUENCE) self.progress("Try shoving up an incorrectly sequenced item (but within band)") self.drain_mav(unparsed=True) self.mav.mav.mission_item_int_send( target_system, target_component, 2, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, mavutil.mavlink.MAV_CMD_NAV_RALLY_POINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(1.1000 * 1e7), # latitude int(1.2000 * 1e7), # longitude 321.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_RALLY), self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, want_type=mavutil.mavlink.MAV_MISSION_INVALID_SEQUENCE) self.progress("Now provide correct item") item1_latitude = int(1.2345 * 1e7) self.drain_mav(unparsed=True) self.mav.mav.mission_item_int_send( target_system, target_component, 1, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, mavutil.mavlink.MAV_CMD_NAV_RALLY_POINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 item1_latitude, # latitude int(1.2000 * 1e7), # longitude 321.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_RALLY), self.assert_receive_mission_item_request(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, 2) self.progress("Answering request for rally point 2") items[2].pack(self.mav.mav) self.drain_mav(unparsed=True) self.mav.mav.send(items[2]) self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.progress("TODO: ensure partial mission write was good") self.start_subtest("clear mission types") self.assert_mission_count_on_link( self.mav, 3, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_mission_count_on_link( self.mav, 3, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_MISSION) self.drain_mav(unparsed=True) self.mav.mav.mission_clear_all_send(target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_mission_count_on_link( self.mav, 0, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_mission_count_on_link( self.mav, 3, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_MISSION) self.drain_mav(unparsed=True) self.mav.mav.mission_clear_all_send(target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_MISSION) self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_MISSION) self.assert_mission_count_on_link( self.mav, 0, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_mission_count_on_link( self.mav, 0, target_system, target_component, mavutil.mavlink.MAV_MISSION_TYPE_MISSION) self.start_subtest("try sending out-of-range counts") self.drain_mav(unparsed=True) self.mav.mav.mission_count_send(target_system, target_component, 1, 230) self.assert_receive_mission_ack(230, want_type=mavutil.mavlink.MAV_MISSION_UNSUPPORTED) self.drain_mav(unparsed=True) self.mav.mav.mission_count_send(target_system, target_component, 16000, mavutil.mavlink.MAV_MISSION_TYPE_RALLY) self.assert_receive_mission_ack(mavutil.mavlink.MAV_MISSION_TYPE_RALLY, want_type=mavutil.mavlink.MAV_MISSION_NO_SPACE) except Exception as e: self.progress("Received exception (%s)" % self.get_exception_stacktrace(e)) self.mav.mav.srcSystem = old_srcSystem raise e self.mavproxy.send('module load rally\n') self.mavproxy.expect("Loaded module rally") self.mavproxy.send('module load wp\n') self.mavproxy.expect("Loaded module wp") self.reboot_sitl() def test_gcs_mission(self): '''check MAVProxy's waypoint handling of missions''' target_system = 1 target_component = 1 self.mavproxy.send('wp clear\n') self.delay_sim_time(1) if self.get_parameter("MIS_TOTAL") != 0: raise NotAchievedException("Failed to clear mission") self.drain_mav_unparsed() m = self.mav.recv_match(type='MISSION_CURRENT', blocking=True, timeout=5) if m is None: raise NotAchievedException("Did not get expected MISSION_CURRENT") if m.seq != 0: raise NotAchievedException("Bad mission current") self.load_mission_using_mavproxy("rover-gripper-mission.txt") set_wp = 1 self.mavproxy.send('wp set %u\n' % set_wp) self.drain_mav() m = self.mav.recv_match(type='MISSION_CURRENT', blocking=True, timeout=5) if m is None: raise NotAchievedException("Did not get expected MISSION_CURRENT") if m.seq != set_wp: raise NotAchievedException("Bad mission current. want=%u got=%u" % (set_wp, m.seq)) self.start_subsubtest("wp changealt") downloaded_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_MISSION) changealt_item = 1 # oldalt = downloaded_items[changealt_item].z want_newalt = 37.2 self.mavproxy.send('wp changealt %u %f\n' % (changealt_item, want_newalt)) self.delay_sim_time(5) downloaded_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_MISSION) if abs(downloaded_items[changealt_item].z - want_newalt) > 0.0001: raise NotAchievedException( "changealt didn't (want=%f got=%f)" % (want_newalt, downloaded_items[changealt_item].z)) self.end_subsubtest("wp changealt") self.start_subsubtest("wp sethome") new_home_lat = 3.14159 new_home_lng = 2.71828 self.mavproxy.send('click %f %f\n' % (new_home_lat, new_home_lng)) self.mavproxy.send('wp sethome\n') self.delay_sim_time(5) # any way to close the loop on this one? # downloaded_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_MISSION) # if abs(downloaded_items[0].x - new_home_lat) > 0.0001: # raise NotAchievedException("wp sethome didn't work") # if abs(downloaded_items[0].y - new_home_lng) > 0.0001: # raise NotAchievedException("wp sethome didn't work") self.end_subsubtest("wp sethome") self.start_subsubtest("wp slope") self.mavproxy.send('wp slope\n') self.mavproxy.expect("WP3: slope 0.1") self.delay_sim_time(5) self.end_subsubtest("wp slope") if not self.mavproxy_can_do_mision_item_protocols(): # adding based on click location yet to be merged into MAVProxy return self.start_subsubtest("wp split") self.mavproxy.send("wp clear\n") self.delay_sim_time(5) self.mavproxy.send("wp list\n") self.delay_sim_time(5) items = [ None, self.mav.mav.mission_item_int_encode( target_system, target_component, 1, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(1.0 * 1e7), # latitude int(1.0 * 1e7), # longitude 33.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_MISSION), self.mav.mav.mission_item_int_encode( target_system, target_component, 2, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(2.0 * 1e7), # latitude int(2.0 * 1e7), # longitude 33.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_MISSION), ] self.mavproxy.send("click 5 5\n") # space for home position self.mavproxy.send("wp add\n") self.delay_sim_time(5) self.click_location_from_item(items[1]) self.mavproxy.send("wp add\n") self.delay_sim_time(5) self.click_location_from_item(items[2]) self.mavproxy.send("wp add\n") self.delay_sim_time(5) downloaded_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_MISSION) self.check_mission_waypoint_items_same(items, downloaded_items) self.mavproxy.send("wp split 2\n") self.delay_sim_time(5) items_with_split_in = [ items[0], items[1], self.mav.mav.mission_item_int_encode( target_system, target_component, 2, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 0, # current 0, # autocontinue 0, # p1 0, # p2 0, # p3 0, # p4 int(1.5 * 1e7), # latitude int(1.5 * 1e7), # longitude 33.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_MISSION), items[2], ] downloaded_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_MISSION) self.check_mission_waypoint_items_same(items_with_split_in, downloaded_items) # MANUAL> usage: wp <changealt|clear|draw|editor|list|load|loop|move|movemulti|noflyadd|param|remove|save|savecsv|savelocal|set|sethome|show|slope|split|status|undo|update> # noqa def wait_location_sending_target(self, loc, target_system=1, target_component=1, timeout=60, max_delta=2): tstart = self.get_sim_time() last_sent = 0 type_mask = (mavutil.mavlink.POSITION_TARGET_TYPEMASK_VX_IGNORE + mavutil.mavlink.POSITION_TARGET_TYPEMASK_VY_IGNORE + mavutil.mavlink.POSITION_TARGET_TYPEMASK_VZ_IGNORE + mavutil.mavlink.POSITION_TARGET_TYPEMASK_AX_IGNORE + mavutil.mavlink.POSITION_TARGET_TYPEMASK_AY_IGNORE + mavutil.mavlink.POSITION_TARGET_TYPEMASK_AZ_IGNORE + mavutil.mavlink.POSITION_TARGET_TYPEMASK_YAW_IGNORE + mavutil.mavlink.POSITION_TARGET_TYPEMASK_YAW_RATE_IGNORE) self.change_mode('GUIDED') tstart = self.get_sim_time() while True: now = self.get_sim_time_cached() if now - tstart > timeout: raise AutoTestTimeoutException("Did not get to location") if now - last_sent > 1: last_sent = now self.mav.mav.set_position_target_global_int_send( 0, target_system, target_component, mavutil.mavlink.MAV_FRAME_GLOBAL_INT, type_mask, int(loc.lat * 1.0e7), int(loc.lng * 1.0e7), 0, # alt 0, # x-ve 0, # y-vel 0, # z-vel 0, # afx 0, # afy 0, # afz 0, # yaw, 0, # yaw-rate ) m = self.mav.recv_match(blocking=True, timeout=1) if m is None: continue t = m.get_type() if t == "POSITION_TARGET_GLOBAL_INT": self.progress("Target: (%s)" % str(m), send_statustext=False) elif t == "GLOBAL_POSITION_INT": self.progress("Position: (%s)" % str(m), send_statustext=False) delta = self.get_distance( mavutil.location(m.lat * 1e-7, m.lon * 1e-7, 0, 0), loc) self.progress("delta: %s" % str(delta), send_statustext=False) if delta < max_delta: self.progress("Reached destination") def drive_somewhere_breach_boundary_and_rtl(self, loc, target_system=1, target_component=1, timeout=60): tstart = self.get_sim_time() last_sent = 0 seen_fence_breach = False type_mask = (mavutil.mavlink.POSITION_TARGET_TYPEMASK_VX_IGNORE + mavutil.mavlink.POSITION_TARGET_TYPEMASK_VY_IGNORE + mavutil.mavlink.POSITION_TARGET_TYPEMASK_VZ_IGNORE + mavutil.mavlink.POSITION_TARGET_TYPEMASK_AX_IGNORE + mavutil.mavlink.POSITION_TARGET_TYPEMASK_AY_IGNORE + mavutil.mavlink.POSITION_TARGET_TYPEMASK_AZ_IGNORE + mavutil.mavlink.POSITION_TARGET_TYPEMASK_YAW_IGNORE + mavutil.mavlink.POSITION_TARGET_TYPEMASK_YAW_RATE_IGNORE) self.change_mode('GUIDED') while True: now = self.get_sim_time_cached() if now - tstart > timeout: raise NotAchievedException("Did not breach boundary + RTL") if now - last_sent > 1: last_sent = now self.mav.mav.set_position_target_global_int_send( 0, target_system, target_component, mavutil.mavlink.MAV_FRAME_GLOBAL_INT, type_mask, int(loc.lat * 1.0e7), int(loc.lng * 1.0e7), 0, # alt 0, # x-ve 0, # y-vel 0, # z-vel 0, # afx 0, # afy 0, # afz 0, # yaw, 0, # yaw-rate ) m = self.mav.recv_match(blocking=True, timeout=1) if m is None: continue t = m.get_type() if t == "POSITION_TARGET_GLOBAL_INT": print("Target: (%s)" % str(m)) elif t == "GLOBAL_POSITION_INT": print("Position: (%s)" % str(m)) elif t == "FENCE_STATUS": print("Fence: %s" % str(m)) if m.breach_status != 0: seen_fence_breach = True self.progress("Fence breach detected!") if m.breach_type != mavutil.mavlink.FENCE_BREACH_BOUNDARY: raise NotAchievedException("Breach of unexpected type") if self.mode_is("RTL", cached=True) and seen_fence_breach: break self.wait_distance_to_home(3, 7, timeout=30) def drive_somewhere_stop_at_boundary(self, loc, expected_stopping_point, expected_distance_epsilon=1.0, target_system=1, target_component=1, timeout=120): tstart = self.get_sim_time() last_sent = 0 type_mask = (mavutil.mavlink.POSITION_TARGET_TYPEMASK_VX_IGNORE + mavutil.mavlink.POSITION_TARGET_TYPEMASK_VY_IGNORE + mavutil.mavlink.POSITION_TARGET_TYPEMASK_VZ_IGNORE + mavutil.mavlink.POSITION_TARGET_TYPEMASK_AX_IGNORE + mavutil.mavlink.POSITION_TARGET_TYPEMASK_AY_IGNORE + mavutil.mavlink.POSITION_TARGET_TYPEMASK_AZ_IGNORE + mavutil.mavlink.POSITION_TARGET_TYPEMASK_YAW_IGNORE + mavutil.mavlink.POSITION_TARGET_TYPEMASK_YAW_RATE_IGNORE) self.change_mode('GUIDED') at_stopping_point = False while True: now = self.get_sim_time_cached() if now - tstart > timeout: raise NotAchievedException("Did not arrive and stop at boundary") if now - last_sent > 1: last_sent = now self.mav.mav.set_position_target_global_int_send( 0, target_system, target_component, mavutil.mavlink.MAV_FRAME_GLOBAL_INT, type_mask, int(loc.lat * 1.0e7), int(loc.lng * 1.0e7), 0, # alt 0, # x-ve 0, # y-vel 0, # z-vel 0, # afx 0, # afy 0, # afz 0, # yaw, 0, # yaw-rate ) m = self.mav.recv_match(blocking=True, timeout=1) if m is None: continue t = m.get_type() if t == "POSITION_TARGET_GLOBAL_INT": print("Target: (%s)" % str(m)) elif t == "GLOBAL_POSITION_INT": print("Position: (%s)" % str(m)) delta = self.get_distance( mavutil.location(m.lat * 1e-7, m.lon * 1e-7, 0, 0), mavutil.location(expected_stopping_point.lat, expected_stopping_point.lng, 0, 0)) print("delta: %s want_delta<%f" % (str(delta), expected_distance_epsilon)) at_stopping_point = delta < expected_distance_epsilon elif t == "VFR_HUD": print("groundspeed: %f" % m.groundspeed) if at_stopping_point: if m.groundspeed < 1: self.progress("Seemed to have stopped at stopping point") return def assert_fence_breached(self): m = self.mav.recv_match(type='FENCE_STATUS', blocking=True, timeout=10) if m is None: raise NotAchievedException("Not receiving fence notifications?") if m.breach_status != 1: raise NotAchievedException("Expected to be breached") def wait_fence_not_breached(self, timeout=5): tstart = self.get_sim_time() while True: if self.get_sim_time_cached() - tstart > timeout: raise AutoTestTimeoutException("Fence remains breached") m = self.mav.recv_match(type='FENCE_STATUS', blocking=True, timeout=1) if m is None: self.progress("No FENCE_STATUS received") continue self.progress("STATUS: %s" % str(m)) if m.breach_status == 0: break def test_poly_fence_noarms(self, target_system=1, target_component=1): '''various tests to ensure we can't arm when in breach of a polyfence''' self.start_subtest("Ensure PolyFence arming checks work") self.clear_mission(mavutil.mavlink.MAV_MISSION_TYPE_FENCE, target_system=target_system, target_component=target_component) self.delay_sim_time(5) # let breaches clear # FIXME: should we allow this? self.progress("Ensure we can arm with no poly in place") self.change_mode("GUIDED") self.wait_ready_to_arm() self.arm_vehicle() self.disarm_vehicle() self.test_poly_fence_noarms_exclusion_circle(target_system=target_system, target_component=target_component) self.test_poly_fence_noarms_inclusion_circle(target_system=target_system, target_component=target_component) self.test_poly_fence_noarms_exclusion_polyfence(target_system=target_system, target_component=target_component) self.test_poly_fence_noarms_inclusion_polyfence(target_system=target_system, target_component=target_component) def test_poly_fence_noarms_exclusion_circle(self, target_system=1, target_component=1): self.start_subtest("Ensure not armable when within an exclusion circle") here = self.mav.location() items = [ self.mav.mav.mission_item_int_encode( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_CIRCLE_EXCLUSION, 0, # current 0, # autocontinue 5, # p1 - radius 0, # p2 0, # p3 0, # p4 int(here.lat * 1e7), # latitude int(here.lng * 1e7), # longitude 33.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), self.mav.mav.mission_item_int_encode( target_system, target_component, 1, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_CIRCLE_EXCLUSION, 0, # current 0, # autocontinue 5, # p1 - radius 0, # p2 0, # p3 0, # p4 int(self.offset_location_ne(here, 100, 100).lat * 1e7), # latitude int(here.lng * 1e7), # longitude 33.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), ] self.upload_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_FENCE, items) self.delay_sim_time(5) # ArduPilot only checks for breaches @1Hz self.drain_mav() self.assert_fence_breached() if self.arm_motors_with_rc_input(): raise NotAchievedException( "Armed when within exclusion zone") self.upload_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_FENCE, []) self.wait_fence_not_breached() def test_poly_fence_noarms_inclusion_circle(self, target_system=1, target_component=1): self.start_subtest("Ensure not armable when outside an inclusion circle (but within another") here = self.mav.location() items = [ self.mav.mav.mission_item_int_encode( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_CIRCLE_INCLUSION, 0, # current 0, # autocontinue 5, # p1 - radius 0, # p2 0, # p3 0, # p4 int(here.lat * 1e7), # latitude int(here.lng * 1e7), # longitude 33.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), self.mav.mav.mission_item_int_encode( target_system, target_component, 1, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_CIRCLE_INCLUSION, 0, # current 0, # autocontinue 5, # p1 - radius 0, # p2 0, # p3 0, # p4 int(self.offset_location_ne(here, 100, 100).lat * 1e7), # latitude int(here.lng * 1e7), # longitude 33.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE), ] self.upload_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_FENCE, items) self.delay_sim_time(5) # ArduPilot only checks for breaches @1Hz self.drain_mav() self.assert_fence_breached() if self.arm_motors_with_rc_input(): raise NotAchievedException( "Armed when outside an inclusion zone") self.upload_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_FENCE, []) self.wait_fence_not_breached() def test_poly_fence_noarms_exclusion_polyfence(self, target_system=1, target_component=1): self.start_subtest("Ensure not armable when inside an exclusion polyfence (but outside another") here = self.mav.location() self.upload_fences_from_locations( mavutil.mavlink.MAV_CMD_NAV_FENCE_POLYGON_VERTEX_EXCLUSION, [ [ # east self.offset_location_ne(here, -50, 20), # bl self.offset_location_ne(here, 50, 20), # br self.offset_location_ne(here, 50, 40), # tr self.offset_location_ne(here, -50, 40), # tl, ], [ # over the top of the vehicle self.offset_location_ne(here, -50, -50), # bl self.offset_location_ne(here, -50, 50), # br self.offset_location_ne(here, 50, 50), # tr self.offset_location_ne(here, 50, -50), # tl, ] ] ) self.delay_sim_time(5) # ArduPilot only checks for breaches @1Hz self.drain_mav() self.assert_fence_breached() if self.arm_motors_with_rc_input(): raise NotAchievedException( "Armed when within polygon exclusion zone") self.upload_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_FENCE, []) self.wait_fence_not_breached() def test_poly_fence_noarms_inclusion_polyfence(self, target_system=1, target_component=1): self.start_subtest("Ensure not armable when outside an inclusion polyfence (but within another") here = self.mav.location() self.upload_fences_from_locations( mavutil.mavlink.MAV_CMD_NAV_FENCE_POLYGON_VERTEX_INCLUSION, [ [ # east self.offset_location_ne(here, -50, 20), # bl self.offset_location_ne(here, 50, 20), # br self.offset_location_ne(here, 50, 40), # tr self.offset_location_ne(here, -50, 40), # tl, ], [ # over the top of the vehicle self.offset_location_ne(here, -50, -50), # bl self.offset_location_ne(here, -50, 50), # br self.offset_location_ne(here, 50, 50), # tr self.offset_location_ne(here, 50, -50), # tl, ] ] ) self.delay_sim_time(5) # ArduPilot only checks for breaches @1Hz self.drain_mav() self.assert_fence_breached() if self.arm_motors_with_rc_input(): raise NotAchievedException( "Armed when outside polygon inclusion zone") self.upload_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_FENCE, []) self.wait_fence_not_breached() def test_fence_upload_timeouts_1(self, target_system=1, target_component=1): self.start_subtest("fence_upload timeouts 1") self.progress("Telling victim there will be one item coming") self.mav.mav.mission_count_send(target_system, target_component, 1, mavutil.mavlink.MAV_MISSION_TYPE_FENCE) m = self.mav.recv_match(type=['MISSION_REQUEST', 'MISSION_ACK'], blocking=True, timeout=1) self.progress("Got (%s)" % str(m)) if m is None: raise NotAchievedException("Did not get ACK or mission request") if m.get_type() == "MISSION_ACK": raise NotAchievedException("Expected MISSION_REQUEST") if m.seq != 0: raise NotAchievedException("Expected request for seq=0") if m.target_system != self.mav.mav.srcSystem: raise NotAchievedException("Incorrect target system in MISSION_REQUEST") if m.target_component != self.mav.mav.srcComponent: raise NotAchievedException("Incorrect target component in MISSION_REQUEST") tstart = self.get_sim_time() rerequest_count = 0 received_text = False received_ack = False while True: if received_ack and received_text: break if self.get_sim_time_cached() - tstart > 10: raise NotAchievedException("Did not get expected ack and statustext") m = self.mav.recv_match(type=['MISSION_REQUEST', 'MISSION_ACK', 'STATUSTEXT'], blocking=True, timeout=1) self.progress("Got (%s)" % str(m)) if m is None: self.progress("Did not receive any messages") continue if m.get_type() == "MISSION_REQUEST": if m.seq != 0: raise NotAchievedException("Received request for invalid seq") if m.target_system != self.mav.mav.srcSystem: raise NotAchievedException("Incorrect target system in MISSION_REQUEST") if m.target_component != self.mav.mav.srcComponent: raise NotAchievedException("Incorrect target component in MISSION_REQUEST") rerequest_count += 1 self.progress("Valid re-request received.") continue if m.get_type() == "MISSION_ACK": if m.mission_type != mavutil.mavlink.MAV_MISSION_TYPE_FENCE: raise NotAchievedException("Wrong mission type") if m.type != mavutil.mavlink.MAV_MISSION_OPERATION_CANCELLED: raise NotAchievedException("Wrong result") received_ack = True continue if m.get_type() == "STATUSTEXT": if "upload time" in m.text: received_text = True continue if rerequest_count < 3: raise NotAchievedException("Expected several re-requests of mission item") self.end_subtest("fence upload timeouts 1") def expect_request_for_item(self, item): m = self.mav.recv_match(type=['MISSION_REQUEST', 'MISSION_ACK'], blocking=True, timeout=1) self.progress("Got (%s)" % str(m)) if m is None: raise NotAchievedException("Did not get ACK or mission request") if m.get_type() == "MISSION_ACK": raise NotAchievedException("Expected MISSION_REQUEST") if m.seq != item.seq: raise NotAchievedException("Expected request for seq=%u" % item.seq) if m.target_system != self.mav.mav.srcSystem: raise NotAchievedException("Incorrect target system in MISSION_REQUEST") if m.target_component != self.mav.mav.srcComponent: raise NotAchievedException("Incorrect target component in MISSION_REQUEST") def test_fence_upload_timeouts_2(self, target_system=1, target_component=1): self.start_subtest("fence upload timeouts 2") self.progress("Telling victim there will be two items coming") # avoid a timeout race condition where ArduPilot re-requests a # fence point before we receive and respond to the first one. # Since ArduPilot has a 1s timeout on re-requesting, This only # requires a round-trip delay of 1/speedup seconds to trigger # - and that has been seen in practise on Travis old_speedup = self.get_parameter("SIM_SPEEDUP") self.set_parameter("SIM_SPEEDUP", 1) self.mav.mav.mission_count_send(target_system, target_component, 2, mavutil.mavlink.MAV_MISSION_TYPE_FENCE) self.progress("Sending item with seq=0") item = self.mav.mav.mission_item_int_encode( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_CIRCLE_EXCLUSION, 0, # current 0, # autocontinue 1, # p1 radius 0, # p2 0, # p3 0, # p4 int(1.1 * 1e7), # latitude int(1.2 * 1e7), # longitude 33.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE) self.expect_request_for_item(item) item.pack(self.mav.mav) self.mav.mav.send(item) self.progress("Sending item with seq=1") item = self.mav.mav.mission_item_int_encode( target_system, target_component, 1, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_FENCE_CIRCLE_EXCLUSION, 0, # current 0, # autocontinue 1, # p1 radius 0, # p2 0, # p3 0, # p4 int(1.1 * 1e7), # latitude int(1.2 * 1e7), # longitude 33.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_FENCE) self.expect_request_for_item(item) self.set_parameter("SIM_SPEEDUP", old_speedup) self.progress("Now waiting for a timeout") tstart = self.get_sim_time() rerequest_count = 0 received_text = False received_ack = False while True: if received_ack and received_text: break if self.get_sim_time_cached() - tstart > 10: raise NotAchievedException("Did not get expected ack and statustext") m = self.mav.recv_match(type=['MISSION_REQUEST', 'MISSION_ACK', 'STATUSTEXT'], blocking=True, timeout=0.1) self.progress("Got (%s)" % str(m)) if m is None: self.progress("Did not receive any messages") continue if m.get_type() == "MISSION_REQUEST": if m.seq != 1: raise NotAchievedException("Received request for invalid seq") if m.target_system != self.mav.mav.srcSystem: raise NotAchievedException("Incorrect target system in MISSION_REQUEST") if m.target_component != self.mav.mav.srcComponent: raise NotAchievedException("Incorrect target component in MISSION_REQUEST") rerequest_count += 1 self.progress("Valid re-request received.") continue if m.get_type() == "MISSION_ACK": if m.mission_type != mavutil.mavlink.MAV_MISSION_TYPE_FENCE: raise NotAchievedException("Wrong mission type") if m.type != mavutil.mavlink.MAV_MISSION_OPERATION_CANCELLED: raise NotAchievedException("Wrong result") received_ack = True continue if m.get_type() == "STATUSTEXT": if "upload time" in m.text: received_text = True continue if rerequest_count < 3: raise NotAchievedException("Expected several re-requests of mission item") self.end_subtest("fence upload timeouts 2") def test_fence_upload_timeouts(self, target_system=1, target_component=1): self.test_fence_upload_timeouts_1(target_system=target_system, target_component=target_component) self.test_fence_upload_timeouts_2(target_system=target_system, target_component=target_component) def test_poly_fence_compatability_ordering(self, target_system=1, target_component=1): self.clear_mission(mavutil.mavlink.MAV_MISSION_TYPE_FENCE, target_system=target_system, target_component=target_component) here = self.mav.location() self.progress("try uploading return point last") self.roundtrip_fence_using_fencepoint_protocol([ self.offset_location_ne(here, 0, 0), # bl // return point self.offset_location_ne(here, -50, 20), # bl self.offset_location_ne(here, 50, 20), # br self.offset_location_ne(here, 50, 40), # tr self.offset_location_ne(here, -50, 40), # tl, self.offset_location_ne(here, -50, 20), # closing point ], ordering=[1, 2, 3, 4, 5, 0]) self.clear_mission(mavutil.mavlink.MAV_MISSION_TYPE_FENCE, target_system=target_system, target_component=target_component) self.progress("try uploading return point in middle") self.roundtrip_fence_using_fencepoint_protocol([ self.offset_location_ne(here, 0, 0), # bl // return point self.offset_location_ne(here, -50, 20), # bl self.offset_location_ne(here, 50, 20), # br self.offset_location_ne(here, 50, 40), # tr self.offset_location_ne(here, -50, 40), # tl, self.offset_location_ne(here, -50, 20), # closing point ], ordering=[1, 2, 3, 0, 4, 5]) self.clear_mission(mavutil.mavlink.MAV_MISSION_TYPE_FENCE, target_system=target_system, target_component=target_component) self.progress("try closing point in middle") self.roundtrip_fence_using_fencepoint_protocol([ self.offset_location_ne(here, 0, 0), # bl // return point self.offset_location_ne(here, -50, 20), # bl self.offset_location_ne(here, 50, 20), # br self.offset_location_ne(here, 50, 40), # tr self.offset_location_ne(here, -50, 40), # tl, self.offset_location_ne(here, -50, 20), # closing point ], ordering=[0, 1, 2, 5, 3, 4]) self.clear_mission(mavutil.mavlink.MAV_MISSION_TYPE_FENCE, target_system=target_system, target_component=target_component) # this is expected to fail as we don't return the closing # point correctly until the first is uploaded self.progress("try closing point first") failed = False try: self.roundtrip_fence_using_fencepoint_protocol([ self.offset_location_ne(here, 0, 0), # bl // return point self.offset_location_ne(here, -50, 20), # bl self.offset_location_ne(here, 50, 20), # br self.offset_location_ne(here, 50, 40), # tr self.offset_location_ne(here, -50, 40), # tl, self.offset_location_ne(here, -50, 20), # closing point ], ordering=[5, 0, 1, 2, 3, 4]) except NotAchievedException as e: failed = "got=0.000000 want=" in str(e) if not failed: raise NotAchievedException("Expected failure, did not get it") self.clear_mission(mavutil.mavlink.MAV_MISSION_TYPE_FENCE, target_system=target_system, target_component=target_component) self.progress("try (almost) reverse order") self.roundtrip_fence_using_fencepoint_protocol([ self.offset_location_ne(here, 0, 0), # bl // return point self.offset_location_ne(here, -50, 20), # bl self.offset_location_ne(here, 50, 20), # br self.offset_location_ne(here, 50, 40), # tr self.offset_location_ne(here, -50, 40), # tl, self.offset_location_ne(here, -50, 20), # closing point ], ordering=[4, 3, 2, 1, 0, 5]) self.clear_mission(mavutil.mavlink.MAV_MISSION_TYPE_FENCE, target_system=target_system, target_component=target_component) def test_poly_fence_compatability(self, target_system=1, target_component=1): self.clear_mission(mavutil.mavlink.MAV_MISSION_TYPE_FENCE, target_system=target_system, target_component=target_component) self.test_poly_fence_compatability_ordering(target_system=target_system, target_component=target_component) here = self.mav.location() self.progress("Playing with changing point count") self.roundtrip_fence_using_fencepoint_protocol( [ self.offset_location_ne(here, 0, 0), # bl // return point self.offset_location_ne(here, -50, 20), # bl self.offset_location_ne(here, 50, 20), # br self.offset_location_ne(here, 50, 40), # tr self.offset_location_ne(here, -50, 40), # tl, self.offset_location_ne(here, -50, 20), # closing point ]) self.roundtrip_fence_using_fencepoint_protocol( [ self.offset_location_ne(here, 0, 0), # bl // return point self.offset_location_ne(here, -50, 20), # bl self.offset_location_ne(here, 50, 20), # br self.offset_location_ne(here, -50, 40), # tl, self.offset_location_ne(here, -50, 20), # closing point ]) self.roundtrip_fence_using_fencepoint_protocol( [ self.offset_location_ne(here, 0, 0), # bl // return point self.offset_location_ne(here, -50, 20), # bl self.offset_location_ne(here, 50, 20), # br self.offset_location_ne(here, 50, 40), # tr self.offset_location_ne(here, -50, 40), # tl, self.offset_location_ne(here, -50, 20), # closing point ]) def test_poly_fence_reboot_survivability(self): here = self.mav.location() self.upload_fences_from_locations( mavutil.mavlink.MAV_CMD_NAV_FENCE_POLYGON_VERTEX_EXCLUSION, [ [ # east self.offset_location_ne(here, -50, 20), # bl self.offset_location_ne(here, 50, 20), # br self.offset_location_ne(here, 50, 40), # tr self.offset_location_ne(here, -50, 40), # tl, ], [ # over the top of the vehicle self.offset_location_ne(here, -50, -50), # bl self.offset_location_ne(here, -50, 50), # br self.offset_location_ne(here, 50, 50), # tr self.offset_location_ne(here, 50, -50), # tl, ] ] ) self.reboot_sitl() downloaded_items = self.download_using_mission_protocol(mavutil.mavlink.MAV_MISSION_TYPE_FENCE) downloaded_len = len(downloaded_items) if downloaded_len != 8: raise NotAchievedException("Items did not survive reboot (want=%u got=%u)" % (8, downloaded_len)) def test_poly_fence(self): '''test fence-related functions''' target_system = 1 target_component = 1 self.change_mode("LOITER") self.wait_ready_to_arm() here = self.mav.location() self.progress("here: %f %f" % (here.lat, here.lng)) self.set_parameter("FENCE_ENABLE", 1) self.set_parameter("AVOID_ENABLE", 0) # self.set_parameter("SIM_SPEEDUP", 1) self.test_poly_fence_compatability() self.test_fence_upload_timeouts() self.test_poly_fence_noarms(target_system=target_system, target_component=target_component) self.arm_vehicle() self.test_poly_fence_inclusion(here, target_system=target_system, target_component=target_component) self.test_poly_fence_exclusion(here, target_system=target_system, target_component=target_component) self.disarm_vehicle() self.test_poly_fence_reboot_survivability() def test_poly_fence_inclusion_overlapping_inclusion_circles(self, here, target_system=1, target_component=1): self.start_subtest("Overlapping circular inclusion") self.upload_fences_from_locations( mavutil.mavlink.MAV_CMD_NAV_FENCE_CIRCLE_INCLUSION, [ { "radius": 30, "loc": self.offset_location_ne(here, -20, 0), }, { "radius": 30, "loc": self.offset_location_ne(here, 20, 0), }, ]) self.mavproxy.send("fence list\n") self.delay_sim_time(5) self.progress("Drive outside top circle") fence_middle = self.offset_location_ne(here, -150, 0) self.drive_somewhere_breach_boundary_and_rtl( fence_middle, target_system=target_system, target_component=target_component) self.delay_sim_time(5) self.progress("Drive outside bottom circle") fence_middle = self.offset_location_ne(here, 150, 0) self.drive_somewhere_breach_boundary_and_rtl( fence_middle, target_system=target_system, target_component=target_component) def test_poly_fence_inclusion(self, here, target_system=1, target_component=1): self.progress("Circle and Polygon inclusion") self.test_poly_fence_inclusion_overlapping_inclusion_circles( here, target_system=target_system, target_component=target_component) self.upload_fences_from_locations( mavutil.mavlink.MAV_CMD_NAV_FENCE_POLYGON_VERTEX_INCLUSION, [ [ self.offset_location_ne(here, -40, -20), # tl self.offset_location_ne(here, 50, -20), # tr self.offset_location_ne(here, 50, 20), # br self.offset_location_ne(here, -40, 20), # bl, ], { "radius": 30, "loc": self.offset_location_ne(here, -20, 0), }, ]) self.delay_sim_time(5) self.mavproxy.send("fence list\n") self.progress("Drive outside polygon") fence_middle = self.offset_location_ne(here, -150, 0) self.drive_somewhere_breach_boundary_and_rtl( fence_middle, target_system=target_system, target_component=target_component) self.delay_sim_time(5) self.progress("Drive outside circle") fence_middle = self.offset_location_ne(here, 150, 0) self.drive_somewhere_breach_boundary_and_rtl( fence_middle, target_system=target_system, target_component=target_component) self.upload_fences_from_locations( mavutil.mavlink.MAV_CMD_NAV_FENCE_POLYGON_VERTEX_INCLUSION, [ [ self.offset_location_ne(here, -20, -25), # tl self.offset_location_ne(here, 50, -25), # tr self.offset_location_ne(here, 50, 15), # br self.offset_location_ne(here, -20, 15), # bl, ], [ self.offset_location_ne(here, 20, -20), # tl self.offset_location_ne(here, -50, -20), # tr self.offset_location_ne(here, -50, 20), # br self.offset_location_ne(here, 20, 20), # bl, ], ]) self.delay_sim_time(5) self.mavproxy.send("fence list\n") self.progress("Drive outside top polygon") fence_middle = self.offset_location_ne(here, -150, 0) self.drive_somewhere_breach_boundary_and_rtl( fence_middle, target_system=target_system, target_component=target_component) self.delay_sim_time(5) self.progress("Drive outside bottom polygon") fence_middle = self.offset_location_ne(here, 150, 0) self.drive_somewhere_breach_boundary_and_rtl( fence_middle, target_system=target_system, target_component=target_component) def test_poly_fence_exclusion(self, here, target_system=1, target_component=1): self.upload_fences_from_locations( mavutil.mavlink.MAV_CMD_NAV_FENCE_POLYGON_VERTEX_EXCLUSION, [ [ # east self.offset_location_ne(here, -50, 20), # bl self.offset_location_ne(here, 50, 20), # br self.offset_location_ne(here, 50, 40), # tr self.offset_location_ne(here, -50, 40), # tl, ], [ # west self.offset_location_ne(here, -50, -20), # tl self.offset_location_ne(here, 50, -20), # tr self.offset_location_ne(here, 50, -40), # br self.offset_location_ne(here, -50, -40), # bl, ], { "radius": 30, "loc": self.offset_location_ne(here, -60, 0), }, ]) self.delay_sim_time(5) self.mavproxy.send("fence list\n") self.progress("Breach eastern boundary") fence_middle = self.offset_location_ne(here, 0, 30) self.drive_somewhere_breach_boundary_and_rtl(fence_middle, target_system=target_system, target_component=target_component) self.progress("delaying - hack to work around manual recovery bug") self.delay_sim_time(5) self.progress("Breach western boundary") fence_middle = self.offset_location_ne(here, 0, -30) self.drive_somewhere_breach_boundary_and_rtl(fence_middle, target_system=target_system, target_component=target_component) self.progress("delaying - hack to work around manual recovery bug") self.delay_sim_time(5) self.progress("Breach southern circle") fence_middle = self.offset_location_ne(here, -150, 0) self.drive_somewhere_breach_boundary_and_rtl(fence_middle, target_system=target_system, target_component=target_component) def drive_smartrtl(self): self.change_mode("STEERING") self.wait_ready_to_arm() self.arm_vehicle() # drive two sides of a square, make sure we don't go back through # the middle of the square self.progress("Driving North") self.reach_heading_manual(0) self.set_rc(3, 2000) self.delay_sim_time(5) self.set_rc(3, 1000) self.wait_groundspeed(0, 1) loc = self.mav.location() self.progress("Driving East") self.set_rc(3, 2000) self.reach_heading_manual(90) self.set_rc(3, 2000) self.delay_sim_time(5) self.set_rc(3, 1000) self.progress("Entering smartrtl") self.change_mode("SMART_RTL") self.progress("Ensure we go via intermediate point") self.wait_distance_to_location(loc, 0, 5) self.progress("Ensure we get home") self.wait_distance_to_home(3, 7, timeout=30) self.disarm_vehicle() def test_motor_test(self): '''AKA run-rover-run''' magic_throttle_value = 1812 self.run_cmd( mavutil.mavlink.MAV_CMD_DO_MOTOR_TEST, 1, # p1 - motor instance mavutil.mavlink.MOTOR_TEST_THROTTLE_PWM, # p2 - throttle type magic_throttle_value, # p3 - throttle 5, # p4 - timeout 1, # p5 - motor count 0, # p6 - test order (see MOTOR_TEST_ORDER) 0, # p7 ) self.wait_armed() self.progress("Waiting for magic throttle value") self.wait_servo_channel_value(3, magic_throttle_value) self.wait_servo_channel_value(3, self.get_parameter("RC3_TRIM", 5), timeout=10) self.wait_disarmed() def test_poly_fence_object_avoidance_guided(self, target_system=1, target_component=1): if not self.mavproxy_can_do_mision_item_protocols(): return self.test_poly_fence_object_avoidance_guided_pathfinding( target_system=target_system, target_component=target_component) return # twosquares is currently disabled because of the requirement to have an inclusion fence (which it doesn't have ATM) # self.test_poly_fence_object_avoidance_guided_two_squares( # target_system=target_system, # target_component=target_component) def test_poly_fence_object_avoidance_auto(self, target_system=1, target_component=1): self.load_fence("rover-path-planning-fence.txt") self.load_mission("rover-path-planning-mission.txt") self.context_push() ex = None try: self.set_parameter("AVOID_ENABLE", 3) self.set_parameter("OA_TYPE", 2) self.set_parameter("FENCE_MARGIN", 0) # FIXME: https://github.com/ArduPilot/ardupilot/issues/11601 self.reboot_sitl() self.change_mode('AUTO') self.wait_ready_to_arm() self.arm_vehicle() self.set_parameter("FENCE_ENABLE", 1) self.mavproxy.send("fence list\n") # target_loc is copied from the mission file target_loc = mavutil.location(40.073799, -105.229156) self.wait_location(target_loc, timeout=300) # mission has RTL as last item self.wait_distance_to_home(3, 7, timeout=300) self.disarm_vehicle() except Exception as e: self.print_exception_caught(e) ex = e self.context_pop() self.reboot_sitl() if ex is not None: raise ex def send_guided_mission_item(self, loc, target_system=1, target_component=1): self.mav.mav.mission_item_send( target_system, target_component, 0, mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 2, # current 0, # autocontinue 0, # param1 0, # param2 0, # param3 0, # param4 loc.lat, # x loc.lng, # y 0 # z ) def test_poly_fence_object_avoidance_guided_pathfinding(self, target_system=1, target_component=1): self.load_fence("rover-path-planning-fence.txt") self.context_push() ex = None try: self.set_parameter("AVOID_ENABLE", 3) self.set_parameter("OA_TYPE", 2) self.set_parameter("FENCE_MARGIN", 0) # FIXME: https://github.com/ArduPilot/ardupilot/issues/11601 self.reboot_sitl() self.change_mode('GUIDED') self.wait_ready_to_arm() self.arm_vehicle() self.set_parameter("FENCE_ENABLE", 1) self.mavproxy.send("fence list\n") target_loc = mavutil.location(40.073800, -105.229172) self.send_guided_mission_item(target_loc, target_system=target_system, target_component=target_component) self.wait_location(target_loc, timeout=300) self.do_RTL(timeout=300) self.disarm_vehicle() except Exception as e: self.print_exception_caught(e) ex = e self.context_pop() self.reboot_sitl() if ex is not None: raise ex def test_wheelencoders(self): '''make sure wheel encoders are generally working''' ex = None try: self.set_parameter("WENC_TYPE", 10) self.set_parameter("EK3_ENABLE", 1) self.set_parameter("AHRS_EKF_TYPE", 3) self.reboot_sitl() self.change_mode("LOITER") self.wait_ready_to_arm() self.change_mode("MANUAL") self.arm_vehicle() self.set_rc(3, 1600) m = self.mav.recv_match(type='WHEEL_DISTANCE', blocking=True, timeout=5) if m is None: raise NotAchievedException("Did not get WHEEL_DISTANCE") tstart = self.get_sim_time() while True: if self.get_sim_time_cached() - tstart > 10: break dist_home = self.distance_to_home(use_cached_home=True) m = self.mav.messages.get("WHEEL_DISTANCE") delta = abs(m.distance[0] - dist_home) self.progress("dist-home=%f wheel-distance=%f delta=%f" % (dist_home, m.distance[0], delta)) if delta > 5: raise NotAchievedException("wheel distance incorrect") self.disarm_vehicle() except Exception as e: self.print_exception_caught(e) self.disarm_vehicle() ex = e self.reboot_sitl() if ex is not None: raise ex def test_poly_fence_object_avoidance_guided_two_squares(self, target_system=1, target_component=1): self.start_subtest("Ensure we can steer around obstacles in guided mode") here = self.mav.location() self.upload_fences_from_locations( mavutil.mavlink.MAV_CMD_NAV_FENCE_POLYGON_VERTEX_EXCLUSION, [ [ # east self.offset_location_ne(here, -50, 20), # bl self.offset_location_ne(here, 50, 10), # tl self.offset_location_ne(here, 50, 30), # tr self.offset_location_ne(here, -50, 40), # br, ], [ # further east (and south self.offset_location_ne(here, -60, 60), # bl self.offset_location_ne(here, 40, 70), # tl self.offset_location_ne(here, 40, 90), # tr self.offset_location_ne(here, -60, 80), # br, ], ]) self.mavproxy.send("fence list\n") self.context_push() ex = None try: self.set_parameter("AVOID_ENABLE", 3) self.set_parameter("OA_TYPE", 2) self.reboot_sitl() self.change_mode('GUIDED') self.wait_ready_to_arm() self.set_parameter("FENCE_ENABLE", 1) self.mavproxy.send("fence list\n") self.arm_vehicle() self.change_mode("GUIDED") target = mavutil.location(40.071382, -105.228340, 0, 0) self.send_guided_mission_item(target, target_system=target_system, target_component=target_component) self.wait_location(target, timeout=300) self.do_RTL() self.disarm_vehicle() except Exception as e: self.print_exception_caught(e) ex = e self.context_pop() self.reboot_sitl() if ex is not None: raise ex def test_poly_fence_avoidance_dont_breach_exclusion(self, target_system=1, target_component=1): self.start_subtest("Ensure we stop before breaching an exclusion fence") here = self.mav.location() self.upload_fences_from_locations( mavutil.mavlink.MAV_CMD_NAV_FENCE_POLYGON_VERTEX_EXCLUSION, [ [ # east self.offset_location_ne(here, -50, 20), # bl self.offset_location_ne(here, 50, 20), # br self.offset_location_ne(here, 50, 40), # tr self.offset_location_ne(here, -50, 40), # tl, ], [ # west self.offset_location_ne(here, -50, -20), # tl self.offset_location_ne(here, 50, -20), # tr self.offset_location_ne(here, 50, -40), # br self.offset_location_ne(here, -50, -40), # bl, ], { "radius": 30, "loc": self.offset_location_ne(here, -60, 0), }, ]) self.mavproxy.send("fence list\n") self.set_parameter("FENCE_ENABLE", 1) self.set_parameter("AVOID_ENABLE", 3) fence_middle = self.offset_location_ne(here, 0, 30) # FIXME: this might be nowhere near "here"! expected_stopping_point = mavutil.location(40.0713376, -105.2295738, 0, 0) self.drive_somewhere_stop_at_boundary( fence_middle, expected_stopping_point, target_system=target_system, target_component=target_component, expected_distance_epsilon=3) self.set_parameter("AVOID_ENABLE", 0) self.do_RTL() def do_RTL(self, distance_min=3, distance_max=7, timeout=60): self.change_mode("RTL") self.wait_distance_to_home(distance_min, distance_max, timeout=timeout) def test_poly_fence_avoidance(self, target_system=1, target_component=1): self.change_mode("LOITER") self.wait_ready_to_arm() self.arm_vehicle() self.change_mode("MANUAL") self.reach_heading_manual(180, turn_right=False) self.change_mode("GUIDED") self.test_poly_fence_avoidance_dont_breach_exclusion(target_system=target_system, target_component=target_component) self.disarm_vehicle() def test_poly_fence_object_avoidance_guided_bendy_ruler(self, target_system=1, target_component=1): if not self.mavproxy_can_do_mision_item_protocols(): return self.load_fence("rover-path-bendyruler-fence.txt") self.context_push() ex = None try: self.set_parameter("AVOID_ENABLE", 3) self.set_parameter("OA_TYPE", 1) self.set_parameter("OA_LOOKAHEAD", 50) self.reboot_sitl() self.change_mode('GUIDED') self.wait_ready_to_arm() self.arm_vehicle() self.set_parameter("FENCE_ENABLE", 1) self.set_parameter("WP_RADIUS", 5) self.mavproxy.send("fence list\n") target_loc = mavutil.location(40.071060, -105.227734, 0, 0) self.send_guided_mission_item(target_loc, target_system=target_system, target_component=target_component) # FIXME: we don't get within WP_RADIUS of our target?! self.wait_location(target_loc, timeout=300, accuracy=15) self.do_RTL(timeout=300) self.disarm_vehicle() except Exception as e: self.print_exception_caught(e) ex = e self.context_pop() self.disarm_vehicle() self.reboot_sitl() if ex is not None: raise ex def test_poly_fence_object_avoidance_bendy_ruler_easier(self, target_system=1, target_component=1): if not self.mavproxy_can_do_mision_item_protocols(): return self.test_poly_fence_object_avoidance_auto_bendy_ruler_easier( target_system=target_system, target_component=target_component) self.test_poly_fence_object_avoidance_guided_bendy_ruler_easier( target_system=target_system, target_component=target_component) def test_poly_fence_object_avoidance_guided_bendy_ruler_easier(self, target_system=1, target_component=1): '''finish-line issue means we can't complete the harder one. This test can go away once we've nailed that one. The only difference here is the target point. ''' if not self.mavproxy_can_do_mision_item_protocols(): return self.load_fence("rover-path-bendyruler-fence.txt") self.context_push() ex = None try: self.set_parameter("AVOID_ENABLE", 3) self.set_parameter("OA_TYPE", 1) self.set_parameter("OA_LOOKAHEAD", 50) self.reboot_sitl() self.change_mode('GUIDED') self.wait_ready_to_arm() self.arm_vehicle() self.set_parameter("FENCE_ENABLE", 1) self.set_parameter("WP_RADIUS", 5) self.mavproxy.send("fence list\n") target_loc = mavutil.location(40.071260, -105.227000, 0, 0) self.send_guided_mission_item(target_loc, target_system=target_system, target_component=target_component) # FIXME: we don't get within WP_RADIUS of our target?! self.wait_location(target_loc, timeout=300, accuracy=15) self.do_RTL(timeout=300) self.disarm_vehicle() except Exception as e: self.print_exception_caught(e) ex = e self.context_pop() self.disarm_vehicle() self.reboot_sitl() if ex is not None: raise ex def test_poly_fence_object_avoidance_auto_bendy_ruler_easier(self, target_system=1, target_component=1): '''finish-line issue means we can't complete the harder one. This test can go away once we've nailed that one. The only difference here is the target point. ''' if not self.mavproxy_can_do_mision_item_protocols(): return self.load_fence("rover-path-bendyruler-fence.txt") self.load_mission("rover-path-bendyruler-mission-easier.txt") self.context_push() ex = None try: self.set_parameter("AVOID_ENABLE", 3) self.set_parameter("OA_TYPE", 1) self.set_parameter("OA_LOOKAHEAD", 50) self.reboot_sitl() self.change_mode('AUTO') self.wait_ready_to_arm() self.arm_vehicle() self.set_parameter("FENCE_ENABLE", 1) self.set_parameter("WP_RADIUS", 5) self.mavproxy.send("fence list\n") target_loc = mavutil.location(40.071260, -105.227000, 0, 0) # target_loc is copied from the mission file self.wait_location(target_loc, timeout=300) # mission has RTL as last item self.wait_distance_to_home(3, 7, timeout=300) self.disarm_vehicle() except Exception as e: self.print_exception_caught(e) ex = e self.context_pop() self.disarm_vehicle() self.reboot_sitl() if ex is not None: raise ex def test_poly_fence_object_avoidance(self, target_system=1, target_component=1): if not self.mavproxy_can_do_mision_item_protocols(): return self.test_poly_fence_object_avoidance_auto( target_system=target_system, target_component=target_component) self.test_poly_fence_object_avoidance_guided( target_system=target_system, target_component=target_component) def test_poly_fence_object_avoidance_bendy_ruler(self, target_system=1, target_component=1): if not self.mavproxy_can_do_mision_item_protocols(): return # bendy Ruler isn't as flexible as Dijkstra for planning, so # it gets a simpler test: self.test_poly_fence_object_avoidance_guided_bendy_ruler( target_system=target_system, target_component=target_component, ) def script_example_source_path(self, scriptname): return os.path.join(self.rootdir(), "libraries", "AP_Scripting", "examples", scriptname) def script_test_source_path(self, scriptname): return os.path.join(self.rootdir(), "libraries", "AP_Scripting", "tests", scriptname) def installed_script_path(self, scriptname): return os.path.join("scripts", scriptname) def install_script(self, source, scriptname): dest = self.installed_script_path(scriptname) destdir = os.path.dirname(dest) if not os.path.exists(destdir): os.mkdir(destdir) self.progress("Copying (%s) to (%s)" % (source, dest)) shutil.copy(source, dest) def install_example_script(self, scriptname): source = self.script_example_source_path(scriptname) self.install_script(source, scriptname) def install_test_script(self, scriptname): source = self.script_test_source_path(scriptname) self.install_script(source, scriptname) def remove_example_script(self, scriptname): dest = self.installed_script_path(scriptname) try: os.unlink(dest) except IOError: pass except OSError: pass def test_scripting_simple_loop(self): self.start_subtest("Scripting simple loop") ex = None example_script = "simple_loop.lua" messages = [] def my_message_hook(mav, message): if message.get_type() != 'STATUSTEXT': return messages.append(message) self.install_message_hook(my_message_hook) try: self.set_parameter("SCR_ENABLE", 1) self.install_example_script(example_script) self.reboot_sitl() self.delay_sim_time(10) except Exception as e: self.print_exception_caught(e) ex = e self.remove_example_script(example_script) self.reboot_sitl() self.remove_message_hook(my_message_hook) if ex is not None: raise ex # check all messages to see if we got our message count = 0 for m in messages: if "hello, world" in m.text: count += 1 self.progress("Got %u hellos" % count) if count < 3: raise NotAchievedException("Expected at least three hellos") def test_scripting_internal_test(self): self.start_subtest("Scripting internal test") ex = None test_scripts = ["scripting_test.lua", "math.lua", "strings.lua"] success_text = ["Internal tests passed", "Math tests passed", "String tests passed"] messages = [] def my_message_hook(mav, message): if message.get_type() != 'STATUSTEXT': return messages.append(message) self.install_message_hook(my_message_hook) try: self.set_parameter("SCR_ENABLE", 1) self.set_parameter("SCR_HEAP_SIZE", 1024000) self.set_parameter("SCR_VM_I_COUNT", 1000000) for script in test_scripts: self.install_test_script(script) self.reboot_sitl() self.delay_sim_time(10) self.remove_example_script(script) except Exception as e: self.print_exception_caught(e) ex = e self.reboot_sitl() self.remove_message_hook(my_message_hook) if ex is not None: raise ex # check all messages to see if we got our message success = True for text in success_text: script_success = False for m in messages: if text in m.text: script_success = True success = script_success and success self.progress("Success") if not success: raise NotAchievedException("Scripting internal test failed") def test_scripting_hello_world(self): self.start_subtest("Scripting hello world") self.context_push() self.context_collect("STATUSTEXT") ex = None example_script = "hello_world.lua" try: self.set_parameter("SCR_ENABLE", 1) self.install_example_script(example_script) self.reboot_sitl() self.wait_statustext('hello, world', check_context=True, timeout=30) except Exception as e: self.print_exception_caught(e) ex = e self.remove_example_script(example_script) self.context_pop() self.reboot_sitl() if ex is not None: raise ex def test_scripting_steering_and_throttle(self): self.start_subtest("Scripting square") ex = None example_script = "rover-set-steering-and-throttle.lua" try: self.install_example_script(example_script) self.set_parameter("SCR_ENABLE", 1) self.reboot_sitl() self.wait_ready_to_arm() self.arm_vehicle() self.set_rc(6, 2000) tstart = self.get_sim_time() while not self.mode_is("HOLD"): if self.get_sim_time_cached() - tstart > 30: raise NotAchievedException("Did not move to hold") m = self.mav.recv_match(type='VFR_HUD', blocking=True, timeout=1) if m is not None: self.progress("Current speed: %f" % m.groundspeed) self.disarm_vehicle() self.reboot_sitl() except Exception as e: self.print_exception_caught(e) self.disarm_vehicle() ex = e self.remove_example_script(example_script) self.reboot_sitl() if ex is not None: raise ex def test_scripting(self): self.test_scripting_hello_world() self.test_scripting_simple_loop() self.test_scripting_internal_test() def test_mission_frame(self, frame, target_system=1, target_component=1): self.clear_mission(mavutil.mavlink.MAV_MISSION_TYPE_MISSION, target_system=target_system, target_component=target_component) items = [ # first item is ignored for missions self.mav.mav.mission_item_int_encode( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 0, # current 0, # autocontinue 3, # p1 0, # p2 0, # p3 0, # p4 int(1.0000 * 1e7), # latitude int(1.0000 * 1e7), # longitude 31.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_MISSION), self.mav.mav.mission_item_int_encode( target_system, target_component, 1, # seq frame, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 0, # current 0, # autocontinue 3, # p1 0, # p2 0, # p3 0, # p4 int(1.0000 * 1e7), # latitude int(1.0000 * 1e7), # longitude 31.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_MISSION), ] self.check_mission_upload_download(items) def test_mission_frames(self, target_system=1, target_component=1): for frame in (mavutil.mavlink.MAV_FRAME_GLOBAL_TERRAIN_ALT_INT, mavutil.mavlink.MAV_FRAME_GLOBAL_TERRAIN_ALT, mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT_INT, mavutil.mavlink.MAV_FRAME_GLOBAL_RELATIVE_ALT, mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_FRAME_GLOBAL): self.test_mission_frame(frame, target_system=1, target_component=1) def mavlink_time_boot_ms(self): '''returns a time suitable for putting into the time_boot_ms entry in mavlink packets''' return int(time.time() * 1000000) def mavlink_time_boot_us(self): '''returns a time suitable for putting into the time_boot_ms entry in mavlink packets''' return int(time.time() * 1000000000) def ap_proximity_mav_obstacle_distance_send(self, data): increment = data.get("increment", 0) increment_f = data.get("increment_f", 0.0) max_distance = data["max_distance"] invalid_distance = max_distance + 1 # per spec distances = data["distances"][:] distances.extend([invalid_distance] * (72-len(distances))) self.mav.mav.obstacle_distance_send( self.mavlink_time_boot_us(), mavutil.mavlink.MAV_DISTANCE_SENSOR_LASER, distances, increment, data["min_distance"], data["max_distance"], increment_f, data["angle_offset"], mavutil.mavlink.MAV_FRAME_BODY_FRD ) def send_obstacle_distances_expect_distance_sensor_messages(self, obstacle_distances_in, expect_distance_sensor_messages): self.delay_sim_time(11) # allow obstacles to time out self.do_timesync_roundtrip() expect_distance_sensor_messages_copy = expect_distance_sensor_messages[:] last_sent = 0 while True: now = self.get_sim_time_cached() if now - last_sent > 1: self.progress("Sending") self.ap_proximity_mav_obstacle_distance_send(obstacle_distances_in) last_sent = now m = self.mav.recv_match(type='DISTANCE_SENSOR', blocking=True, timeout=1) self.progress("Got (%s)" % str(m)) if m is None: self.delay_sim_time(1) continue orientation = m.orientation found = False if m.current_distance == m.max_distance: # ignored continue for expected_distance_sensor_message in expect_distance_sensor_messages_copy: if expected_distance_sensor_message["orientation"] != orientation: continue found = True if not expected_distance_sensor_message.get("__found__", False): self.progress("Marking message as found") expected_distance_sensor_message["__found__"] = True if (m.current_distance - expected_distance_sensor_message["distance"] > 1): raise NotAchievedException( "Bad distance for orient=%u want=%u got=%u" % (orientation, expected_distance_sensor_message["distance"], m.current_distance)) break if not found: raise NotAchievedException("Got unexpected DISTANCE_SENSOR message") all_found = True for expected_distance_sensor_message in expect_distance_sensor_messages_copy: if not expected_distance_sensor_message.get("__found__", False): self.progress("message still not found (orient=%u" % expected_distance_sensor_message["orientation"]) all_found = False break if all_found: self.progress("Have now seen all expected messages") break def ap_proximity_mav(self): self.context_push() ex = None try: self.set_parameter("PRX_TYPE", 2) # AP_Proximity_MAV self.set_parameter("OA_TYPE", 2) # dijkstra self.set_parameter("OA_DB_OUTPUT", 3) # send all items self.reboot_sitl() # 1 laser pointing straight forward: self.send_obstacle_distances_expect_distance_sensor_messages( { "distances": [234], "increment_f": 10, "angle_offset": 0.0, "min_distance": 0, "max_distance": 1000, # cm }, [ {"orientation": 0, "distance": 234}, ]) # 5 lasers at front of vehicle, spread over 40 degrees: self.send_obstacle_distances_expect_distance_sensor_messages( { "distances": [111, 222, 333, 444, 555], "increment_f": 10, "angle_offset": -20.0, "min_distance": 0, "max_distance": 1000, # cm }, [ {"orientation": 0, "distance": 111}, ]) # lots of dense readings (e.g. vision camera: distances = [0] * 72 for i in range(0, 72): distances[i] = 1000 + 10*abs(36-i) self.send_obstacle_distances_expect_distance_sensor_messages( { "distances": distances, "increment_f": 90/72.0, "angle_offset": -45.0, "min_distance": 0, "max_distance": 2000, # cm }, [ {"orientation": 0, "distance": 1000}, {"orientation": 1, "distance": 1190}, {"orientation": 7, "distance": 1190}, ]) except Exception as e: self.print_exception_caught(e) ex = e self.context_pop() self.reboot_sitl() if ex is not None: raise ex def test_send_to_components(self): self.progress("Introducing ourselves to the autopilot as a component") old_srcSystem = self.mav.mav.srcSystem self.mav.mav.srcSystem = 1 self.mav.mav.heartbeat_send( mavutil.mavlink.MAV_TYPE_ONBOARD_CONTROLLER, mavutil.mavlink.MAV_AUTOPILOT_INVALID, 0, 0, 0) self.progress("Sending control message") self.mav.mav.digicam_control_send( 1, # target_system 1, # target_component 1, # start or keep it up 1, # zoom_pos 0, # zoom_step 0, # focus_lock 1, # 1 shot or start filming 17, # command id (de-dupe field) 0, # extra_param 0.0, # extra_value ) self.mav.mav.srcSystem = old_srcSystem self.progress("Expecting a command long") tstart = self.get_sim_time_cached() while True: now = self.get_sim_time_cached() if now - tstart > 2: raise NotAchievedException("Did not receive digicam_control message") m = self.mav.recv_match(type='COMMAND_LONG', blocking=True, timeout=0.1) self.progress("Message: %s" % str(m)) if m is None: continue if m.command != mavutil.mavlink.MAV_CMD_DO_DIGICAM_CONTROL: raise NotAchievedException("Did not get correct command") if m.param6 != 17: raise NotAchievedException("Did not get correct command_id") break def test_skid_steer(self): model = "rover-skid" self.customise_SITL_commandline([], model=model, defaults_filepath=self.model_defaults_filepath("Rover", model)) self.change_mode("MANUAL") self.wait_ready_to_arm() self.arm_vehicle() self.progress("get a known heading to avoid worrying about wrap") # this is steering-type-two-paddles self.set_rc(1, 1400) self.set_rc(3, 1500) self.wait_heading(90) self.progress("straighten up") self.set_rc(1, 1500) self.set_rc(3, 1500) self.progress("steer one way") self.set_rc(1, 1600) self.set_rc(3, 1400) self.wait_heading(120) self.progress("steer the other") self.set_rc(1, 1400) self.set_rc(3, 1600) self.wait_heading(60) self.zero_throttle() self.disarm_vehicle() def test_slew_rate(self): """Test Motor Slew Rate feature.""" self.context_push() self.change_mode("MANUAL") self.wait_ready_to_arm() self.arm_vehicle() self.start_subtest("Test no slew behavior") throttle_channel = 3 throttle_max = 2000 self.set_parameter("MOT_SLEWRATE", 0) self.set_rc(throttle_channel, throttle_max) tstart = self.get_sim_time() self.wait_servo_channel_value(throttle_channel, throttle_max) tstop = self.get_sim_time() achieved_time = tstop - tstart self.progress("achieved_time: %0.1fs" % achieved_time) if achieved_time > 0.5: raise NotAchievedException("Output response should be instant, got %f" % achieved_time) self.zero_throttle() self.wait_groundspeed(0, 0.5) # why do we not stop?! self.start_subtest("Test 100% slew rate") self.set_parameter("MOT_SLEWRATE", 100) self.set_rc(throttle_channel, throttle_max) tstart = self.get_sim_time() self.wait_servo_channel_value(throttle_channel, throttle_max) tstop = self.get_sim_time() achieved_time = tstop - tstart self.progress("achieved_time: %0.1fs" % achieved_time) if achieved_time < 0.9 or achieved_time > 1.1: raise NotAchievedException("Output response should be 1s, got %f" % achieved_time) self.zero_throttle() self.wait_groundspeed(0, 0.5) # why do we not stop?! self.start_subtest("Test 50% slew rate") self.set_parameter("MOT_SLEWRATE", 50) self.set_rc(throttle_channel, throttle_max) tstart = self.get_sim_time() self.wait_servo_channel_value(throttle_channel, throttle_max, timeout=10) tstop = self.get_sim_time() achieved_time = tstop - tstart self.progress("achieved_time: %0.1fs" % achieved_time) if achieved_time < 1.8 or achieved_time > 2.2: raise NotAchievedException("Output response should be 2s, got %f" % achieved_time) self.zero_throttle() self.wait_groundspeed(0, 0.5) # why do we not stop?! self.start_subtest("Test 25% slew rate") self.set_parameter("MOT_SLEWRATE", 25) self.set_rc(throttle_channel, throttle_max) tstart = self.get_sim_time() self.wait_servo_channel_value(throttle_channel, throttle_max, timeout=10) tstop = self.get_sim_time() achieved_time = tstop - tstart self.progress("achieved_time: %0.1fs" % achieved_time) if achieved_time < 3.6 or achieved_time > 4.4: raise NotAchievedException("Output response should be 4s, got %f" % achieved_time) self.zero_throttle() self.wait_groundspeed(0, 0.5) # why do we not stop?! self.start_subtest("Test 10% slew rate") self.set_parameter("MOT_SLEWRATE", 10) self.set_rc(throttle_channel, throttle_max) tstart = self.get_sim_time() self.wait_servo_channel_value(throttle_channel, throttle_max, timeout=20) tstop = self.get_sim_time() achieved_time = tstop - tstart self.progress("achieved_time: %0.1fs" % achieved_time) if achieved_time < 9 or achieved_time > 11: raise NotAchievedException("Output response should be 10s, got %f" % achieved_time) self.zero_throttle() self.wait_groundspeed(0, 0.5) # why do we not stop?! self.disarm_vehicle() self.context_pop() def SET_ATTITUDE_TARGET(self, target_sysid=None, target_compid=1): if target_sysid is None: target_sysid = self.sysid_thismav() self.change_mode('GUIDED') self.wait_ready_to_arm() self.arm_vehicle() tstart = self.get_sim_time() while True: now = self.get_sim_time_cached() if now - tstart > 10: raise AutoTestTimeoutException("Didn't get to speed") self.mav.mav.set_attitude_target_send( 0, # time_boot_ms target_sysid, target_compid, mavutil.mavlink.ATTITUDE_TARGET_TYPEMASK_BODY_ROLL_RATE_IGNORE | mavutil.mavlink.ATTITUDE_TARGET_TYPEMASK_BODY_PITCH_RATE_IGNORE | mavutil.mavlink.ATTITUDE_TARGET_TYPEMASK_ATTITUDE_IGNORE, mavextra.euler_to_quat([0, math.radians(0), math.radians(0)]), # att 0, # yaw rate (rad/s) 0, # pitch rate 0, # yaw rate 1) # thrust msg = self.mav.recv_match(type='VFR_HUD', blocking=True, timeout=1) if msg is None: raise NotAchievedException("No VFR_HUD message") if msg.groundspeed > 5: break self.disarm_vehicle() def SET_POSITION_TARGET_LOCAL_NED(self, target_sysid=None, target_compid=1): if target_sysid is None: target_sysid = self.sysid_thismav() self.change_mode('GUIDED') self.wait_ready_to_arm() self.arm_vehicle() tstart = self.get_sim_time() while True: now = self.get_sim_time_cached() if now - tstart > 10: raise AutoTestTimeoutException("Didn't get to speed") self.mav.mav.set_position_target_local_ned_send( 0, # time_boot_ms target_sysid, target_compid, mavutil.mavlink.MAV_FRAME_LOCAL_NED, mavutil.mavlink.POSITION_TARGET_TYPEMASK_VX_IGNORE | mavutil.mavlink.POSITION_TARGET_TYPEMASK_VY_IGNORE | mavutil.mavlink.POSITION_TARGET_TYPEMASK_VZ_IGNORE | mavutil.mavlink.POSITION_TARGET_TYPEMASK_AX_IGNORE | mavutil.mavlink.POSITION_TARGET_TYPEMASK_AY_IGNORE | mavutil.mavlink.POSITION_TARGET_TYPEMASK_AZ_IGNORE | mavutil.mavlink.POSITION_TARGET_TYPEMASK_YAW_IGNORE | mavutil.mavlink.POSITION_TARGET_TYPEMASK_YAW_RATE_IGNORE, 30.0, # pos-x 30.0, # pos-y 0, # pos-z 0, # vel-x 0, # vel-y 0, # vel-z 0, # acc-x 0, # acc-y 0, # acc-z 0, # yaw 0, # yaw rate ) msg = self.mav.recv_match(type='VFR_HUD', blocking=True, timeout=1) if msg is None: raise NotAchievedException("No VFR_HUD message") self.progress("speed=%f" % msg.groundspeed) if msg.groundspeed > 5: break self.disarm_vehicle() def test_end_mission_behavior(self, timeout=60): self.context_push() ex = None try: self.load_mission("end-mission.txt") self.wait_ready_to_arm() self.arm_vehicle() self.start_subtest("Test End Mission Behavior HOLD") self.context_collect("STATUSTEXT") self.change_mode("AUTO") self.wait_text("Mission Complete", check_context=True, wallclock_timeout=2) # On Hold we should just stop and don't update the navigation target anymore tstart = self.get_sim_time() while True: if self.get_sim_time_cached() - tstart > 15: raise AutoTestTimeoutException("Still getting POSITION_TARGET_GLOBAL_INT") m = self.mav.recv_match(type="POSITION_TARGET_GLOBAL_INT", blocking=True, timeout=10) if m is None: self.progress("No POSITION_TARGET_GLOBAL_INT received, all good !") break self.context_clear_collection("STATUSTEXT") self.change_mode("GUIDED") self.context_collect("STATUSTEXT") self.start_subtest("Test End Mission Behavior LOITER") self.set_parameter("MIS_DONE_BEHAVE", 1) self.change_mode("AUTO") self.wait_text("Mission Complete", check_context=True, wallclock_timeout=2) # On LOITER we should update the navigation target tstart = self.get_sim_time() while True: if self.get_sim_time_cached() - tstart > 15: raise AutoTestTimeoutException("Not getting POSITION_TARGET_GLOBAL_INT") m = self.mav.recv_match(type="POSITION_TARGET_GLOBAL_INT", blocking=True, timeout=5) if m is None: self.progress("No POSITION_TARGET_GLOBAL_INT received") continue else: if self.get_sim_time_cached() - tstart > 15: self.progress("Got POSITION_TARGET_GLOBAL_INT, all good !") break self.change_mode("GUIDED") self.start_subtest("Test End Mission Behavior ACRO") self.set_parameter("MIS_DONE_BEHAVE", 2) self.change_mode("AUTO") self.wait_mode("ACRO") self.start_subtest("Test End Mission Behavior MANUAL") self.set_parameter("MIS_DONE_BEHAVE", 3) self.change_mode("AUTO") self.wait_mode("MANUAL") self.disarm_vehicle() except Exception as e: self.print_exception_caught(e) ex = e self.context_pop() self.reboot_sitl() if ex is not None: raise ex def test_mavproxy_param(self): self.mavproxy.send("param fetch\n") self.mavproxy.expect("Received [0-9]+ parameters") def MAV_CMD_DO_SET_MISSION_CURRENT_mission(self, target_system=1, target_component=1): return copy.copy([ self.mav.mav.mission_item_int_encode( target_system, target_component, 0, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 0, # current 0, # autocontinue 3, # p1 0, # p2 0, # p3 0, # p4 int(1.0000 * 1e7), # latitude int(1.0000 * 1e7), # longitude 31.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_MISSION), self.mav.mav.mission_item_int_encode( target_system, target_component, 1, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 0, # current 0, # autocontinue 3, # p1 0, # p2 0, # p3 0, # p4 int(1.0000 * 1e7), # latitude int(1.0000 * 1e7), # longitude 31.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_MISSION), self.mav.mav.mission_item_int_encode( target_system, target_component, 2, # seq mavutil.mavlink.MAV_FRAME_GLOBAL_INT, mavutil.mavlink.MAV_CMD_NAV_WAYPOINT, 0, # current 0, # autocontinue 3, # p1 0, # p2 0, # p3 0, # p4 int(1.0000 * 1e7), # latitude int(1.0000 * 1e7), # longitude 31.0000, # altitude mavutil.mavlink.MAV_MISSION_TYPE_MISSION), ]) def MAV_CMD_DO_SET_MISSION_CURRENT(self, target_sysid=None, target_compid=1): if target_sysid is None: target_sysid = self.sysid_thismav() self.check_mission_upload_download(self.MAV_CMD_DO_SET_MISSION_CURRENT_mission()) self.set_current_waypoint(2) self.set_current_waypoint_using_mav_cmd_do_set_mission_current(2) self.run_cmd(mavutil.mavlink.MAV_CMD_DO_SET_MISSION_CURRENT, 17, 0, 0, 0, 0, 0, 0, timeout=1, target_sysid=target_sysid, target_compid=target_compid, want_result=mavutil.mavlink.MAV_RESULT_FAILED) def tests(self): '''return list of all tests''' ret = super(AutoTestRover, self).tests() ret.extend([ ("MAVProxy_SetModeUsingSwitch", "Set modes via mavproxy switch", self.test_setting_modes_via_mavproxy_switch), ("MAVProxy_SetModeUsingMode", "Set modes via mavproxy mode command", self.test_setting_modes_via_mavproxy_mode_command), ("ModeSwitch", "Set modes via modeswitch", self.test_setting_modes_via_modeswitch), ("AuxModeSwitch", "Set modes via auxswitches", self.test_setting_modes_via_auxswitches), ("DriveRTL", "Drive an RTL Mission", self.drive_rtl_mission), ("SmartRTL", "Test SmartRTL", self.drive_smartrtl), ("DriveSquare", "Learn/Drive Square with Ch7 option", self.drive_square), ("DriveMaxRCIN", "Drive rover at max RC inputs", self.drive_max_rcin), ("DriveMission", "Drive Mission %s" % "rover1.txt", lambda: self.drive_mission("rover1.txt", strict=False)), # disabled due to frequent failures in travis. This test needs re-writing # ("Drive Brake", self.drive_brake), ("GetBanner", "Get Banner", self.do_get_banner), ("DO_SET_MODE", "Set mode via MAV_COMMAND_DO_SET_MODE", self.test_do_set_mode_via_command_long), ("MAVProxy_DO_SET_MODE", "Set mode via MAV_COMMAND_DO_SET_MODE with MAVProxy", self.test_mavproxy_do_set_mode_via_command_long), ("ServoRelayEvents", "Test ServoRelayEvents", self.test_servorelayevents), ("RCOverrides", "Test RC overrides", self.test_rc_overrides), ("RCOverridesCancel", "Test RC overrides Cancel", self.test_rc_override_cancel), ("MANUAL_CONTROL", "Test mavlink MANUAL_CONTROL", self.test_manual_control), ("Sprayer", "Test Sprayer", self.test_sprayer), ("AC_Avoidance", "Test AC Avoidance switch", self.drive_fence_ac_avoidance), ("CameraMission", "Test Camera Mission Items", self.test_camera_mission_items), # Gripper test ("Gripper", "Test gripper", self.test_gripper), ("GripperMission", "Test Gripper Mission Items", self.test_gripper_mission), ("SET_MESSAGE_INTERVAL", "Test MAV_CMD_SET_MESSAGE_INTERVAL", self.test_set_message_interval), ("REQUEST_MESSAGE", "Test MAV_CMD_REQUEST_MESSAGE", self.test_request_message), ("SYSID_ENFORCE", "Test enforcement of SYSID_MYGCS", self.test_sysid_enforce), ("SET_ATTITUDE_TARGET", "Test handling of SET_ATTITUDE_TARGET", self.SET_ATTITUDE_TARGET), ("SET_POSITION_TARGET_LOCAL_NED", "Test handling of SET_POSITION_TARGET_LOCAL_NED", self.SET_POSITION_TARGET_LOCAL_NED), ("MAV_CMD_DO_SET_MISSION_CURRENT", "Test handling of CMD_DO_SET_MISSION_CURRENT", self.MAV_CMD_DO_SET_MISSION_CURRENT), ("Button", "Test Buttons", self.test_button), ("Rally", "Test Rally Points", self.test_rally_points), ("Offboard", "Test Offboard Control", self.test_offboard), ("MAVProxyParam", "Test MAVProxy parameter handling", self.test_mavproxy_param), ("GCSFence", "Upload and download of fence", self.test_gcs_fence), ("GCSRally", "Upload and download of rally", self.test_gcs_rally), ("GCSMission", "Upload and download of mission", self.test_gcs_mission), ("MotorTest", "Motor Test triggered via mavlink", self.test_motor_test), ("WheelEncoders", "Ensure SITL wheel encoders work", self.test_wheelencoders), ("DataFlashOverMAVLink", "Test DataFlash over MAVLink", self.test_dataflash_over_mavlink), ("DataFlashSITL", "Test DataFlash SITL backend", self.test_dataflash_sitl), ("SkidSteer", "Check skid-steering", self.test_skid_steer), ("PolyFence", "PolyFence tests", self.test_poly_fence), ("PolyFenceAvoidance", "PolyFence avoidance tests", self.test_poly_fence_avoidance), ("PolyFenceObjectAvoidance", "PolyFence object avoidance tests", self.test_poly_fence_object_avoidance), ("PolyFenceObjectAvoidanceBendyRuler", "PolyFence object avoidance tests - bendy ruler", self.test_poly_fence_object_avoidance_bendy_ruler), ("SendToComponents", "Test ArduPilot send_to_components function", self.test_send_to_components), ("PolyFenceObjectAvoidanceBendyRulerEasier", "PolyFence object avoidance tests - easier bendy ruler test", self.test_poly_fence_object_avoidance_bendy_ruler_easier), ("SlewRate", "Test output slew rate", self.test_slew_rate), ("Scripting", "Scripting test", self.test_scripting), ("ScriptingSteeringAndThrottle", "Scripting test - steering and throttle", self.test_scripting_steering_and_throttle), ("MissionFrames", "Upload/Download of items in different frames", self.test_mission_frames), ("SetpointGlobalPos", "Test setpoint global position", lambda: self.test_set_position_global_int()), ("SetpointGlobalVel", "Test setpoint gloabl velocity", lambda: self.test_set_velocity_global_int()), ("AccelCal", "Accelerometer Calibration testing", self.accelcal), ("AHRSTrim", "Accelerometer trim testing", self.ahrstrim), ("AP_Proximity_MAV", "Test MAV proximity backend", self.ap_proximity_mav), ("EndMissionBehavior", "Test end mission behavior", self.test_end_mission_behavior), ("LogUpload", "Upload logs", self.log_upload), ]) return ret def disabled_tests(self): return { "DriveMaxRCIN": "currently triggers Arithmetic Exception", "SlewRate": "got timing report failure on CI", } def rc_defaults(self): ret = super(AutoTestRover, self).rc_defaults() ret[3] = 1500 ret[8] = 1800 return ret def initial_mode_switch_mode(self): return "MANUAL" def default_mode(self): return 'MANUAL'
gpl-3.0
gdowding/pyvmomi
pyVmomi/Iso8601.py
14
12050
# VMware vSphere Python SDK # Copyright (c) 2008-2015 VMware, Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function # TODO (hartsocks): Introduce logging to remove the need for print function. """ This module is for ISO 8601 parsing """ __author__ = 'VMware, Inc.' from six import iteritems import time from datetime import datetime, timedelta, tzinfo import re """ Regular expression to parse a subset of ISO 8601 format """ _dtExpr = re.compile( # XMLSchema datetime. Mandatory to have - and : # See: http://www.w3.org/TR/xmlschema-2/#isoformats # Note: python datetime cannot handle the following: # - leap second, ie. 0-60 seconds (not 0-59) # - BC (negative years) # year [-]0000..9999 r'(?P<year>-?\d{4})' \ # month 01..12 r'(-(?P<month>(0[1-9]|1[0-2]))' \ # day 01..31 r'(-(?P<day>(0[1-9]|[1-2]\d|3[01])))?)?' \ # time separator 'T' r'(T' \ # hour 00..24 r'(?P<hour>([01]\d|2[0-4]))' \ # minute 00..59 r'((:(?P<minute>[0-5]\d))' \ # seconds 00..60 (leap second ok) r'(:(?P<second>([0-5]\d|60))' \ # microsecond. max 16 digits # - Should not allows trailing zeros. But python isoformat() put zeros # after microseconds. Oh well, allows trailing zeros, quite harmless r'(\.(?P<microsecond>\d{1,16}))?)?)?' \ # UTC 'Z', or... r'((?P<tzutc>Z)' \ # tz [+-]00..13:0..59|14:00 r'|((?P<tzhr>[+-](([0]\d)|(1[0-3])|(?P<tzlimit>)14))' \ r'(:(?P<tzmin>(?(tzlimit)00|([0-5]\d))))?))?' \ r')?$') """ Default date time val. Key should match the tags in _dtExpr """ _dtExprKeyDefValMap = {'year' : None, 'month' : 1, 'day' : 1, 'hour' : 0, 'minute' : 0, 'second' : 0, 'microsecond' : 0} class TZInfo(tzinfo): """ Timezone info class """ timedelta0 = timedelta(hours=0) timedelta1 = timedelta(hours=1) def __init__(self, tzname='UTC', utcOffset=None, dst=None): self._tzname = tzname if not utcOffset: utcOffset = self.timedelta0 self._utcOffset = utcOffset if not dst: dst = None self._dst = dst def utcoffset(self, dt): return self._utcOffset + self.dst(dt) def tzname(self, dt): return self._tzname def dst(self, dt): ret = self.timedelta0 if self._dst: if self._dst[0] <= dt.replace(tzinfo=None) < self._dst[1]: ret = self.timedelta1 return ret class TZManager: """ Time zone manager """ _tzInfos = {} @staticmethod def GetTZInfo(tzname='UTC', utcOffset=None, dst=None): """ Get / Add timezone info """ key = (tzname, utcOffset, dst) tzInfo = TZManager._tzInfos.get(key) if not tzInfo: tzInfo = TZInfo(tzname, utcOffset, dst) TZManager._tzInfos[key] = tzInfo return tzInfo def ParseISO8601(datetimeStr): """ Parse ISO 8601 date time from string. Returns datetime if ok, None otherwise Note: Allows YYYY / YYYY-MM, but truncate YYYY -> YYYY-01-01, YYYY-MM -> YYYY-MM-01 Truncate microsecond to most significant 6 digits """ datetimeVal = None match = _dtExpr.match(datetimeStr) if match: try: dt = {} for key, defaultVal in iteritems(_dtExprKeyDefValMap): val = match.group(key) if val: if key == 'microsecond': val = val[:6] + '0' * (6 - len(val)) dt[key] = int(val) elif defaultVal: dt[key] = defaultVal # Orig. XMLSchema don't allow all zeros year. But newer draft is ok #if dt['year'] == 0: # # Year cannot be all zeros # raise Exception('Year cannot be all zeros') # 24 is a special case. It is actually represented as next day 00:00 delta = None if dt.get('hour', 0) == 24: # Must be 24:00:00.0 if dt.get('minute', 0) == 0 and dt.get('second', 0) == 0 and \ dt.get('microsecond', 0) == 0: dt['hour'] = 23 delta = timedelta(hours=1) else: return None # Set tzinfo # TODO: dst tzInfo = None val = match.group('tzutc') if val: tzInfo = TZManager.GetTZInfo() else: val = match.group('tzhr') if val: # tz hours offset tzhr = int(val) utcsign = val[0] # tz minutes offset tzmin = 0 val = match.group('tzmin') if val: tzmin = tzhr >= 0 and int(val) or -int(val) # Better tzname (map UTC +-00:00 to UTC) tzname = 'UTC' if tzhr != 0 or tzmin != 0: tzname += ' %s%02d:%02d' % (utcsign, abs(tzhr), abs(tzmin)) tzInfo = TZManager.GetTZInfo(tzname=tzname, utcOffset=timedelta(hours=tzhr, minutes=tzmin)) if tzInfo: dt['tzinfo'] = tzInfo datetimeVal = datetime(**dt) if delta: datetimeVal += delta except Exception as e: pass return datetimeVal def ISO8601Format(dt): """ Python datetime isoformat() has the following problems: - leave trailing 0 at the end of microseconds (violates XMLSchema rule) - tz print +00:00 instead of Z - Missing timezone offset for datetime without tzinfo """ isoStr = dt.strftime('%Y-%m-%dT%H:%M:%S') if dt.microsecond: isoStr += ('.%06d' % dt.microsecond).rstrip('0') if dt.tzinfo: tz = dt.strftime('%z') else: if time.daylight and time.localtime().tm_isdst: utcOffset_minutes = -time.altzone / 60 else: utcOffset_minutes = -time.timezone / 60 tz = "%+.2d%.2d" % (utcOffset_minutes / 60, (abs(utcOffset_minutes) % 60)) if tz == '+0000': return isoStr + 'Z' elif tz: return isoStr + tz[:3] + ':' + tz[3:] else: # Local offset is unknown return isoStr + '-00:00' # Testing if __name__ == '__main__': # Valid entries for testStr in [ '1971', # 1971-01-01 '1971-11', # 1971-11-01 '1971-11-02', '1971-11-02T23', '1971-11-02T23Z', '1971-11-02T23:04', '1971-11-02T23:04Z', '1971-11-02T23:04:15', '1971-11-02T23:04:15Z', '1971-11-02T23:04:15.1', '1971-11-02T23:04:15.01', '1971-11-02T23:04:15.023456', '1971-11-02T23:04:15.103456Z', '1971-11-02T23:04:15.123456+11', '1971-11-02T23:04:15.123456-11', '1971-11-02T23:04:15.123456+11:30', '1971-11-02T23:04:15.123456-11:30', '1971-11-02T23:04:15.123456+00:00', # Same as Z '1971-11-02T23:04:15.123456-00:00', # Same as Z '1971-01-02T23:04:15+14', '1971-01-02T23:04:15+14:00', '1971-01-02T23:04:15-14', '1971-01-02T23:04:15-14:00', # Valid: Truncate microsec to 6 digits '1971-01-02T23:04:15.123456891+11', '1971-01-02T24', # 24 is valid. It should represent the 00:00 the # next day '1971-01-02T24:00', '1971-01-02T24:00:00', '1971-01-02T24:00:00.0', # Should NOT be valid but python isoformat adding trailing zeros '1971-01-02T23:04:15.123430', # Microseconds ends in zero '1971-01-02T23:04:15.0', # Microseconds ends in zero # Should be valid but python datetime don't support it #'2005-12-31T23:59:60Z', # Leap second #'-0001', # BC 1 ]: dt = ParseISO8601(testStr) if dt == None: print('Failed to parse ({0})'.format(testStr)) assert(False) # Make sure we can translate back isoformat = ISO8601Format(dt) dt1 = ParseISO8601(isoformat) if dt.tzinfo is None: dt = dt.replace(tzinfo=dt1.tzinfo) if dt1 != dt: print('ParseISO8601 -> ISO8601Format -> ParseISO8601 failed ({0})'.format(testStr)) assert(False) # Make sure we can parse python isoformat() dt2 = ParseISO8601(dt.isoformat()) if dt2 == None: print('ParseISO8601("{0}".isoformat()) failed'.format(testStr)) assert(False) print(testStr, '->', dt, isoformat) # Basic form for testStr in [ '197111', # 1971-11-01 '19711102', '19711102T23', '19711102T23Z', '19711102T2304', '19711102T2304Z', '19711102T230415', '19711102T230415Z', '19711102T230415.123456', '19711102T230415.123456Z', '19711102T230415.123456+11', '19711102T230415.123456-11', '19711102T230415.123456+1130', '19711102T230415.123456-1130', ]: # Reject for now dt = ParseISO8601(testStr) if dt != None: print('ParseISO8601 ({0}) should fail, but it did not'.format(testStr)) assert(False) #print testStr, '->', dt #assert(dt != None) # Invalid entries for testStr in [ # Xml schema reject year 0 '0000', # 0 years are not allowed '+0001', # Leading + is not allowed '', # Empty datetime str '09', # Years must be at least 4 digits '1971-01-02T', # T not follow by time '1971-01-02TZ', # T not follow by time '1971-01-02T+10', # T not follow by time '1971-01-02T-10', # T not follow by time '1971-01-02T23:', # extra : '1971-01-02T23:04:', # extra : '1971-01-02T23:0d', # 0d '1971-01-02T23:04:15.', # Dot not follows by microsec '1971-01-02+12', # time without T '1971Z', # Z without T '1971-01-02T23:04:15.123456Z+11', # Z follows by + '1971-01-02T23:04:15.123456Z-11', # Z follows by - '1971-01-02T23:04:15.123456+:30', # extra : '1971-01-02T23:04:15.123456+30:', # extra : '1971-01-02T23:04:15.01234567890123456789', # Too many microseconds digits # Python isoformat leave trailing zeros in microseconds # Relax regular expression to accept it #'1971-01-02T23:04:15.123430', # Microseconds ends in zero #'1971-01-02T23:04:15.0', # Microseconds ends in zero # Timezone must be between +14 / -14 '1971-01-02T23:04:15+15', '1971-01-02T23:04:15-15', '1971-01-02T23:04:15+14:01', '1971-01-02T23:04:15-14:01', # Mix basic form with extended format '197101-02T23:04:15.123456', '19710102T23:04:15.123456', '19710102T230415.123456+11:30', '1971-01-02T230415.123456', '1971-01-02T23:04:15.123456+1130', # Error captured by datetime class '1971-00-02', # Less than 1 month '1971-13-02', # Larger than 12 months '1971-01-00', # Less than 1 day '1971-11-32', # Larger than 30 days for Nov '1971-12-32', # Larger than 31 days '1971-01-02T24:01', # Larger than 23 hr '1971-01-02T23:61', # Larger than 60 min '1971-01-02T23:60:61', # Larger than 61 sec ]: dt = ParseISO8601(testStr) if dt != None: print('ParseISO8601 ({0}) should fail, but it did not'.format(testStr)) assert(False)
apache-2.0
fweik/espresso
testsuite/python/widom_insertion.py
2
3926
# # Copyright (C) 2013-2019 The ESPResSo project # # This file is part of ESPResSo. # # ESPResSo is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ESPResSo is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # """Testmodule for the Widom Insertion. """ import unittest as ut import unittest_decorators as utx import numpy as np import espressomd from espressomd import reaction_ensemble from tests_common import lj_potential @utx.skipIfMissingFeatures(["LENNARD_JONES"]) class WidomInsertionTest(ut.TestCase): """Test the implementation of the widom insertion. The excess chemical potential is calculated for identical particles in a 20 cubed box with a single particle, interacting via a LJ-potential (cut-off at 5 sigma).""" N0 = 1 TEMPERATURE = 0.5 TYPE_HA = 0 CHARGE_HA = 0 LJ_EPS = 1.0 LJ_SIG = 1.0 LJ_CUT = 5 BOX_L = 2 * LJ_CUT LJ_SHIFT = lj_potential(LJ_CUT, LJ_EPS, LJ_SIG, LJ_CUT + 1, 0.0) radius = np.linspace(1e-10, LJ_CUT, 1000) # numerical integration for radii smaller than the cut-off in spherical # coordinates integrateUpToCutOff = 4 * np.pi * np.trapz( radius**2 * np.exp(-lj_potential(radius, LJ_EPS, LJ_SIG, LJ_CUT, LJ_SHIFT) / TEMPERATURE), x=radius) # numerical solution for V_lj=0 => corresponds to the volume (as exp(0)=1) integreateRest = (BOX_L**3 - 4.0 / 3.0 * np.pi * LJ_CUT**3) # calculate excess chemical potential of the system, see Frenkel Smith, # p 174. Note: He uses scaled coordinates, which is why we need to divide # by the box volume target_mu_ex = -TEMPERATURE * \ np.log((integrateUpToCutOff + integreateRest) / BOX_L**3) system = espressomd.System(box_l=np.ones(3) * BOX_L) system.cell_system.set_n_square() np.random.seed(69) # make reaction code fully deterministic system.cell_system.skin = 0.4 volume = system.volume() Widom = reaction_ensemble.WidomInsertion( temperature=TEMPERATURE, seed=1) def setUp(self): self.system.part.add(pos=0.5 * self.system.box_l, type=self.TYPE_HA) self.system.non_bonded_inter[self.TYPE_HA, self.TYPE_HA].lennard_jones.set_params( epsilon=self.LJ_EPS, sigma=self.LJ_SIG, cutoff=self.LJ_CUT, shift="auto") self.Widom.add_reaction( reactant_types=[], reactant_coefficients=[], product_types=[self.TYPE_HA], product_coefficients=[1], default_charges={self.TYPE_HA: self.CHARGE_HA}) def test_widom_insertion(self): num_samples = 100000 for _ in range(num_samples): # 0 for insertion reaction self.Widom.measure_excess_chemical_potential(0) mu_ex = self.Widom.measure_excess_chemical_potential(0) deviation_mu_ex = abs(mu_ex[0] - self.target_mu_ex) self.assertLess( deviation_mu_ex, 1e-3, msg="\nExcess chemical potential for single LJ-particle computed via Widom insertion is wrong.\n" + f" average mu_ex: {mu_ex[0]:.4f}" + f" mu_ex_std_err: {mu_ex[1]:.5f}" + f" target_mu_ex: {self.target_mu_ex:.4f}" ) if __name__ == "__main__": ut.main()
gpl-3.0
cloudbase/neutron
neutron/services/timestamp/timestamp_db.py
3
4381
# Copyright 2015 HuaWei Technologies. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from neutron_lib import exceptions as n_exc from oslo_log import log from oslo_utils import timeutils from sqlalchemy import event from sqlalchemy import exc as sql_exc from sqlalchemy.orm import session as se from neutron._i18n import _LW from neutron.db import standard_attr LOG = log.getLogger(__name__) CHANGED_SINCE = 'changed_since' class TimeStamp_db_mixin(object): """Mixin class to add Time Stamp methods.""" ISO8601_TIME_FORMAT = '%Y-%m-%dT%H:%M:%S' def _change_since_result_filter_hook(self, query, filters): # this block is for change_since query # we get the changed_since string from filters. # And translate it from string to datetime type. # Then compare with the timestamp in db which has # datetime type. values = filters and filters.get(CHANGED_SINCE, []) if not values: return query data = filters[CHANGED_SINCE][0] try: changed_since_string = timeutils.parse_isotime(data) except Exception: msg = _LW("The input %s must be in the " "following format: YYYY-MM-DDTHH:MM:SSZ") % CHANGED_SINCE raise n_exc.InvalidInput(error_message=msg) changed_since = (timeutils. normalize_time(changed_since_string)) target_model_class = list(query._mapper_adapter_map.keys())[0] query = query.join(standard_attr.StandardAttribute, target_model_class.standard_attr_id == standard_attr.StandardAttribute.id).filter( standard_attr.StandardAttribute.updated_at >= changed_since) return query def update_timestamp(self, session, context, instances): objs_list = session.new.union(session.dirty) while objs_list: obj = objs_list.pop() if (isinstance(obj, standard_attr.HasStandardAttributes) and obj.standard_attr_id): obj.updated_at = timeutils.utcnow() def register_db_events(self): event.listen(standard_attr.StandardAttribute, 'before_insert', self._add_timestamp) event.listen(se.Session, 'before_flush', self.update_timestamp) def unregister_db_events(self): self._unregister_db_event(standard_attr.StandardAttribute, 'before_insert', self._add_timestamp) self._unregister_db_event(se.Session, 'before_flush', self.update_timestamp) def _unregister_db_event(self, listen_obj, listened_event, listen_hander): try: event.remove(listen_obj, listened_event, listen_hander) except sql_exc.InvalidRequestError: LOG.warning(_LW("No sqlalchemy event for resource %s found"), listen_obj) def _format_timestamp(self, resource_db, result): result['created_at'] = (resource_db.created_at. strftime(self.ISO8601_TIME_FORMAT)) + 'Z' result['updated_at'] = (resource_db.updated_at. strftime(self.ISO8601_TIME_FORMAT)) + 'Z' def extend_resource_dict_timestamp(self, plugin_obj, resource_res, resource_db): if (resource_db and resource_db.created_at and resource_db.updated_at): self._format_timestamp(resource_db, resource_res) def _add_timestamp(self, mapper, _conn, target): if not target.created_at and not target.updated_at: time = timeutils.utcnow() for field in ['created_at', 'updated_at']: setattr(target, field, time) return target
apache-2.0
bigswitch/nova
nova/virt/hyperv/hostops.py
1
6821
# Copyright 2012 Cloudbase Solutions Srl # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Management class for host operations. """ import datetime import os import platform import time from os_win import constants as os_win_const from os_win import utilsfactory from oslo_log import log as logging from oslo_serialization import jsonutils from oslo_utils import units from nova.compute import arch from nova.compute import hv_type from nova.compute import vm_mode import nova.conf from nova.i18n import _ from nova.virt.hyperv import constants from nova.virt.hyperv import pathutils CONF = nova.conf.CONF LOG = logging.getLogger(__name__) class HostOps(object): def __init__(self): self._hostutils = utilsfactory.get_hostutils() self._pathutils = pathutils.PathUtils() def _get_cpu_info(self): """Get the CPU information. :returns: A dictionary containing the main properties of the central processor in the hypervisor. """ cpu_info = dict() processors = self._hostutils.get_cpus_info() w32_arch_dict = constants.WMI_WIN32_PROCESSOR_ARCHITECTURE cpu_info['arch'] = w32_arch_dict.get(processors[0]['Architecture'], 'Unknown') cpu_info['model'] = processors[0]['Name'] cpu_info['vendor'] = processors[0]['Manufacturer'] topology = dict() topology['sockets'] = len(processors) topology['cores'] = processors[0]['NumberOfCores'] topology['threads'] = (processors[0]['NumberOfLogicalProcessors'] // processors[0]['NumberOfCores']) cpu_info['topology'] = topology features = list() for fkey, fname in os_win_const.PROCESSOR_FEATURE.items(): if self._hostutils.is_cpu_feature_present(fkey): features.append(fname) cpu_info['features'] = features return cpu_info def _get_memory_info(self): (total_mem_kb, free_mem_kb) = self._hostutils.get_memory_info() total_mem_mb = total_mem_kb // 1024 free_mem_mb = free_mem_kb // 1024 return (total_mem_mb, free_mem_mb, total_mem_mb - free_mem_mb) def _get_local_hdd_info_gb(self): drive = os.path.splitdrive(self._pathutils.get_instances_dir())[0] (size, free_space) = self._hostutils.get_volume_info(drive) total_gb = size // units.Gi free_gb = free_space // units.Gi used_gb = total_gb - free_gb return (total_gb, free_gb, used_gb) def _get_hypervisor_version(self): """Get hypervisor version. :returns: hypervisor version (ex. 6003) """ # NOTE(claudiub): The hypervisor_version will be stored in the database # as an Integer and it will be used by the scheduler, if required by # the image property 'hypervisor_version_requires'. # The hypervisor_version will then be converted back to a version # by splitting the int in groups of 3 digits. # E.g.: hypervisor_version 6003 is converted to '6.3'. version = self._hostutils.get_windows_version().split('.') version = int(version[0]) * 1000 + int(version[1]) LOG.debug('Windows version: %s ', version) return version def get_available_resource(self): """Retrieve resource info. This method is called when nova-compute launches, and as part of a periodic task. :returns: dictionary describing resources """ LOG.debug('get_available_resource called') (total_mem_mb, free_mem_mb, used_mem_mb) = self._get_memory_info() (total_hdd_gb, free_hdd_gb, used_hdd_gb) = self._get_local_hdd_info_gb() cpu_info = self._get_cpu_info() cpu_topology = cpu_info['topology'] vcpus = (cpu_topology['sockets'] * cpu_topology['cores'] * cpu_topology['threads']) dic = {'vcpus': vcpus, 'memory_mb': total_mem_mb, 'memory_mb_used': used_mem_mb, 'local_gb': total_hdd_gb, 'local_gb_used': used_hdd_gb, 'hypervisor_type': "hyperv", 'hypervisor_version': self._get_hypervisor_version(), 'hypervisor_hostname': platform.node(), 'vcpus_used': 0, 'cpu_info': jsonutils.dumps(cpu_info), 'supported_instances': [(arch.I686, hv_type.HYPERV, vm_mode.HVM), (arch.X86_64, hv_type.HYPERV, vm_mode.HVM)], 'numa_topology': None, } return dic def host_power_action(self, action): """Reboots, shuts down or powers up the host.""" if action in [constants.HOST_POWER_ACTION_SHUTDOWN, constants.HOST_POWER_ACTION_REBOOT]: self._hostutils.host_power_action(action) else: if action == constants.HOST_POWER_ACTION_STARTUP: raise NotImplementedError( _("Host PowerOn is not supported by the Hyper-V driver")) def get_host_ip_addr(self): host_ip = CONF.my_ip if not host_ip: # Return the first available address host_ip = self._hostutils.get_local_ips()[0] LOG.debug("Host IP address is: %s", host_ip) return host_ip def get_host_uptime(self): """Returns the host uptime.""" tick_count64 = self._hostutils.get_host_tick_count64() # format the string to match libvirt driver uptime # Libvirt uptime returns a combination of the following # - current host time # - time since host is up # - number of logged in users # - cpu load # Since the Windows function GetTickCount64 returns only # the time since the host is up, returning 0s for cpu load # and number of logged in users. # This is done to ensure the format of the returned # value is same as in libvirt return "%s up %s, 0 users, load average: 0, 0, 0" % ( str(time.strftime("%H:%M:%S")), str(datetime.timedelta(milliseconds=int(tick_count64))))
apache-2.0
wkeeling/ansible
lib/ansible/plugins/callback/json.py
34
2423
# (c) 2016, Matt Martz <matt@sivel.net> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type import json from ansible.plugins.callback import CallbackBase class CallbackModule(CallbackBase): CALLBACK_VERSION = 2.0 CALLBACK_TYPE = 'stdout' CALLBACK_NAME = 'json' def __init__(self, display=None): super(CallbackModule, self).__init__(display) self.results = [] def _new_play(self, play): return { 'play': { 'name': play.name, 'id': str(play._uuid) }, 'tasks': [] } def _new_task(self, task): return { 'task': { 'name': task.name, 'id': str(task._uuid) }, 'hosts': {} } def v2_playbook_on_play_start(self, play): self.results.append(self._new_play(play)) def v2_playbook_on_task_start(self, task, is_conditional): self.results[-1]['tasks'].append(self._new_task(task)) def v2_runner_on_ok(self, result, **kwargs): host = result._host self.results[-1]['tasks'][-1]['hosts'][host.name] = result._result def v2_playbook_on_stats(self, stats): """Display info about playbook statistics""" hosts = sorted(stats.processed.keys()) summary = {} for h in hosts: s = stats.summarize(h) summary[h] = s output = { 'plays': self.results, 'stats': summary } print(json.dumps(output, indent=4, sort_keys=True)) v2_runner_on_failed = v2_runner_on_ok v2_runner_on_unreachable = v2_runner_on_ok v2_runner_on_skipped = v2_runner_on_ok
gpl-3.0
OMS-NetZero/FAIR
versioneer.py
386
68611
# Version: 0.18 """The Versioneer - like a rocketeer, but for versions. The Versioneer ============== * like a rocketeer, but for versions! * https://github.com/warner/python-versioneer * Brian Warner * License: Public Domain * Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy * [![Latest Version] (https://pypip.in/version/versioneer/badge.svg?style=flat) ](https://pypi.python.org/pypi/versioneer/) * [![Build Status] (https://travis-ci.org/warner/python-versioneer.png?branch=master) ](https://travis-ci.org/warner/python-versioneer) This is a tool for managing a recorded version number in distutils-based python projects. The goal is to remove the tedious and error-prone "update the embedded version string" step from your release process. Making a new release should be as easy as recording a new tag in your version-control system, and maybe making new tarballs. ## Quick Install * `pip install versioneer` to somewhere to your $PATH * add a `[versioneer]` section to your setup.cfg (see below) * run `versioneer install` in your source tree, commit the results ## Version Identifiers Source trees come from a variety of places: * a version-control system checkout (mostly used by developers) * a nightly tarball, produced by build automation * a snapshot tarball, produced by a web-based VCS browser, like github's "tarball from tag" feature * a release tarball, produced by "setup.py sdist", distributed through PyPI Within each source tree, the version identifier (either a string or a number, this tool is format-agnostic) can come from a variety of places: * ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows about recent "tags" and an absolute revision-id * the name of the directory into which the tarball was unpacked * an expanded VCS keyword ($Id$, etc) * a `_version.py` created by some earlier build step For released software, the version identifier is closely related to a VCS tag. Some projects use tag names that include more than just the version string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool needs to strip the tag prefix to extract the version identifier. For unreleased software (between tags), the version identifier should provide enough information to help developers recreate the same tree, while also giving them an idea of roughly how old the tree is (after version 1.2, before version 1.3). Many VCS systems can report a description that captures this, for example `git describe --tags --dirty --always` reports things like "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has uncommitted changes. The version identifier is used for multiple purposes: * to allow the module to self-identify its version: `myproject.__version__` * to choose a name and prefix for a 'setup.py sdist' tarball ## Theory of Operation Versioneer works by adding a special `_version.py` file into your source tree, where your `__init__.py` can import it. This `_version.py` knows how to dynamically ask the VCS tool for version information at import time. `_version.py` also contains `$Revision$` markers, and the installation process marks `_version.py` to have this marker rewritten with a tag name during the `git archive` command. As a result, generated tarballs will contain enough information to get the proper version. To allow `setup.py` to compute a version too, a `versioneer.py` is added to the top level of your source tree, next to `setup.py` and the `setup.cfg` that configures it. This overrides several distutils/setuptools commands to compute the version when invoked, and changes `setup.py build` and `setup.py sdist` to replace `_version.py` with a small static file that contains just the generated version data. ## Installation See [INSTALL.md](./INSTALL.md) for detailed installation instructions. ## Version-String Flavors Code which uses Versioneer can learn about its version string at runtime by importing `_version` from your main `__init__.py` file and running the `get_versions()` function. From the "outside" (e.g. in `setup.py`), you can import the top-level `versioneer.py` and run `get_versions()`. Both functions return a dictionary with different flavors of version information: * `['version']`: A condensed version string, rendered using the selected style. This is the most commonly used value for the project's version string. The default "pep440" style yields strings like `0.11`, `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section below for alternative styles. * `['full-revisionid']`: detailed revision identifier. For Git, this is the full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". * `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the commit date in ISO 8601 format. This will be None if the date is not available. * `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that this is only accurate if run in a VCS checkout, otherwise it is likely to be False or None * `['error']`: if the version string could not be computed, this will be set to a string describing the problem, otherwise it will be None. It may be useful to throw an exception in setup.py if this is set, to avoid e.g. creating tarballs with a version string of "unknown". Some variants are more useful than others. Including `full-revisionid` in a bug report should allow developers to reconstruct the exact code being tested (or indicate the presence of local changes that should be shared with the developers). `version` is suitable for display in an "about" box or a CLI `--version` output: it can be easily compared against release notes and lists of bugs fixed in various releases. The installer adds the following text to your `__init__.py` to place a basic version in `YOURPROJECT.__version__`: from ._version import get_versions __version__ = get_versions()['version'] del get_versions ## Styles The setup.cfg `style=` configuration controls how the VCS information is rendered into a version string. The default style, "pep440", produces a PEP440-compliant string, equal to the un-prefixed tag name for actual releases, and containing an additional "local version" section with more detail for in-between builds. For Git, this is TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags --dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and that this commit is two revisions ("+2") beyond the "0.11" tag. For released software (exactly equal to a known tag), the identifier will only contain the stripped tag, e.g. "0.11". Other styles are available. See [details.md](details.md) in the Versioneer source tree for descriptions. ## Debugging Versioneer tries to avoid fatal errors: if something goes wrong, it will tend to return a version of "0+unknown". To investigate the problem, run `setup.py version`, which will run the version-lookup code in a verbose mode, and will display the full contents of `get_versions()` (including the `error` string, which may help identify what went wrong). ## Known Limitations Some situations are known to cause problems for Versioneer. This details the most significant ones. More can be found on Github [issues page](https://github.com/warner/python-versioneer/issues). ### Subprojects Versioneer has limited support for source trees in which `setup.py` is not in the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are two common reasons why `setup.py` might not be in the root: * Source trees which contain multiple subprojects, such as [Buildbot](https://github.com/buildbot/buildbot), which contains both "master" and "slave" subprojects, each with their own `setup.py`, `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI distributions (and upload multiple independently-installable tarballs). * Source trees whose main purpose is to contain a C library, but which also provide bindings to Python (and perhaps other langauges) in subdirectories. Versioneer will look for `.git` in parent directories, and most operations should get the right version string. However `pip` and `setuptools` have bugs and implementation details which frequently cause `pip install .` from a subproject directory to fail to find a correct version string (so it usually defaults to `0+unknown`). `pip install --editable .` should work correctly. `setup.py install` might work too. Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in some later version. [Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking this issue. The discussion in [PR #61](https://github.com/warner/python-versioneer/pull/61) describes the issue from the Versioneer side in more detail. [pip PR#3176](https://github.com/pypa/pip/pull/3176) and [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve pip to let Versioneer work correctly. Versioneer-0.16 and earlier only looked for a `.git` directory next to the `setup.cfg`, so subprojects were completely unsupported with those releases. ### Editable installs with setuptools <= 18.5 `setup.py develop` and `pip install --editable .` allow you to install a project into a virtualenv once, then continue editing the source code (and test) without re-installing after every change. "Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a convenient way to specify executable scripts that should be installed along with the python package. These both work as expected when using modern setuptools. When using setuptools-18.5 or earlier, however, certain operations will cause `pkg_resources.DistributionNotFound` errors when running the entrypoint script, which must be resolved by re-installing the package. This happens when the install happens with one version, then the egg_info data is regenerated while a different version is checked out. Many setup.py commands cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into a different virtualenv), so this can be surprising. [Bug #83](https://github.com/warner/python-versioneer/issues/83) describes this one, but upgrading to a newer version of setuptools should probably resolve it. ### Unicode version strings While Versioneer works (and is continually tested) with both Python 2 and Python 3, it is not entirely consistent with bytes-vs-unicode distinctions. Newer releases probably generate unicode version strings on py2. It's not clear that this is wrong, but it may be surprising for applications when then write these strings to a network connection or include them in bytes-oriented APIs like cryptographic checksums. [Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates this question. ## Updating Versioneer To upgrade your project to a new release of Versioneer, do the following: * install the new Versioneer (`pip install -U versioneer` or equivalent) * edit `setup.cfg`, if necessary, to include any new configuration settings indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. * re-run `versioneer install` in your source tree, to replace `SRC/_version.py` * commit any changed files ## Future Directions This tool is designed to make it easily extended to other version-control systems: all VCS-specific components are in separate directories like src/git/ . The top-level `versioneer.py` script is assembled from these components by running make-versioneer.py . In the future, make-versioneer.py will take a VCS name as an argument, and will construct a version of `versioneer.py` that is specific to the given VCS. It might also take the configuration arguments that are currently provided manually during installation by editing setup.py . Alternatively, it might go the other direction and include code from all supported VCS systems, reducing the number of intermediate scripts. ## License To make Versioneer easier to embed, all its code is dedicated to the public domain. The `_version.py` that it creates is also in the public domain. Specifically, both are released under the Creative Commons "Public Domain Dedication" license (CC0-1.0), as described in https://creativecommons.org/publicdomain/zero/1.0/ . """ from __future__ import print_function try: import configparser except ImportError: import ConfigParser as configparser import errno import json import os import re import subprocess import sys class VersioneerConfig: """Container for Versioneer configuration parameters.""" def get_root(): """Get the project root directory. We require that all commands are run from the project root, i.e. the directory that contains setup.py, setup.cfg, and versioneer.py . """ root = os.path.realpath(os.path.abspath(os.getcwd())) setup_py = os.path.join(root, "setup.py") versioneer_py = os.path.join(root, "versioneer.py") if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): # allow 'python path/to/setup.py COMMAND' root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) setup_py = os.path.join(root, "setup.py") versioneer_py = os.path.join(root, "versioneer.py") if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): err = ("Versioneer was unable to run the project root directory. " "Versioneer requires setup.py to be executed from " "its immediate directory (like 'python setup.py COMMAND'), " "or in a way that lets it use sys.argv[0] to find the root " "(like 'python path/to/setup.py COMMAND').") raise VersioneerBadRootError(err) try: # Certain runtime workflows (setup.py install/develop in a setuptools # tree) execute all dependencies in a single python process, so # "versioneer" may be imported multiple times, and python's shared # module-import table will cache the first one. So we can't use # os.path.dirname(__file__), as that will find whichever # versioneer.py was first imported, even in later projects. me = os.path.realpath(os.path.abspath(__file__)) me_dir = os.path.normcase(os.path.splitext(me)[0]) vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) if me_dir != vsr_dir: print("Warning: build in %s is using versioneer.py from %s" % (os.path.dirname(me), versioneer_py)) except NameError: pass return root def get_config_from_root(root): """Read the project setup.cfg file to determine Versioneer config.""" # This might raise EnvironmentError (if setup.cfg is missing), or # configparser.NoSectionError (if it lacks a [versioneer] section), or # configparser.NoOptionError (if it lacks "VCS="). See the docstring at # the top of versioneer.py for instructions on writing your setup.cfg . setup_cfg = os.path.join(root, "setup.cfg") parser = configparser.SafeConfigParser() with open(setup_cfg, "r") as f: parser.readfp(f) VCS = parser.get("versioneer", "VCS") # mandatory def get(parser, name): if parser.has_option("versioneer", name): return parser.get("versioneer", name) return None cfg = VersioneerConfig() cfg.VCS = VCS cfg.style = get(parser, "style") or "" cfg.versionfile_source = get(parser, "versionfile_source") cfg.versionfile_build = get(parser, "versionfile_build") cfg.tag_prefix = get(parser, "tag_prefix") if cfg.tag_prefix in ("''", '""'): cfg.tag_prefix = "" cfg.parentdir_prefix = get(parser, "parentdir_prefix") cfg.verbose = get(parser, "verbose") return cfg class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" # these dictionaries contain VCS-specific tools LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator """Decorator to mark a method as the handler for a particular VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %s" % dispcmd) print(e) return None, None else: if verbose: print("unable to find command, tried %s" % (commands,)) return None, None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) print("stdout was %s" % stdout) return None, p.returncode return stdout, p.returncode LONG_VERSION_PY['git'] = ''' # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build # directories (produced by setup.py build) will contain a much shorter file # that just contains the computed version number. # This file is released into the public domain. Generated by # versioneer-0.18 (https://github.com/warner/python-versioneer) """Git implementation of _version.py.""" import errno import os import re import subprocess import sys def get_keywords(): """Get the keywords needed to look up the version information.""" # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must # each be defined on a line of their own. _version.py will just call # get_keywords(). git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} return keywords class VersioneerConfig: """Container for Versioneer configuration parameters.""" def get_config(): """Create, populate and return the VersioneerConfig() object.""" # these strings are filled in when 'setup.py versioneer' creates # _version.py cfg = VersioneerConfig() cfg.VCS = "git" cfg.style = "%(STYLE)s" cfg.tag_prefix = "%(TAG_PREFIX)s" cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" cfg.verbose = False return cfg class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator """Decorator to mark a method as the handler for a particular VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %%s" %% dispcmd) print(e) return None, None else: if verbose: print("unable to find command, tried %%s" %% (commands,)) return None, None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run %%s (error)" %% dispcmd) print("stdout was %%s" %% stdout) return None, p.returncode return stdout, p.returncode def versions_from_parentdir(parentdir_prefix, root, verbose): """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory """ rootdirs = [] for i in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None, "date": None} else: rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: print("Tried directories %%s but none started with prefix %%s" %% (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) if line.strip().startswith("git_date ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["date"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" if not keywords: raise NotThisMethod("no keywords at all, weird") date = keywords.get("date") if date is not None: # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because # it's been around since git-1.5.3, and it's too difficult to # discover which version we're using, or to work around using an # older one. date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %%d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: print("discarding '%%s', no digits" %% ",".join(refs - tags)) if verbose: print("likely tags: %%s" %% ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking %%s" %% r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date} # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %%s not under git control" %% root) raise NotThisMethod("'git rev-parse --git-dir' returned error") # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%%s*" %% tag_prefix], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%%s'" %% describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%%s' doesn't start with prefix '%%s'" print(fmt %% (full_tag, tag_prefix)) pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" %% (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces def plus_or_dot(pieces): """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty Exceptions: 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): """TAG[.post.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post.devDISTANCE """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%%d" %% pieces["distance"] else: # exception #1 rendered = "0.post.dev%%d" %% pieces["distance"] return rendered def render_pep440_post(pieces): """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%%s" %% pieces["short"] else: # exception #1 rendered = "0.post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%%s" %% pieces["short"] return rendered def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"], "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%%s'" %% style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None, "date": pieces.get("date")} def get_versions(): """Get version information or return default if unable to do so.""" # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which # case we can only use expanded keywords. cfg = get_config() verbose = cfg.verbose try: return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass try: root = os.path.realpath(__file__) # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. for i in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to find root of source tree", "date": None} try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) return render(pieces, cfg.style) except NotThisMethod: pass try: if cfg.parentdir_prefix: return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) except NotThisMethod: pass return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version", "date": None} ''' @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) if line.strip().startswith("git_date ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["date"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" if not keywords: raise NotThisMethod("no keywords at all, weird") date = keywords.get("date") if date is not None: # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because # it's been around since git-1.5.3, and it's too difficult to # discover which version we're using, or to work around using an # older one. date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: print("likely tags: %s" % ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking %s" % r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date} # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %s not under git control" % root) raise NotThisMethod("'git rev-parse --git-dir' returned error") # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%s*" % tag_prefix], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%s'" % describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces def do_vcs_install(manifest_in, versionfile_source, ipy): """Git-specific installation logic for Versioneer. For Git, this means creating/changing .gitattributes to mark _version.py for export-subst keyword substitution. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] files = [manifest_in, versionfile_source] if ipy: files.append(ipy) try: me = __file__ if me.endswith(".pyc") or me.endswith(".pyo"): me = os.path.splitext(me)[0] + ".py" versioneer_file = os.path.relpath(me) except NameError: versioneer_file = "versioneer.py" files.append(versioneer_file) present = False try: f = open(".gitattributes", "r") for line in f.readlines(): if line.strip().startswith(versionfile_source): if "export-subst" in line.strip().split()[1:]: present = True f.close() except EnvironmentError: pass if not present: f = open(".gitattributes", "a+") f.write("%s export-subst\n" % versionfile_source) f.close() files.append(".gitattributes") run_command(GITS, ["add", "--"] + files) def versions_from_parentdir(parentdir_prefix, root, verbose): """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory """ rootdirs = [] for i in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None, "date": None} else: rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: print("Tried directories %s but none started with prefix %s" % (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") SHORT_VERSION_PY = """ # This file was generated by 'versioneer.py' (0.18) from # revision-control system data, or from the parent directory name of an # unpacked source archive. Distribution tarballs contain a pre-generated copy # of this file. import json version_json = ''' %s ''' # END VERSION_JSON def get_versions(): return json.loads(version_json) """ def versions_from_file(filename): """Try to determine the version from _version.py if present.""" try: with open(filename) as f: contents = f.read() except EnvironmentError: raise NotThisMethod("unable to read _version.py") mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S) if not mo: mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", contents, re.M | re.S) if not mo: raise NotThisMethod("no version_json in _version.py") return json.loads(mo.group(1)) def write_to_version_file(filename, versions): """Write the given version number to the given _version.py file.""" os.unlink(filename) contents = json.dumps(versions, sort_keys=True, indent=1, separators=(",", ": ")) with open(filename, "w") as f: f.write(SHORT_VERSION_PY % contents) print("set %s to '%s'" % (filename, versions["version"])) def plus_or_dot(pieces): """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty Exceptions: 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): """TAG[.post.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post.devDISTANCE """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%d" % pieces["distance"] else: # exception #1 rendered = "0.post.dev%d" % pieces["distance"] return rendered def render_pep440_post(pieces): """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%s" % pieces["short"] else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%s" % pieces["short"] return rendered def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"], "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%s'" % style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None, "date": pieces.get("date")} class VersioneerBadRootError(Exception): """The project root directory is unknown or missing key files.""" def get_versions(verbose=False): """Get the project version from whatever source is available. Returns dict with two keys: 'version' and 'full'. """ if "versioneer" in sys.modules: # see the discussion in cmdclass.py:get_cmdclass() del sys.modules["versioneer"] root = get_root() cfg = get_config_from_root(root) assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" handlers = HANDLERS.get(cfg.VCS) assert handlers, "unrecognized VCS '%s'" % cfg.VCS verbose = verbose or cfg.verbose assert cfg.versionfile_source is not None, \ "please set versioneer.versionfile_source" assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" versionfile_abs = os.path.join(root, cfg.versionfile_source) # extract version from first of: _version.py, VCS command (e.g. 'git # describe'), parentdir. This is meant to work for developers using a # source checkout, for users of a tarball created by 'setup.py sdist', # and for users of a tarball/zipball created by 'git archive' or github's # download-from-tag feature or the equivalent in other VCSes. get_keywords_f = handlers.get("get_keywords") from_keywords_f = handlers.get("keywords") if get_keywords_f and from_keywords_f: try: keywords = get_keywords_f(versionfile_abs) ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) if verbose: print("got version from expanded keyword %s" % ver) return ver except NotThisMethod: pass try: ver = versions_from_file(versionfile_abs) if verbose: print("got version from file %s %s" % (versionfile_abs, ver)) return ver except NotThisMethod: pass from_vcs_f = handlers.get("pieces_from_vcs") if from_vcs_f: try: pieces = from_vcs_f(cfg.tag_prefix, root, verbose) ver = render(pieces, cfg.style) if verbose: print("got version from VCS %s" % ver) return ver except NotThisMethod: pass try: if cfg.parentdir_prefix: ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) if verbose: print("got version from parentdir %s" % ver) return ver except NotThisMethod: pass if verbose: print("unable to compute version") return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version", "date": None} def get_version(): """Get the short version string for this project.""" return get_versions()["version"] def get_cmdclass(): """Get the custom setuptools/distutils subclasses used by Versioneer.""" if "versioneer" in sys.modules: del sys.modules["versioneer"] # this fixes the "python setup.py develop" case (also 'install' and # 'easy_install .'), in which subdependencies of the main project are # built (using setup.py bdist_egg) in the same python process. Assume # a main project A and a dependency B, which use different versions # of Versioneer. A's setup.py imports A's Versioneer, leaving it in # sys.modules by the time B's setup.py is executed, causing B to run # with the wrong versioneer. Setuptools wraps the sub-dep builds in a # sandbox that restores sys.modules to it's pre-build state, so the # parent is protected against the child's "import versioneer". By # removing ourselves from sys.modules here, before the child build # happens, we protect the child from the parent's versioneer too. # Also see https://github.com/warner/python-versioneer/issues/52 cmds = {} # we add "version" to both distutils and setuptools from distutils.core import Command class cmd_version(Command): description = "report generated version string" user_options = [] boolean_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): vers = get_versions(verbose=True) print("Version: %s" % vers["version"]) print(" full-revisionid: %s" % vers.get("full-revisionid")) print(" dirty: %s" % vers.get("dirty")) print(" date: %s" % vers.get("date")) if vers["error"]: print(" error: %s" % vers["error"]) cmds["version"] = cmd_version # we override "build_py" in both distutils and setuptools # # most invocation pathways end up running build_py: # distutils/build -> build_py # distutils/install -> distutils/build ->.. # setuptools/bdist_wheel -> distutils/install ->.. # setuptools/bdist_egg -> distutils/install_lib -> build_py # setuptools/install -> bdist_egg ->.. # setuptools/develop -> ? # pip install: # copies source tree to a tempdir before running egg_info/etc # if .git isn't copied too, 'git describe' will fail # then does setup.py bdist_wheel, or sometimes setup.py install # setup.py egg_info -> ? # we override different "build_py" commands for both environments if "setuptools" in sys.modules: from setuptools.command.build_py import build_py as _build_py else: from distutils.command.build_py import build_py as _build_py class cmd_build_py(_build_py): def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() _build_py.run(self) # now locate _version.py in the new build/ directory and replace # it with an updated value if cfg.versionfile_build: target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) cmds["build_py"] = cmd_build_py if "cx_Freeze" in sys.modules: # cx_freeze enabled? from cx_Freeze.dist import build_exe as _build_exe # nczeczulin reports that py2exe won't like the pep440-style string # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. # setup(console=[{ # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION # "product_version": versioneer.get_version(), # ... class cmd_build_exe(_build_exe): def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() target_versionfile = cfg.versionfile_source print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) _build_exe.run(self) os.unlink(target_versionfile) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] f.write(LONG % {"DOLLAR": "$", "STYLE": cfg.style, "TAG_PREFIX": cfg.tag_prefix, "PARENTDIR_PREFIX": cfg.parentdir_prefix, "VERSIONFILE_SOURCE": cfg.versionfile_source, }) cmds["build_exe"] = cmd_build_exe del cmds["build_py"] if 'py2exe' in sys.modules: # py2exe enabled? try: from py2exe.distutils_buildexe import py2exe as _py2exe # py3 except ImportError: from py2exe.build_exe import py2exe as _py2exe # py2 class cmd_py2exe(_py2exe): def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() target_versionfile = cfg.versionfile_source print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) _py2exe.run(self) os.unlink(target_versionfile) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] f.write(LONG % {"DOLLAR": "$", "STYLE": cfg.style, "TAG_PREFIX": cfg.tag_prefix, "PARENTDIR_PREFIX": cfg.parentdir_prefix, "VERSIONFILE_SOURCE": cfg.versionfile_source, }) cmds["py2exe"] = cmd_py2exe # we override different "sdist" commands for both environments if "setuptools" in sys.modules: from setuptools.command.sdist import sdist as _sdist else: from distutils.command.sdist import sdist as _sdist class cmd_sdist(_sdist): def run(self): versions = get_versions() self._versioneer_generated_versions = versions # unless we update this, the command will keep using the old # version self.distribution.metadata.version = versions["version"] return _sdist.run(self) def make_release_tree(self, base_dir, files): root = get_root() cfg = get_config_from_root(root) _sdist.make_release_tree(self, base_dir, files) # now locate _version.py in the new base_dir directory # (remembering that it may be a hardlink) and replace it with an # updated value target_versionfile = os.path.join(base_dir, cfg.versionfile_source) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, self._versioneer_generated_versions) cmds["sdist"] = cmd_sdist return cmds CONFIG_ERROR = """ setup.cfg is missing the necessary Versioneer configuration. You need a section like: [versioneer] VCS = git style = pep440 versionfile_source = src/myproject/_version.py versionfile_build = myproject/_version.py tag_prefix = parentdir_prefix = myproject- You will also need to edit your setup.py to use the results: import versioneer setup(version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), ...) Please read the docstring in ./versioneer.py for configuration instructions, edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. """ SAMPLE_CONFIG = """ # See the docstring in versioneer.py for instructions. Note that you must # re-run 'versioneer.py setup' after changing this section, and commit the # resulting files. [versioneer] #VCS = git #style = pep440 #versionfile_source = #versionfile_build = #tag_prefix = #parentdir_prefix = """ INIT_PY_SNIPPET = """ from ._version import get_versions __version__ = get_versions()['version'] del get_versions """ def do_setup(): """Main VCS-independent setup function for installing Versioneer.""" root = get_root() try: cfg = get_config_from_root(root) except (EnvironmentError, configparser.NoSectionError, configparser.NoOptionError) as e: if isinstance(e, (EnvironmentError, configparser.NoSectionError)): print("Adding sample versioneer config to setup.cfg", file=sys.stderr) with open(os.path.join(root, "setup.cfg"), "a") as f: f.write(SAMPLE_CONFIG) print(CONFIG_ERROR, file=sys.stderr) return 1 print(" creating %s" % cfg.versionfile_source) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] f.write(LONG % {"DOLLAR": "$", "STYLE": cfg.style, "TAG_PREFIX": cfg.tag_prefix, "PARENTDIR_PREFIX": cfg.parentdir_prefix, "VERSIONFILE_SOURCE": cfg.versionfile_source, }) ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py") if os.path.exists(ipy): try: with open(ipy, "r") as f: old = f.read() except EnvironmentError: old = "" if INIT_PY_SNIPPET not in old: print(" appending to %s" % ipy) with open(ipy, "a") as f: f.write(INIT_PY_SNIPPET) else: print(" %s unmodified" % ipy) else: print(" %s doesn't exist, ok" % ipy) ipy = None # Make sure both the top-level "versioneer.py" and versionfile_source # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so # they'll be copied into source distributions. Pip won't be able to # install the package without this. manifest_in = os.path.join(root, "MANIFEST.in") simple_includes = set() try: with open(manifest_in, "r") as f: for line in f: if line.startswith("include "): for include in line.split()[1:]: simple_includes.add(include) except EnvironmentError: pass # That doesn't cover everything MANIFEST.in can do # (http://docs.python.org/2/distutils/sourcedist.html#commands), so # it might give some false negatives. Appending redundant 'include' # lines is safe, though. if "versioneer.py" not in simple_includes: print(" appending 'versioneer.py' to MANIFEST.in") with open(manifest_in, "a") as f: f.write("include versioneer.py\n") else: print(" 'versioneer.py' already in MANIFEST.in") if cfg.versionfile_source not in simple_includes: print(" appending versionfile_source ('%s') to MANIFEST.in" % cfg.versionfile_source) with open(manifest_in, "a") as f: f.write("include %s\n" % cfg.versionfile_source) else: print(" versionfile_source already in MANIFEST.in") # Make VCS-specific changes. For git, this means creating/changing # .gitattributes to mark _version.py for export-subst keyword # substitution. do_vcs_install(manifest_in, cfg.versionfile_source, ipy) return 0 def scan_setup_py(): """Validate the contents of setup.py against Versioneer's expectations.""" found = set() setters = False errors = 0 with open("setup.py", "r") as f: for line in f.readlines(): if "import versioneer" in line: found.add("import") if "versioneer.get_cmdclass()" in line: found.add("cmdclass") if "versioneer.get_version()" in line: found.add("get_version") if "versioneer.VCS" in line: setters = True if "versioneer.versionfile_source" in line: setters = True if len(found) != 3: print("") print("Your setup.py appears to be missing some important items") print("(but I might be wrong). Please make sure it has something") print("roughly like the following:") print("") print(" import versioneer") print(" setup( version=versioneer.get_version(),") print(" cmdclass=versioneer.get_cmdclass(), ...)") print("") errors += 1 if setters: print("You should remove lines like 'versioneer.VCS = ' and") print("'versioneer.versionfile_source = ' . This configuration") print("now lives in setup.cfg, and should be removed from setup.py") print("") errors += 1 return errors if __name__ == "__main__": cmd = sys.argv[1] if cmd == "setup": errors = do_setup() errors += scan_setup_py() if errors: sys.exit(1)
apache-2.0
nirmeshk/oh-mainline
vendor/packages/Pygments/pygments/lexers/web.py
42
135416
# -*- coding: utf-8 -*- """ pygments.lexers.web ~~~~~~~~~~~~~~~~~~~ Lexers for web-related languages and markup. :copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re import copy from pygments.lexer import RegexLexer, ExtendedRegexLexer, bygroups, using, \ include, this from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Other, Punctuation, Literal from pygments.util import get_bool_opt, get_list_opt, looks_like_xml, \ html_doctype_matches, unirange from pygments.lexers.agile import RubyLexer from pygments.lexers.compiled import ScalaLexer __all__ = ['HtmlLexer', 'XmlLexer', 'JavascriptLexer', 'JsonLexer', 'CssLexer', 'PhpLexer', 'ActionScriptLexer', 'XsltLexer', 'ActionScript3Lexer', 'MxmlLexer', 'HaxeLexer', 'HamlLexer', 'SassLexer', 'ScssLexer', 'ObjectiveJLexer', 'CoffeeScriptLexer', 'LiveScriptLexer', 'DuelLexer', 'ScamlLexer', 'JadeLexer', 'XQueryLexer', 'DtdLexer', 'DartLexer', 'LassoLexer', 'QmlLexer', 'TypeScriptLexer'] class JavascriptLexer(RegexLexer): """ For JavaScript source code. """ name = 'JavaScript' aliases = ['js', 'javascript'] filenames = ['*.js', ] mimetypes = ['application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript', ] flags = re.DOTALL tokens = { 'commentsandwhitespace': [ (r'\s+', Text), (r'<!--', Comment), (r'//.*?\n', Comment.Single), (r'/\*.*?\*/', Comment.Multiline) ], 'slashstartsregex': [ include('commentsandwhitespace'), (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/' r'([gim]+\b|\B)', String.Regex, '#pop'), (r'(?=/)', Text, ('#pop', 'badregex')), (r'', Text, '#pop') ], 'badregex': [ (r'\n', Text, '#pop') ], 'root': [ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'), include('commentsandwhitespace'), (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|' r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'), (r'[{(\[;,]', Punctuation, 'slashstartsregex'), (r'[})\].]', Punctuation), (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|' r'throw|try|catch|finally|new|delete|typeof|instanceof|void|' r'this)\b', Keyword, 'slashstartsregex'), (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'), (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|' r'extends|final|float|goto|implements|import|int|interface|long|native|' r'package|private|protected|public|short|static|super|synchronized|throws|' r'transient|volatile)\b', Keyword.Reserved), (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant), (r'(Array|Boolean|Date|Error|Function|Math|netscape|' r'Number|Object|Packages|RegExp|String|sun|decodeURI|' r'decodeURIComponent|encodeURI|encodeURIComponent|' r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|' r'window)\b', Name.Builtin), (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other), (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), (r'0x[0-9a-fA-F]+', Number.Hex), (r'[0-9]+', Number.Integer), (r'"(\\\\|\\"|[^"])*"', String.Double), (r"'(\\\\|\\'|[^'])*'", String.Single), ] } class JsonLexer(RegexLexer): """ For JSON data structures. *New in Pygments 1.5.* """ name = 'JSON' aliases = ['json'] filenames = ['*.json'] mimetypes = [ 'application/json', ] # integer part of a number int_part = r'-?(0|[1-9]\d*)' # fractional part of a number frac_part = r'\.\d+' # exponential part of a number exp_part = r'[eE](\+|-)?\d+' flags = re.DOTALL tokens = { 'whitespace': [ (r'\s+', Text), ], # represents a simple terminal value 'simplevalue': [ (r'(true|false|null)\b', Keyword.Constant), (('%(int_part)s(%(frac_part)s%(exp_part)s|' '%(exp_part)s|%(frac_part)s)') % vars(), Number.Float), (int_part, Number.Integer), (r'"(\\\\|\\"|[^"])*"', String.Double), ], # the right hand side of an object, after the attribute name 'objectattribute': [ include('value'), (r':', Punctuation), # comma terminates the attribute but expects more (r',', Punctuation, '#pop'), # a closing bracket terminates the entire object, so pop twice (r'}', Punctuation, ('#pop', '#pop')), ], # a json object - { attr, attr, ... } 'objectvalue': [ include('whitespace'), (r'"(\\\\|\\"|[^"])*"', Name.Tag, 'objectattribute'), (r'}', Punctuation, '#pop'), ], # json array - [ value, value, ... } 'arrayvalue': [ include('whitespace'), include('value'), (r',', Punctuation), (r']', Punctuation, '#pop'), ], # a json value - either a simple value or a complex value (object or array) 'value': [ include('whitespace'), include('simplevalue'), (r'{', Punctuation, 'objectvalue'), (r'\[', Punctuation, 'arrayvalue'), ], # the root of a json document whould be a value 'root': [ include('value'), ], } JSONLexer = JsonLexer # for backwards compatibility with Pygments 1.5 class ActionScriptLexer(RegexLexer): """ For ActionScript source code. *New in Pygments 0.9.* """ name = 'ActionScript' aliases = ['as', 'actionscript'] filenames = ['*.as'] mimetypes = ['application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3'] flags = re.DOTALL tokens = { 'root': [ (r'\s+', Text), (r'//.*?\n', Comment.Single), (r'/\*.*?\*/', Comment.Multiline), (r'/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex), (r'[~\^\*!%&<>\|+=:;,/?\\-]+', Operator), (r'[{}\[\]();.]+', Punctuation), (r'(case|default|for|each|in|while|do|break|return|continue|if|else|' r'throw|try|catch|var|with|new|typeof|arguments|instanceof|this|' r'switch)\b', Keyword), (r'(class|public|final|internal|native|override|private|protected|' r'static|import|extends|implements|interface|intrinsic|return|super|' r'dynamic|function|const|get|namespace|package|set)\b', Keyword.Declaration), (r'(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b', Keyword.Constant), (r'(Accessibility|AccessibilityProperties|ActionScriptVersion|' r'ActivityEvent|AntiAliasType|ApplicationDomain|AsBroadcaster|Array|' r'AsyncErrorEvent|AVM1Movie|BevelFilter|Bitmap|BitmapData|' r'BitmapDataChannel|BitmapFilter|BitmapFilterQuality|BitmapFilterType|' r'BlendMode|BlurFilter|Boolean|ByteArray|Camera|Capabilities|CapsStyle|' r'Class|Color|ColorMatrixFilter|ColorTransform|ContextMenu|' r'ContextMenuBuiltInItems|ContextMenuEvent|ContextMenuItem|' r'ConvultionFilter|CSMSettings|DataEvent|Date|DefinitionError|' r'DeleteObjectSample|Dictionary|DisplacmentMapFilter|DisplayObject|' r'DisplacmentMapFilterMode|DisplayObjectContainer|DropShadowFilter|' r'Endian|EOFError|Error|ErrorEvent|EvalError|Event|EventDispatcher|' r'EventPhase|ExternalInterface|FileFilter|FileReference|' r'FileReferenceList|FocusDirection|FocusEvent|Font|FontStyle|FontType|' r'FrameLabel|FullScreenEvent|Function|GlowFilter|GradientBevelFilter|' r'GradientGlowFilter|GradientType|Graphics|GridFitType|HTTPStatusEvent|' r'IBitmapDrawable|ID3Info|IDataInput|IDataOutput|IDynamicPropertyOutput' r'IDynamicPropertyWriter|IEventDispatcher|IExternalizable|' r'IllegalOperationError|IME|IMEConversionMode|IMEEvent|int|' r'InteractiveObject|InterpolationMethod|InvalidSWFError|InvokeEvent|' r'IOError|IOErrorEvent|JointStyle|Key|Keyboard|KeyboardEvent|KeyLocation|' r'LineScaleMode|Loader|LoaderContext|LoaderInfo|LoadVars|LocalConnection|' r'Locale|Math|Matrix|MemoryError|Microphone|MorphShape|Mouse|MouseEvent|' r'MovieClip|MovieClipLoader|Namespace|NetConnection|NetStatusEvent|' r'NetStream|NewObjectSample|Number|Object|ObjectEncoding|PixelSnapping|' r'Point|PrintJob|PrintJobOptions|PrintJobOrientation|ProgressEvent|Proxy|' r'QName|RangeError|Rectangle|ReferenceError|RegExp|Responder|Sample|Scene|' r'ScriptTimeoutError|Security|SecurityDomain|SecurityError|' r'SecurityErrorEvent|SecurityPanel|Selection|Shape|SharedObject|' r'SharedObjectFlushStatus|SimpleButton|Socket|Sound|SoundChannel|' r'SoundLoaderContext|SoundMixer|SoundTransform|SpreadMethod|Sprite|' r'StackFrame|StackOverflowError|Stage|StageAlign|StageDisplayState|' r'StageQuality|StageScaleMode|StaticText|StatusEvent|String|StyleSheet|' r'SWFVersion|SyncEvent|SyntaxError|System|TextColorType|TextField|' r'TextFieldAutoSize|TextFieldType|TextFormat|TextFormatAlign|' r'TextLineMetrics|TextRenderer|TextSnapshot|Timer|TimerEvent|Transform|' r'TypeError|uint|URIError|URLLoader|URLLoaderDataFormat|URLRequest|' r'URLRequestHeader|URLRequestMethod|URLStream|URLVariabeles|VerifyError|' r'Video|XML|XMLDocument|XMLList|XMLNode|XMLNodeType|XMLSocket|XMLUI)\b', Name.Builtin), (r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|' r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|' r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|' r'unescape)\b',Name.Function), (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other), (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), (r'0x[0-9a-f]+', Number.Hex), (r'[0-9]+', Number.Integer), (r'"(\\\\|\\"|[^"])*"', String.Double), (r"'(\\\\|\\'|[^'])*'", String.Single), ] } class ActionScript3Lexer(RegexLexer): """ For ActionScript 3 source code. *New in Pygments 0.11.* """ name = 'ActionScript 3' aliases = ['as3', 'actionscript3'] filenames = ['*.as'] mimetypes = ['application/x-actionscript', 'text/x-actionscript', 'text/actionscript'] identifier = r'[$a-zA-Z_][a-zA-Z0-9_]*' typeidentifier = identifier + '(?:\.<\w+>)?' flags = re.DOTALL | re.MULTILINE tokens = { 'root': [ (r'\s+', Text), (r'(function\s+)(' + identifier + r')(\s*)(\()', bygroups(Keyword.Declaration, Name.Function, Text, Operator), 'funcparams'), (r'(var|const)(\s+)(' + identifier + r')(\s*)(:)(\s*)(' + typeidentifier + r')', bygroups(Keyword.Declaration, Text, Name, Text, Punctuation, Text, Keyword.Type)), (r'(import|package)(\s+)((?:' + identifier + r'|\.)+)(\s*)', bygroups(Keyword, Text, Name.Namespace, Text)), (r'(new)(\s+)(' + typeidentifier + r')(\s*)(\()', bygroups(Keyword, Text, Keyword.Type, Text, Operator)), (r'//.*?\n', Comment.Single), (r'/\*.*?\*/', Comment.Multiline), (r'/(\\\\|\\/|[^\n])*/[gisx]*', String.Regex), (r'(\.)(' + identifier + r')', bygroups(Operator, Name.Attribute)), (r'(case|default|for|each|in|while|do|break|return|continue|if|else|' r'throw|try|catch|with|new|typeof|arguments|instanceof|this|' r'switch|import|include|as|is)\b', Keyword), (r'(class|public|final|internal|native|override|private|protected|' r'static|import|extends|implements|interface|intrinsic|return|super|' r'dynamic|function|const|get|namespace|package|set)\b', Keyword.Declaration), (r'(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b', Keyword.Constant), (r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|' r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|' r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|' r'unescape)\b', Name.Function), (identifier, Name), (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), (r'0x[0-9a-f]+', Number.Hex), (r'[0-9]+', Number.Integer), (r'"(\\\\|\\"|[^"])*"', String.Double), (r"'(\\\\|\\'|[^'])*'", String.Single), (r'[~\^\*!%&<>\|+=:;,/?\\{}\[\]().-]+', Operator), ], 'funcparams': [ (r'\s+', Text), (r'(\s*)(\.\.\.)?(' + identifier + r')(\s*)(:)(\s*)(' + typeidentifier + r'|\*)(\s*)', bygroups(Text, Punctuation, Name, Text, Operator, Text, Keyword.Type, Text), 'defval'), (r'\)', Operator, 'type') ], 'type': [ (r'(\s*)(:)(\s*)(' + typeidentifier + r'|\*)', bygroups(Text, Operator, Text, Keyword.Type), '#pop:2'), (r'\s*', Text, '#pop:2') ], 'defval': [ (r'(=)(\s*)([^(),]+)(\s*)(,?)', bygroups(Operator, Text, using(this), Text, Operator), '#pop'), (r',?', Operator, '#pop') ] } def analyse_text(text): if re.match(r'\w+\s*:\s*\w', text): return 0.3 return 0 class CssLexer(RegexLexer): """ For CSS (Cascading Style Sheets). """ name = 'CSS' aliases = ['css'] filenames = ['*.css'] mimetypes = ['text/css'] tokens = { 'root': [ include('basics'), ], 'basics': [ (r'\s+', Text), (r'/\*(?:.|\n)*?\*/', Comment), (r'{', Punctuation, 'content'), (r'\:[a-zA-Z0-9_-]+', Name.Decorator), (r'\.[a-zA-Z0-9_-]+', Name.Class), (r'\#[a-zA-Z0-9_-]+', Name.Function), (r'@[a-zA-Z0-9_-]+', Keyword, 'atrule'), (r'[a-zA-Z0-9_-]+', Name.Tag), (r'[~\^\*!%&\[\]\(\)<>\|+=@:;,./?-]', Operator), (r'"(\\\\|\\"|[^"])*"', String.Double), (r"'(\\\\|\\'|[^'])*'", String.Single) ], 'atrule': [ (r'{', Punctuation, 'atcontent'), (r';', Punctuation, '#pop'), include('basics'), ], 'atcontent': [ include('basics'), (r'}', Punctuation, '#pop:2'), ], 'content': [ (r'\s+', Text), (r'}', Punctuation, '#pop'), (r'url\(.*?\)', String.Other), (r'^@.*?$', Comment.Preproc), (r'(azimuth|background-attachment|background-color|' r'background-image|background-position|background-repeat|' r'background|border-bottom-color|border-bottom-style|' r'border-bottom-width|border-left-color|border-left-style|' r'border-left-width|border-right|border-right-color|' r'border-right-style|border-right-width|border-top-color|' r'border-top-style|border-top-width|border-bottom|' r'border-collapse|border-left|border-width|border-color|' r'border-spacing|border-style|border-top|border|caption-side|' r'clear|clip|color|content|counter-increment|counter-reset|' r'cue-after|cue-before|cue|cursor|direction|display|' r'elevation|empty-cells|float|font-family|font-size|' r'font-size-adjust|font-stretch|font-style|font-variant|' r'font-weight|font|height|letter-spacing|line-height|' r'list-style-type|list-style-image|list-style-position|' r'list-style|margin-bottom|margin-left|margin-right|' r'margin-top|margin|marker-offset|marks|max-height|max-width|' r'min-height|min-width|opacity|orphans|outline|outline-color|' r'outline-style|outline-width|overflow(?:-x|-y)?|padding-bottom|' r'padding-left|padding-right|padding-top|padding|page|' r'page-break-after|page-break-before|page-break-inside|' r'pause-after|pause-before|pause|pitch|pitch-range|' r'play-during|position|quotes|richness|right|size|' r'speak-header|speak-numeral|speak-punctuation|speak|' r'speech-rate|stress|table-layout|text-align|text-decoration|' r'text-indent|text-shadow|text-transform|top|unicode-bidi|' r'vertical-align|visibility|voice-family|volume|white-space|' r'widows|width|word-spacing|z-index|bottom|left|' r'above|absolute|always|armenian|aural|auto|avoid|baseline|' r'behind|below|bidi-override|blink|block|bold|bolder|both|' r'capitalize|center-left|center-right|center|circle|' r'cjk-ideographic|close-quote|collapse|condensed|continuous|' r'crop|crosshair|cross|cursive|dashed|decimal-leading-zero|' r'decimal|default|digits|disc|dotted|double|e-resize|embed|' r'extra-condensed|extra-expanded|expanded|fantasy|far-left|' r'far-right|faster|fast|fixed|georgian|groove|hebrew|help|' r'hidden|hide|higher|high|hiragana-iroha|hiragana|icon|' r'inherit|inline-table|inline|inset|inside|invert|italic|' r'justify|katakana-iroha|katakana|landscape|larger|large|' r'left-side|leftwards|level|lighter|line-through|list-item|' r'loud|lower-alpha|lower-greek|lower-roman|lowercase|ltr|' r'lower|low|medium|message-box|middle|mix|monospace|' r'n-resize|narrower|ne-resize|no-close-quote|no-open-quote|' r'no-repeat|none|normal|nowrap|nw-resize|oblique|once|' r'open-quote|outset|outside|overline|pointer|portrait|px|' r'relative|repeat-x|repeat-y|repeat|rgb|ridge|right-side|' r'rightwards|s-resize|sans-serif|scroll|se-resize|' r'semi-condensed|semi-expanded|separate|serif|show|silent|' r'slow|slower|small-caps|small-caption|smaller|soft|solid|' r'spell-out|square|static|status-bar|super|sw-resize|' r'table-caption|table-cell|table-column|table-column-group|' r'table-footer-group|table-header-group|table-row|' r'table-row-group|text|text-bottom|text-top|thick|thin|' r'transparent|ultra-condensed|ultra-expanded|underline|' r'upper-alpha|upper-latin|upper-roman|uppercase|url|' r'visible|w-resize|wait|wider|x-fast|x-high|x-large|x-loud|' r'x-low|x-small|x-soft|xx-large|xx-small|yes)\b', Keyword), (r'(indigo|gold|firebrick|indianred|yellow|darkolivegreen|' r'darkseagreen|mediumvioletred|mediumorchid|chartreuse|' r'mediumslateblue|black|springgreen|crimson|lightsalmon|brown|' r'turquoise|olivedrab|cyan|silver|skyblue|gray|darkturquoise|' r'goldenrod|darkgreen|darkviolet|darkgray|lightpink|teal|' r'darkmagenta|lightgoldenrodyellow|lavender|yellowgreen|thistle|' r'violet|navy|orchid|blue|ghostwhite|honeydew|cornflowerblue|' r'darkblue|darkkhaki|mediumpurple|cornsilk|red|bisque|slategray|' r'darkcyan|khaki|wheat|deepskyblue|darkred|steelblue|aliceblue|' r'gainsboro|mediumturquoise|floralwhite|coral|purple|lightgrey|' r'lightcyan|darksalmon|beige|azure|lightsteelblue|oldlace|' r'greenyellow|royalblue|lightseagreen|mistyrose|sienna|' r'lightcoral|orangered|navajowhite|lime|palegreen|burlywood|' r'seashell|mediumspringgreen|fuchsia|papayawhip|blanchedalmond|' r'peru|aquamarine|white|darkslategray|ivory|dodgerblue|' r'lemonchiffon|chocolate|orange|forestgreen|slateblue|olive|' r'mintcream|antiquewhite|darkorange|cadetblue|moccasin|' r'limegreen|saddlebrown|darkslateblue|lightskyblue|deeppink|' r'plum|aqua|darkgoldenrod|maroon|sandybrown|magenta|tan|' r'rosybrown|pink|lightblue|palevioletred|mediumseagreen|' r'dimgray|powderblue|seagreen|snow|mediumblue|midnightblue|' r'paleturquoise|palegoldenrod|whitesmoke|darkorchid|salmon|' r'lightslategray|lawngreen|lightgreen|tomato|hotpink|' r'lightyellow|lavenderblush|linen|mediumaquamarine|green|' r'blueviolet|peachpuff)\b', Name.Builtin), (r'\!important', Comment.Preproc), (r'/\*(?:.|\n)*?\*/', Comment), (r'\#[a-zA-Z0-9]{1,6}', Number), (r'[\.-]?[0-9]*[\.]?[0-9]+(em|px|\%|pt|pc|in|mm|cm|ex|s)\b', Number), (r'-?[0-9]+', Number), (r'[~\^\*!%&<>\|+=@:,./?-]+', Operator), (r'[\[\]();]+', Punctuation), (r'"(\\\\|\\"|[^"])*"', String.Double), (r"'(\\\\|\\'|[^'])*'", String.Single), (r'[a-zA-Z_][a-zA-Z0-9_]*', Name) ] } class ObjectiveJLexer(RegexLexer): """ For Objective-J source code with preprocessor directives. *New in Pygments 1.3.* """ name = 'Objective-J' aliases = ['objective-j', 'objectivej', 'obj-j', 'objj'] filenames = ['*.j'] mimetypes = ['text/x-objective-j'] #: optional Comment or Whitespace _ws = r'(?:\s|//.*?\n|/[*].*?[*]/)*' flags = re.DOTALL | re.MULTILINE tokens = { 'root': [ include('whitespace'), # function definition (r'^(' + _ws + r'[\+-]' + _ws + r')([\(a-zA-Z_].*?[^\(])(' + _ws + '{)', bygroups(using(this), using(this, state='function_signature'), using(this))), # class definition (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text), 'classname'), (r'(@class|@protocol)(\s*)', bygroups(Keyword, Text), 'forward_classname'), (r'(\s*)(@end)(\s*)', bygroups(Text, Keyword, Text)), include('statements'), ('[{\(\)}]', Punctuation), (';', Punctuation), ], 'whitespace': [ (r'(@import)(\s+)("(?:\\\\|\\"|[^"])*")', bygroups(Comment.Preproc, Text, String.Double)), (r'(@import)(\s+)(<(?:\\\\|\\>|[^>])*>)', bygroups(Comment.Preproc, Text, String.Double)), (r'(#(?:include|import))(\s+)("(?:\\\\|\\"|[^"])*")', bygroups(Comment.Preproc, Text, String.Double)), (r'(#(?:include|import))(\s+)(<(?:\\\\|\\>|[^>])*>)', bygroups(Comment.Preproc, Text, String.Double)), (r'#if\s+0', Comment.Preproc, 'if0'), (r'#', Comment.Preproc, 'macro'), (r'\n', Text), (r'\s+', Text), (r'\\\n', Text), # line continuation (r'//(\n|(.|\n)*?[^\\]\n)', Comment.Single), (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline), (r'<!--', Comment), ], 'slashstartsregex': [ include('whitespace'), (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/' r'([gim]+\b|\B)', String.Regex, '#pop'), (r'(?=/)', Text, ('#pop', 'badregex')), (r'', Text, '#pop'), ], 'badregex': [ (r'\n', Text, '#pop'), ], 'statements': [ (r'(L|@)?"', String, 'string'), (r"(L|@)?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char), (r'"(\\\\|\\"|[^"])*"', String.Double), (r"'(\\\\|\\'|[^'])*'", String.Single), (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float), (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float), (r'0x[0-9a-fA-F]+[Ll]?', Number.Hex), (r'0[0-7]+[Ll]?', Number.Oct), (r'\d+[Ll]?', Number.Integer), (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'), (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|' r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'), (r'[{(\[;,]', Punctuation, 'slashstartsregex'), (r'[})\].]', Punctuation), (r'(for|in|while|do|break|return|continue|switch|case|default|if|' r'else|throw|try|catch|finally|new|delete|typeof|instanceof|void|' r'prototype|__proto__)\b', Keyword, 'slashstartsregex'), (r'(var|with|function)\b', Keyword.Declaration, 'slashstartsregex'), (r'(@selector|@private|@protected|@public|@encode|' r'@synchronized|@try|@throw|@catch|@finally|@end|@property|' r'@synthesize|@dynamic|@for|@accessors|new)\b', Keyword), (r'(int|long|float|short|double|char|unsigned|signed|void|' r'id|BOOL|bool|boolean|IBOutlet|IBAction|SEL|@outlet|@action)\b', Keyword.Type), (r'(self|super)\b', Name.Builtin), (r'(TRUE|YES|FALSE|NO|Nil|nil|NULL)\b', Keyword.Constant), (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant), (r'(ABS|ASIN|ACOS|ATAN|ATAN2|SIN|COS|TAN|EXP|POW|CEIL|FLOOR|ROUND|' r'MIN|MAX|RAND|SQRT|E|LN2|LN10|LOG2E|LOG10E|PI|PI2|PI_2|SQRT1_2|' r'SQRT2)\b', Keyword.Constant), (r'(Array|Boolean|Date|Error|Function|Math|netscape|' r'Number|Object|Packages|RegExp|String|sun|decodeURI|' r'decodeURIComponent|encodeURI|encodeURIComponent|' r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|' r'window)\b', Name.Builtin), (r'([$a-zA-Z_][a-zA-Z0-9_]*)(' + _ws + r')(?=\()', bygroups(Name.Function, using(this))), (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name), ], 'classname' : [ # interface definition that inherits (r'([a-zA-Z_][a-zA-Z0-9_]*)(' + _ws + r':' + _ws + r')([a-zA-Z_][a-zA-Z0-9_]*)?', bygroups(Name.Class, using(this), Name.Class), '#pop'), # interface definition for a category (r'([a-zA-Z_][a-zA-Z0-9_]*)(' + _ws + r'\()([a-zA-Z_][a-zA-Z0-9_]*)(\))', bygroups(Name.Class, using(this), Name.Label, Text), '#pop'), # simple interface / implementation (r'([a-zA-Z_][a-zA-Z0-9_]*)', Name.Class, '#pop'), ], 'forward_classname' : [ (r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*,\s*)', bygroups(Name.Class, Text), '#push'), (r'([a-zA-Z_][a-zA-Z0-9_]*)(\s*;?)', bygroups(Name.Class, Text), '#pop'), ], 'function_signature': [ include('whitespace'), # start of a selector w/ parameters (r'(\(' + _ws + r')' # open paren r'([a-zA-Z_][a-zA-Z0-9_]+)' # return type r'(' + _ws + r'\)' + _ws + r')' # close paren r'([$a-zA-Z_][a-zA-Z0-9_]+' + _ws + r':)', # function name bygroups(using(this), Keyword.Type, using(this), Name.Function), 'function_parameters'), # no-param function (r'(\(' + _ws + r')' # open paren r'([a-zA-Z_][a-zA-Z0-9_]+)' # return type r'(' + _ws + r'\)' + _ws + r')' # close paren r'([$a-zA-Z_][a-zA-Z0-9_]+)', # function name bygroups(using(this), Keyword.Type, using(this), Name.Function), "#pop"), # no return type given, start of a selector w/ parameters (r'([$a-zA-Z_][a-zA-Z0-9_]+' + _ws + r':)', # function name bygroups (Name.Function), 'function_parameters'), # no return type given, no-param function (r'([$a-zA-Z_][a-zA-Z0-9_]+)', # function name bygroups(Name.Function), "#pop"), ('', Text, '#pop'), ], 'function_parameters': [ include('whitespace'), # parameters (r'(\(' + _ws + ')' # open paren r'([^\)]+)' # type r'(' + _ws + r'\)' + _ws + r')' # close paren r'([$a-zA-Z_][a-zA-Z0-9_]+)', # param name bygroups(using(this), Keyword.Type, using(this), Text)), # one piece of a selector name (r'([$a-zA-Z_][a-zA-Z0-9_]+' + _ws + r':)', # function name Name.Function), # smallest possible selector piece (r'(:)', Name.Function), # var args (r'(,' + _ws + r'\.\.\.)', using(this)), # param name (r'([$a-zA-Z_][a-zA-Z0-9_]+)', Text), ], 'expression' : [ (r'([$a-zA-Z_][a-zA-Z0-9_]*)(\()', bygroups(Name.Function, Punctuation)), (r'(\))', Punctuation, "#pop"), ], 'string': [ (r'"', String, '#pop'), (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape), (r'[^\\"\n]+', String), # all other characters (r'\\\n', String), # line continuation (r'\\', String), # stray backslash ], 'macro': [ (r'[^/\n]+', Comment.Preproc), (r'/[*](.|\n)*?[*]/', Comment.Multiline), (r'//.*?\n', Comment.Single, '#pop'), (r'/', Comment.Preproc), (r'(?<=\\)\n', Comment.Preproc), (r'\n', Comment.Preproc, '#pop'), ], 'if0': [ (r'^\s*#if.*?(?<!\\)\n', Comment.Preproc, '#push'), (r'^\s*#endif.*?(?<!\\)\n', Comment.Preproc, '#pop'), (r'.*?\n', Comment), ] } def analyse_text(text): if re.search('^\s*@import\s+[<"]', text, re.MULTILINE): # special directive found in most Objective-J files return True return False class HtmlLexer(RegexLexer): """ For HTML 4 and XHTML 1 markup. Nested JavaScript and CSS is highlighted by the appropriate lexer. """ name = 'HTML' aliases = ['html'] filenames = ['*.html', '*.htm', '*.xhtml', '*.xslt'] mimetypes = ['text/html', 'application/xhtml+xml'] flags = re.IGNORECASE | re.DOTALL tokens = { 'root': [ ('[^<&]+', Text), (r'&\S*?;', Name.Entity), (r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc), ('<!--', Comment, 'comment'), (r'<\?.*?\?>', Comment.Preproc), ('<![^>]*>', Comment.Preproc), (r'<\s*script\s*', Name.Tag, ('script-content', 'tag')), (r'<\s*style\s*', Name.Tag, ('style-content', 'tag')), (r'<\s*[a-zA-Z0-9:]+', Name.Tag, 'tag'), (r'<\s*/\s*[a-zA-Z0-9:]+\s*>', Name.Tag), ], 'comment': [ ('[^-]+', Comment), ('-->', Comment, '#pop'), ('-', Comment), ], 'tag': [ (r'\s+', Text), (r'[a-zA-Z0-9_:-]+\s*=', Name.Attribute, 'attr'), (r'[a-zA-Z0-9_:-]+', Name.Attribute), (r'/?\s*>', Name.Tag, '#pop'), ], 'script-content': [ (r'<\s*/\s*script\s*>', Name.Tag, '#pop'), (r'.+?(?=<\s*/\s*script\s*>)', using(JavascriptLexer)), ], 'style-content': [ (r'<\s*/\s*style\s*>', Name.Tag, '#pop'), (r'.+?(?=<\s*/\s*style\s*>)', using(CssLexer)), ], 'attr': [ ('".*?"', String, '#pop'), ("'.*?'", String, '#pop'), (r'[^\s>]+', String, '#pop'), ], } def analyse_text(text): if html_doctype_matches(text): return 0.5 class PhpLexer(RegexLexer): """ For `PHP <http://www.php.net/>`_ source code. For PHP embedded in HTML, use the `HtmlPhpLexer`. Additional options accepted: `startinline` If given and ``True`` the lexer starts highlighting with php code (i.e.: no starting ``<?php`` required). The default is ``False``. `funcnamehighlighting` If given and ``True``, highlight builtin function names (default: ``True``). `disabledmodules` If given, must be a list of module names whose function names should not be highlighted. By default all modules are highlighted except the special ``'unknown'`` module that includes functions that are known to php but are undocumented. To get a list of allowed modules have a look into the `_phpbuiltins` module: .. sourcecode:: pycon >>> from pygments.lexers._phpbuiltins import MODULES >>> MODULES.keys() ['PHP Options/Info', 'Zip', 'dba', ...] In fact the names of those modules match the module names from the php documentation. """ name = 'PHP' aliases = ['php', 'php3', 'php4', 'php5'] filenames = ['*.php', '*.php[345]', '*.inc'] mimetypes = ['text/x-php'] flags = re.IGNORECASE | re.DOTALL | re.MULTILINE tokens = { 'root': [ (r'<\?(php)?', Comment.Preproc, 'php'), (r'[^<]+', Other), (r'<', Other) ], 'php': [ (r'\?>', Comment.Preproc, '#pop'), (r'<<<(\'?)([a-zA-Z_][a-zA-Z0-9_]*)\1\n.*?\n\2\;?\n', String), (r'\s+', Text), (r'#.*?\n', Comment.Single), (r'//.*?\n', Comment.Single), # put the empty comment here, it is otherwise seen as # the start of a docstring (r'/\*\*/', Comment.Multiline), (r'/\*\*.*?\*/', String.Doc), (r'/\*.*?\*/', Comment.Multiline), (r'(->|::)(\s*)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Operator, Text, Name.Attribute)), (r'[~!%^&*+=|:.<>/?@-]+', Operator), (r'[\[\]{}();,]+', Punctuation), (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'), (r'(function)(\s*)(?=\()', bygroups(Keyword, Text)), (r'(function)(\s+)(&?)(\s*)', bygroups(Keyword, Text, Operator, Text), 'functionname'), (r'(const)(\s+)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(Keyword, Text, Name.Constant)), (r'(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|' r'eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|' r'FALSE|print|for|require|continue|foreach|require_once|' r'declare|return|default|static|do|switch|die|stdClass|' r'echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|' r'virtual|endfor|include_once|while|endforeach|global|__FILE__|' r'endif|list|__LINE__|endswitch|new|__sleep|endwhile|not|' r'array|__wakeup|E_ALL|NULL|final|php_user_filter|interface|' r'implements|public|private|protected|abstract|clone|try|' r'catch|throw|this|use|namespace|trait)\b', Keyword), (r'(true|false|null)\b', Keyword.Constant), (r'\$\{\$+[a-zA-Z_][a-zA-Z0-9_]*\}', Name.Variable), (r'\$+[a-zA-Z_][a-zA-Z0-9_]*', Name.Variable), (r'[\\a-zA-Z_][\\a-zA-Z0-9_]*', Name.Other), (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float), (r'\d+[eE][+-]?[0-9]+', Number.Float), (r'0[0-7]+', Number.Oct), (r'0[xX][a-fA-F0-9]+', Number.Hex), (r'\d+', Number.Integer), (r"'([^'\\]*(?:\\.[^'\\]*)*)'", String.Single), (r'`([^`\\]*(?:\\.[^`\\]*)*)`', String.Backtick), (r'"', String.Double, 'string'), ], 'classname': [ (r'[a-zA-Z_][\\a-zA-Z0-9_]*', Name.Class, '#pop') ], 'functionname': [ (r'[a-zA-Z_][a-zA-Z0-9_]*', Name.Function, '#pop') ], 'string': [ (r'"', String.Double, '#pop'), (r'[^{$"\\]+', String.Double), (r'\\([nrt\"$\\]|[0-7]{1,3}|x[0-9A-Fa-f]{1,2})', String.Escape), (r'\$[a-zA-Z_][a-zA-Z0-9_]*(\[\S+\]|->[a-zA-Z_][a-zA-Z0-9_]*)?', String.Interpol), (r'(\{\$\{)(.*?)(\}\})', bygroups(String.Interpol, using(this, _startinline=True), String.Interpol)), (r'(\{)(\$.*?)(\})', bygroups(String.Interpol, using(this, _startinline=True), String.Interpol)), (r'(\$\{)(\S+)(\})', bygroups(String.Interpol, Name.Variable, String.Interpol)), (r'[${\\]+', String.Double) ], } def __init__(self, **options): self.funcnamehighlighting = get_bool_opt( options, 'funcnamehighlighting', True) self.disabledmodules = get_list_opt( options, 'disabledmodules', ['unknown']) self.startinline = get_bool_opt(options, 'startinline', False) # private option argument for the lexer itself if '_startinline' in options: self.startinline = options.pop('_startinline') # collect activated functions in a set self._functions = set() if self.funcnamehighlighting: from pygments.lexers._phpbuiltins import MODULES for key, value in MODULES.iteritems(): if key not in self.disabledmodules: self._functions.update(value) RegexLexer.__init__(self, **options) def get_tokens_unprocessed(self, text): stack = ['root'] if self.startinline: stack.append('php') for index, token, value in \ RegexLexer.get_tokens_unprocessed(self, text, stack): if token is Name.Other: if value in self._functions: yield index, Name.Builtin, value continue yield index, token, value def analyse_text(text): rv = 0.0 if re.search(r'<\?(?!xml)', text): rv += 0.3 if '?>' in text: rv += 0.1 return rv class DtdLexer(RegexLexer): """ A lexer for DTDs (Document Type Definitions). *New in Pygments 1.5.* """ flags = re.MULTILINE | re.DOTALL name = 'DTD' aliases = ['dtd'] filenames = ['*.dtd'] mimetypes = ['application/xml-dtd'] tokens = { 'root': [ include('common'), (r'(<!ELEMENT)(\s+)(\S+)', bygroups(Keyword, Text, Name.Tag), 'element'), (r'(<!ATTLIST)(\s+)(\S+)', bygroups(Keyword, Text, Name.Tag), 'attlist'), (r'(<!ENTITY)(\s+)(\S+)', bygroups(Keyword, Text, Name.Entity), 'entity'), (r'(<!NOTATION)(\s+)(\S+)', bygroups(Keyword, Text, Name.Tag), 'notation'), (r'(<!\[)([^\[\s]+)(\s*)(\[)', # conditional sections bygroups(Keyword, Name.Entity, Text, Keyword)), (r'(<!DOCTYPE)(\s+)([^>\s]+)', bygroups(Keyword, Text, Name.Tag)), (r'PUBLIC|SYSTEM', Keyword.Constant), (r'[\[\]>]', Keyword), ], 'common': [ (r'\s+', Text), (r'(%|&)[^;]*;', Name.Entity), ('<!--', Comment, 'comment'), (r'[(|)*,?+]', Operator), (r'"[^"]*"', String.Double), (r'\'[^\']*\'', String.Single), ], 'comment': [ ('[^-]+', Comment), ('-->', Comment, '#pop'), ('-', Comment), ], 'element': [ include('common'), (r'EMPTY|ANY|#PCDATA', Keyword.Constant), (r'[^>\s\|()?+*,]+', Name.Tag), (r'>', Keyword, '#pop'), ], 'attlist': [ include('common'), (r'CDATA|IDREFS|IDREF|ID|NMTOKENS|NMTOKEN|ENTITIES|ENTITY|NOTATION', Keyword.Constant), (r'#REQUIRED|#IMPLIED|#FIXED', Keyword.Constant), (r'xml:space|xml:lang', Keyword.Reserved), (r'[^>\s\|()?+*,]+', Name.Attribute), (r'>', Keyword, '#pop'), ], 'entity': [ include('common'), (r'SYSTEM|PUBLIC|NDATA', Keyword.Constant), (r'[^>\s\|()?+*,]+', Name.Entity), (r'>', Keyword, '#pop'), ], 'notation': [ include('common'), (r'SYSTEM|PUBLIC', Keyword.Constant), (r'[^>\s\|()?+*,]+', Name.Attribute), (r'>', Keyword, '#pop'), ], } def analyse_text(text): if not looks_like_xml(text) and \ ('<!ELEMENT' in text or '<!ATTLIST' in text or '<!ENTITY' in text): return 0.8 class XmlLexer(RegexLexer): """ Generic lexer for XML (eXtensible Markup Language). """ flags = re.MULTILINE | re.DOTALL | re.UNICODE name = 'XML' aliases = ['xml'] filenames = ['*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl'] mimetypes = ['text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml'] tokens = { 'root': [ ('[^<&]+', Text), (r'&\S*?;', Name.Entity), (r'\<\!\[CDATA\[.*?\]\]\>', Comment.Preproc), ('<!--', Comment, 'comment'), (r'<\?.*?\?>', Comment.Preproc), ('<![^>]*>', Comment.Preproc), (r'<\s*[\w:.-]+', Name.Tag, 'tag'), (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag), ], 'comment': [ ('[^-]+', Comment), ('-->', Comment, '#pop'), ('-', Comment), ], 'tag': [ (r'\s+', Text), (r'[\w.:-]+\s*=', Name.Attribute, 'attr'), (r'/?\s*>', Name.Tag, '#pop'), ], 'attr': [ ('\s+', Text), ('".*?"', String, '#pop'), ("'.*?'", String, '#pop'), (r'[^\s>]+', String, '#pop'), ], } def analyse_text(text): if looks_like_xml(text): return 0.5 class XsltLexer(XmlLexer): ''' A lexer for XSLT. *New in Pygments 0.10.* ''' name = 'XSLT' aliases = ['xslt'] filenames = ['*.xsl', '*.xslt', '*.xpl'] # xpl is XProc mimetypes = ['application/xsl+xml', 'application/xslt+xml'] EXTRA_KEYWORDS = set([ 'apply-imports', 'apply-templates', 'attribute', 'attribute-set', 'call-template', 'choose', 'comment', 'copy', 'copy-of', 'decimal-format', 'element', 'fallback', 'for-each', 'if', 'import', 'include', 'key', 'message', 'namespace-alias', 'number', 'otherwise', 'output', 'param', 'preserve-space', 'processing-instruction', 'sort', 'strip-space', 'stylesheet', 'template', 'text', 'transform', 'value-of', 'variable', 'when', 'with-param' ]) def get_tokens_unprocessed(self, text): for index, token, value in XmlLexer.get_tokens_unprocessed(self, text): m = re.match('</?xsl:([^>]*)/?>?', value) if token is Name.Tag and m and m.group(1) in self.EXTRA_KEYWORDS: yield index, Keyword, value else: yield index, token, value def analyse_text(text): if looks_like_xml(text) and '<xsl' in text: return 0.8 class MxmlLexer(RegexLexer): """ For MXML markup. Nested AS3 in <script> tags is highlighted by the appropriate lexer. *New in Pygments 1.1.* """ flags = re.MULTILINE | re.DOTALL name = 'MXML' aliases = ['mxml'] filenames = ['*.mxml'] mimetimes = ['text/xml', 'application/xml'] tokens = { 'root': [ ('[^<&]+', Text), (r'&\S*?;', Name.Entity), (r'(\<\!\[CDATA\[)(.*?)(\]\]\>)', bygroups(String, using(ActionScript3Lexer), String)), ('<!--', Comment, 'comment'), (r'<\?.*?\?>', Comment.Preproc), ('<![^>]*>', Comment.Preproc), (r'<\s*[a-zA-Z0-9:._-]+', Name.Tag, 'tag'), (r'<\s*/\s*[a-zA-Z0-9:._-]+\s*>', Name.Tag), ], 'comment': [ ('[^-]+', Comment), ('-->', Comment, '#pop'), ('-', Comment), ], 'tag': [ (r'\s+', Text), (r'[a-zA-Z0-9_.:-]+\s*=', Name.Attribute, 'attr'), (r'/?\s*>', Name.Tag, '#pop'), ], 'attr': [ ('\s+', Text), ('".*?"', String, '#pop'), ("'.*?'", String, '#pop'), (r'[^\s>]+', String, '#pop'), ], } class HaxeLexer(RegexLexer): """ For haXe source code (http://haxe.org/). *New in Pygments 1.3.* """ name = 'haXe' aliases = ['hx', 'haXe'] filenames = ['*.hx'] mimetypes = ['text/haxe'] ident = r'(?:[a-zA-Z_][a-zA-Z0-9_]*)' typeid = r'(?:(?:[a-z0-9_\.])*[A-Z_][A-Za-z0-9_]*)' key_prop = r'(?:default|null|never)' key_decl_mod = r'(?:public|private|override|static|inline|extern|dynamic)' flags = re.DOTALL | re.MULTILINE tokens = { 'root': [ include('whitespace'), include('comments'), (key_decl_mod, Keyword.Declaration), include('enumdef'), include('typedef'), include('classdef'), include('imports'), ], # General constructs 'comments': [ (r'//.*?\n', Comment.Single), (r'/\*.*?\*/', Comment.Multiline), (r'#[^\n]*', Comment.Preproc), ], 'whitespace': [ include('comments'), (r'\s+', Text), ], 'codekeywords': [ (r'\b(if|else|while|do|for|in|break|continue|' r'return|switch|case|try|catch|throw|null|trace|' r'new|this|super|untyped|cast|callback|here)\b', Keyword.Reserved), ], 'literals': [ (r'0[xX][0-9a-fA-F]+', Number.Hex), (r'[0-9]+', Number.Integer), (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), (r"'(\\\\|\\'|[^'])*'", String.Single), (r'"(\\\\|\\"|[^"])*"', String.Double), (r'~/([^\n])*?/[gisx]*', String.Regex), (r'\b(true|false|null)\b', Keyword.Constant), ], 'codeblock': [ include('whitespace'), include('new'), include('case'), include('anonfundef'), include('literals'), include('vardef'), include('codekeywords'), (r'[();,\[\]]', Punctuation), (r'(?:=|\+=|-=|\*=|/=|%=|&=|\|=|\^=|<<=|>>=|>>>=|\|\||&&|' r'\.\.\.|==|!=|>|<|>=|<=|\||&|\^|<<|>>>|>>|\+|\-|\*|/|%|' r'!|\+\+|\-\-|~|\.|\?|\:)', Operator), (ident, Name), (r'}', Punctuation,'#pop'), (r'{', Punctuation,'#push'), ], # Instance/Block level constructs 'propertydef': [ (r'(\()(' + key_prop + ')(,)(' + key_prop + ')(\))', bygroups(Punctuation, Keyword.Reserved, Punctuation, Keyword.Reserved, Punctuation)), ], 'new': [ (r'\bnew\b', Keyword, 'typedecl'), ], 'case': [ (r'\b(case)(\s+)(' + ident + ')(\s*)(\()', bygroups(Keyword.Reserved, Text, Name, Text, Punctuation), 'funargdecl'), ], 'vardef': [ (r'\b(var)(\s+)(' + ident + ')', bygroups(Keyword.Declaration, Text, Name.Variable), 'vardecl'), ], 'vardecl': [ include('whitespace'), include('typelabel'), (r'=', Operator,'#pop'), (r';', Punctuation,'#pop'), ], 'instancevardef': [ (key_decl_mod,Keyword.Declaration), (r'\b(var)(\s+)(' + ident + ')', bygroups(Keyword.Declaration, Text, Name.Variable.Instance), 'instancevardecl'), ], 'instancevardecl': [ include('vardecl'), include('propertydef'), ], 'anonfundef': [ (r'\bfunction\b', Keyword.Declaration, 'fundecl'), ], 'instancefundef': [ (key_decl_mod, Keyword.Declaration), (r'\b(function)(\s+)(' + ident + ')', bygroups(Keyword.Declaration, Text, Name.Function), 'fundecl'), ], 'fundecl': [ include('whitespace'), include('typelabel'), include('generictypedecl'), (r'\(',Punctuation,'funargdecl'), (r'(?=[a-zA-Z0-9_])',Text,'#pop'), (r'{',Punctuation,('#pop','codeblock')), (r';',Punctuation,'#pop'), ], 'funargdecl': [ include('whitespace'), (ident, Name.Variable), include('typelabel'), include('literals'), (r'=', Operator), (r',', Punctuation), (r'\?', Punctuation), (r'\)', Punctuation, '#pop'), ], 'typelabel': [ (r':', Punctuation, 'type'), ], 'typedecl': [ include('whitespace'), (typeid, Name.Class), (r'<', Punctuation, 'generictypedecl'), (r'(?=[{}()=,a-z])', Text,'#pop'), ], 'type': [ include('whitespace'), (typeid, Name.Class), (r'<', Punctuation, 'generictypedecl'), (r'->', Keyword.Type), (r'(?=[{}(),;=])', Text, '#pop'), ], 'generictypedecl': [ include('whitespace'), (typeid, Name.Class), (r'<', Punctuation, '#push'), (r'>', Punctuation, '#pop'), (r',', Punctuation), ], # Top level constructs 'imports': [ (r'(package|import|using)(\s+)([^;]+)(;)', bygroups(Keyword.Namespace, Text, Name.Namespace,Punctuation)), ], 'typedef': [ (r'typedef', Keyword.Declaration, ('typedefprebody', 'typedecl')), ], 'typedefprebody': [ include('whitespace'), (r'(=)(\s*)({)', bygroups(Punctuation, Text, Punctuation), ('#pop', 'typedefbody')), ], 'enumdef': [ (r'enum', Keyword.Declaration, ('enumdefprebody', 'typedecl')), ], 'enumdefprebody': [ include('whitespace'), (r'{', Punctuation, ('#pop','enumdefbody')), ], 'classdef': [ (r'class', Keyword.Declaration, ('classdefprebody', 'typedecl')), ], 'classdefprebody': [ include('whitespace'), (r'(extends|implements)', Keyword.Declaration,'typedecl'), (r'{', Punctuation, ('#pop', 'classdefbody')), ], 'interfacedef': [ (r'interface', Keyword.Declaration, ('interfacedefprebody', 'typedecl')), ], 'interfacedefprebody': [ include('whitespace'), (r'(extends)', Keyword.Declaration, 'typedecl'), (r'{', Punctuation, ('#pop', 'classdefbody')), ], 'typedefbody': [ include('whitespace'), include('instancevardef'), include('instancefundef'), (r'>', Punctuation, 'typedecl'), (r',', Punctuation), (r'}', Punctuation, '#pop'), ], 'enumdefbody': [ include('whitespace'), (ident, Name.Variable.Instance), (r'\(', Punctuation, 'funargdecl'), (r';', Punctuation), (r'}', Punctuation, '#pop'), ], 'classdefbody': [ include('whitespace'), include('instancevardef'), include('instancefundef'), (r'}', Punctuation, '#pop'), include('codeblock'), ], } def analyse_text(text): if re.match(r'\w+\s*:\s*\w', text): return 0.3 def _indentation(lexer, match, ctx): indentation = match.group(0) yield match.start(), Text, indentation ctx.last_indentation = indentation ctx.pos = match.end() if hasattr(ctx, 'block_state') and ctx.block_state and \ indentation.startswith(ctx.block_indentation) and \ indentation != ctx.block_indentation: ctx.stack.append(ctx.block_state) else: ctx.block_state = None ctx.block_indentation = None ctx.stack.append('content') def _starts_block(token, state): def callback(lexer, match, ctx): yield match.start(), token, match.group(0) if hasattr(ctx, 'last_indentation'): ctx.block_indentation = ctx.last_indentation else: ctx.block_indentation = '' ctx.block_state = state ctx.pos = match.end() return callback class HamlLexer(ExtendedRegexLexer): """ For Haml markup. *New in Pygments 1.3.* """ name = 'Haml' aliases = ['haml', 'HAML'] filenames = ['*.haml'] mimetypes = ['text/x-haml'] flags = re.IGNORECASE # Haml can include " |\n" anywhere, # which is ignored and used to wrap long lines. # To accomodate this, use this custom faux dot instead. _dot = r'(?: \|\n(?=.* \|)|.)' # In certain places, a comma at the end of the line # allows line wrapping as well. _comma_dot = r'(?:,\s*\n|' + _dot + ')' tokens = { 'root': [ (r'[ \t]*\n', Text), (r'[ \t]*', _indentation), ], 'css': [ (r'\.[a-z0-9_:-]+', Name.Class, 'tag'), (r'\#[a-z0-9_:-]+', Name.Function, 'tag'), ], 'eval-or-plain': [ (r'[&!]?==', Punctuation, 'plain'), (r'([&!]?[=~])(' + _comma_dot + r'*\n)', bygroups(Punctuation, using(RubyLexer)), 'root'), (r'', Text, 'plain'), ], 'content': [ include('css'), (r'%[a-z0-9_:-]+', Name.Tag, 'tag'), (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'), (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)', bygroups(Comment, Comment.Special, Comment), '#pop'), (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'), '#pop'), (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc, 'haml-comment-block'), '#pop'), (r'(-)(' + _comma_dot + r'*\n)', bygroups(Punctuation, using(RubyLexer)), '#pop'), (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'), '#pop'), include('eval-or-plain'), ], 'tag': [ include('css'), (r'\{(,\n|' + _dot + ')*?\}', using(RubyLexer)), (r'\[' + _dot + '*?\]', using(RubyLexer)), (r'\(', Text, 'html-attributes'), (r'/[ \t]*\n', Punctuation, '#pop:2'), (r'[<>]{1,2}(?=[ \t=])', Punctuation), include('eval-or-plain'), ], 'plain': [ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text), (r'(#\{)(' + _dot + '*?)(\})', bygroups(String.Interpol, using(RubyLexer), String.Interpol)), (r'\n', Text, 'root'), ], 'html-attributes': [ (r'\s+', Text), (r'[a-z0-9_:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'), (r'[a-z0-9_:-]+', Name.Attribute), (r'\)', Text, '#pop'), ], 'html-attribute-value': [ (r'[ \t]+', Text), (r'[a-z0-9_]+', Name.Variable, '#pop'), (r'@[a-z0-9_]+', Name.Variable.Instance, '#pop'), (r'\$[a-z0-9_]+', Name.Variable.Global, '#pop'), (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'), (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'), ], 'html-comment-block': [ (_dot + '+', Comment), (r'\n', Text, 'root'), ], 'haml-comment-block': [ (_dot + '+', Comment.Preproc), (r'\n', Text, 'root'), ], 'filter-block': [ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator), (r'(#\{)(' + _dot + '*?)(\})', bygroups(String.Interpol, using(RubyLexer), String.Interpol)), (r'\n', Text, 'root'), ], } common_sass_tokens = { 'value': [ (r'[ \t]+', Text), (r'[!$][\w-]+', Name.Variable), (r'url\(', String.Other, 'string-url'), (r'[a-z_-][\w-]*(?=\()', Name.Function), (r'(azimuth|background-attachment|background-color|' r'background-image|background-position|background-repeat|' r'background|border-bottom-color|border-bottom-style|' r'border-bottom-width|border-left-color|border-left-style|' r'border-left-width|border-right|border-right-color|' r'border-right-style|border-right-width|border-top-color|' r'border-top-style|border-top-width|border-bottom|' r'border-collapse|border-left|border-width|border-color|' r'border-spacing|border-style|border-top|border|caption-side|' r'clear|clip|color|content|counter-increment|counter-reset|' r'cue-after|cue-before|cue|cursor|direction|display|' r'elevation|empty-cells|float|font-family|font-size|' r'font-size-adjust|font-stretch|font-style|font-variant|' r'font-weight|font|height|letter-spacing|line-height|' r'list-style-type|list-style-image|list-style-position|' r'list-style|margin-bottom|margin-left|margin-right|' r'margin-top|margin|marker-offset|marks|max-height|max-width|' r'min-height|min-width|opacity|orphans|outline|outline-color|' r'outline-style|outline-width|overflow|padding-bottom|' r'padding-left|padding-right|padding-top|padding|page|' r'page-break-after|page-break-before|page-break-inside|' r'pause-after|pause-before|pause|pitch|pitch-range|' r'play-during|position|quotes|richness|right|size|' r'speak-header|speak-numeral|speak-punctuation|speak|' r'speech-rate|stress|table-layout|text-align|text-decoration|' r'text-indent|text-shadow|text-transform|top|unicode-bidi|' r'vertical-align|visibility|voice-family|volume|white-space|' r'widows|width|word-spacing|z-index|bottom|left|' r'above|absolute|always|armenian|aural|auto|avoid|baseline|' r'behind|below|bidi-override|blink|block|bold|bolder|both|' r'capitalize|center-left|center-right|center|circle|' r'cjk-ideographic|close-quote|collapse|condensed|continuous|' r'crop|crosshair|cross|cursive|dashed|decimal-leading-zero|' r'decimal|default|digits|disc|dotted|double|e-resize|embed|' r'extra-condensed|extra-expanded|expanded|fantasy|far-left|' r'far-right|faster|fast|fixed|georgian|groove|hebrew|help|' r'hidden|hide|higher|high|hiragana-iroha|hiragana|icon|' r'inherit|inline-table|inline|inset|inside|invert|italic|' r'justify|katakana-iroha|katakana|landscape|larger|large|' r'left-side|leftwards|level|lighter|line-through|list-item|' r'loud|lower-alpha|lower-greek|lower-roman|lowercase|ltr|' r'lower|low|medium|message-box|middle|mix|monospace|' r'n-resize|narrower|ne-resize|no-close-quote|no-open-quote|' r'no-repeat|none|normal|nowrap|nw-resize|oblique|once|' r'open-quote|outset|outside|overline|pointer|portrait|px|' r'relative|repeat-x|repeat-y|repeat|rgb|ridge|right-side|' r'rightwards|s-resize|sans-serif|scroll|se-resize|' r'semi-condensed|semi-expanded|separate|serif|show|silent|' r'slow|slower|small-caps|small-caption|smaller|soft|solid|' r'spell-out|square|static|status-bar|super|sw-resize|' r'table-caption|table-cell|table-column|table-column-group|' r'table-footer-group|table-header-group|table-row|' r'table-row-group|text|text-bottom|text-top|thick|thin|' r'transparent|ultra-condensed|ultra-expanded|underline|' r'upper-alpha|upper-latin|upper-roman|uppercase|url|' r'visible|w-resize|wait|wider|x-fast|x-high|x-large|x-loud|' r'x-low|x-small|x-soft|xx-large|xx-small|yes)\b', Name.Constant), (r'(indigo|gold|firebrick|indianred|darkolivegreen|' r'darkseagreen|mediumvioletred|mediumorchid|chartreuse|' r'mediumslateblue|springgreen|crimson|lightsalmon|brown|' r'turquoise|olivedrab|cyan|skyblue|darkturquoise|' r'goldenrod|darkgreen|darkviolet|darkgray|lightpink|' r'darkmagenta|lightgoldenrodyellow|lavender|yellowgreen|thistle|' r'violet|orchid|ghostwhite|honeydew|cornflowerblue|' r'darkblue|darkkhaki|mediumpurple|cornsilk|bisque|slategray|' r'darkcyan|khaki|wheat|deepskyblue|darkred|steelblue|aliceblue|' r'gainsboro|mediumturquoise|floralwhite|coral|lightgrey|' r'lightcyan|darksalmon|beige|azure|lightsteelblue|oldlace|' r'greenyellow|royalblue|lightseagreen|mistyrose|sienna|' r'lightcoral|orangered|navajowhite|palegreen|burlywood|' r'seashell|mediumspringgreen|papayawhip|blanchedalmond|' r'peru|aquamarine|darkslategray|ivory|dodgerblue|' r'lemonchiffon|chocolate|orange|forestgreen|slateblue|' r'mintcream|antiquewhite|darkorange|cadetblue|moccasin|' r'limegreen|saddlebrown|darkslateblue|lightskyblue|deeppink|' r'plum|darkgoldenrod|sandybrown|magenta|tan|' r'rosybrown|pink|lightblue|palevioletred|mediumseagreen|' r'dimgray|powderblue|seagreen|snow|mediumblue|midnightblue|' r'paleturquoise|palegoldenrod|whitesmoke|darkorchid|salmon|' r'lightslategray|lawngreen|lightgreen|tomato|hotpink|' r'lightyellow|lavenderblush|linen|mediumaquamarine|' r'blueviolet|peachpuff)\b', Name.Entity), (r'(black|silver|gray|white|maroon|red|purple|fuchsia|green|' r'lime|olive|yellow|navy|blue|teal|aqua)\b', Name.Builtin), (r'\!(important|default)', Name.Exception), (r'(true|false)', Name.Pseudo), (r'(and|or|not)', Operator.Word), (r'/\*', Comment.Multiline, 'inline-comment'), (r'//[^\n]*', Comment.Single), (r'\#[a-z0-9]{1,6}', Number.Hex), (r'(-?\d+)(\%|[a-z]+)?', bygroups(Number.Integer, Keyword.Type)), (r'(-?\d*\.\d+)(\%|[a-z]+)?', bygroups(Number.Float, Keyword.Type)), (r'#{', String.Interpol, 'interpolation'), (r'[~\^\*!&%<>\|+=@:,./?-]+', Operator), (r'[\[\]()]+', Punctuation), (r'"', String.Double, 'string-double'), (r"'", String.Single, 'string-single'), (r'[a-z_-][\w-]*', Name), ], 'interpolation': [ (r'\}', String.Interpol, '#pop'), include('value'), ], 'selector': [ (r'[ \t]+', Text), (r'\:', Name.Decorator, 'pseudo-class'), (r'\.', Name.Class, 'class'), (r'\#', Name.Namespace, 'id'), (r'[a-zA-Z0-9_-]+', Name.Tag), (r'#\{', String.Interpol, 'interpolation'), (r'&', Keyword), (r'[~\^\*!&\[\]\(\)<>\|+=@:;,./?-]', Operator), (r'"', String.Double, 'string-double'), (r"'", String.Single, 'string-single'), ], 'string-double': [ (r'(\\.|#(?=[^\n{])|[^\n"#])+', String.Double), (r'#\{', String.Interpol, 'interpolation'), (r'"', String.Double, '#pop'), ], 'string-single': [ (r"(\\.|#(?=[^\n{])|[^\n'#])+", String.Double), (r'#\{', String.Interpol, 'interpolation'), (r"'", String.Double, '#pop'), ], 'string-url': [ (r'(\\#|#(?=[^\n{])|[^\n#)])+', String.Other), (r'#\{', String.Interpol, 'interpolation'), (r'\)', String.Other, '#pop'), ], 'pseudo-class': [ (r'[\w-]+', Name.Decorator), (r'#\{', String.Interpol, 'interpolation'), (r'', Text, '#pop'), ], 'class': [ (r'[\w-]+', Name.Class), (r'#\{', String.Interpol, 'interpolation'), (r'', Text, '#pop'), ], 'id': [ (r'[\w-]+', Name.Namespace), (r'#\{', String.Interpol, 'interpolation'), (r'', Text, '#pop'), ], 'for': [ (r'(from|to|through)', Operator.Word), include('value'), ], } class SassLexer(ExtendedRegexLexer): """ For Sass stylesheets. *New in Pygments 1.3.* """ name = 'Sass' aliases = ['sass', 'SASS'] filenames = ['*.sass'] mimetypes = ['text/x-sass'] flags = re.IGNORECASE tokens = { 'root': [ (r'[ \t]*\n', Text), (r'[ \t]*', _indentation), ], 'content': [ (r'//[^\n]*', _starts_block(Comment.Single, 'single-comment'), 'root'), (r'/\*[^\n]*', _starts_block(Comment.Multiline, 'multi-comment'), 'root'), (r'@import', Keyword, 'import'), (r'@for', Keyword, 'for'), (r'@(debug|warn|if|while)', Keyword, 'value'), (r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'), (r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'), (r'@extend', Keyword, 'selector'), (r'@[a-z0-9_-]+', Keyword, 'selector'), (r'=[\w-]+', Name.Function, 'value'), (r'\+[\w-]+', Name.Decorator, 'value'), (r'([!$][\w-]\w*)([ \t]*(?:(?:\|\|)?=|:))', bygroups(Name.Variable, Operator), 'value'), (r':', Name.Attribute, 'old-style-attr'), (r'(?=.+?[=:]([^a-z]|$))', Name.Attribute, 'new-style-attr'), (r'', Text, 'selector'), ], 'single-comment': [ (r'.+', Comment.Single), (r'\n', Text, 'root'), ], 'multi-comment': [ (r'.+', Comment.Multiline), (r'\n', Text, 'root'), ], 'import': [ (r'[ \t]+', Text), (r'\S+', String), (r'\n', Text, 'root'), ], 'old-style-attr': [ (r'[^\s:="\[]+', Name.Attribute), (r'#{', String.Interpol, 'interpolation'), (r'[ \t]*=', Operator, 'value'), (r'', Text, 'value'), ], 'new-style-attr': [ (r'[^\s:="\[]+', Name.Attribute), (r'#{', String.Interpol, 'interpolation'), (r'[ \t]*[=:]', Operator, 'value'), ], 'inline-comment': [ (r"(\\#|#(?=[^\n{])|\*(?=[^\n/])|[^\n#*])+", Comment.Multiline), (r'#\{', String.Interpol, 'interpolation'), (r"\*/", Comment, '#pop'), ], } for group, common in common_sass_tokens.iteritems(): tokens[group] = copy.copy(common) tokens['value'].append((r'\n', Text, 'root')) tokens['selector'].append((r'\n', Text, 'root')) class ScssLexer(RegexLexer): """ For SCSS stylesheets. """ name = 'SCSS' aliases = ['scss'] filenames = ['*.scss'] mimetypes = ['text/x-scss'] flags = re.IGNORECASE | re.DOTALL tokens = { 'root': [ (r'\s+', Text), (r'//.*?\n', Comment.Single), (r'/\*.*?\*/', Comment.Multiline), (r'@import', Keyword, 'value'), (r'@for', Keyword, 'for'), (r'@(debug|warn|if|while)', Keyword, 'value'), (r'(@mixin)( [\w-]+)', bygroups(Keyword, Name.Function), 'value'), (r'(@include)( [\w-]+)', bygroups(Keyword, Name.Decorator), 'value'), (r'@extend', Keyword, 'selector'), (r'@[a-z0-9_-]+', Keyword, 'selector'), (r'(\$[\w-]*\w)([ \t]*:)', bygroups(Name.Variable, Operator), 'value'), (r'(?=[^;{}][;}])', Name.Attribute, 'attr'), (r'(?=[^;{}:]+:[^a-z])', Name.Attribute, 'attr'), (r'', Text, 'selector'), ], 'attr': [ (r'[^\s:="\[]+', Name.Attribute), (r'#{', String.Interpol, 'interpolation'), (r'[ \t]*:', Operator, 'value'), ], 'inline-comment': [ (r"(\\#|#(?=[^{])|\*(?=[^/])|[^#*])+", Comment.Multiline), (r'#\{', String.Interpol, 'interpolation'), (r"\*/", Comment, '#pop'), ], } for group, common in common_sass_tokens.iteritems(): tokens[group] = copy.copy(common) tokens['value'].extend([(r'\n', Text), (r'[;{}]', Punctuation, 'root')]) tokens['selector'].extend([(r'\n', Text), (r'[;{}]', Punctuation, 'root')]) class CoffeeScriptLexer(RegexLexer): """ For `CoffeeScript`_ source code. .. _CoffeeScript: http://coffeescript.org *New in Pygments 1.3.* """ name = 'CoffeeScript' aliases = ['coffee-script', 'coffeescript'] filenames = ['*.coffee'] mimetypes = ['text/coffeescript'] flags = re.DOTALL tokens = { 'commentsandwhitespace': [ (r'\s+', Text), (r'###[^#].*?###', Comment.Multiline), (r'#(?!##[^#]).*?\n', Comment.Single), ], 'multilineregex': [ (r'[^/#]+', String.Regex), (r'///([gim]+\b|\B)', String.Regex, '#pop'), (r'#{', String.Interpol, 'interpoling_string'), (r'[/#]', String.Regex), ], 'slashstartsregex': [ include('commentsandwhitespace'), (r'///', String.Regex, ('#pop', 'multilineregex')), (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/' r'([gim]+\b|\B)', String.Regex, '#pop'), (r'', Text, '#pop'), ], 'root': [ # this next expr leads to infinite loops root -> slashstartsregex #(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'), include('commentsandwhitespace'), (r'\+\+|~|&&|\band\b|\bor\b|\bis\b|\bisnt\b|\bnot\b|\?|:|' r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|' r'=(?!>)|-(?!>)|[<>+*`%&\|\^/])=?', Operator, 'slashstartsregex'), (r'(?:\([^()]+\))?\s*[=-]>', Name.Function), (r'[{(\[;,]', Punctuation, 'slashstartsregex'), (r'[})\].]', Punctuation), (r'(?<![\.\$])(for|own|in|of|while|until|' r'loop|break|return|continue|' r'switch|when|then|if|unless|else|' r'throw|try|catch|finally|new|delete|typeof|instanceof|super|' r'extends|this|class|by)\b', Keyword, 'slashstartsregex'), (r'(?<![\.\$])(true|false|yes|no|on|off|null|' r'NaN|Infinity|undefined)\b', Keyword.Constant), (r'(Array|Boolean|Date|Error|Function|Math|netscape|' r'Number|Object|Packages|RegExp|String|sun|decodeURI|' r'decodeURIComponent|encodeURI|encodeURIComponent|' r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b', Name.Builtin), (r'[$a-zA-Z_][a-zA-Z0-9_\.:\$]*\s*[:=]\s', Name.Variable, 'slashstartsregex'), (r'@[$a-zA-Z_][a-zA-Z0-9_\.:\$]*\s*[:=]\s', Name.Variable.Instance, 'slashstartsregex'), (r'@', Name.Other, 'slashstartsregex'), (r'@?[$a-zA-Z_][a-zA-Z0-9_\$]*', Name.Other, 'slashstartsregex'), (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), (r'0x[0-9a-fA-F]+', Number.Hex), (r'[0-9]+', Number.Integer), ('"""', String, 'tdqs'), ("'''", String, 'tsqs'), ('"', String, 'dqs'), ("'", String, 'sqs'), ], 'strings': [ (r'[^#\\\'"]+', String), # note that all coffee script strings are multi-line. # hashmarks, quotes and backslashes must be parsed one at a time ], 'interpoling_string' : [ (r'}', String.Interpol, "#pop"), include('root') ], 'dqs': [ (r'"', String, '#pop'), (r'\\.|\'', String), # double-quoted string don't need ' escapes (r'#{', String.Interpol, "interpoling_string"), include('strings') ], 'sqs': [ (r"'", String, '#pop'), (r'#|\\.|"', String), # single quoted strings don't need " escapses include('strings') ], 'tdqs': [ (r'"""', String, '#pop'), (r'\\.|\'|"', String), # no need to escape quotes in triple-string (r'#{', String.Interpol, "interpoling_string"), include('strings'), ], 'tsqs': [ (r"'''", String, '#pop'), (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings include('strings') ], } class LiveScriptLexer(RegexLexer): """ For `LiveScript`_ source code. .. _LiveScript: http://gkz.github.com/LiveScript/ New in Pygments 1.6. """ name = 'LiveScript' aliases = ['live-script', 'livescript'] filenames = ['*.ls'] mimetypes = ['text/livescript'] flags = re.DOTALL tokens = { 'commentsandwhitespace': [ (r'\s+', Text), (r'/\*.*?\*/', Comment.Multiline), (r'#.*?\n', Comment.Single), ], 'multilineregex': [ include('commentsandwhitespace'), (r'//([gim]+\b|\B)', String.Regex, '#pop'), (r'/', String.Regex), (r'[^/#]+', String.Regex) ], 'slashstartsregex': [ include('commentsandwhitespace'), (r'//', String.Regex, ('#pop', 'multilineregex')), (r'/(?! )(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/' r'([gim]+\b|\B)', String.Regex, '#pop'), (r'', Text, '#pop'), ], 'root': [ # this next expr leads to infinite loops root -> slashstartsregex #(r'^(?=\s|/|<!--)', Text, 'slashstartsregex'), include('commentsandwhitespace'), (r'(?:\([^()]+\))?[ ]*[~-]{1,2}>|' r'(?:\(?[^()\n]+\)?)?[ ]*<[~-]{1,2}', Name.Function), (r'\+\+|&&|(?<![\.\$])\b(?:and|x?or|is|isnt|not)\b|\?|:|=|' r'\|\||\\(?=\n)|(<<|>>>?|==?|!=?|' r'~(?!\~?>)|-(?!\-?>)|<(?!\[)|(?<!\])>|' r'[+*`%&\|\^/])=?', Operator, 'slashstartsregex'), (r'[{(\[;,]', Punctuation, 'slashstartsregex'), (r'[})\].]', Punctuation), (r'(?<![\.\$])(for|own|in|of|while|until|loop|break|' r'return|continue|switch|when|then|if|unless|else|' r'throw|try|catch|finally|new|delete|typeof|instanceof|super|' r'extends|this|class|by|const|var|to|til)\b', Keyword, 'slashstartsregex'), (r'(?<![\.\$])(true|false|yes|no|on|off|' r'null|NaN|Infinity|undefined|void)\b', Keyword.Constant), (r'(Array|Boolean|Date|Error|Function|Math|netscape|' r'Number|Object|Packages|RegExp|String|sun|decodeURI|' r'decodeURIComponent|encodeURI|encodeURIComponent|' r'eval|isFinite|isNaN|parseFloat|parseInt|document|window)\b', Name.Builtin), (r'[$a-zA-Z_][a-zA-Z0-9_\.\-:\$]*\s*[:=]\s', Name.Variable, 'slashstartsregex'), (r'@[$a-zA-Z_][a-zA-Z0-9_\.\-:\$]*\s*[:=]\s', Name.Variable.Instance, 'slashstartsregex'), (r'@', Name.Other, 'slashstartsregex'), (r'@?[$a-zA-Z_][a-zA-Z0-9_\-]*', Name.Other, 'slashstartsregex'), (r'[0-9]+\.[0-9]+([eE][0-9]+)?[fd]?(?:[a-zA-Z_]+)?', Number.Float), (r'[0-9]+(~[0-9a-z]+)?(?:[a-zA-Z_]+)?', Number.Integer), ('"""', String, 'tdqs'), ("'''", String, 'tsqs'), ('"', String, 'dqs'), ("'", String, 'sqs'), (r'\\[\w$-]+', String), (r'<\[.*\]>', String), ], 'strings': [ (r'[^#\\\'"]+', String), # note that all coffee script strings are multi-line. # hashmarks, quotes and backslashes must be parsed one at a time ], 'interpoling_string' : [ (r'}', String.Interpol, "#pop"), include('root') ], 'dqs': [ (r'"', String, '#pop'), (r'\\.|\'', String), # double-quoted string don't need ' escapes (r'#{', String.Interpol, "interpoling_string"), (r'#', String), include('strings') ], 'sqs': [ (r"'", String, '#pop'), (r'#|\\.|"', String), # single quoted strings don't need " escapses include('strings') ], 'tdqs': [ (r'"""', String, '#pop'), (r'\\.|\'|"', String), # no need to escape quotes in triple-string (r'#{', String.Interpol, "interpoling_string"), (r'#', String), include('strings'), ], 'tsqs': [ (r"'''", String, '#pop'), (r'#|\\.|\'|"', String), # no need to escape quotes in triple-strings include('strings') ], } class DuelLexer(RegexLexer): """ Lexer for Duel Views Engine (formerly JBST) markup with JavaScript code blocks. See http://duelengine.org/. See http://jsonml.org/jbst/. *New in Pygments 1.4.* """ name = 'Duel' aliases = ['duel', 'Duel Engine', 'Duel View', 'JBST', 'jbst', 'JsonML+BST'] filenames = ['*.duel','*.jbst'] mimetypes = ['text/x-duel','text/x-jbst'] flags = re.DOTALL tokens = { 'root': [ (r'(<%[@=#!:]?)(.*?)(%>)', bygroups(Name.Tag, using(JavascriptLexer), Name.Tag)), (r'(<%\$)(.*?)(:)(.*?)(%>)', bygroups(Name.Tag, Name.Function, Punctuation, String, Name.Tag)), (r'(<%--)(.*?)(--%>)', bygroups(Name.Tag, Comment.Multiline, Name.Tag)), (r'(<script.*?>)(.*?)(</script>)', bygroups(using(HtmlLexer), using(JavascriptLexer), using(HtmlLexer))), (r'(.+?)(?=<)', using(HtmlLexer)), (r'.+', using(HtmlLexer)), ], } class ScamlLexer(ExtendedRegexLexer): """ For `Scaml markup <http://scalate.fusesource.org/>`_. Scaml is Haml for Scala. *New in Pygments 1.4.* """ name = 'Scaml' aliases = ['scaml', 'SCAML'] filenames = ['*.scaml'] mimetypes = ['text/x-scaml'] flags = re.IGNORECASE # Scaml does not yet support the " |\n" notation to # wrap long lines. Once it does, use the custom faux # dot instead. # _dot = r'(?: \|\n(?=.* \|)|.)' _dot = r'.' tokens = { 'root': [ (r'[ \t]*\n', Text), (r'[ \t]*', _indentation), ], 'css': [ (r'\.[a-z0-9_:-]+', Name.Class, 'tag'), (r'\#[a-z0-9_:-]+', Name.Function, 'tag'), ], 'eval-or-plain': [ (r'[&!]?==', Punctuation, 'plain'), (r'([&!]?[=~])(' + _dot + r'*\n)', bygroups(Punctuation, using(ScalaLexer)), 'root'), (r'', Text, 'plain'), ], 'content': [ include('css'), (r'%[a-z0-9_:-]+', Name.Tag, 'tag'), (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'), (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)', bygroups(Comment, Comment.Special, Comment), '#pop'), (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'), '#pop'), (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc, 'scaml-comment-block'), '#pop'), (r'(-@\s*)(import)?(' + _dot + r'*\n)', bygroups(Punctuation, Keyword, using(ScalaLexer)), '#pop'), (r'(-)(' + _dot + r'*\n)', bygroups(Punctuation, using(ScalaLexer)), '#pop'), (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'), '#pop'), include('eval-or-plain'), ], 'tag': [ include('css'), (r'\{(,\n|' + _dot + ')*?\}', using(ScalaLexer)), (r'\[' + _dot + '*?\]', using(ScalaLexer)), (r'\(', Text, 'html-attributes'), (r'/[ \t]*\n', Punctuation, '#pop:2'), (r'[<>]{1,2}(?=[ \t=])', Punctuation), include('eval-or-plain'), ], 'plain': [ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text), (r'(#\{)(' + _dot + '*?)(\})', bygroups(String.Interpol, using(ScalaLexer), String.Interpol)), (r'\n', Text, 'root'), ], 'html-attributes': [ (r'\s+', Text), (r'[a-z0-9_:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'), (r'[a-z0-9_:-]+', Name.Attribute), (r'\)', Text, '#pop'), ], 'html-attribute-value': [ (r'[ \t]+', Text), (r'[a-z0-9_]+', Name.Variable, '#pop'), (r'@[a-z0-9_]+', Name.Variable.Instance, '#pop'), (r'\$[a-z0-9_]+', Name.Variable.Global, '#pop'), (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'), (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'), ], 'html-comment-block': [ (_dot + '+', Comment), (r'\n', Text, 'root'), ], 'scaml-comment-block': [ (_dot + '+', Comment.Preproc), (r'\n', Text, 'root'), ], 'filter-block': [ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator), (r'(#\{)(' + _dot + '*?)(\})', bygroups(String.Interpol, using(ScalaLexer), String.Interpol)), (r'\n', Text, 'root'), ], } class JadeLexer(ExtendedRegexLexer): """ For Jade markup. Jade is a variant of Scaml, see: http://scalate.fusesource.org/documentation/scaml-reference.html *New in Pygments 1.4.* """ name = 'Jade' aliases = ['jade', 'JADE'] filenames = ['*.jade'] mimetypes = ['text/x-jade'] flags = re.IGNORECASE _dot = r'.' tokens = { 'root': [ (r'[ \t]*\n', Text), (r'[ \t]*', _indentation), ], 'css': [ (r'\.[a-z0-9_:-]+', Name.Class, 'tag'), (r'\#[a-z0-9_:-]+', Name.Function, 'tag'), ], 'eval-or-plain': [ (r'[&!]?==', Punctuation, 'plain'), (r'([&!]?[=~])(' + _dot + r'*\n)', bygroups(Punctuation, using(ScalaLexer)), 'root'), (r'', Text, 'plain'), ], 'content': [ include('css'), (r'!!!' + _dot + r'*\n', Name.Namespace, '#pop'), (r'(/)(\[' + _dot + '*?\])(' + _dot + r'*\n)', bygroups(Comment, Comment.Special, Comment), '#pop'), (r'/' + _dot + r'*\n', _starts_block(Comment, 'html-comment-block'), '#pop'), (r'-#' + _dot + r'*\n', _starts_block(Comment.Preproc, 'scaml-comment-block'), '#pop'), (r'(-@\s*)(import)?(' + _dot + r'*\n)', bygroups(Punctuation, Keyword, using(ScalaLexer)), '#pop'), (r'(-)(' + _dot + r'*\n)', bygroups(Punctuation, using(ScalaLexer)), '#pop'), (r':' + _dot + r'*\n', _starts_block(Name.Decorator, 'filter-block'), '#pop'), (r'[a-z0-9_:-]+', Name.Tag, 'tag'), (r'\|', Text, 'eval-or-plain'), ], 'tag': [ include('css'), (r'\{(,\n|' + _dot + ')*?\}', using(ScalaLexer)), (r'\[' + _dot + '*?\]', using(ScalaLexer)), (r'\(', Text, 'html-attributes'), (r'/[ \t]*\n', Punctuation, '#pop:2'), (r'[<>]{1,2}(?=[ \t=])', Punctuation), include('eval-or-plain'), ], 'plain': [ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Text), (r'(#\{)(' + _dot + '*?)(\})', bygroups(String.Interpol, using(ScalaLexer), String.Interpol)), (r'\n', Text, 'root'), ], 'html-attributes': [ (r'\s+', Text), (r'[a-z0-9_:-]+[ \t]*=', Name.Attribute, 'html-attribute-value'), (r'[a-z0-9_:-]+', Name.Attribute), (r'\)', Text, '#pop'), ], 'html-attribute-value': [ (r'[ \t]+', Text), (r'[a-z0-9_]+', Name.Variable, '#pop'), (r'@[a-z0-9_]+', Name.Variable.Instance, '#pop'), (r'\$[a-z0-9_]+', Name.Variable.Global, '#pop'), (r"'(\\\\|\\'|[^'\n])*'", String, '#pop'), (r'"(\\\\|\\"|[^"\n])*"', String, '#pop'), ], 'html-comment-block': [ (_dot + '+', Comment), (r'\n', Text, 'root'), ], 'scaml-comment-block': [ (_dot + '+', Comment.Preproc), (r'\n', Text, 'root'), ], 'filter-block': [ (r'([^#\n]|#[^{\n]|(\\\\)*\\#\{)+', Name.Decorator), (r'(#\{)(' + _dot + '*?)(\})', bygroups(String.Interpol, using(ScalaLexer), String.Interpol)), (r'\n', Text, 'root'), ], } class XQueryLexer(ExtendedRegexLexer): """ An XQuery lexer, parsing a stream and outputting the tokens needed to highlight xquery code. *New in Pygments 1.4.* """ name = 'XQuery' aliases = ['xquery', 'xqy', 'xq', 'xql', 'xqm'] filenames = ['*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'] mimetypes = ['text/xquery', 'application/xquery'] xquery_parse_state = [] # FIX UNICODE LATER #ncnamestartchar = ( # ur"[A-Z]|_|[a-z]|[\u00C0-\u00D6]|[\u00D8-\u00F6]|[\u00F8-\u02FF]|" # ur"[\u0370-\u037D]|[\u037F-\u1FFF]|[\u200C-\u200D]|[\u2070-\u218F]|" # ur"[\u2C00-\u2FEF]|[\u3001-\uD7FF]|[\uF900-\uFDCF]|[\uFDF0-\uFFFD]|" # ur"[\u10000-\uEFFFF]" #) ncnamestartchar = r"(?:[A-Z]|_|[a-z])" # FIX UNICODE LATER #ncnamechar = ncnamestartchar + (ur"|-|\.|[0-9]|\u00B7|[\u0300-\u036F]|" # ur"[\u203F-\u2040]") ncnamechar = r"(?:" + ncnamestartchar + r"|-|\.|[0-9])" ncname = "(?:%s+%s*)" % (ncnamestartchar, ncnamechar) pitarget_namestartchar = r"(?:[A-KN-WY-Z]|_|:|[a-kn-wy-z])" pitarget_namechar = r"(?:" + pitarget_namestartchar + r"|-|\.|[0-9])" pitarget = "%s+%s*" % (pitarget_namestartchar, pitarget_namechar) prefixedname = "%s:%s" % (ncname, ncname) unprefixedname = ncname qname = "(?:%s|%s)" % (prefixedname, unprefixedname) entityref = r'(?:&(?:lt|gt|amp|quot|apos|nbsp);)' charref = r'(?:&#[0-9]+;|&#x[0-9a-fA-F]+;)' stringdouble = r'(?:"(?:' + entityref + r'|' + charref + r'|""|[^&"])*")' stringsingle = r"(?:'(?:" + entityref + r"|" + charref + r"|''|[^&'])*')" # FIX UNICODE LATER #elementcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|' # ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]') elementcontentchar = r'[A-Za-z]|\s|\d|[!"#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_\'`\|~]' #quotattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0021]|[\u0023-\u0025]|' # ur'[\u0027-\u003b]|[\u003d-\u007a]|\u007c|[\u007e-\u007F]') quotattrcontentchar = r'[A-Za-z]|\s|\d|[!#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_\'`\|~]' #aposattrcontentchar = (ur'\t|\r|\n|[\u0020-\u0025]|[\u0028-\u003b]|' # ur'[\u003d-\u007a]|\u007c|[\u007e-\u007F]') aposattrcontentchar = r'[A-Za-z]|\s|\d|[!"#$%\(\)\*\+,\-\./\:;=\?\@\[\\\]^_`\|~]' # CHAR elements - fix the above elementcontentchar, quotattrcontentchar, # aposattrcontentchar #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF] flags = re.DOTALL | re.MULTILINE | re.UNICODE def punctuation_root_callback(lexer, match, ctx): yield match.start(), Punctuation, match.group(1) # transition to root always - don't pop off stack ctx.stack = ['root'] ctx.pos = match.end() def operator_root_callback(lexer, match, ctx): yield match.start(), Operator, match.group(1) # transition to root always - don't pop off stack ctx.stack = ['root'] ctx.pos = match.end() def popstate_tag_callback(lexer, match, ctx): yield match.start(), Name.Tag, match.group(1) ctx.stack.append(lexer.xquery_parse_state.pop()) ctx.pos = match.end() def popstate_xmlcomment_callback(lexer, match, ctx): yield match.start(), String.Doc, match.group(1) ctx.stack.append(lexer.xquery_parse_state.pop()) ctx.pos = match.end() def popstate_kindtest_callback(lexer, match, ctx): yield match.start(), Punctuation, match.group(1) next_state = lexer.xquery_parse_state.pop() if next_state == 'occurrenceindicator': if re.match("[?*+]+", match.group(2)): yield match.start(), Punctuation, match.group(2) ctx.stack.append('operator') ctx.pos = match.end() else: ctx.stack.append('operator') ctx.pos = match.end(1) else: ctx.stack.append(next_state) ctx.pos = match.end(1) def popstate_callback(lexer, match, ctx): yield match.start(), Punctuation, match.group(1) # if we have run out of our state stack, pop whatever is on the pygments # state stack if len(lexer.xquery_parse_state) == 0: ctx.stack.pop() elif len(ctx.stack) > 1: ctx.stack.append(lexer.xquery_parse_state.pop()) else: # i don't know if i'll need this, but in case, default back to root ctx.stack = ['root'] ctx.pos = match.end() def pushstate_element_content_starttag_callback(lexer, match, ctx): yield match.start(), Name.Tag, match.group(1) lexer.xquery_parse_state.append('element_content') ctx.stack.append('start_tag') ctx.pos = match.end() def pushstate_cdata_section_callback(lexer, match, ctx): yield match.start(), String.Doc, match.group(1) ctx.stack.append('cdata_section') lexer.xquery_parse_state.append(ctx.state.pop) ctx.pos = match.end() def pushstate_starttag_callback(lexer, match, ctx): yield match.start(), Name.Tag, match.group(1) lexer.xquery_parse_state.append(ctx.state.pop) ctx.stack.append('start_tag') ctx.pos = match.end() def pushstate_operator_order_callback(lexer, match, ctx): yield match.start(), Keyword, match.group(1) yield match.start(), Text, match.group(2) yield match.start(), Punctuation, match.group(3) ctx.stack = ['root'] lexer.xquery_parse_state.append('operator') ctx.pos = match.end() def pushstate_operator_root_validate(lexer, match, ctx): yield match.start(), Keyword, match.group(1) yield match.start(), Text, match.group(2) yield match.start(), Punctuation, match.group(3) ctx.stack = ['root'] lexer.xquery_parse_state.append('operator') ctx.pos = match.end() def pushstate_operator_root_validate_withmode(lexer, match, ctx): yield match.start(), Keyword, match.group(1) yield match.start(), Text, match.group(2) yield match.start(), Keyword, match.group(3) ctx.stack = ['root'] lexer.xquery_parse_state.append('operator') ctx.pos = match.end() def pushstate_operator_processing_instruction_callback(lexer, match, ctx): yield match.start(), String.Doc, match.group(1) ctx.stack.append('processing_instruction') lexer.xquery_parse_state.append('operator') ctx.pos = match.end() def pushstate_element_content_processing_instruction_callback(lexer, match, ctx): yield match.start(), String.Doc, match.group(1) ctx.stack.append('processing_instruction') lexer.xquery_parse_state.append('element_content') ctx.pos = match.end() def pushstate_element_content_cdata_section_callback(lexer, match, ctx): yield match.start(), String.Doc, match.group(1) ctx.stack.append('cdata_section') lexer.xquery_parse_state.append('element_content') ctx.pos = match.end() def pushstate_operator_cdata_section_callback(lexer, match, ctx): yield match.start(), String.Doc, match.group(1) ctx.stack.append('cdata_section') lexer.xquery_parse_state.append('operator') ctx.pos = match.end() def pushstate_element_content_xmlcomment_callback(lexer, match, ctx): yield match.start(), String.Doc, match.group(1) ctx.stack.append('xml_comment') lexer.xquery_parse_state.append('element_content') ctx.pos = match.end() def pushstate_operator_xmlcomment_callback(lexer, match, ctx): yield match.start(), String.Doc, match.group(1) ctx.stack.append('xml_comment') lexer.xquery_parse_state.append('operator') ctx.pos = match.end() def pushstate_kindtest_callback(lexer, match, ctx): yield match.start(), Keyword, match.group(1) yield match.start(), Text, match.group(2) yield match.start(), Punctuation, match.group(3) lexer.xquery_parse_state.append('kindtest') ctx.stack.append('kindtest') ctx.pos = match.end() def pushstate_operator_kindtestforpi_callback(lexer, match, ctx): yield match.start(), Keyword, match.group(1) yield match.start(), Text, match.group(2) yield match.start(), Punctuation, match.group(3) lexer.xquery_parse_state.append('operator') ctx.stack.append('kindtestforpi') ctx.pos = match.end() def pushstate_operator_kindtest_callback(lexer, match, ctx): yield match.start(), Keyword, match.group(1) yield match.start(), Text, match.group(2) yield match.start(), Punctuation, match.group(3) lexer.xquery_parse_state.append('operator') ctx.stack.append('kindtest') ctx.pos = match.end() def pushstate_occurrenceindicator_kindtest_callback(lexer, match, ctx): yield match.start(), Name.Tag, match.group(1) yield match.start(), Text, match.group(2) yield match.start(), Punctuation, match.group(3) lexer.xquery_parse_state.append('occurrenceindicator') ctx.stack.append('kindtest') ctx.pos = match.end() def pushstate_operator_starttag_callback(lexer, match, ctx): yield match.start(), Name.Tag, match.group(1) lexer.xquery_parse_state.append('operator') ctx.stack.append('start_tag') ctx.pos = match.end() def pushstate_operator_root_callback(lexer, match, ctx): yield match.start(), Punctuation, match.group(1) lexer.xquery_parse_state.append('operator') ctx.stack = ['root']#.append('root') ctx.pos = match.end() def pushstate_operator_root_construct_callback(lexer, match, ctx): yield match.start(), Keyword, match.group(1) yield match.start(), Text, match.group(2) yield match.start(), Punctuation, match.group(3) lexer.xquery_parse_state.append('operator') ctx.stack = ['root'] ctx.pos = match.end() def pushstate_root_callback(lexer, match, ctx): yield match.start(), Punctuation, match.group(1) cur_state = ctx.stack.pop() lexer.xquery_parse_state.append(cur_state) ctx.stack = ['root']#.append('root') ctx.pos = match.end() def pushstate_operator_attribute_callback(lexer, match, ctx): yield match.start(), Name.Attribute, match.group(1) ctx.stack.append('operator') ctx.pos = match.end() def pushstate_operator_callback(lexer, match, ctx): yield match.start(), Keyword, match.group(1) yield match.start(), Text, match.group(2) yield match.start(), Punctuation, match.group(3) lexer.xquery_parse_state.append('operator') ctx.pos = match.end() tokens = { 'comment': [ # xquery comments (r'(:\))', Comment, '#pop'), (r'(\(:)', Comment, '#push'), (r'[^:)]', Comment), (r'([^:)]|:|\))', Comment), ], 'whitespace': [ (r'\s+', Text), ], 'operator': [ include('whitespace'), (r'(\})', popstate_callback), (r'\(:', Comment, 'comment'), (r'(\{)', pushstate_root_callback), (r'then|else|external|at|div|except', Keyword, 'root'), (r'order by', Keyword, 'root'), (r'is|mod|order\s+by|stable\s+order\s+by', Keyword, 'root'), (r'and|or', Operator.Word, 'root'), (r'(eq|ge|gt|le|lt|ne|idiv|intersect|in)(?=\b)', Operator.Word, 'root'), (r'return|satisfies|to|union|where|preserve\s+strip', Keyword, 'root'), (r'(>=|>>|>|<=|<<|<|-|\*|!=|\+|\||:=|=)', operator_root_callback), (r'(::|;|\[|//|/|,)', punctuation_root_callback), (r'(castable|cast)(\s+)(as)\b', bygroups(Keyword, Text, Keyword), 'singletype'), (r'(instance)(\s+)(of)\b', bygroups(Keyword, Text, Keyword), 'itemtype'), (r'(treat)(\s+)(as)\b', bygroups(Keyword, Text, Keyword), 'itemtype'), (r'(case|as)\b', Keyword, 'itemtype'), (r'(\))(\s*)(as)', bygroups(Punctuation, Text, Keyword), 'itemtype'), (r'\$', Name.Variable, 'varname'), (r'(for|let)(\s+)(\$)', bygroups(Keyword, Text, Name.Variable), 'varname'), #(r'\)|\?|\]', Punctuation, '#push'), (r'\)|\?|\]', Punctuation), (r'(empty)(\s+)(greatest|least)', bygroups(Keyword, Text, Keyword)), (r'ascending|descending|default', Keyword, '#push'), (r'external', Keyword), (r'collation', Keyword, 'uritooperator'), # finally catch all string literals and stay in operator state (stringdouble, String.Double), (stringsingle, String.Single), (r'(catch)(\s*)', bygroups(Keyword, Text), 'root'), ], 'uritooperator': [ (stringdouble, String.Double, '#pop'), (stringsingle, String.Single, '#pop'), ], 'namespacedecl': [ include('whitespace'), (r'\(:', Comment, 'comment'), (r'(at)(\s+)('+stringdouble+')', bygroups(Keyword, Text, String.Double)), (r"(at)(\s+)("+stringsingle+')', bygroups(Keyword, Text, String.Single)), (stringdouble, String.Double), (stringsingle, String.Single), (r',', Punctuation), (r'=', Operator), (r';', Punctuation, 'root'), (ncname, Name.Namespace), ], 'namespacekeyword': [ include('whitespace'), (r'\(:', Comment, 'comment'), (stringdouble, String.Double, 'namespacedecl'), (stringsingle, String.Single, 'namespacedecl'), (r'inherit|no-inherit', Keyword, 'root'), (r'namespace', Keyword, 'namespacedecl'), (r'(default)(\s+)(element)', bygroups(Keyword, Text, Keyword)), (r'preserve|no-preserve', Keyword), (r',', Punctuation), ], 'varname': [ (r'\(:', Comment, 'comment'), (qname, Name.Variable, 'operator'), ], 'singletype': [ (r'\(:', Comment, 'comment'), (ncname + r'(:\*)', Name.Variable, 'operator'), (qname, Name.Variable, 'operator'), ], 'itemtype': [ include('whitespace'), (r'\(:', Comment, 'comment'), (r'\$', Punctuation, 'varname'), (r'(void)(\s*)(\()(\s*)(\))', bygroups(Keyword, Text, Punctuation, Text, Punctuation), 'operator'), (r'(element|attribute|schema-element|schema-attribute|comment|text|' r'node|binary|document-node|empty-sequence)(\s*)(\()', pushstate_occurrenceindicator_kindtest_callback), # Marklogic specific type? (r'(processing-instruction)(\s*)(\()', bygroups(Keyword, Text, Punctuation), ('occurrenceindicator', 'kindtestforpi')), (r'(item)(\s*)(\()(\s*)(\))(?=[*+?])', bygroups(Keyword, Text, Punctuation, Text, Punctuation), 'occurrenceindicator'), (r'\(\#', Punctuation, 'pragma'), (r';', Punctuation, '#pop'), (r'then|else', Keyword, '#pop'), (r'(at)(\s+)(' + stringdouble + ')', bygroups(Keyword, Text, String.Double), 'namespacedecl'), (r'(at)(\s+)(' + stringsingle + ')', bygroups(Keyword, Text, String.Single), 'namespacedecl'), (r'except|intersect|in|is|return|satisfies|to|union|where', Keyword, 'root'), (r'and|div|eq|ge|gt|le|lt|ne|idiv|mod|or', Operator.Word, 'root'), (r':=|=|,|>=|>>|>|\[|\(|<=|<<|<|-|!=|\|', Operator, 'root'), (r'external|at', Keyword, 'root'), (r'(stable)(\s+)(order)(\s+)(by)', bygroups(Keyword, Text, Keyword, Text, Keyword), 'root'), (r'(castable|cast)(\s+)(as)', bygroups(Keyword, Text, Keyword), 'singletype'), (r'(treat)(\s+)(as)', bygroups(Keyword, Text, Keyword)), (r'(instance)(\s+)(of)', bygroups(Keyword, Text, Keyword)), (r'case|as', Keyword, 'itemtype'), (r'(\))(\s*)(as)', bygroups(Operator, Text, Keyword), 'itemtype'), (ncname + r':\*', Keyword.Type, 'operator'), (qname, Keyword.Type, 'occurrenceindicator'), ], 'kindtest': [ (r'\(:', Comment, 'comment'), (r'{', Punctuation, 'root'), (r'(\))([*+?]?)', popstate_kindtest_callback), (r'\*', Name, 'closekindtest'), (qname, Name, 'closekindtest'), (r'(element|schema-element)(\s*)(\()', pushstate_kindtest_callback), ], 'kindtestforpi': [ (r'\(:', Comment, 'comment'), (r'\)', Punctuation, '#pop'), (ncname, Name.Variable), (stringdouble, String.Double), (stringsingle, String.Single), ], 'closekindtest': [ (r'\(:', Comment, 'comment'), (r'(\))', popstate_callback), (r',', Punctuation), (r'(\{)', pushstate_operator_root_callback), (r'\?', Punctuation), ], 'xml_comment': [ (r'(-->)', popstate_xmlcomment_callback), (r'[^-]{1,2}', Literal), (ur'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' + unirange(0x10000, 0x10ffff), Literal), ], 'processing_instruction': [ (r'\s+', Text, 'processing_instruction_content'), (r'\?>', String.Doc, '#pop'), (pitarget, Name), ], 'processing_instruction_content': [ (r'\?>', String.Doc, '#pop'), (ur'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' + unirange(0x10000, 0x10ffff), Literal), ], 'cdata_section': [ (r']]>', String.Doc, '#pop'), (ur'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' + unirange(0x10000, 0x10ffff), Literal), ], 'start_tag': [ include('whitespace'), (r'(/>)', popstate_tag_callback), (r'>', Name.Tag, 'element_content'), (r'"', Punctuation, 'quot_attribute_content'), (r"'", Punctuation, 'apos_attribute_content'), (r'=', Operator), (qname, Name.Tag), ], 'quot_attribute_content': [ (r'"', Punctuation, 'start_tag'), (r'(\{)', pushstate_root_callback), (r'""', Name.Attribute), (quotattrcontentchar, Name.Attribute), (entityref, Name.Attribute), (charref, Name.Attribute), (r'\{\{|\}\}', Name.Attribute), ], 'apos_attribute_content': [ (r"'", Punctuation, 'start_tag'), (r'\{', Punctuation, 'root'), (r"''", Name.Attribute), (aposattrcontentchar, Name.Attribute), (entityref, Name.Attribute), (charref, Name.Attribute), (r'\{\{|\}\}', Name.Attribute), ], 'element_content': [ (r'</', Name.Tag, 'end_tag'), (r'(\{)', pushstate_root_callback), (r'(<!--)', pushstate_element_content_xmlcomment_callback), (r'(<\?)', pushstate_element_content_processing_instruction_callback), (r'(<!\[CDATA\[)', pushstate_element_content_cdata_section_callback), (r'(<)', pushstate_element_content_starttag_callback), (elementcontentchar, Literal), (entityref, Literal), (charref, Literal), (r'\{\{|\}\}', Literal), ], 'end_tag': [ include('whitespace'), (r'(>)', popstate_tag_callback), (qname, Name.Tag), ], 'xmlspace_decl': [ (r'\(:', Comment, 'comment'), (r'preserve|strip', Keyword, '#pop'), ], 'declareordering': [ (r'\(:', Comment, 'comment'), include('whitespace'), (r'ordered|unordered', Keyword, '#pop'), ], 'xqueryversion': [ include('whitespace'), (r'\(:', Comment, 'comment'), (stringdouble, String.Double), (stringsingle, String.Single), (r'encoding', Keyword), (r';', Punctuation, '#pop'), ], 'pragma': [ (qname, Name.Variable, 'pragmacontents'), ], 'pragmacontents': [ (r'#\)', Punctuation, 'operator'), (ur'\t|\r|\n|[\u0020-\uD7FF]|[\uE000-\uFFFD]|' + unirange(0x10000, 0x10ffff), Literal), (r'(\s+)', Text), ], 'occurrenceindicator': [ include('whitespace'), (r'\(:', Comment, 'comment'), (r'\*|\?|\+', Operator, 'operator'), (r':=', Operator, 'root'), (r'', Text, 'operator'), ], 'option': [ include('whitespace'), (qname, Name.Variable, '#pop'), ], 'qname_braren': [ include('whitespace'), (r'(\{)', pushstate_operator_root_callback), (r'(\()', Punctuation, 'root'), ], 'element_qname': [ (qname, Name.Variable, 'root'), ], 'attribute_qname': [ (qname, Name.Variable, 'root'), ], 'root': [ include('whitespace'), (r'\(:', Comment, 'comment'), # handle operator state # order on numbers matters - handle most complex first (r'\d+(\.\d*)?[eE][\+\-]?\d+', Number.Double, 'operator'), (r'(\.\d+)[eE][\+\-]?\d+', Number.Double, 'operator'), (r'(\.\d+|\d+\.\d*)', Number, 'operator'), (r'(\d+)', Number.Integer, 'operator'), (r'(\.\.|\.|\))', Punctuation, 'operator'), (r'(declare)(\s+)(construction)', bygroups(Keyword, Text, Keyword), 'operator'), (r'(declare)(\s+)(default)(\s+)(order)', bygroups(Keyword, Text, Keyword, Text, Keyword), 'operator'), (ncname + ':\*', Name, 'operator'), ('\*:'+ncname, Name.Tag, 'operator'), ('\*', Name.Tag, 'operator'), (stringdouble, String.Double, 'operator'), (stringsingle, String.Single, 'operator'), (r'(\})', popstate_callback), #NAMESPACE DECL (r'(declare)(\s+)(default)(\s+)(collation)', bygroups(Keyword, Text, Keyword, Text, Keyword)), (r'(module|declare)(\s+)(namespace)', bygroups(Keyword, Text, Keyword), 'namespacedecl'), (r'(declare)(\s+)(base-uri)', bygroups(Keyword, Text, Keyword), 'namespacedecl'), #NAMESPACE KEYWORD (r'(declare)(\s+)(default)(\s+)(element|function)', bygroups(Keyword, Text, Keyword, Text, Keyword), 'namespacekeyword'), (r'(import)(\s+)(schema|module)', bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'namespacekeyword'), (r'(declare)(\s+)(copy-namespaces)', bygroups(Keyword, Text, Keyword), 'namespacekeyword'), #VARNAMEs (r'(for|let|some|every)(\s+)(\$)', bygroups(Keyword, Text, Name.Variable), 'varname'), (r'\$', Name.Variable, 'varname'), (r'(declare)(\s+)(variable)(\s+)(\$)', bygroups(Keyword, Text, Keyword, Text, Name.Variable), 'varname'), #ITEMTYPE (r'(\))(\s+)(as)', bygroups(Operator, Text, Keyword), 'itemtype'), (r'(element|attribute|schema-element|schema-attribute|comment|' r'text|node|document-node|empty-sequence)(\s+)(\()', pushstate_operator_kindtest_callback), (r'(processing-instruction)(\s+)(\()', pushstate_operator_kindtestforpi_callback), (r'(<!--)', pushstate_operator_xmlcomment_callback), (r'(<\?)', pushstate_operator_processing_instruction_callback), (r'(<!\[CDATA\[)', pushstate_operator_cdata_section_callback), # (r'</', Name.Tag, 'end_tag'), (r'(<)', pushstate_operator_starttag_callback), (r'(declare)(\s+)(boundary-space)', bygroups(Keyword, Text, Keyword), 'xmlspace_decl'), (r'(validate)(\s+)(lax|strict)', pushstate_operator_root_validate_withmode), (r'(validate)(\s*)(\{)', pushstate_operator_root_validate), (r'(typeswitch)(\s*)(\()', bygroups(Keyword, Text, Punctuation)), (r'(element|attribute)(\s*)(\{)', pushstate_operator_root_construct_callback), (r'(document|text|processing-instruction|comment)(\s*)(\{)', pushstate_operator_root_construct_callback), #ATTRIBUTE (r'(attribute)(\s+)(?=' + qname + r')', bygroups(Keyword, Text), 'attribute_qname'), #ELEMENT (r'(element)(\s+)(?=' +qname+ r')', bygroups(Keyword, Text), 'element_qname'), #PROCESSING_INSTRUCTION (r'(processing-instruction)(\s+)(' + ncname + r')(\s*)(\{)', bygroups(Keyword, Text, Name.Variable, Text, Punctuation), 'operator'), (r'(declare|define)(\s+)(function)', bygroups(Keyword, Text, Keyword)), (r'(\{)', pushstate_operator_root_callback), (r'(unordered|ordered)(\s*)(\{)', pushstate_operator_order_callback), (r'(declare)(\s+)(ordering)', bygroups(Keyword, Text, Keyword), 'declareordering'), (r'(xquery)(\s+)(version)', bygroups(Keyword.Pseudo, Text, Keyword.Pseudo), 'xqueryversion'), (r'(\(#)', Punctuation, 'pragma'), # sometimes return can occur in root state (r'return', Keyword), (r'(declare)(\s+)(option)', bygroups(Keyword, Text, Keyword), 'option'), #URI LITERALS - single and double quoted (r'(at)(\s+)('+stringdouble+')', String.Double, 'namespacedecl'), (r'(at)(\s+)('+stringsingle+')', String.Single, 'namespacedecl'), (r'(ancestor-or-self|ancestor|attribute|child|descendant-or-self)(::)', bygroups(Keyword, Punctuation)), (r'(descendant|following-sibling|following|parent|preceding-sibling' r'|preceding|self)(::)', bygroups(Keyword, Punctuation)), (r'(if)(\s*)(\()', bygroups(Keyword, Text, Punctuation)), (r'then|else', Keyword), # ML specific (r'(try)(\s*)', bygroups(Keyword, Text), 'root'), (r'(catch)(\s*)(\()(\$)', bygroups(Keyword, Text, Punctuation, Name.Variable), 'varname'), (r'(@'+qname+')', Name.Attribute), (r'(@'+ncname+')', Name.Attribute), (r'@\*:'+ncname, Name.Attribute), (r'(@)', Name.Attribute), (r'//|/|\+|-|;|,|\(|\)', Punctuation), # STANDALONE QNAMES (qname + r'(?=\s*{)', Name.Tag, 'qname_braren'), (qname + r'(?=\s*\([^:])', Name.Function, 'qname_braren'), (qname, Name.Tag, 'operator'), ] } class DartLexer(RegexLexer): """ For `Dart <http://dartlang.org/>`_ source code. *New in Pygments 1.5.* """ name = 'Dart' aliases = ['dart'] filenames = ['*.dart'] mimetypes = ['text/x-dart'] flags = re.MULTILINE | re.DOTALL tokens = { 'root': [ include('string_literal'), (r'#!(.*?)$', Comment.Preproc), (r'\b(import|export)\b', Keyword, 'import_decl'), (r'\b(library|source|part of|part)\b', Keyword), (r'[^\S\n]+', Text), (r'//.*?\n', Comment.Single), (r'/\*.*?\*/', Comment.Multiline), (r'\b(class)\b(\s+)', bygroups(Keyword.Declaration, Text), 'class'), (r'\b(assert|break|case|catch|continue|default|do|else|finally|for|' r'if|in|is|new|return|super|switch|this|throw|try|while)\b', Keyword), (r'\b(abstract|const|extends|factory|final|get|implements|' r'native|operator|set|static|typedef|var)\b', Keyword.Declaration), (r'\b(bool|double|Dynamic|int|num|Object|String|void)\b', Keyword.Type), (r'\b(false|null|true)\b', Keyword.Constant), (r'[~!%^&*+=|?:<>/-]|as', Operator), (r'[a-zA-Z_$][a-zA-Z0-9_]*:', Name.Label), (r'[a-zA-Z_$][a-zA-Z0-9_]*', Name), (r'[(){}\[\],.;]', Punctuation), (r'0[xX][0-9a-fA-F]+', Number.Hex), # DIGIT+ (‘.’ DIGIT*)? EXPONENT? (r'\d+(\.\d*)?([eE][+-]?\d+)?', Number), (r'\.\d+([eE][+-]?\d+)?', Number), # ‘.’ DIGIT+ EXPONENT? (r'\n', Text) # pseudo-keyword negate intentionally left out ], 'class': [ (r'[a-zA-Z_$][a-zA-Z0-9_]*', Name.Class, '#pop') ], 'import_decl': [ include('string_literal'), (r'\s+', Text), (r'\b(as|show|hide)\b', Keyword), (r'[a-zA-Z_$][a-zA-Z0-9_]*', Name), (r'\,', Punctuation), (r'\;', Punctuation, '#pop') ], 'string_literal': [ # Raw strings. (r'r"""([\s|\S]*?)"""', String.Double), (r"r'''([\s|\S]*?)'''", String.Single), (r'r"(.*?)"', String.Double), (r"r'(.*?)'", String.Single), # Normal Strings. (r'"""', String.Double, 'string_double_multiline'), (r"'''", String.Single, 'string_single_multiline'), (r'"', String.Double, 'string_double'), (r"'", String.Single, 'string_single') ], 'string_common': [ (r"\\(x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|u\{[0-9A-Fa-f]*\}|[a-z\'\"$\\])", String.Escape), (r'(\$)([a-zA-Z_][a-zA-Z0-9_]*)', bygroups(String.Interpol, Name)), (r'(\$\{)(.*?)(\})', bygroups(String.Interpol, using(this), String.Interpol)) ], 'string_double': [ (r'"', String.Double, '#pop'), (r'[^\"$\\\n]+', String.Double), include('string_common'), (r'\$+', String.Double) ], 'string_double_multiline': [ (r'"""', String.Double, '#pop'), (r'[^\"$\\]+', String.Double), include('string_common'), (r'(\$|\")+', String.Double) ], 'string_single': [ (r"'", String.Single, '#pop'), (r"[^\'$\\\n]+", String.Single), include('string_common'), (r'\$+', String.Single) ], 'string_single_multiline': [ (r"'''", String.Single, '#pop'), (r'[^\'$\\]+', String.Single), include('string_common'), (r'(\$|\')+', String.Single) ] } class TypeScriptLexer(RegexLexer): """ For `TypeScript <http://www.python.org>`_ source code. *New in Pygments 1.6.* """ name = 'TypeScript' aliases = ['ts'] filenames = ['*.ts'] mimetypes = ['text/x-typescript'] flags = re.DOTALL tokens = { 'commentsandwhitespace': [ (r'\s+', Text), (r'<!--', Comment), (r'//.*?\n', Comment.Single), (r'/\*.*?\*/', Comment.Multiline) ], 'slashstartsregex': [ include('commentsandwhitespace'), (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/' r'([gim]+\b|\B)', String.Regex, '#pop'), (r'(?=/)', Text, ('#pop', 'badregex')), (r'', Text, '#pop') ], 'badregex': [ (r'\n', Text, '#pop') ], 'root': [ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'), include('commentsandwhitespace'), (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|' r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'), (r'[{(\[;,]', Punctuation, 'slashstartsregex'), (r'[})\].]', Punctuation), (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|' r'throw|try|catch|finally|new|delete|typeof|instanceof|void|' r'this)\b', Keyword, 'slashstartsregex'), (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'), (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|' r'extends|final|float|goto|implements|import|int|interface|long|native|' r'package|private|protected|public|short|static|super|synchronized|throws|' r'transient|volatile)\b', Keyword.Reserved), (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant), (r'(Array|Boolean|Date|Error|Function|Math|netscape|' r'Number|Object|Packages|RegExp|String|sun|decodeURI|' r'decodeURIComponent|encodeURI|encodeURIComponent|' r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|' r'window)\b', Name.Builtin), # Match stuff like: module name {...} (r'\b(module)(\s*)(\s*[a-zA-Z0-9_?.$][\w?.$]*)(\s*)', bygroups(Keyword.Reserved, Text, Name.Other, Text), 'slashstartsregex'), # Match variable type keywords (r'\b(string|bool|number)\b', Keyword.Type), # Match stuff like: constructor (r'\b(constructor|declare|interface|as|AS)\b', Keyword.Reserved), # Match stuff like: super(argument, list) (r'(super)(\s*)(\([a-zA-Z0-9,_?.$\s]+\s*\))', bygroups(Keyword.Reserved, Text), 'slashstartsregex'), # Match stuff like: function() {...} (r'([a-zA-Z_?.$][\w?.$]*)\(\) \{', Name.Other, 'slashstartsregex'), # Match stuff like: (function: return type) (r'([a-zA-Z0-9_?.$][\w?.$]*)(\s*:\s*)([a-zA-Z0-9_?.$][\w?.$]*)', bygroups(Name.Other, Text, Keyword.Type)), (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other), (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), (r'0x[0-9a-fA-F]+', Number.Hex), (r'[0-9]+', Number.Integer), (r'"(\\\\|\\"|[^"])*"', String.Double), (r"'(\\\\|\\'|[^'])*'", String.Single), ] } class LassoLexer(RegexLexer): """ For `Lasso <http://www.lassosoft.com/>`_ source code, covering both Lasso 9 syntax and LassoScript for Lasso 8.6 and earlier. For Lasso embedded in HTML, use the `LassoHtmlLexer`. Additional options accepted: `builtinshighlighting` If given and ``True``, highlight builtin tags, types, traits, and methods (default: ``True``). `requiredelimiters` If given and ``True``, only highlight code between delimiters as Lasso (default: ``False``). *New in Pygments 1.6.* """ name = 'Lasso' aliases = ['lasso', 'lassoscript'] filenames = ['*.lasso', '*.lasso[89]'] alias_filenames = ['*.incl', '*.inc', '*.las'] mimetypes = ['text/x-lasso'] flags = re.IGNORECASE | re.DOTALL | re.MULTILINE tokens = { 'root': [ (r'^#!.+lasso9\b', Comment.Preproc, 'lasso'), (r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'), (r'\[noprocess\]', Comment.Preproc, ('delimiters', 'noprocess')), (r'\[', Comment.Preproc, ('delimiters', 'squarebrackets')), (r'<\?(LassoScript|lasso|=)', Comment.Preproc, ('delimiters', 'anglebrackets')), (r'<', Other, 'delimiters'), (r'\s+', Other), (r'', Other, ('delimiters', 'lassofile')), ], 'delimiters': [ (r'\[no_square_brackets\]', Comment.Preproc, 'nosquarebrackets'), (r'\[noprocess\]', Comment.Preproc, 'noprocess'), (r'\[', Comment.Preproc, 'squarebrackets'), (r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'), (r'<', Other), (r'[^[<]+', Other), ], 'nosquarebrackets': [ (r'<\?(LassoScript|lasso|=)', Comment.Preproc, 'anglebrackets'), (r'<', Other), (r'[^<]+', Other), ], 'noprocess': [ (r'\[/noprocess\]', Comment.Preproc, '#pop'), (r'\[', Other), (r'[^[]', Other), ], 'squarebrackets': [ (r'\]', Comment.Preproc, '#pop'), include('lasso'), ], 'anglebrackets': [ (r'\?>', Comment.Preproc, '#pop'), include('lasso'), ], 'lassofile': [ (r'\]', Comment.Preproc, '#pop'), (r'\?>', Comment.Preproc, '#pop'), include('lasso'), ], 'whitespacecomments': [ (r'\s+', Text), (r'//.*?\n', Comment.Single), (r'/\*\*!.*?\*/', String.Doc), (r'/\*.*?\*/', Comment.Multiline), ], 'lasso': [ # whitespace/comments include('whitespacecomments'), # literals (r'\d*\.\d+(e[+-]?\d+)?', Number.Float), (r'0x[\da-f]+', Number.Hex), (r'\d+', Number.Integer), (r'([+-]?)(infinity|NaN)\b', bygroups(Operator, Number)), (r"'", String.Single, 'singlestring'), (r'"', String.Double, 'doublestring'), (r'`[^`]*`', String.Backtick), # names (r'\$[a-z_][\w.]*', Name.Variable), (r'#[a-z_][\w.]*|#\d+', Name.Variable.Instance), (r"(\.)('[a-z_][\w.]*')", bygroups(Name.Builtin.Pseudo, Name.Variable.Class)), (r"(self)(->)('[a-z_][\w.]*')", bygroups(Name.Builtin.Pseudo, Operator, Name.Variable.Class)), (r'(\.\.?)([a-z_][\w.]*)', bygroups(Name.Builtin.Pseudo, Name.Other)), (r'(self|inherited|global|void)\b', Name.Builtin.Pseudo), (r'-[a-z_][\w.]*', Name.Attribute), (r'(::\s*)([a-z_][\w.]*)', bygroups(Punctuation, Name.Label)), (r'(error_(code|msg)_\w+|Error_AddError|Error_ColumnRestriction|' r'Error_DatabaseConnectionUnavailable|Error_DatabaseTimeout|' r'Error_DeleteError|Error_FieldRestriction|Error_FileNotFound|' r'Error_InvalidDatabase|Error_InvalidPassword|' r'Error_InvalidUsername|Error_ModuleNotFound|' r'Error_NoError|Error_NoPermission|Error_OutOfMemory|' r'Error_ReqColumnMissing|Error_ReqFieldMissing|' r'Error_RequiredColumnMissing|Error_RequiredFieldMissing|' r'Error_UpdateError)\b', Name.Exception), # definitions (r'(define)(\s+)([a-z_][\w.]*)(\s*)(=>)(\s*)(type|trait|thread)\b', bygroups(Keyword.Declaration, Text, Name.Class, Text, Operator, Text, Keyword)), (r'(define)(\s+)([a-z_][\w.]*)(->)([a-z_][\w.]*=?|[-+*/%<>]|==)', bygroups(Keyword.Declaration, Text, Name.Class, Operator, Name.Function), 'signature'), (r'(define)(\s+)([a-z_][\w.]*)', bygroups(Keyword.Declaration, Text, Name.Function), 'signature'), (r'(public|protected|private|provide)(\s+)(([a-z_][\w.]*=?|' r'[-+*/%<>]|==)(?=\s*\())', bygroups(Keyword, Text, Name.Function), 'signature'), (r'(public|protected|private)(\s+)([a-z_][\w.]*)', bygroups(Keyword, Text, Name.Function)), # keywords (r'(true|false|none|minimal|full|all)\b', Keyword.Constant), (r'(local|var|variable|data)\b', Keyword.Declaration), (r'(array|date|decimal|duration|integer|map|pair|string|tag|xml|' r'null)\b', Keyword.Type), (r'([a-z_][\w.]*)(\s+)(in)\b', bygroups(Name, Text, Keyword)), (r'(let|into)(\s+)([a-z_][\w.]*)', bygroups(Keyword, Text, Name)), (r'require\b', Keyword, 'requiresection'), (r'(/?)(Namespace_Using)\b', bygroups(Punctuation, Keyword.Namespace)), (r'(/?)(Cache|Database_Names|Database_SchemaNames|' r'Database_TableNames|Define_Tag|Define_Type|Email_Batch|' r'Encode_Set|HTML_Comment|Handle|Handle_Error|Header|If|Inline|' r'Iterate|LJAX_Target|Link|Link_CurrentAction|Link_CurrentGroup|' r'Link_CurrentRecord|Link_Detail|Link_FirstGroup|' r'Link_FirstRecord|Link_LastGroup|Link_LastRecord|Link_NextGroup|' r'Link_NextRecord|Link_PrevGroup|Link_PrevRecord|Log|Loop|' r'NoProcess|Output_None|Portal|Private|Protect|Records|Referer|' r'Referrer|Repeating|ResultSet|Rows|Search_Args|Search_Arguments|' r'Select|Sort_Args|Sort_Arguments|Thread_Atomic|Value_List|While|' r'Abort|Case|Else|If_Empty|If_False|If_Null|If_True|Loop_Abort|' r'Loop_Continue|Loop_Count|Params|Params_Up|Return|Return_Value|' r'Run_Children|SOAP_DefineTag|SOAP_LastRequest|SOAP_LastResponse|' r'Tag_Name|ascending|average|by|define|descending|do|equals|' r'frozen|group|handle_failure|import|in|into|join|let|match|max|' r'min|on|order|parent|protected|provide|public|require|skip|' r'split_thread|sum|take|thread|to|trait|type|where|with|yield)\b', bygroups(Punctuation, Keyword)), # other (r'(([a-z_][\w.]*=?|[-+*/%<>]|==)(?=\s*\([^)]*\)\s*=>))', Name.Function, 'signature'), (r'(and|or|not)\b', Operator.Word), (r'([a-z_][\w.]*)(\s*)(::\s*)([a-z_][\w.]*)(\s*)(=)', bygroups(Name, Text, Punctuation, Name.Label, Text, Operator)), (r'((?<!->)[a-z_][\w.]*)(\s*)(=(?!=))', bygroups(Name, Text, Operator)), (r'(/?)([\w.]+)', bygroups(Punctuation, Name.Other)), (r'(=)(bw|ew|cn|lte?|gte?|n?eq|ft|n?rx)\b', bygroups(Operator, Operator.Word)), (r':=|[-+*/%=<>&|!?\\]+', Operator), (r'[{}():;,@^]', Punctuation), ], 'singlestring': [ (r"'", String.Single, '#pop'), (r"[^'\\]+", String.Single), include('escape'), (r"\\+", String.Single), ], 'doublestring': [ (r'"', String.Double, '#pop'), (r'[^"\\]+', String.Double), include('escape'), (r'\\+', String.Double), ], 'escape': [ (r'\\(U[\da-f]{8}|u[\da-f]{4}|x[\da-f]{1,2}|[0-7]{1,3}|:[^:]+:|' r'[abefnrtv?\"\'\\]|$)', String.Escape), ], 'signature': [ (r'=>', Operator, '#pop'), (r'\)', Punctuation, '#pop'), (r'[(,]', Punctuation, 'parameter'), include('lasso'), ], 'parameter': [ (r'\)', Punctuation, '#pop'), (r'-?[a-z_][\w.]*', Name.Attribute, '#pop'), (r'\.\.\.', Name.Builtin.Pseudo), include('lasso'), ], 'requiresection': [ (r'(([a-z_][\w.]*=?|[-+*/%<>]|==)(?=\s*\())', Name, 'requiresignature'), (r'(([a-z_][\w.]*=?|[-+*/%<>]|==)(?=(\s*::\s*[\w.]+)?\s*,))', Name), (r'[a-z_][\w.]*=?|[-+*/%<>]|==', Name, '#pop'), (r'(::\s*)([a-z_][\w.]*)', bygroups(Punctuation, Name.Label)), (r',', Punctuation), include('whitespacecomments'), ], 'requiresignature': [ (r'(\)(?=(\s*::\s*[\w.]+)?\s*,))', Punctuation, '#pop'), (r'\)', Punctuation, '#pop:2'), (r'-?[a-z_][\w.]*', Name.Attribute), (r'(::\s*)([a-z_][\w.]*)', bygroups(Punctuation, Name.Label)), (r'\.\.\.', Name.Builtin.Pseudo), (r'[(,]', Punctuation), include('whitespacecomments'), ], } def __init__(self, **options): self.builtinshighlighting = get_bool_opt( options, 'builtinshighlighting', True) self.requiredelimiters = get_bool_opt( options, 'requiredelimiters', False) self._builtins = set() if self.builtinshighlighting: from pygments.lexers._lassobuiltins import BUILTINS for key, value in BUILTINS.iteritems(): self._builtins.update(value) RegexLexer.__init__(self, **options) def get_tokens_unprocessed(self, text): stack = ['root'] if self.requiredelimiters: stack.append('delimiters') for index, token, value in \ RegexLexer.get_tokens_unprocessed(self, text, stack): if token is Name.Other: if value.lower() in self._builtins: yield index, Name.Builtin, value continue yield index, token, value def analyse_text(text): rv = 0.0 if 'bin/lasso9' in text: rv += 0.8 if re.search(r'<\?(=|lasso)', text, re.I): rv += 0.4 if re.search(r'local\(', text, re.I): rv += 0.4 if re.search(r'\[\n|\?>', text): rv += 0.4 return rv class QmlLexer(RegexLexer): """ For QML files. See http://doc.qt.digia.com/4.7/qdeclarativeintroduction.html. *New in Pygments 1.6.* """ # QML is based on javascript, so much of this is taken from the # JavascriptLexer above. name = 'QML' aliases = ['qml', 'Qt Meta Language', 'Qt modeling Language'] filenames = ['*.qml',] mimetypes = [ 'application/x-qml',] # pasted from JavascriptLexer, with some additions flags = re.DOTALL tokens = { 'commentsandwhitespace': [ (r'\s+', Text), (r'<!--', Comment), (r'//.*?\n', Comment.Single), (r'/\*.*?\*/', Comment.Multiline) ], 'slashstartsregex': [ include('commentsandwhitespace'), (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/' r'([gim]+\b|\B)', String.Regex, '#pop'), (r'(?=/)', Text, ('#pop', 'badregex')), (r'', Text, '#pop') ], 'badregex': [ (r'\n', Text, '#pop') ], 'root' : [ (r'^(?=\s|/|<!--)', Text, 'slashstartsregex'), include('commentsandwhitespace'), (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|' r'(<<|>>>?|==?|!=?|[-<>+*%&\|\^/])=?', Operator, 'slashstartsregex'), (r'[{(\[;,]', Punctuation, 'slashstartsregex'), (r'[})\].]', Punctuation), # QML insertions (r'\bid\s*:\s*[A-Za-z][_A-Za-z.0-9]*',Keyword.Declaration, 'slashstartsregex'), (r'\b[A-Za-z][_A-Za-z.0-9]*\s*:',Keyword, 'slashstartsregex'), # the rest from JavascriptLexer (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|' r'throw|try|catch|finally|new|delete|typeof|instanceof|void|' r'this)\b', Keyword, 'slashstartsregex'), (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'), (r'(abstract|boolean|byte|char|class|const|debugger|double|enum|export|' r'extends|final|float|goto|implements|import|int|interface|long|native|' r'package|private|protected|public|short|static|super|synchronized|throws|' r'transient|volatile)\b', Keyword.Reserved), (r'(true|false|null|NaN|Infinity|undefined)\b', Keyword.Constant), (r'(Array|Boolean|Date|Error|Function|Math|netscape|' r'Number|Object|Packages|RegExp|String|sun|decodeURI|' r'decodeURIComponent|encodeURI|encodeURIComponent|' r'Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|' r'window)\b', Name.Builtin), (r'[$a-zA-Z_][a-zA-Z0-9_]*', Name.Other), (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), (r'0x[0-9a-fA-F]+', Number.Hex), (r'[0-9]+', Number.Integer), (r'"(\\\\|\\"|[^"])*"', String.Double), (r"'(\\\\|\\'|[^'])*'", String.Single), ] }
agpl-3.0