repo_name
stringlengths
5
100
path
stringlengths
4
375
copies
stringclasses
991 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
tchernomax/ansible
lib/ansible/modules/network/nxos/nxos_vrrp.py
68
12591
#!/usr/bin/python # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'network'} DOCUMENTATION = ''' --- module: nxos_vrrp extends_documentation_fragment: nxos version_added: "2.1" short_description: Manages VRRP configuration on NX-OS switches. description: - Manages VRRP configuration on NX-OS switches. author: - Jason Edelman (@jedelman8) - Gabriele Gerbino (@GGabriele) notes: - Tested against NXOSv 7.3.(0)D1(1) on VIRL - VRRP feature needs to be enabled first on the system. - SVIs must exist before using this module. - Interface must be a L3 port before using this module. - C(state=absent) removes the VRRP group if it exists on the device. - VRRP cannot be configured on loopback interfaces. options: group: description: - VRRP group number. required: true interface: description: - Full name of interface that is being managed for VRRP. required: true interval: description: - Time interval between advertisement or 'default' keyword required: false default: 1 version_added: 2.6 priority: description: - VRRP priority or 'default' keyword default: 100 preempt: description: - Enable/Disable preempt. type: bool default: 'yes' vip: description: - VRRP virtual IP address or 'default' keyword authentication: description: - Clear text authentication string or 'default' keyword admin_state: description: - Used to enable or disable the VRRP process. choices: ['shutdown', 'no shutdown', 'default'] default: shutdown state: description: - Specify desired state of the resource. default: present choices: ['present','absent'] ''' EXAMPLES = ''' - name: Ensure vrrp group 100 and vip 10.1.100.1 is on vlan10 nxos_vrrp: interface: vlan10 group: 100 vip: 10.1.100.1 - name: Ensure removal of the vrrp group config # vip is required to ensure the user knows what they are removing nxos_vrrp: interface: vlan10 group: 100 vip: 10.1.100.1 state: absent - name: Re-config with more params nxos_vrrp: interface: vlan10 group: 100 vip: 10.1.100.1 preempt: false priority: 130 authentication: AUTHKEY ''' RETURN = ''' commands: description: commands sent to the device returned: always type: list sample: ["interface vlan10", "vrrp 150", "address 10.1.15.1", "authentication text testing", "no shutdown"] ''' from ansible.module_utils.network.nxos.nxos import load_config, run_commands from ansible.module_utils.network.nxos.nxos import get_capabilities, nxos_argument_spec from ansible.module_utils.network.nxos.nxos import get_interface_type from ansible.module_utils.basic import AnsibleModule PARAM_TO_DEFAULT_KEYMAP = { 'priority': '100', 'interval': '1', 'vip': '0.0.0.0', 'admin_state': 'shutdown', } def execute_show_command(command, module): if 'show run' not in command: output = 'json' else: output = 'text' commands = [{ 'command': command, 'output': output, }] return run_commands(module, commands)[0] def apply_key_map(key_map, table): new_dict = {} for key, value in table.items(): new_key = key_map.get(key) if new_key: if value: new_dict[new_key] = str(value) else: new_dict[new_key] = value return new_dict def is_default(interface, module): command = 'show run interface {0}'.format(interface) try: body = execute_show_command(command, module) if 'invalid' in body.lower(): return 'DNE' else: raw_list = body.split('\n') if raw_list[-1].startswith('interface'): return True else: return False except (KeyError): return 'DNE' def get_interface_mode(interface, intf_type, module): command = 'show interface {0}'.format(interface) interface = {} mode = 'unknown' body = execute_show_command(command, module) interface_table = body['TABLE_interface']['ROW_interface'] name = interface_table.get('interface') if intf_type in ['ethernet', 'portchannel']: mode = str(interface_table.get('eth_mode', 'layer3')) if mode == 'access' or mode == 'trunk': mode = 'layer2' elif intf_type == 'svi': mode = 'layer3' return mode, name def get_vrr_status(group, module, interface): command = 'show run all | section interface.{0}$'.format(interface) body = execute_show_command(command, module) vrf_index = None admin_state = 'shutdown' if body: splitted_body = body.splitlines() for index in range(0, len(splitted_body) - 1): if splitted_body[index].strip() == 'vrrp {0}'.format(group): vrf_index = index vrf_section = splitted_body[vrf_index::] for line in vrf_section: if line.strip() == 'no shutdown': admin_state = 'no shutdown' break return admin_state def get_existing_vrrp(interface, group, module, name): command = 'show vrrp detail interface {0}'.format(interface) body = execute_show_command(command, module) vrrp = {} vrrp_key = { 'sh_group_id': 'group', 'sh_vip_addr': 'vip', 'sh_priority': 'priority', 'sh_group_preempt': 'preempt', 'sh_auth_text': 'authentication', 'sh_adv_interval': 'interval' } try: vrrp_table = body['TABLE_vrrp_group'] except (AttributeError, IndexError, TypeError): return {} if isinstance(vrrp_table, dict): vrrp_table = [vrrp_table] for each_vrrp in vrrp_table: vrrp_row = each_vrrp['ROW_vrrp_group'] parsed_vrrp = apply_key_map(vrrp_key, vrrp_row) if parsed_vrrp['preempt'] == 'Disable': parsed_vrrp['preempt'] = False elif parsed_vrrp['preempt'] == 'Enable': parsed_vrrp['preempt'] = True if parsed_vrrp['group'] == group: parsed_vrrp['admin_state'] = get_vrr_status(group, module, name) return parsed_vrrp return vrrp def get_commands_config_vrrp(delta, existing, group): commands = [] CMDS = { 'priority': 'priority {0}', 'preempt': 'preempt', 'vip': 'address {0}', 'interval': 'advertisement-interval {0}', 'auth': 'authentication text {0}', 'admin_state': '{0}', } for arg in ['vip', 'priority', 'interval', 'admin_state']: val = delta.get(arg) if val == 'default': val = PARAM_TO_DEFAULT_KEYMAP.get(arg) if val != existing.get(arg): commands.append((CMDS.get(arg)).format(val)) elif val: commands.append((CMDS.get(arg)).format(val)) preempt = delta.get('preempt') auth = delta.get('authentication') if preempt: commands.append(CMDS.get('preempt')) elif preempt is False: commands.append('no ' + CMDS.get('preempt')) if auth: if auth != 'default': commands.append((CMDS.get('auth')).format(auth)) elif existing.get('authentication'): commands.append('no authentication') if commands: commands.insert(0, 'vrrp {0}'.format(group)) return commands def flatten_list(command_lists): flat_command_list = [] for command in command_lists: if isinstance(command, list): flat_command_list.extend(command) else: flat_command_list.append(command) return flat_command_list def validate_params(param, module): value = module.params[param] if param == 'group': try: if (int(value) < 1 or int(value) > 255): raise ValueError except ValueError: module.fail_json(msg="Warning! 'group' must be an integer between" " 1 and 255", group=value) elif param == 'priority': try: if (int(value) < 1 or int(value) > 254): raise ValueError except ValueError: module.fail_json(msg="Warning! 'priority' must be an integer " "between 1 and 254", priority=value) def main(): argument_spec = dict( group=dict(required=True, type='str'), interface=dict(required=True), interval=dict(required=False, type='str'), priority=dict(required=False, type='str'), preempt=dict(required=False, type='bool'), vip=dict(required=False, type='str'), admin_state=dict(required=False, type='str', choices=['shutdown', 'no shutdown', 'default'], default='shutdown'), authentication=dict(required=False, type='str'), state=dict(choices=['absent', 'present'], required=False, default='present') ) argument_spec.update(nxos_argument_spec) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True) warnings = list() results = {'changed': False, 'commands': [], 'warnings': warnings} state = module.params['state'] interface = module.params['interface'].lower() group = module.params['group'] priority = module.params['priority'] interval = module.params['interval'] preempt = module.params['preempt'] vip = module.params['vip'] authentication = module.params['authentication'] admin_state = module.params['admin_state'] device_info = get_capabilities(module) network_api = device_info.get('network_api', 'nxapi') if state == 'present' and not vip: module.fail_json(msg='the "vip" param is required when state=present') intf_type = get_interface_type(interface) if (intf_type != 'ethernet' and network_api == 'cliconf'): if is_default(interface, module) == 'DNE': module.fail_json(msg='That interface does not exist yet. Create ' 'it first.', interface=interface) if intf_type == 'loopback': module.fail_json(msg="Loopback interfaces don't support VRRP.", interface=interface) mode, name = get_interface_mode(interface, intf_type, module) if mode == 'layer2': module.fail_json(msg='That interface is a layer2 port.\nMake it ' 'a layer 3 port first.', interface=interface) args = dict(group=group, priority=priority, preempt=preempt, vip=vip, authentication=authentication, interval=interval, admin_state=admin_state) proposed = dict((k, v) for k, v in args.items() if v is not None) existing = get_existing_vrrp(interface, group, module, name) changed = False end_state = existing commands = [] if state == 'present': delta = dict( set(proposed.items()).difference(existing.items())) if delta: command = get_commands_config_vrrp(delta, existing, group) if command: commands.append(command) elif state == 'absent': if existing: commands.append(['no vrrp {0}'.format(group)]) if commands: commands.insert(0, ['interface {0}'.format(interface)]) commands = flatten_list(commands) results['commands'] = commands results['changed'] = True if not module.check_mode: load_config(module, commands) if 'configure' in commands: commands.pop(0) module.exit_json(**results) if __name__ == '__main__': main()
gpl-3.0
StuartLittlefair/astropy
astropy/samp/hub_script.py
12
6571
# Licensed under a 3-clause BSD style license - see LICENSE.rst import copy import time import sys import argparse from astropy import log, __version__ from .hub import SAMPHubServer __all__ = ['hub_script'] def hub_script(timeout=0): """ This main function is executed by the ``samp_hub`` command line tool. """ parser = argparse.ArgumentParser(prog="samp_hub " + __version__) parser.add_argument("-k", "--secret", dest="secret", metavar="CODE", help="custom secret code.") parser.add_argument("-d", "--addr", dest="addr", metavar="ADDR", help="listening address (or IP).") parser.add_argument("-p", "--port", dest="port", metavar="PORT", type=int, help="listening port number.") parser.add_argument("-f", "--lockfile", dest="lockfile", metavar="FILE", help="custom lockfile.") parser.add_argument("-w", "--no-web-profile", dest="web_profile", action="store_false", help="run the Hub disabling the Web Profile.", default=True) parser.add_argument("-P", "--pool-size", dest="pool_size", metavar="SIZE", type=int, help="the socket connections pool size.", default=20) timeout_group = parser.add_argument_group("Timeout group", "Special options to setup hub and client timeouts." "It contains a set of special options that allows to set up the Hub and " "clients inactivity timeouts, that is the Hub or client inactivity time " "interval after which the Hub shuts down or unregisters the client. " "Notification of samp.hub.disconnect MType is sent to the clients " "forcibly unregistered for timeout expiration.") timeout_group.add_argument("-t", "--timeout", dest="timeout", metavar="SECONDS", help="set the Hub inactivity timeout in SECONDS. By default it " "is set to 0, that is the Hub never expires.", type=int, default=0) timeout_group.add_argument("-c", "--client-timeout", dest="client_timeout", metavar="SECONDS", help="set the client inactivity timeout in SECONDS. By default it " "is set to 0, that is the client never expires.", type=int, default=0) parser.add_argument_group(timeout_group) log_group = parser.add_argument_group("Logging options", "Additional options which allow to customize the logging output. By " "default the SAMP Hub uses the standard output and standard error " "devices to print out INFO level logging messages. Using the options " "here below it is possible to modify the logging level and also " "specify the output files where redirect the logging messages.") log_group.add_argument("-L", "--log-level", dest="loglevel", metavar="LEVEL", help="set the Hub instance log level (OFF, ERROR, WARNING, INFO, DEBUG).", type=str, choices=["OFF", "ERROR", "WARNING", "INFO", "DEBUG"], default='INFO') log_group.add_argument("-O", "--log-output", dest="logout", metavar="FILE", help="set the output file for the log messages.", default="") parser.add_argument_group(log_group) adv_group = parser.add_argument_group("Advanced group", "Advanced options addressed to facilitate administrative tasks and " "allow new non-standard Hub behaviors. In particular the --label " "options is used to assign a value to hub.label token and is used to " "assign a name to the Hub instance. " "The very special --multi option allows to start a Hub in multi-instance mode. " "Multi-instance mode is a non-standard Hub behavior that enables " "multiple contemporaneous running Hubs. Multi-instance hubs place " "their non-standard lock-files within the <home directory>/.samp-1 " "directory naming them making use of the format: " "samp-hub-<PID>-<ID>, where PID is the Hub process ID while ID is an " "internal ID (integer).") adv_group.add_argument("-l", "--label", dest="label", metavar="LABEL", help="assign a LABEL to the Hub.", default="") adv_group.add_argument("-m", "--multi", dest="mode", help="run the Hub in multi-instance mode generating a custom " "lockfile with a random name.", action="store_const", const='multiple', default='single') parser.add_argument_group(adv_group) options = parser.parse_args() try: if options.loglevel in ("OFF", "ERROR", "WARNING", "DEBUG", "INFO"): log.setLevel(options.loglevel) if options.logout != "": context = log.log_to_file(options.logout) else: class dummy_context: def __enter__(self): pass def __exit__(self, exc_type, exc_value, traceback): pass context = dummy_context() with context: args = copy.deepcopy(options.__dict__) del(args["loglevel"]) del(args["logout"]) hub = SAMPHubServer(**args) hub.start(False) if not timeout: while hub.is_running: time.sleep(0.01) else: time.sleep(timeout) hub.stop() except KeyboardInterrupt: try: hub.stop() except NameError: pass except OSError as e: print(f"[SAMP] Error: I/O error({e.errno}): {e.strerror}") sys.exit(1) except SystemExit: pass
bsd-3-clause
kartikkumar/sda
populationPruner.py
1
4223
''' Copyright (c) 2014, K. Kumar (me@kartikkumar.com) All rights reserved. ''' ################################################################################################### # Set up input deck ################################################################################################### # Set path to TLE catalog file. tleCatalogFilePath = "141202_3le_test_catalog.txt" # Set number of lines per entry in TLE catalog (2 or 3). tleEntryNumberOfLines = 3 # Set path to output directory. outputPath = "." # Set minimum and maximum inclinations [deg]. inclinationMinimum = inclinationMaximum = # Set minimum and maximum semi-major axes [km]. altitudeMinimum = altitudeMaximum = # Set minimum and maximum eccentricities [-]. eccentricityMinimum = eccentricityMaximum = # Set output file name. outputFileName = "test_catalog.txt" ################################################################################################### ''' DO NOT EDIT PARAMETERS BEYOND THIS POINT!!! ''' ################################################################################################### # Set up modules and packages ################################################################################################### import numpy as np from sgp4.earth_gravity import wgs72 from sgp4.io import twoline2rv from sgp4.propagation import getgravconst from twoBodyMethods import convertSemiMajorAxisToMeanMotion ################################################################################################### ################################################################################################### # Read and store TLE catalog ################################################################################################### # Read in catalog and store lines in list. fileHandle = open(tleCatalogFilePath) catalogLines = fileHandle.readlines() fileHandle.close() # Strip newline and return carriage characters. for i in xrange(len(catalogLines)): catalogLines[i] = catalogLines[i].strip('\r\n') # Parse TLE entries and store debris objects. debrisObjects = [] for tleEntry in xrange(0, len(catalogLines), tleEntryNumberOfLines): debrisObjects.append(twoline2rv(catalogLines[tleEntry+1], catalogLines[tleEntry+2], wgs72)) ################################################################################################### ################################################################################################### # Compute pruned list and write to file. ################################################################################################### # Convert altitude bounds [km] to mean motion [rad/min]. meanMotionMaximum = convertSemiMajorAxisToMeanMotion( \ altitudeMinimum + getgravconst('wgs72')[2], getgravconst('wgs72')[1]) * 60.0 meanMotionMinimum = convertSemiMajorAxisToMeanMotion( \ altitudeMaximum + getgravconst('wgs72')[2], getgravconst('wgs72')[1]) * 60.0 # Create pruned list based on bounds specified by user. prunedList = [(objectIndex) for objectIndex, debrisObject in enumerate(debrisObjects) \ if (debrisObject.inclo > np.deg2rad(inclinationMinimum)) \ & (debrisObject.inclo < np.deg2rad(inclinationMaximum)) \ & (debrisObject.no > meanMotionMinimum) \ & (debrisObject.no < meanMotionMaximum) \ & (debrisObject.ecco > eccentricityMinimum) \ & (debrisObject.ecco < eccentricityMaximum)] # Write pruned catalog in bounded list to text file. outputFile = open(outputFileName, 'w') for debrisObjectIndex in prunedList: outputFile.write(catalogLines[tleEntryNumberOfLines*debrisObjectIndex]) outputFile.write('\n') outputFile.write(catalogLines[tleEntryNumberOfLines*debrisObjectIndex+1]) outputFile.write('\n') if tleEntryNumberOfLines == 3: outputFile.write(catalogLines[tleEntryNumberOfLines*debrisObjectIndex+2]) outputFile.write('\n') outputFile.close() ###################################################################################################
bsd-3-clause
kkellerlbl/transform
t/py/script_checking_framework.py
6
6855
''' Created on Jan 30, 2015 @author: gaprice@lbl.gov ''' import os from biokbase.Transform import script_utils, drivers from bzrlib.config import ConfigObj import random import sys from biokbase.Transform.drivers import TransformTaskRunnerDriver import inspect KEEP_VENV = 'KB_KEEP_TEST_VENV' #CLIENT_SHORTCUTS = {drivers.WS_CLIENT: 'ws', # drivers.HANDLE_CLIENT: 'handle', # drivers.UJS_CLIENT: 'ujs'} #URL_SHORTCUTS = {drivers.WS_URL: 'ws_url', # drivers.UJS_URL: 'ujs_url', # drivers.SHOCK_URL: 'shock_url'} TEST_CFG_FILE = 'test.cfg' FILE_LOC = os.path.split(__file__)[0] sys.path.append(os.path.join(FILE_LOC, '../')) # to import demo/setup # this import is both resolved and used from demo.setup import TransformVirtualEnv # @UnresolvedImport @UnusedImport TRANSFORM_LOC = os.path.join(FILE_LOC, '../../') # maybe this should be configurable...? PLUGIN_CFG_LOC = os.path.join(TRANSFORM_LOC, 'plugins/configs') TEST_CFG_LOC = os.path.join(FILE_LOC, TEST_CFG_FILE) class ScriptCheckFramework(object): _keep_venv = False @classmethod def setup_class(cls): ''' Sets up the token, service urls, and service clients for use in tests. ''' cls.token = script_utils.get_token() cfg = ConfigObj(TEST_CFG_LOC) cls.runner = TransformTaskRunnerDriver(cfg, PLUGIN_CFG_LOC) mapping = cls.runner.get_service_mapping() # TODO discuss why we would need different names here than what is used # by the transform service, client code, and all scripts, why is this necessary at all? cls.ws_url = mapping["workspace"]["url"] cls.ujs_url = mapping["ujs"]["url"] cls.shock_url = mapping["shock"]["url"] cls.ws = mapping["workspace"]["client"] cls.handle = mapping["handle"]["client"] cls.ujs = mapping["ujs"]["client"] keep_venv = cls._keep_venv if os.environ.get(KEEP_VENV): keep_venv = True tve = TransformVirtualEnv(FILE_LOC, 'venv', TRANSFORM_LOC, keep_current_venv=keep_venv) tve.activate_for_current_py_process() cls.staged = {} cls.stage_data() @classmethod def keep_current_venv(cls, keep=True): ''' Call *prior* to calling setup_class() to keep the previously built virtual environment. ''' cls._keep_venv = keep @classmethod def stage_data(cls): '''Override to stage data for all tests''' pass @classmethod def upload_file_to_shock_and_get_handle(cls, test_file): ''' Uploads the file in test_file to shock and returns the node and a handle to the node. ''' node_id = script_utils.upload_file_to_shock( shock_service_url=cls.shock_url, filePath=test_file, ssl_verify=False, token=cls.token)['id'] handle_id = cls.handle.persist_handle({'id': node_id, 'type': 'shock', 'url': cls.shock_url }) return node_id, handle_id @classmethod def create_random_workspace(cls, prefix): ''' Creates a workspace with a name consisting of prefix appended with a random number and returns the new name. ''' ws_name = prefix + '_' + str(random.random())[2:] wsinfo = cls.ws.create_workspace({'workspace': ws_name}) return wsinfo[1] @classmethod def run_taskrunner(cls, method, args): ''' Runs a task runner of type method with arguments args. Method is one of 'convert', 'upload', or 'download'. Returns a tuple with the standard output as a string, the standard error as a string, and the script return code. ''' _, results = cls.runner.run_job(method, args) return results['stdout'], results['stderr'], results['exit_code'] @classmethod def run_and_check(cls, method, args, expect_out, expect_err, not_expect_out=None, not_expect_err=None, ret_code=0): ''' Runs a task runner of type method with arguments args. Method is one of 'convert', 'upload', or 'download'. If expect_out or expect_err is None, the respective io stream is expected to be empty; otherwise a test error will result. If they are not None, the string provided must be in the respective io stream. If not_expect_out or not_expect_err is provided, the string must not be in the respective io stream. ret_code specifies the expected return code of the script, defaulting to 0. ''' stdo, stde, code = cls.run_taskrunner(method, args) if not expect_out and stdo: raise TestException('Got unexpected data in standard out:\n' + stdo) if stdo and expect_out not in stdo: raise TestException('Did not get expected data in stdout:\n' + stdo) if stdo and not_expect_out and not_expect_out in stdo: raise TestException('Got unexpected data in standard out:\n' + stdo) if not expect_err and stde: raise TestException('Got unexpected data in standard err:\n' + stde) if stde and expect_err not in stde: raise TestException('Did not get expected data in stderr:\n' + stde) if stde and not_expect_err and not_expect_err in stde: raise TestException('Got unexpected data in standard out:\n' + stdo) if ret_code != code: raise TestException('Got unexpected return code from script:' + str(code)) def get_runner_class(modulename): classes = inspect.getmembers( sys.modules[modulename], lambda member: inspect.isclass(member) and member.__module__ == modulename) for c in classes: if c[0].startswith('Test'): return c[1] raise TestException('No class starting with Test found') def run_methods(modulename, keep_venv=False): testclass = get_runner_class(modulename) if keep_venv: testclass.keep_current_venv() # for testing testclass.setup_class() test = testclass() methods = inspect.getmembers(test, predicate=inspect.ismethod) for meth in methods: if meth[0].startswith('test_'): print("\nRunning " + meth[0]) meth[1]() class TestException(Exception): pass
mit
hwroitzsch/BikersLifeSaver
lib/python3.5/site-packages/pip/_vendor/html5lib/filters/lint.py
500
4208
from __future__ import absolute_import, division, unicode_literals from . import _base from ..constants import cdataElements, rcdataElements, voidElements from ..constants import spaceCharacters spaceCharacters = "".join(spaceCharacters) class LintError(Exception): pass class Filter(_base.Filter): def __iter__(self): open_elements = [] contentModelFlag = "PCDATA" for token in _base.Filter.__iter__(self): type = token["type"] if type in ("StartTag", "EmptyTag"): name = token["name"] if contentModelFlag != "PCDATA": raise LintError("StartTag not in PCDATA content model flag: %(tag)s" % {"tag": name}) if not isinstance(name, str): raise LintError("Tag name is not a string: %(tag)r" % {"tag": name}) if not name: raise LintError("Empty tag name") if type == "StartTag" and name in voidElements: raise LintError("Void element reported as StartTag token: %(tag)s" % {"tag": name}) elif type == "EmptyTag" and name not in voidElements: raise LintError("Non-void element reported as EmptyTag token: %(tag)s" % {"tag": token["name"]}) if type == "StartTag": open_elements.append(name) for name, value in token["data"]: if not isinstance(name, str): raise LintError("Attribute name is not a string: %(name)r" % {"name": name}) if not name: raise LintError("Empty attribute name") if not isinstance(value, str): raise LintError("Attribute value is not a string: %(value)r" % {"value": value}) if name in cdataElements: contentModelFlag = "CDATA" elif name in rcdataElements: contentModelFlag = "RCDATA" elif name == "plaintext": contentModelFlag = "PLAINTEXT" elif type == "EndTag": name = token["name"] if not isinstance(name, str): raise LintError("Tag name is not a string: %(tag)r" % {"tag": name}) if not name: raise LintError("Empty tag name") if name in voidElements: raise LintError("Void element reported as EndTag token: %(tag)s" % {"tag": name}) start_name = open_elements.pop() if start_name != name: raise LintError("EndTag (%(end)s) does not match StartTag (%(start)s)" % {"end": name, "start": start_name}) contentModelFlag = "PCDATA" elif type == "Comment": if contentModelFlag != "PCDATA": raise LintError("Comment not in PCDATA content model flag") elif type in ("Characters", "SpaceCharacters"): data = token["data"] if not isinstance(data, str): raise LintError("Attribute name is not a string: %(name)r" % {"name": data}) if not data: raise LintError("%(type)s token with empty data" % {"type": type}) if type == "SpaceCharacters": data = data.strip(spaceCharacters) if data: raise LintError("Non-space character(s) found in SpaceCharacters token: %(token)r" % {"token": data}) elif type == "Doctype": name = token["name"] if contentModelFlag != "PCDATA": raise LintError("Doctype not in PCDATA content model flag: %(name)s" % {"name": name}) if not isinstance(name, str): raise LintError("Tag name is not a string: %(tag)r" % {"tag": name}) # XXX: what to do with token["data"] ? elif type in ("ParseError", "SerializeError"): pass else: raise LintError("Unknown token type: %(type)s" % {"type": type}) yield token
mit
khaotik/DaNet-Tensorflow
app/modules.py
1
21520
from math import sqrt from functools import partial import numpy as np import tensorflow as tf from app.hparams import hparams import app.ops as ops class ModelModule(object): ''' abstract class for a sub-module of model Args: model: Model instance ''' def __init__(self, model, name): if hparams.DEBUG: self.debug_fetches = {} self.name = name self.model = model def __call__(self, s_dropout_keep=1.): raise NotImplementedError() class Encoder(ModelModule): ''' maps log-magnitude-spectra to embedding ''' def __init__(self, model, name): super(Encoder, self).__init__(model, name) def __call__(self, s_mixture, s_dropout_keep=1.): ''' Args: s_mixture: tensor variable 3d tensor of shape [batch_size, length, feature_size] s_dropout_keep: scalar const or variable keep probability for dropout layer Returns: [batch_size, length, feature_size, embedding_size] Notes: `length` is a not constant ''' raise NotImplementedError() class Estimator(ModelModule): ''' Estimates attractor location, either from TF-embedding, or true source ''' USE_TRUTH=True # set this to true if it uses ground truth def __init__(self, model, name): super(Estimator, self).__init__(model, name) def __call__(self, s_embed, **kwargs): ''' Args: s_embed: tensor of shape [batch_size, length, feature_size, embedding_size] Returns: s_attractors: tensor of shape [batch_size, num_signals, embedding_size] ''' raise NotImplementedError() class Separator(ModelModule): ''' Given mixture power spectra, attractors, and embedding, produce power spectra of separated signals ''' def __init__(self, model, name): super(Separator, self).__init__(model, name) def __call__(self, s_mixed_signals_pwr, s_attractors, s_embed_flat): ''' Args: s_mixed_signals_pwr: tensor of shape [batch_size, length, feature_size] s_attractors: tensor of shape [num_attractor, embed_dims] s_embed_flat: tensor of shape [batch_size, num_signals, length, feature_size] ''' raise NotImplementedError() @hparams.register_encoder('toy') class ToyEncoder(Encoder): ''' This encoder is a 3 layer MLP for debugging purposes ''' def __init__(self, model, name): super(ToyEncoder, self).__init__(model, name) def __call__(self, s_signals, s_dropout_keep=1.): with tf.variable_scope(self.name): s_mid = ops.lyr_linear( 'linear0', s_signals, hparams.FFT_SIZE*2, axis=-1) s_mid = ops.relu(s_mid, hparams.RELU_LEAKAGE) s_out = ops.lyr_linear( 'linear1', s_mid, hparams.FEATURE_SIZE * hparams.EMBED_SIZE, axis=-1) s_out = tf.reshape( s_out, [hparams.BATCH_SIZE, -1, hparams.FEATURE_SIZE, hparams.EMBED_SIZE]) return s_out # TODO move this into ops.py def _lyr_bilstm( name_, model_, s_input_, hdim_, t_axis_, axis_, w_init_, b_init_, s_dropout_keep_): ndim = len(s_input_.get_shape().as_list()) t_axis_ %= ndim rev_signal = (slice(None),)*t_axis_ + (slice(None, None, -1),) s_output_fwd = model_.lyr_lstm( name_+'_fwd', s_input_, hdim_, t_axis=t_axis_, w_init=w_init_, b_init=b_init_) s_output_bwd = model_.lyr_lstm( name_+'_bwd', s_input_[rev_signal], hdim_, t_axis=t_axis_, w_init=w_init_, b_init=b_init_) s_output = tf.concat( [s_output_fwd, s_output_bwd[rev_signal]], axis=axis_) return tf.nn.dropout(s_output, keep_prob=s_dropout_keep_) @hparams.register_encoder('lstm-orig') class LstmEncoder(Encoder): ''' LSTM network as in original paper ''' def __init__(self, model, name): super(LstmEncoder, self).__init__(model, name) def __call__(self, s_signals, s_dropout_keep=1.): with tf.variable_scope(self.name): s_signals = s_signals - tf.reduce_mean( s_signals, axis=(1,2), keep_dims=True) hdim = 600 init_range = 1.15 / sqrt(hdim) w_initer = tf.random_uniform_initializer( -init_range, init_range, dtype=hparams.FLOATX) b_init_value = np.zeros([hdim*4], dtype=hparams.FLOATX) b_init_value[hdim*1:hdim*2] = 1.5 # input gate b_init_value[hdim*2:hdim*3] = -1. # forget gate b_init_value[hdim*3:hdim*4] = 1. # output gate b_initer = tf.constant_initializer(b_init_value, dtype=hparams.FLOATX) s_mid0 = self.model.lyr_lstm( 'lstm0', s_signals, hdim, t_axis=-2, axis=-1, w_init=w_initer, b_init=b_initer) s_mid1 = self.model.lyr_lstm( 'lstm1', s_mid0, hdim, t_axis=-2, axis=-1, w_init=w_initer, b_init=b_initer) s_mid2 = self.model.lyr_lstm( 'lstm2', s_mid1, hdim, t_axis=-2, axis=-1, w_init=w_initer, b_init=b_initer) s_out = self.model.lyr_lstm( 'lstm3', s_mid2, hdim, t_axis=-2, axis=-1, w_init=w_initer, b_init=b_initer) s_out = s_out - tf.reduce_mean( s_out, axis=(1,2), keep_dims=True) init_range = 1.85 s_out = ops.lyr_linear( 'output', s_out, hparams.FEATURE_SIZE * hparams.EMBED_SIZE, w_init=tf.random_uniform_initializer( -init_range, init_range, dtype=hparams.FLOATX), bias=None) s_out = tf.reshape( s_out, [ hparams.BATCH_SIZE, -1, hparams.FEATURE_SIZE, hparams.EMBED_SIZE]) return s_out @hparams.register_encoder('bilstm-orig') class BiLstmEncoder(LstmEncoder): ''' Bi-LSTM network as in original paper ''' def __init__(self, model, name): super(LstmEncoder, self).__init__(model, name) def __call__(self, s_signals, s_dropout_keep=1.): with tf.variable_scope(self.name): s_signals = s_signals - tf.reduce_mean( s_signals, axis=(1,2), keep_dims=True) hdim = 300 init_range = .75 / sqrt(hdim) w_initer = tf.random_uniform_initializer( -init_range, init_range, dtype=hparams.FLOATX) b_init_value = np.zeros([hdim*4], dtype=hparams.FLOATX) b_init_value[hdim*1:hdim*2] = 1.5 # input gate b_init_value[hdim*2:hdim*3] = -1. # forget gate b_init_value[hdim*3:hdim*4] = 1. # output gate b_initer = tf.constant_initializer(b_init_value, dtype=hparams.FLOATX) s_mid0 = _lyr_bilstm( 'lstm0', self.model, s_signals, hdim, -2, -1, w_initer, b_initer, s_dropout_keep) s_mid1 = _lyr_bilstm( 'lstm1', self.model, s_mid0, hdim, -2, -1, w_initer, b_initer, s_dropout_keep) s_mid2 = _lyr_bilstm( 'lstm2', self.model, s_mid1, hdim, -2, -1, w_initer, b_initer, s_dropout_keep) s_out = _lyr_bilstm( 'lstm3', self.model, s_mid2, hdim, -2, -1, w_initer, b_initer, s_dropout_keep) s_out = s_out - tf.reduce_mean( s_out, axis=(1,2), keep_dims=True) # init_range = 2. / sqrt(300) init_range = 1.85 s_out = ops.lyr_linear( 'output', s_out, hparams.FEATURE_SIZE * hparams.EMBED_SIZE, w_init=tf.random_uniform_initializer( -init_range, init_range, dtype=hparams.FLOATX), bias=None) s_out = tf.reshape( s_out, [ hparams.BATCH_SIZE, -1, hparams.FEATURE_SIZE, hparams.EMBED_SIZE]) return s_out @hparams.register_encoder('conv-bilstm-v1') class ConvBiLstmEncoder(Encoder): ''' Experimental CNN-LSTM hybrid network ''' def __init__(self, model, name): super(ConvBiLstmEncoder, self).__init__(model, name) def __call__(self, s_signals, s_dropout_keep=1.): with tf.variable_scope(self.name): s_signals = tf.expand_dims(s_signals, 1) nb = hparams.BATCH_SIZE nfft = hparams.FFT_SIZE init_range = 2. / sqrt(nfft) w_initer = tf.random_uniform_initializer( -init_range, init_range, dtype=hparams.FLOATX) b_init_value = np.zeros([nfft*4], dtype=hparams.FLOATX) b_init_value[nfft*1:nfft*2] = 1. # input gate b_init_value[nfft*2:nfft*3] = -1. # forget gate b_init_value[nfft*3:nfft*4] = 1. # output gate b_initer = tf.constant_initializer( b_init_value, dtype=hparams.FLOATX) s_mid0 = tf.layers.conv2d( s_signals, 8, 5, activation=partial(ops.relu, alpha=hparams.RELU_LEAKAGE), data_format='channels_first', padding='same') s_mid0 = tf.layers.conv2d( s_mid0, 16, 5, activation=partial(ops.relu, alpha=hparams.RELU_LEAKAGE), data_format='channels_first', padding='same') # [B, 16, T/2, FFT_SIZE/4] s_mid0 = tf.layers.max_pooling2d( s_mid0, (2,2), (2,2),data_format='channels_first') s_mid1 = tf.layers.conv2d( s_mid0, 32, 3, activation=partial(ops.relu, alpha=hparams.RELU_LEAKAGE), data_format='channels_first', padding='same') s_mid1 = tf.layers.conv2d( s_mid1, 16, 3, activation=partial(ops.relu, alpha=hparams.RELU_LEAKAGE), data_format='channels_first', padding='same') # [B, 16, T/4, FFT_SIZE/8] s_mid1 = tf.layers.max_pooling2d( s_mid1, (2,2), (2,2),data_format='channels_first') s_mid1 -= tf.reduce_mean(s_mid1, axis=(1,2,3), keep_dims=True) # [B, T/4, FFT_SIZE*2] s_mid2 = tf.reshape( tf.transpose(s_mid1, [0, 2, 1, 3]), [nb, -1, nfft*2]) s_mid2 = _lyr_bilstm( 'lstm0', self.model, s_mid2, nfft, -2, -1, w_initer, b_initer, s_dropout_keep) s_mid3 = _lyr_bilstm( 'lstm1', self.model, s_mid2, nfft, -2, -1, w_initer, b_initer, s_dropout_keep) s_mid3 = tf.transpose(tf.reshape( s_mid3, [nb, -1, 16, nfft//8]), (0, 2, 1, 3)) s_mid3 += s_mid1 s_mid3 -= tf.reduce_mean(s_mid3, axis=(1,2,3), keep_dims=True) conv_init_range = 3e-1 conv_w_initer = tf.random_uniform_initializer( -conv_init_range, conv_init_range, dtype=hparams.FLOATX) # [B, 16, T/2, FFT_SIZE/4] s_mid4 = tf.layers.conv2d( s_mid3, 32, 3, activation=partial(ops.relu, alpha=hparams.RELU_LEAKAGE), data_format='channels_first', padding='same', kernel_initializer=conv_w_initer) s_mid4 = tf.layers.conv2d( s_mid4, 64, 3, activation=partial(ops.relu, alpha=hparams.RELU_LEAKAGE), data_format='channels_first', padding='same', kernel_initializer=conv_w_initer) s_mid4 = tf.reshape(s_mid4, [ nb, 16, 2, 2, -1, nfft//8]) s_mid4 = tf.transpose(s_mid4, [0, 1, 4, 2, 5, 3]) s_mid4 = tf.reshape(s_mid4, [nb, 16, -1, nfft//4]) s_mid5 = tf.layers.conv2d( s_mid4, 16, 5, activation=partial(ops.relu, alpha=hparams.RELU_LEAKAGE), data_format='channels_first', padding='same') # [B, 8, T/2, FFT_SIZE/4] s_mid5 = tf.layers.conv2d( s_mid5, 8, 5, activation=partial(ops.relu, alpha=hparams.RELU_LEAKAGE), data_format='channels_first', padding='same') # [B, T, FFT_SIZE] s_mid5 = tf.reshape( tf.transpose(s_mid5, [0, 2, 1, 3]), [nb, -1, nfft]) s_out = tf.layers.dense( s_mid5, hparams.FEATURE_SIZE * hparams.EMBED_SIZE, use_bias=False) s_out = tf.reshape(s_out, [ nb, -1, hparams.FEATURE_SIZE, hparams.EMBED_SIZE]) if hparams.DEBUG: self.debug_fetches = dict( conv_act=s_mid1, lstm_act=s_mid3, mid4=s_mid4) return s_out @hparams.register_estimator('truth') class AverageEstimator(Estimator): ''' Estimate attractor from simple average of true assignment ''' USE_TRUTH = True def __init__(self, model, name): super(AverageEstimator, self).__init__(model, name) def __call__(self, s_embed, s_src_pwr, s_mix_pwr, s_embed_flat=None): if s_embed_flat is None: s_embed_flat = tf.reshape( s_embed, [hparams.BATCH_SIZE, -1, hparams.EMBED_SIZE]) with tf.variable_scope(self.name): s_src_assignment = tf.argmax(s_src_pwr, axis=1) s_indices = tf.reshape( s_src_assignment, [hparams.BATCH_SIZE, -1]) fn_segmean = lambda _: tf.unsorted_segment_sum( _[0], _[1], hparams.MAX_N_SIGNAL) s_attractors = tf.map_fn( fn_segmean, (s_embed_flat, s_indices), hparams.FLOATX) s_attractors_wgt = tf.map_fn( fn_segmean, (tf.ones_like(s_embed_flat), s_indices), hparams.FLOATX) s_attractors /= (s_attractors_wgt + 1.) if hparams.DEBUG: self.debug_fetches = dict() # float[B, C, E] return s_attractors @hparams.register_estimator('truth-threshold') class ThreshouldedAverageEstimator(Estimator): ''' Estimate attractor from true assignment, cutting spectra with magnitude less than a certain threshold. ''' USE_TRUTH = True def __init__(self, model, name): super(ThreshouldedAverageEstimator, self).__init__(model, name) def __call__(self, s_embed, s_src_pwr, s_mix_pwr, s_embed_flat=None): if s_embed_flat is None: s_embed_flat = tf.reshape( s_embed, [hparams.BATCH_SIZE, -1, hparams.EMBED_SIZE]) with tf.variable_scope(self.name): s_wgt = tf.reshape( s_mix_pwr, [hparams.BATCH_SIZE, -1, 1]) s_wgt = tf.cast( tf.less(5., s_wgt), hparams.FLOATX) s_src_assignment = tf.argmax(s_src_pwr, axis=1) s_indices = tf.reshape( s_src_assignment, [hparams.BATCH_SIZE, -1]) fn_segmean = lambda _: tf.unsorted_segment_sum( _[0], _[1], hparams.MAX_N_SIGNAL) s_attractors = tf.map_fn(fn_segmean, ( s_embed_flat * s_wgt, s_indices), hparams.FLOATX) s_attractors_wgt = tf.map_fn(fn_segmean, ( s_wgt, s_indices), hparams.FLOATX) s_attractors /= (s_attractors_wgt + hparams.EPS) # float[B, C, E] return s_attractors @hparams.register_estimator('truth-weighted') class WeightedAverageEstimator(Estimator): ''' Estimate attractor from weighted average of true assignment ''' USE_TRUTH = True def __init__(self, model, name): super(WeightedAverageEstimator, self).__init__(model, name) def __call__(self, s_embed, s_src_pwr, s_mix_pwr, s_embed_flat=None): if s_embed_flat is None: s_embed_flat = tf.reshape( s_embed, [hparams.BATCH_SIZE, -1, hparams.EMBED_SIZE]) with tf.variable_scope(self.name): s_wgt = tf.reshape( s_mix_pwr, [hparams.BATCH_SIZE, -1, 1]) s_src_assignment = tf.argmax(s_src_pwr, axis=1) s_indices = tf.reshape( s_src_assignment, [hparams.BATCH_SIZE, -1]) fn_segmean = lambda _: tf.unsorted_segment_sum( _[0], _[1], hparams.MAX_N_SIGNAL) s_attractors = tf.map_fn(fn_segmean, ( s_embed_flat * s_wgt, s_indices), hparams.FLOATX) s_attractors_wgt = tf.map_fn(fn_segmean, ( s_wgt, s_indices), hparams.FLOATX) s_attractors /= (s_attractors_wgt + hparams.EPS) if hparams.DEBUG: self.debug_fetches = dict() # float[B, C, E] return s_attractors @hparams.register_estimator('anchor') class AnchoredEstimator(Estimator): ''' Estimate attractor from best combination from anchors, then perform 1-step EM ''' USE_TRUTH = False def __init__(self, model, name): super(AnchoredEstimator, self).__init__(model, name) self.name = name def __call__(self, s_embed, s_src_pwr=None, s_mix_pwr=None, s_embed_flat=None): with tf.variable_scope(self.name): v_anchors = tf.get_variable( 'anchors', [hparams.NUM_ANCHOR, hparams.EMBED_SIZE], initializer=tf.random_normal_initializer( stddev=1.)) # all combinations of anchors s_anchor_sets = ops.combinations( v_anchors, hparams.MAX_N_SIGNAL) # equation (6) s_anchor_assignment = tf.einsum( 'btfe,pce->bptfc', s_embed, s_anchor_sets) s_anchor_assignment = tf.nn.softmax(s_anchor_assignment) # equation (7) s_attractor_sets = tf.einsum( 'bptfc,btfe->bpce', s_anchor_assignment, s_embed) s_attractor_sets /= tf.expand_dims( tf.reduce_sum(s_anchor_assignment, axis=(2,3)), -1) # equation (8) s_in_set_similarities = tf.reduce_max( tf.matmul( s_attractor_sets, tf.transpose(s_attractor_sets, [0, 1, 3, 2])), axis=(-1, -2)) # equation (9) s_subset_choice = tf.argmin(s_in_set_similarities, axis=1) s_subset_choice = tf.transpose(tf.stack([ tf.range(hparams.BATCH_SIZE, dtype=tf.int64), s_subset_choice])) s_attractors = tf.gather_nd(s_attractor_sets, s_subset_choice) if hparams.DEBUG: self.debug_fetches = dict( asets=s_attractor_sets, anchors=v_anchors, subset_choice=s_subset_choice) return s_attractors @hparams.register_separator('dot-sigmoid-orig') class DotSeparatorSigmoid(Separator): ''' Use dot product as similarity measure, same as original paper ''' def __init__(self, model, name): super(DotSeparatorSigmoid, self).__init__(model, name) def __call__(self, s_mixed_signals_pwr, s_attractors, s_embed_flat): with tf.variable_scope(self.name): s_logits = tf.matmul( s_embed_flat, tf.transpose(s_attractors, [0, 2, 1])) s_logits = tf.reshape( s_logits, [ hparams.BATCH_SIZE, -1, hparams.FEATURE_SIZE, hparams.MAX_N_SIGNAL]) s_masks = tf.nn.sigmoid(s_logits) s_separated_signals_pwr = tf.expand_dims( s_mixed_signals_pwr, -1) * s_masks if hparams.DEBUG: self.debug_fetches['masks'] = s_masks return tf.transpose( s_separated_signals_pwr, [0, 3, 1, 2]) @hparams.register_separator('dot-softmax-orig') class DotSeparatorSoftmax(Separator): ''' Use dot product as similarity measure, same as original paper ''' def __init__(self, model, name): super(DotSeparatorSoftmax, self).__init__(model, name) def __call__(self, s_mixed_signals_pwr, s_attractors, s_embed_flat): with tf.variable_scope(self.name): s_logits = tf.matmul( s_embed_flat, tf.transpose(s_attractors, [0, 2, 1])) s_logits = tf.reshape( s_logits, [ hparams.BATCH_SIZE, -1, hparams.FEATURE_SIZE, hparams.MAX_N_SIGNAL]) s_masks = tf.nn.softmax(s_logits) s_separated_signals_pwr = tf.expand_dims( s_mixed_signals_pwr, -1) * s_masks if hparams.DEBUG: self.debug_fetches['masks'] = s_masks return tf.transpose( s_separated_signals_pwr, [0, 3, 1, 2])
mit
ericbaze/continuum_code_2012
pydata/moin/pythonenv/lib/python2.7/encodings/iso8859_7.py
593
13100
""" Python Character Mapping Codec iso8859_7 generated from 'MAPPINGS/ISO8859/8859-7.TXT' with gencodec.py. """#" import codecs ### Codec APIs class Codec(codecs.Codec): def encode(self,input,errors='strict'): return codecs.charmap_encode(input,errors,encoding_table) def decode(self,input,errors='strict'): return codecs.charmap_decode(input,errors,decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.charmap_encode(input,self.errors,encoding_table)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): return codecs.charmap_decode(input,self.errors,decoding_table)[0] class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass ### encodings module API def getregentry(): return codecs.CodecInfo( name='iso8859-7', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) ### Decoding Table decoding_table = ( u'\x00' # 0x00 -> NULL u'\x01' # 0x01 -> START OF HEADING u'\x02' # 0x02 -> START OF TEXT u'\x03' # 0x03 -> END OF TEXT u'\x04' # 0x04 -> END OF TRANSMISSION u'\x05' # 0x05 -> ENQUIRY u'\x06' # 0x06 -> ACKNOWLEDGE u'\x07' # 0x07 -> BELL u'\x08' # 0x08 -> BACKSPACE u'\t' # 0x09 -> HORIZONTAL TABULATION u'\n' # 0x0A -> LINE FEED u'\x0b' # 0x0B -> VERTICAL TABULATION u'\x0c' # 0x0C -> FORM FEED u'\r' # 0x0D -> CARRIAGE RETURN u'\x0e' # 0x0E -> SHIFT OUT u'\x0f' # 0x0F -> SHIFT IN u'\x10' # 0x10 -> DATA LINK ESCAPE u'\x11' # 0x11 -> DEVICE CONTROL ONE u'\x12' # 0x12 -> DEVICE CONTROL TWO u'\x13' # 0x13 -> DEVICE CONTROL THREE u'\x14' # 0x14 -> DEVICE CONTROL FOUR u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE u'\x16' # 0x16 -> SYNCHRONOUS IDLE u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK u'\x18' # 0x18 -> CANCEL u'\x19' # 0x19 -> END OF MEDIUM u'\x1a' # 0x1A -> SUBSTITUTE u'\x1b' # 0x1B -> ESCAPE u'\x1c' # 0x1C -> FILE SEPARATOR u'\x1d' # 0x1D -> GROUP SEPARATOR u'\x1e' # 0x1E -> RECORD SEPARATOR u'\x1f' # 0x1F -> UNIT SEPARATOR u' ' # 0x20 -> SPACE u'!' # 0x21 -> EXCLAMATION MARK u'"' # 0x22 -> QUOTATION MARK u'#' # 0x23 -> NUMBER SIGN u'$' # 0x24 -> DOLLAR SIGN u'%' # 0x25 -> PERCENT SIGN u'&' # 0x26 -> AMPERSAND u"'" # 0x27 -> APOSTROPHE u'(' # 0x28 -> LEFT PARENTHESIS u')' # 0x29 -> RIGHT PARENTHESIS u'*' # 0x2A -> ASTERISK u'+' # 0x2B -> PLUS SIGN u',' # 0x2C -> COMMA u'-' # 0x2D -> HYPHEN-MINUS u'.' # 0x2E -> FULL STOP u'/' # 0x2F -> SOLIDUS u'0' # 0x30 -> DIGIT ZERO u'1' # 0x31 -> DIGIT ONE u'2' # 0x32 -> DIGIT TWO u'3' # 0x33 -> DIGIT THREE u'4' # 0x34 -> DIGIT FOUR u'5' # 0x35 -> DIGIT FIVE u'6' # 0x36 -> DIGIT SIX u'7' # 0x37 -> DIGIT SEVEN u'8' # 0x38 -> DIGIT EIGHT u'9' # 0x39 -> DIGIT NINE u':' # 0x3A -> COLON u';' # 0x3B -> SEMICOLON u'<' # 0x3C -> LESS-THAN SIGN u'=' # 0x3D -> EQUALS SIGN u'>' # 0x3E -> GREATER-THAN SIGN u'?' # 0x3F -> QUESTION MARK u'@' # 0x40 -> COMMERCIAL AT u'A' # 0x41 -> LATIN CAPITAL LETTER A u'B' # 0x42 -> LATIN CAPITAL LETTER B u'C' # 0x43 -> LATIN CAPITAL LETTER C u'D' # 0x44 -> LATIN CAPITAL LETTER D u'E' # 0x45 -> LATIN CAPITAL LETTER E u'F' # 0x46 -> LATIN CAPITAL LETTER F u'G' # 0x47 -> LATIN CAPITAL LETTER G u'H' # 0x48 -> LATIN CAPITAL LETTER H u'I' # 0x49 -> LATIN CAPITAL LETTER I u'J' # 0x4A -> LATIN CAPITAL LETTER J u'K' # 0x4B -> LATIN CAPITAL LETTER K u'L' # 0x4C -> LATIN CAPITAL LETTER L u'M' # 0x4D -> LATIN CAPITAL LETTER M u'N' # 0x4E -> LATIN CAPITAL LETTER N u'O' # 0x4F -> LATIN CAPITAL LETTER O u'P' # 0x50 -> LATIN CAPITAL LETTER P u'Q' # 0x51 -> LATIN CAPITAL LETTER Q u'R' # 0x52 -> LATIN CAPITAL LETTER R u'S' # 0x53 -> LATIN CAPITAL LETTER S u'T' # 0x54 -> LATIN CAPITAL LETTER T u'U' # 0x55 -> LATIN CAPITAL LETTER U u'V' # 0x56 -> LATIN CAPITAL LETTER V u'W' # 0x57 -> LATIN CAPITAL LETTER W u'X' # 0x58 -> LATIN CAPITAL LETTER X u'Y' # 0x59 -> LATIN CAPITAL LETTER Y u'Z' # 0x5A -> LATIN CAPITAL LETTER Z u'[' # 0x5B -> LEFT SQUARE BRACKET u'\\' # 0x5C -> REVERSE SOLIDUS u']' # 0x5D -> RIGHT SQUARE BRACKET u'^' # 0x5E -> CIRCUMFLEX ACCENT u'_' # 0x5F -> LOW LINE u'`' # 0x60 -> GRAVE ACCENT u'a' # 0x61 -> LATIN SMALL LETTER A u'b' # 0x62 -> LATIN SMALL LETTER B u'c' # 0x63 -> LATIN SMALL LETTER C u'd' # 0x64 -> LATIN SMALL LETTER D u'e' # 0x65 -> LATIN SMALL LETTER E u'f' # 0x66 -> LATIN SMALL LETTER F u'g' # 0x67 -> LATIN SMALL LETTER G u'h' # 0x68 -> LATIN SMALL LETTER H u'i' # 0x69 -> LATIN SMALL LETTER I u'j' # 0x6A -> LATIN SMALL LETTER J u'k' # 0x6B -> LATIN SMALL LETTER K u'l' # 0x6C -> LATIN SMALL LETTER L u'm' # 0x6D -> LATIN SMALL LETTER M u'n' # 0x6E -> LATIN SMALL LETTER N u'o' # 0x6F -> LATIN SMALL LETTER O u'p' # 0x70 -> LATIN SMALL LETTER P u'q' # 0x71 -> LATIN SMALL LETTER Q u'r' # 0x72 -> LATIN SMALL LETTER R u's' # 0x73 -> LATIN SMALL LETTER S u't' # 0x74 -> LATIN SMALL LETTER T u'u' # 0x75 -> LATIN SMALL LETTER U u'v' # 0x76 -> LATIN SMALL LETTER V u'w' # 0x77 -> LATIN SMALL LETTER W u'x' # 0x78 -> LATIN SMALL LETTER X u'y' # 0x79 -> LATIN SMALL LETTER Y u'z' # 0x7A -> LATIN SMALL LETTER Z u'{' # 0x7B -> LEFT CURLY BRACKET u'|' # 0x7C -> VERTICAL LINE u'}' # 0x7D -> RIGHT CURLY BRACKET u'~' # 0x7E -> TILDE u'\x7f' # 0x7F -> DELETE u'\x80' # 0x80 -> <control> u'\x81' # 0x81 -> <control> u'\x82' # 0x82 -> <control> u'\x83' # 0x83 -> <control> u'\x84' # 0x84 -> <control> u'\x85' # 0x85 -> <control> u'\x86' # 0x86 -> <control> u'\x87' # 0x87 -> <control> u'\x88' # 0x88 -> <control> u'\x89' # 0x89 -> <control> u'\x8a' # 0x8A -> <control> u'\x8b' # 0x8B -> <control> u'\x8c' # 0x8C -> <control> u'\x8d' # 0x8D -> <control> u'\x8e' # 0x8E -> <control> u'\x8f' # 0x8F -> <control> u'\x90' # 0x90 -> <control> u'\x91' # 0x91 -> <control> u'\x92' # 0x92 -> <control> u'\x93' # 0x93 -> <control> u'\x94' # 0x94 -> <control> u'\x95' # 0x95 -> <control> u'\x96' # 0x96 -> <control> u'\x97' # 0x97 -> <control> u'\x98' # 0x98 -> <control> u'\x99' # 0x99 -> <control> u'\x9a' # 0x9A -> <control> u'\x9b' # 0x9B -> <control> u'\x9c' # 0x9C -> <control> u'\x9d' # 0x9D -> <control> u'\x9e' # 0x9E -> <control> u'\x9f' # 0x9F -> <control> u'\xa0' # 0xA0 -> NO-BREAK SPACE u'\u2018' # 0xA1 -> LEFT SINGLE QUOTATION MARK u'\u2019' # 0xA2 -> RIGHT SINGLE QUOTATION MARK u'\xa3' # 0xA3 -> POUND SIGN u'\u20ac' # 0xA4 -> EURO SIGN u'\u20af' # 0xA5 -> DRACHMA SIGN u'\xa6' # 0xA6 -> BROKEN BAR u'\xa7' # 0xA7 -> SECTION SIGN u'\xa8' # 0xA8 -> DIAERESIS u'\xa9' # 0xA9 -> COPYRIGHT SIGN u'\u037a' # 0xAA -> GREEK YPOGEGRAMMENI u'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK u'\xac' # 0xAC -> NOT SIGN u'\xad' # 0xAD -> SOFT HYPHEN u'\ufffe' u'\u2015' # 0xAF -> HORIZONTAL BAR u'\xb0' # 0xB0 -> DEGREE SIGN u'\xb1' # 0xB1 -> PLUS-MINUS SIGN u'\xb2' # 0xB2 -> SUPERSCRIPT TWO u'\xb3' # 0xB3 -> SUPERSCRIPT THREE u'\u0384' # 0xB4 -> GREEK TONOS u'\u0385' # 0xB5 -> GREEK DIALYTIKA TONOS u'\u0386' # 0xB6 -> GREEK CAPITAL LETTER ALPHA WITH TONOS u'\xb7' # 0xB7 -> MIDDLE DOT u'\u0388' # 0xB8 -> GREEK CAPITAL LETTER EPSILON WITH TONOS u'\u0389' # 0xB9 -> GREEK CAPITAL LETTER ETA WITH TONOS u'\u038a' # 0xBA -> GREEK CAPITAL LETTER IOTA WITH TONOS u'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK u'\u038c' # 0xBC -> GREEK CAPITAL LETTER OMICRON WITH TONOS u'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF u'\u038e' # 0xBE -> GREEK CAPITAL LETTER UPSILON WITH TONOS u'\u038f' # 0xBF -> GREEK CAPITAL LETTER OMEGA WITH TONOS u'\u0390' # 0xC0 -> GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS u'\u0391' # 0xC1 -> GREEK CAPITAL LETTER ALPHA u'\u0392' # 0xC2 -> GREEK CAPITAL LETTER BETA u'\u0393' # 0xC3 -> GREEK CAPITAL LETTER GAMMA u'\u0394' # 0xC4 -> GREEK CAPITAL LETTER DELTA u'\u0395' # 0xC5 -> GREEK CAPITAL LETTER EPSILON u'\u0396' # 0xC6 -> GREEK CAPITAL LETTER ZETA u'\u0397' # 0xC7 -> GREEK CAPITAL LETTER ETA u'\u0398' # 0xC8 -> GREEK CAPITAL LETTER THETA u'\u0399' # 0xC9 -> GREEK CAPITAL LETTER IOTA u'\u039a' # 0xCA -> GREEK CAPITAL LETTER KAPPA u'\u039b' # 0xCB -> GREEK CAPITAL LETTER LAMDA u'\u039c' # 0xCC -> GREEK CAPITAL LETTER MU u'\u039d' # 0xCD -> GREEK CAPITAL LETTER NU u'\u039e' # 0xCE -> GREEK CAPITAL LETTER XI u'\u039f' # 0xCF -> GREEK CAPITAL LETTER OMICRON u'\u03a0' # 0xD0 -> GREEK CAPITAL LETTER PI u'\u03a1' # 0xD1 -> GREEK CAPITAL LETTER RHO u'\ufffe' u'\u03a3' # 0xD3 -> GREEK CAPITAL LETTER SIGMA u'\u03a4' # 0xD4 -> GREEK CAPITAL LETTER TAU u'\u03a5' # 0xD5 -> GREEK CAPITAL LETTER UPSILON u'\u03a6' # 0xD6 -> GREEK CAPITAL LETTER PHI u'\u03a7' # 0xD7 -> GREEK CAPITAL LETTER CHI u'\u03a8' # 0xD8 -> GREEK CAPITAL LETTER PSI u'\u03a9' # 0xD9 -> GREEK CAPITAL LETTER OMEGA u'\u03aa' # 0xDA -> GREEK CAPITAL LETTER IOTA WITH DIALYTIKA u'\u03ab' # 0xDB -> GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA u'\u03ac' # 0xDC -> GREEK SMALL LETTER ALPHA WITH TONOS u'\u03ad' # 0xDD -> GREEK SMALL LETTER EPSILON WITH TONOS u'\u03ae' # 0xDE -> GREEK SMALL LETTER ETA WITH TONOS u'\u03af' # 0xDF -> GREEK SMALL LETTER IOTA WITH TONOS u'\u03b0' # 0xE0 -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS u'\u03b1' # 0xE1 -> GREEK SMALL LETTER ALPHA u'\u03b2' # 0xE2 -> GREEK SMALL LETTER BETA u'\u03b3' # 0xE3 -> GREEK SMALL LETTER GAMMA u'\u03b4' # 0xE4 -> GREEK SMALL LETTER DELTA u'\u03b5' # 0xE5 -> GREEK SMALL LETTER EPSILON u'\u03b6' # 0xE6 -> GREEK SMALL LETTER ZETA u'\u03b7' # 0xE7 -> GREEK SMALL LETTER ETA u'\u03b8' # 0xE8 -> GREEK SMALL LETTER THETA u'\u03b9' # 0xE9 -> GREEK SMALL LETTER IOTA u'\u03ba' # 0xEA -> GREEK SMALL LETTER KAPPA u'\u03bb' # 0xEB -> GREEK SMALL LETTER LAMDA u'\u03bc' # 0xEC -> GREEK SMALL LETTER MU u'\u03bd' # 0xED -> GREEK SMALL LETTER NU u'\u03be' # 0xEE -> GREEK SMALL LETTER XI u'\u03bf' # 0xEF -> GREEK SMALL LETTER OMICRON u'\u03c0' # 0xF0 -> GREEK SMALL LETTER PI u'\u03c1' # 0xF1 -> GREEK SMALL LETTER RHO u'\u03c2' # 0xF2 -> GREEK SMALL LETTER FINAL SIGMA u'\u03c3' # 0xF3 -> GREEK SMALL LETTER SIGMA u'\u03c4' # 0xF4 -> GREEK SMALL LETTER TAU u'\u03c5' # 0xF5 -> GREEK SMALL LETTER UPSILON u'\u03c6' # 0xF6 -> GREEK SMALL LETTER PHI u'\u03c7' # 0xF7 -> GREEK SMALL LETTER CHI u'\u03c8' # 0xF8 -> GREEK SMALL LETTER PSI u'\u03c9' # 0xF9 -> GREEK SMALL LETTER OMEGA u'\u03ca' # 0xFA -> GREEK SMALL LETTER IOTA WITH DIALYTIKA u'\u03cb' # 0xFB -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA u'\u03cc' # 0xFC -> GREEK SMALL LETTER OMICRON WITH TONOS u'\u03cd' # 0xFD -> GREEK SMALL LETTER UPSILON WITH TONOS u'\u03ce' # 0xFE -> GREEK SMALL LETTER OMEGA WITH TONOS u'\ufffe' ) ### Encoding table encoding_table=codecs.charmap_build(decoding_table)
gpl-2.0
pcdocker/pcdocker
tests/engine.py
2
5080
from subprocess import call from os import path import hitchpostgres import hitchselenium import hitchpython import hitchserve import hitchredis import hitchtest import hitchsmtp # Get directory above this file PROJECT_DIRECTORY = path.abspath(path.join(path.dirname(__file__), '..')) class ExecutionEngine(hitchtest.ExecutionEngine): """Engine for orchestating and interacting with the app.""" def set_up(self): """Ensure virtualenv present, then run all services.""" python_package = hitchpython.PythonPackage( python_version=self.preconditions['python_version'] ) python_package.build() python_package.verify() call([ python_package.pip, "install", "-r", path.join(PROJECT_DIRECTORY, "requirements/local.txt") ]) postgres_package = hitchpostgres.PostgresPackage( version=self.settings["postgres_version"], ) postgres_package.build() postgres_package.verify() redis_package = hitchredis.RedisPackage(version="2.8.4") redis_package.build() redis_package.verify() self.services = hitchserve.ServiceBundle( project_directory=PROJECT_DIRECTORY, startup_timeout=float(self.settings["startup_timeout"]), shutdown_timeout=5.0, ) postgres_user = hitchpostgres.PostgresUser("pcdocker", "password") self.services['Postgres'] = hitchpostgres.PostgresService( postgres_package=postgres_package, users=[postgres_user, ], databases=[hitchpostgres.PostgresDatabase("pcdocker", postgres_user), ] ) self.services['HitchSMTP'] = hitchsmtp.HitchSMTPService(port=1025) self.services['Django'] = hitchpython.DjangoService( python=python_package.python, port=8000, version=str(self.settings.get("django_version")), settings="config.settings.local", needs=[self.services['Postgres'], ], env_vars=self.settings['environment_variables'], ) self.services['Redis'] = hitchredis.RedisService( redis_package=redis_package, port=16379, ) self.services['Firefox'] = hitchselenium.SeleniumService( xvfb=self.settings.get("quiet", False), no_libfaketime=True, ) # import hitchcron # self.services['Cron'] = hitchcron.CronService( # run=self.services['Django'].manage("trigger").command, # every=1, # needs=[ self.services['Django'], ], # ) self.services.startup(interactive=False) # Configure selenium driver self.driver = self.services['Firefox'].driver self.driver.set_window_size(self.settings['window_size']['height'], self.settings['window_size']['width']) self.driver.set_window_position(0, 0) self.driver.implicitly_wait(2.0) self.driver.accept_next_alert = True def pause(self, message=None): """Stop. IPython time.""" if hasattr(self, 'services'): self.services.start_interactive_mode() self.ipython(message) if hasattr(self, 'services'): self.services.stop_interactive_mode() def load_website(self): """Navigate to website in Firefox.""" self.driver.get(self.services['Django'].url()) def click(self, on): """Click on HTML id.""" self.driver.find_element_by_id(on).click() def fill_form(self, **kwargs): """Fill in a form with id=value.""" for element, text in kwargs.items(): self.driver.find_element_by_id(element).send_keys(text) def click_submit(self): """Click on a submit button if it exists.""" self.driver.find_element_by_css_selector("button[type=\"submit\"]").click() def confirm_emails_sent(self, number): """Count number of emails sent by app.""" assert len(self.services['HitchSMTP'].logs.json()) == int(number) def wait_for_email(self, containing=None): """Wait for, and return email.""" self.services['HitchSMTP'].logs.out.tail.until_json( lambda email: containing in email['payload'] or containing in email['subject'], timeout=25, lines_back=1, ) def time_travel(self, days=""): """Make all services think that time has skipped forward.""" self.services.time_travel(days=int(days)) def on_failure(self): """Stop and IPython.""" if not self.settings['quiet']: if self.settings.get("pause_on_failure", False): self.pause(message=self.stacktrace.to_template()) def on_success(self): """Pause on success if enabled.""" if self.settings.get("pause_on_success", False): self.pause(message="SUCCESS") def tear_down(self): """Shut down services required to run your test.""" if hasattr(self, 'services'): self.services.shutdown()
bsd-3-clause
chubbymaggie/miasm
test/arch/mips32/unit/asm_test.py
2
1672
import sys import os from miasm2.core.cpu import ParseAst from miasm2.arch.mips32.arch import mn_mips32, base_expr, variable from miasm2.core import parse_asm from miasm2.expression.expression import * from miasm2.core import asmblock from elfesteem.strpatchwork import StrPatchwork from miasm2.analysis.machine import Machine from miasm2.jitter.csts import * reg_and_id = dict(mn_mips32.regs.all_regs_ids_byname) class Asm_Test(object): def __init__(self, jitter): self.myjit = Machine("mips32l").jitter(jitter) self.myjit.init_stack() self.myjit.jit.log_regs = False self.myjit.jit.log_mn = False def __call__(self): self.asm() self.run() self.check() def asm(self): blocks, symbol_pool = parse_asm.parse_txt(mn_mips32, 'l', self.TXT, symbol_pool=self.myjit.ir_arch.symbol_pool) # fix shellcode addr symbol_pool.set_offset(symbol_pool.getby_name("main"), 0x0) s = StrPatchwork() patches = asmblock.asm_resolve_final(mn_mips32, blocks, symbol_pool) for offset, raw in patches.items(): s[offset] = raw s = str(s) self.assembly = s def run(self): run_addr = 0 self.myjit.vm.add_memory_page( run_addr, PAGE_READ | PAGE_WRITE, self.assembly) self.myjit.cpu.RA = 0x1337beef self.myjit.add_breakpoint(0x1337beef, lambda x: False) self.myjit.init_run(run_addr) self.myjit.continue_run() assert(self.myjit.pc == 0x1337beef) def check(self): raise NotImplementedError('abstract method')
gpl-2.0
signed/intellij-community
plugins/hg4idea/testData/bin/hgext/convert/common.py
90
14555
# common.py - common code for the convert extension # # Copyright 2005-2009 Matt Mackall <mpm@selenic.com> and others # # This software may be used and distributed according to the terms of the # GNU General Public License version 2 or any later version. import base64, errno, subprocess, os, datetime import cPickle as pickle from mercurial import util from mercurial.i18n import _ propertycache = util.propertycache def encodeargs(args): def encodearg(s): lines = base64.encodestring(s) lines = [l.splitlines()[0] for l in lines] return ''.join(lines) s = pickle.dumps(args) return encodearg(s) def decodeargs(s): s = base64.decodestring(s) return pickle.loads(s) class MissingTool(Exception): pass def checktool(exe, name=None, abort=True): name = name or exe if not util.findexe(exe): exc = abort and util.Abort or MissingTool raise exc(_('cannot find required "%s" tool') % name) class NoRepo(Exception): pass SKIPREV = 'SKIP' class commit(object): def __init__(self, author, date, desc, parents, branch=None, rev=None, extra={}, sortkey=None): self.author = author or 'unknown' self.date = date or '0 0' self.desc = desc self.parents = parents self.branch = branch self.rev = rev self.extra = extra self.sortkey = sortkey class converter_source(object): """Conversion source interface""" def __init__(self, ui, path=None, rev=None): """Initialize conversion source (or raise NoRepo("message") exception if path is not a valid repository)""" self.ui = ui self.path = path self.rev = rev self.encoding = 'utf-8' def before(self): pass def after(self): pass def setrevmap(self, revmap): """set the map of already-converted revisions""" pass def getheads(self): """Return a list of this repository's heads""" raise NotImplementedError def getfile(self, name, rev): """Return a pair (data, mode) where data is the file content as a string and mode one of '', 'x' or 'l'. rev is the identifier returned by a previous call to getchanges(). Raise IOError to indicate that name was deleted in rev. """ raise NotImplementedError def getchanges(self, version): """Returns a tuple of (files, copies). files is a sorted list of (filename, id) tuples for all files changed between version and its first parent returned by getcommit(). id is the source revision id of the file. copies is a dictionary of dest: source """ raise NotImplementedError def getcommit(self, version): """Return the commit object for version""" raise NotImplementedError def gettags(self): """Return the tags as a dictionary of name: revision Tag names must be UTF-8 strings. """ raise NotImplementedError def recode(self, s, encoding=None): if not encoding: encoding = self.encoding or 'utf-8' if isinstance(s, unicode): return s.encode("utf-8") try: return s.decode(encoding).encode("utf-8") except UnicodeError: try: return s.decode("latin-1").encode("utf-8") except UnicodeError: return s.decode(encoding, "replace").encode("utf-8") def getchangedfiles(self, rev, i): """Return the files changed by rev compared to parent[i]. i is an index selecting one of the parents of rev. The return value should be the list of files that are different in rev and this parent. If rev has no parents, i is None. This function is only needed to support --filemap """ raise NotImplementedError def converted(self, rev, sinkrev): '''Notify the source that a revision has been converted.''' pass def hasnativeorder(self): """Return true if this source has a meaningful, native revision order. For instance, Mercurial revisions are store sequentially while there is no such global ordering with Darcs. """ return False def hasnativeclose(self): """Return true if this source has ability to close branch. """ return False def lookuprev(self, rev): """If rev is a meaningful revision reference in source, return the referenced identifier in the same format used by getcommit(). return None otherwise. """ return None def getbookmarks(self): """Return the bookmarks as a dictionary of name: revision Bookmark names are to be UTF-8 strings. """ return {} class converter_sink(object): """Conversion sink (target) interface""" def __init__(self, ui, path): """Initialize conversion sink (or raise NoRepo("message") exception if path is not a valid repository) created is a list of paths to remove if a fatal error occurs later""" self.ui = ui self.path = path self.created = [] def getheads(self): """Return a list of this repository's heads""" raise NotImplementedError def revmapfile(self): """Path to a file that will contain lines source_rev_id sink_rev_id mapping equivalent revision identifiers for each system.""" raise NotImplementedError def authorfile(self): """Path to a file that will contain lines srcauthor=dstauthor mapping equivalent authors identifiers for each system.""" return None def putcommit(self, files, copies, parents, commit, source, revmap): """Create a revision with all changed files listed in 'files' and having listed parents. 'commit' is a commit object containing at a minimum the author, date, and message for this changeset. 'files' is a list of (path, version) tuples, 'copies' is a dictionary mapping destinations to sources, 'source' is the source repository, and 'revmap' is a mapfile of source revisions to converted revisions. Only getfile() and lookuprev() should be called on 'source'. Note that the sink repository is not told to update itself to a particular revision (or even what that revision would be) before it receives the file data. """ raise NotImplementedError def puttags(self, tags): """Put tags into sink. tags: {tagname: sink_rev_id, ...} where tagname is an UTF-8 string. Return a pair (tag_revision, tag_parent_revision), or (None, None) if nothing was changed. """ raise NotImplementedError def setbranch(self, branch, pbranches): """Set the current branch name. Called before the first putcommit on the branch. branch: branch name for subsequent commits pbranches: (converted parent revision, parent branch) tuples""" pass def setfilemapmode(self, active): """Tell the destination that we're using a filemap Some converter_sources (svn in particular) can claim that a file was changed in a revision, even if there was no change. This method tells the destination that we're using a filemap and that it should filter empty revisions. """ pass def before(self): pass def after(self): pass def putbookmarks(self, bookmarks): """Put bookmarks into sink. bookmarks: {bookmarkname: sink_rev_id, ...} where bookmarkname is an UTF-8 string. """ pass def hascommit(self, rev): """Return True if the sink contains rev""" raise NotImplementedError class commandline(object): def __init__(self, ui, command): self.ui = ui self.command = command def prerun(self): pass def postrun(self): pass def _cmdline(self, cmd, *args, **kwargs): cmdline = [self.command, cmd] + list(args) for k, v in kwargs.iteritems(): if len(k) == 1: cmdline.append('-' + k) else: cmdline.append('--' + k.replace('_', '-')) try: if len(k) == 1: cmdline.append('' + v) else: cmdline[-1] += '=' + v except TypeError: pass cmdline = [util.shellquote(arg) for arg in cmdline] if not self.ui.debugflag: cmdline += ['2>', os.devnull] cmdline = ' '.join(cmdline) return cmdline def _run(self, cmd, *args, **kwargs): def popen(cmdline): p = subprocess.Popen(cmdline, shell=True, bufsize=-1, close_fds=util.closefds, stdout=subprocess.PIPE) return p return self._dorun(popen, cmd, *args, **kwargs) def _run2(self, cmd, *args, **kwargs): return self._dorun(util.popen2, cmd, *args, **kwargs) def _dorun(self, openfunc, cmd, *args, **kwargs): cmdline = self._cmdline(cmd, *args, **kwargs) self.ui.debug('running: %s\n' % (cmdline,)) self.prerun() try: return openfunc(cmdline) finally: self.postrun() def run(self, cmd, *args, **kwargs): p = self._run(cmd, *args, **kwargs) output = p.communicate()[0] self.ui.debug(output) return output, p.returncode def runlines(self, cmd, *args, **kwargs): p = self._run(cmd, *args, **kwargs) output = p.stdout.readlines() p.wait() self.ui.debug(''.join(output)) return output, p.returncode def checkexit(self, status, output=''): if status: if output: self.ui.warn(_('%s error:\n') % self.command) self.ui.warn(output) msg = util.explainexit(status)[0] raise util.Abort('%s %s' % (self.command, msg)) def run0(self, cmd, *args, **kwargs): output, status = self.run(cmd, *args, **kwargs) self.checkexit(status, output) return output def runlines0(self, cmd, *args, **kwargs): output, status = self.runlines(cmd, *args, **kwargs) self.checkexit(status, ''.join(output)) return output @propertycache def argmax(self): # POSIX requires at least 4096 bytes for ARG_MAX argmax = 4096 try: argmax = os.sysconf("SC_ARG_MAX") except (AttributeError, ValueError): pass # Windows shells impose their own limits on command line length, # down to 2047 bytes for cmd.exe under Windows NT/2k and 2500 bytes # for older 4nt.exe. See http://support.microsoft.com/kb/830473 for # details about cmd.exe limitations. # Since ARG_MAX is for command line _and_ environment, lower our limit # (and make happy Windows shells while doing this). return argmax // 2 - 1 def _limit_arglist(self, arglist, cmd, *args, **kwargs): cmdlen = len(self._cmdline(cmd, *args, **kwargs)) limit = self.argmax - cmdlen bytes = 0 fl = [] for fn in arglist: b = len(fn) + 3 if bytes + b < limit or len(fl) == 0: fl.append(fn) bytes += b else: yield fl fl = [fn] bytes = b if fl: yield fl def xargs(self, arglist, cmd, *args, **kwargs): for l in self._limit_arglist(arglist, cmd, *args, **kwargs): self.run0(cmd, *(list(args) + l), **kwargs) class mapfile(dict): def __init__(self, ui, path): super(mapfile, self).__init__() self.ui = ui self.path = path self.fp = None self.order = [] self._read() def _read(self): if not self.path: return try: fp = open(self.path, 'r') except IOError, err: if err.errno != errno.ENOENT: raise return for i, line in enumerate(fp): line = line.splitlines()[0].rstrip() if not line: # Ignore blank lines continue try: key, value = line.rsplit(' ', 1) except ValueError: raise util.Abort( _('syntax error in %s(%d): key/value pair expected') % (self.path, i + 1)) if key not in self: self.order.append(key) super(mapfile, self).__setitem__(key, value) fp.close() def __setitem__(self, key, value): if self.fp is None: try: self.fp = open(self.path, 'a') except IOError, err: raise util.Abort(_('could not open map file %r: %s') % (self.path, err.strerror)) self.fp.write('%s %s\n' % (key, value)) self.fp.flush() super(mapfile, self).__setitem__(key, value) def close(self): if self.fp: self.fp.close() self.fp = None def parsesplicemap(path): """Parse a splicemap, return a child/parents dictionary.""" if not path: return {} m = {} try: fp = open(path, 'r') for i, line in enumerate(fp): line = line.splitlines()[0].rstrip() if not line: # Ignore blank lines continue try: child, parents = line.split(' ', 1) parents = parents.replace(',', ' ').split() except ValueError: raise util.Abort(_('syntax error in %s(%d): child parent1' '[,parent2] expected') % (path, i + 1)) pp = [] for p in parents: if p not in pp: pp.append(p) m[child] = pp except IOError, e: if e.errno != errno.ENOENT: raise return m def makedatetimestamp(t): """Like util.makedate() but for time t instead of current time""" delta = (datetime.datetime.utcfromtimestamp(t) - datetime.datetime.fromtimestamp(t)) tz = delta.days * 86400 + delta.seconds return t, tz
apache-2.0
mattesno1/CouchPotatoServer
libs/apscheduler/triggers/interval.py
132
1388
from datetime import datetime, timedelta from math import ceil from apscheduler.util import convert_to_datetime, timedelta_seconds class IntervalTrigger(object): def __init__(self, interval, start_date=None): if not isinstance(interval, timedelta): raise TypeError('interval must be a timedelta') if start_date: start_date = convert_to_datetime(start_date) self.interval = interval self.interval_length = timedelta_seconds(self.interval) if self.interval_length == 0: self.interval = timedelta(seconds=1) self.interval_length = 1 if start_date is None: self.start_date = datetime.now() + self.interval else: self.start_date = convert_to_datetime(start_date) def get_next_fire_time(self, start_date): if start_date < self.start_date: return self.start_date timediff_seconds = timedelta_seconds(start_date - self.start_date) next_interval_num = int(ceil(timediff_seconds / self.interval_length)) return self.start_date + self.interval * next_interval_num def __str__(self): return 'interval[%s]' % str(self.interval) def __repr__(self): return "<%s (interval=%s, start_date=%s)>" % ( self.__class__.__name__, repr(self.interval), repr(self.start_date))
gpl-3.0
GrognardsFromHell/TemplePlus
tpdatasrc/tpgamefiles/scr/tpModifiers/sp_magic_circle_outward_fixed.py
1
9636
from templeplus.pymod import PythonModifier from toee import * import tpdp from utilities import * print "Registering sp-Magic Circle Outward Fixed" # args: (0-4) # 0 - spell_id # 1 - duration # 2 - Type Flag (1-Good, 2- Evil, 3- Law, 4- Chaos) # 3 - aoe id # 4 - spare #List of spells protection from evil grants immunity for (should really suppress). #Includes charm/dominate enchantments (not sleep or confusion) and magic jar (if someone ever adds it). immunity_list = [ spell_mass_charm_monster, spell_charm_monster, spell_charm_person, spell_charm_person_or_animal, spell_magic_jar, spell_dominate_animal, spell_dominate_monster, spell_dominate_person, spell_mass_suggestion, spell_suggestion ] #Checks if a character's alignment matches the spell type def CheckAlignment(character, flag): align = character.stat_level_get(stat_alignment) if flag == 1: return align & ALIGNMENT_GOOD elif flag == 2: return align & ALIGNMENT_EVIL elif flag == 3: return align & ALIGNMENT_LAWFUL elif flag == 4: return align & ALIGNMENT_CHAOTIC return 0 #Check if a summoned creature can still attack someone protected def SummonCanAttack(character, flag): align = character.stat_level_get(stat_alignment) if flag == 1: return align & ALIGNMENT_EVIL elif flag == 2: return align & ALIGNMENT_GOOD elif flag == 4: return align & ALIGNMENT_CHAOTIC elif flag == 3: return align & ALIGNMENT_LAWFUL return 0 #Gets the text name for the type of spell def GetCircleSpellType(type): SpellName = "" if type == 1: SpellName = game.get_spell_mesline(284) elif type == 2: SpellName = game.get_spell_mesline(283) elif type == 3: SpellName = game.get_spell_mesline(285) elif type == 4: SpellName = game.get_spell_mesline(282) return SpellName #Gets the text name for the type of effect def GetCircleEffectType(type): SpellName = "" if type == 1: SpellName = game.get_spell_mesline(371) elif type == 2: SpellName = game.get_spell_mesline(370) elif type == 3: SpellName = game.get_spell_mesline(372) elif type == 4: SpellName = game.get_spell_mesline(368) return SpellName def MagicCircleOutwardBegin(attachee, args, evt_obj): radius_feet = 10.0 obj_evt_id = attachee.object_event_append(OLC_CRITTERS, radius_feet) args.set_arg(3, obj_evt_id) print "Magic Circle Outward: New Object Event ID: " + str(obj_evt_id) return 0 def MagicCircleOutwardAoEEntered(attachee, args, evt_obj): obj_evt_id = args.get_arg(3) if obj_evt_id != evt_obj.evt_id: print "Magic Circle Outward Aura Entered: ID mismatch " + str(evt_obj.evt_id) + ", stored was: " + str(obj_evt_id) return 0 print "Magic Circle Outward Aura Entered, event ID: " + str(obj_evt_id) tgt = evt_obj.target if tgt == OBJ_HANDLE_NULL: return 0 if attachee == OBJ_HANDLE_NULL: return 0 #All get the effect even the character the spell is on type = args.get_arg(2) spell_id = args.get_arg(0) tgt.condition_add_with_args("Magic Circle Outward Aura", obj_evt_id, type, spell_id) return 0 def MagicCircleOutwardHasSpellActive(attachee, args, evt_obj): if type == 1: if evt_obj.data1 == 284: evt_obj.return_val = 1 elif type == 2: if evt_obj.data1 == 283: evt_obj.return_val = 1 elif type == 3: if evt_obj.data1 == 285: evt_obj.return_val = 1 elif type == 4: if evt_obj.data1 == 282: evt_obj.return_val = 1 return 0 def MagicCircleOutwardKilled(attachee, args, evt_obj): args.remove_spell() args.remove_spell_mod() return 0 def MagicCircleOutwardSpellEnd(attachee, args, evt_obj): spell_id = args.get_arg(0) type = args.get_arg(2) if evt_obj.data1 == spell_id: if type == 1: game.particles( 'sp-Magic Circle against Good-END', attachee) elif type == 2: game.particles( 'sp-Magic Circle against Evil-END', attachee) elif type == 3: game.particles( 'sp-Magic Circle against Law-END', attachee) elif type == 4: game.particles( 'sp-Magic Circle against Chaos-END', attachee) return 0 def HasMagicCircleQuery(attachee, args, evt_obj): type = args.get_arg(2) if evt_obj.data1 == type: evt_obj.return_val = 1 return 0 def MagicCircleOutwardTooltip(attachee, args, evt_obj): type = args.get_arg(2) SpellName = GetCircleSpellType(type) evt_obj.append(SpellName) return 0 def MagicCircleOutwardEffectTooltip(attachee, args, evt_obj): type = args.get_arg(2) SpellName = GetCircleSpellType(type) evt_obj.append(tpdp.hash("MAGIC_CIRCLE_OUTWARD_FIXED"), -2, SpellName + "") return 0 magicCircleOutward = PythonModifier("sp-Magic Circle Outward Fixed", 5, 0) #Note: Allows duplicates (for the other versions of the spell) magicCircleOutward.AddHook(ET_OnConditionAdd, EK_NONE, MagicCircleOutwardBegin, ()) magicCircleOutward.AddHook(ET_OnD20Signal, EK_S_Teleport_Reconnect, MagicCircleOutwardBegin, ()) magicCircleOutward.AddHook(ET_OnObjectEvent, EK_OnEnterAoE, MagicCircleOutwardAoEEntered, ()) magicCircleOutward.AddHook(ET_OnD20Query, EK_Q_Critter_Has_Spell_Active, MagicCircleOutwardHasSpellActive, ()) magicCircleOutward.AddHook(ET_OnD20Signal, EK_S_Killed, MagicCircleOutwardKilled, ()) magicCircleOutward.AddHook(ET_OnD20Signal, EK_S_Spell_End, MagicCircleOutwardSpellEnd, ()) magicCircleOutward.AddHook(ET_OnD20PythonQuery, "Has Magic Circle Spell", HasMagicCircleQuery, ()) magicCircleOutward.AddHook(ET_OnGetTooltip, EK_NONE, MagicCircleOutwardTooltip, ()) magicCircleOutward.AddHook(ET_OnGetEffectTooltip, EK_NONE, MagicCircleOutwardEffectTooltip, ()) magicCircleOutward.AddSpellDispelCheckStandard() magicCircleOutward.AddSpellTeleportPrepareStandard() magicCircleOutward.AddSpellTeleportReconnectStandard() magicCircleOutward.AddSpellCountdownStandardHook() def MagicCircleOutwardEffAoEExited(attachee, args, evt_obj): obj_evt_id = args.get_arg(0) if obj_evt_id != evt_obj.evt_id: print "Magic Circle Outward Aura: ID mismatch " + str(evt_obj.evt_id) + ", stored was: " + str(obj_evt_id) return 0 print "Magic Circle Outward Aura (ID " + str(obj_evt_id) +") Exited, critter: " + attachee.description + " " args.condition_remove() return 0 def MagicCircleOutwardEffTooltip(attachee, args, evt_obj): type = args.get_arg(1) SpellName = GetCircleEffectType(type) evt_obj.append(SpellName) return 0 def MagicCircleOutwardEffEffectTooltip(attachee, args, evt_obj): type = args.get_arg(1) SpellName = GetCircleEffectType(type) evt_obj.append(tpdp.hash("MAGIC_CIRCLE_OUTWARD_FIXED_EFFECT"), -2, SpellName + "") return 0 def MagicCircleOutwardEffRemove(attachee, args, evt_obj): print "Removing Magic Circle Effect Condition " + attachee.description args.condition_remove() return 0 def MagicCircleOutwardEffACBonus(attachee, args, evt_obj): attacker = evt_obj.attack_packet.attacker if attacker == OBJ_HANDLE_NULL: return 0 type = args.get_arg(1) addBonus = CheckAlignment(attacker, type) if addBonus: evt_obj.bonus_list.add(2, 11, 207) #Deflection bonus return 0 def MagicCircleOutwardEffSavingThrow(attachee, args, evt_obj): caster = evt_obj.obj type = args.get_arg(1) addBonus = CheckAlignment(caster, type) if addBonus: evt_obj.bonus_list.add(2, 15, 207) #Resistance bonus return 0 def MagicCircleOutwardEffDamageResistance(attachee, args, evt_obj): attacker = evt_obj.attack_packet.attacker if attacker == OBJ_HANDLE_NULL: return 0 if attacker.d20_query_has_condition("sp-Summoned"): type = args.get_arg(1) canAttack = SummonCanAttack(attacker, type) if not canAttack: wpn = evt_obj.attack_packet.get_weapon_used() if wpn != OBJ_HANDLE_NULL: return 0 spell_id = args.get_arg(2) spell_packet = tpdp.SpellPacket(spell_id) resisted = spell_packet.check_spell_resistance_force(attacker) if resisted: return 0 evt_obj.damage_packet.add_mod_factor(0.0, D20DT_UNSPECIFIED, 104) #Do no damage at all return 0 def MagicCircleOutwardEffSpellImmunity(attachee, args, evt_obj): sp_pkt = evt_obj.spell_packet spell_enum = sp_pkt.spell_enum if (spell_enum == 0): return 0 # Providing immunity (the effect really should just be suppressed while inside the circle) if spell_enum in immunity_list: evt_obj.return_val = 1 return 0 def MagicCircleOutwardEffPreAdd(attachee, args, evt_obj): val = evt_obj.is_modifier("Magic Circle Outward Aura") #Is it a duplicate? if val: type = args.get_arg(1) if type == evt_obj.arg2: #Reject if it is the same type of protection effect evt_obj.return_val = 0 return 0 # Note: Condition allows duplicates for the other versions of the spell magicCircleOutwardEffect = PythonModifier("Magic Circle Outward Aura", 4, 0) #id, alignment flag (1-Good, 2- Evil, 3- Law, 4- Chaos), spell id, spare magicCircleOutwardEffect.AddHook(ET_OnObjectEvent, EK_OnLeaveAoE, MagicCircleOutwardEffAoEExited, ()) magicCircleOutwardEffect.AddHook(ET_OnNewDay, EK_NEWDAY_REST, MagicCircleOutwardEffRemove, ()) magicCircleOutwardEffect.AddHook(ET_OnGetTooltip, EK_NONE, MagicCircleOutwardEffTooltip, ()) magicCircleOutwardEffect.AddHook(ET_OnGetEffectTooltip, EK_NONE, MagicCircleOutwardEffEffectTooltip, ()) magicCircleOutwardEffect.AddHook(ET_OnD20Signal, EK_S_Teleport_Prepare, MagicCircleOutwardEffRemove, ()) magicCircleOutwardEffect.AddHook(ET_OnGetAC, EK_NONE, MagicCircleOutwardEffACBonus, ()) magicCircleOutwardEffect.AddHook(ET_OnSaveThrowLevel, EK_NONE, MagicCircleOutwardEffSavingThrow, () ) magicCircleOutwardEffect.AddHook(ET_OnTakingDamage2, EK_NONE, MagicCircleOutwardEffDamageResistance, ()) magicCircleOutwardEffect.AddHook(ET_OnSpellImmunityCheck, EK_NONE, MagicCircleOutwardEffSpellImmunity, ()) magicCircleOutwardEffect.AddHook(ET_OnConditionAddPre, EK_NONE, MagicCircleOutwardEffPreAdd, ())
mit
ndingwall/scikit-learn
examples/kernel_approximation/plot_scalable_poly_kernels.py
15
7266
""" ======================================================= Scalable learning with polynomial kernel aproximation ======================================================= This example illustrates the use of :class:`PolynomialCountSketch` to efficiently generate polynomial kernel feature-space approximations. This is used to train linear classifiers that approximate the accuracy of kernelized ones. .. currentmodule:: sklearn.kernel_approximation We use the Covtype dataset [2], trying to reproduce the experiments on the original paper of Tensor Sketch [1], i.e. the algorithm implemented by :class:`PolynomialCountSketch`. First, we compute the accuracy of a linear classifier on the original features. Then, we train linear classifiers on different numbers of features (`n_components`) generated by :class:`PolynomialCountSketch`, approximating the accuracy of a kernelized classifier in a scalable manner. """ print(__doc__) # Author: Daniel Lopez-Sanchez <lope@usal.es> # License: BSD 3 clause import matplotlib.pyplot as plt from sklearn.datasets import fetch_covtype from sklearn.model_selection import train_test_split from sklearn.preprocessing import MinMaxScaler, Normalizer from sklearn.svm import LinearSVC from sklearn.kernel_approximation import PolynomialCountSketch from sklearn.pipeline import Pipeline, make_pipeline import time # %% # Load the Covtype dataset, which contains 581,012 samples # with 54 features each, distributed among 6 classes. The goal of this dataset # is to predict forest cover type from cartographic variables only # (no remotely sensed data). After loading, we transform it into a binary # classification problem to match the version of the dataset in the # LIBSVM webpage [2], which was the one used in [1]. X, y = fetch_covtype(return_X_y=True) y[y != 2] = 0 y[y == 2] = 1 # We will try to separate class 2 from the other 6 classes. # %% # Here we select 5,000 samples for training and 10,000 for testing. # To actually reproduce the results in the original Tensor Sketch paper, # select 100,000 for training. X_train, X_test, y_train, y_test = train_test_split(X, y, train_size=5_000, test_size=10_000, random_state=42) # %% # Now scale features to the range [0, 1] to match the format of the dataset in # the LIBSVM webpage, and then normalize to unit length as done in the # original Tensor Sketch paper [1]. mm = make_pipeline(MinMaxScaler(), Normalizer()) X_train = mm.fit_transform(X_train) X_test = mm.transform(X_test) # %% # As a baseline, train a linear SVM on the original features and print the # accuracy. We also measure and store accuracies and training times to # plot them latter. results = {} lsvm = LinearSVC() start = time.time() lsvm.fit(X_train, y_train) lsvm_time = time.time() - start lsvm_score = 100 * lsvm.score(X_test, y_test) results["LSVM"] = {"time": lsvm_time, "score": lsvm_score} print(f"Linear SVM score on raw features: {lsvm_score:.2f}%") # %% # Then we train linear SVMs on the features generated by # :class:`PolynomialCountSketch` with different values for `n_components`, # showing that these kernel feature approximations improve the accuracy # of linear classification. In typical application scenarios, `n_components` # should be larger than the number of features in the input representation # in order to achieve an improvement with respect to linear classification. # As a rule of thumb, the optimum of evaluation score / run time cost is # typically achieved at around `n_components` = 10 * `n_features`, though this # might depend on the specific dataset being handled. Note that, since the # original samples have 54 features, the explicit feature map of the # polynomial kernel of degree four would have approximately 8.5 million # features (precisely, 54^4). Thanks to :class:`PolynomialCountSketch`, we can # condense most of the discriminative information of that feature space into a # much more compact representation. We repeat the experiment 5 times to # compensate for the stochastic nature of :class:`PolynomialCountSketch`. n_runs = 3 for n_components in [250, 500, 1000, 2000]: ps_lsvm_time = 0 ps_lsvm_score = 0 for _ in range(n_runs): pipeline = Pipeline(steps=[("kernel_approximator", PolynomialCountSketch( n_components=n_components, degree=4)), ("linear_classifier", LinearSVC())]) start = time.time() pipeline.fit(X_train, y_train) ps_lsvm_time += time.time() - start ps_lsvm_score += 100 * pipeline.score(X_test, y_test) ps_lsvm_time /= n_runs ps_lsvm_score /= n_runs results[f"LSVM + PS({n_components})"] = { "time": ps_lsvm_time, "score": ps_lsvm_score } print(f"Linear SVM score on {n_components} PolynomialCountSketch " + f"features: {ps_lsvm_score:.2f}%") # %% # Train a kernelized SVM to see how well :class:`PolynomialCountSketch` # is approximating the performance of the kernel. This, of course, may take # some time, as the SVC class has a relatively poor scalability. This is the # reason why kernel approximators are so useful: from sklearn.svm import SVC ksvm = SVC(C=500., kernel="poly", degree=4, coef0=0, gamma=1.) start = time.time() ksvm.fit(X_train, y_train) ksvm_time = time.time() - start ksvm_score = 100 * ksvm.score(X_test, y_test) results["KSVM"] = {"time": ksvm_time, "score": ksvm_score} print(f"Kernel-SVM score on raw featrues: {ksvm_score:.2f}%") # %% # Finally, plot the resuts of the different methods against their training # times. As we can see, the kernelized SVM achieves a higher accuracy, # but its training time is much larger and, most importantly, will grow # much faster if the number of training samples increases. N_COMPONENTS = [250, 500, 1000, 2000] fig, ax = plt.subplots(figsize=(7, 7)) ax.scatter([results["LSVM"]["time"], ], [results["LSVM"]["score"], ], label="Linear SVM", c="green", marker="^") ax.scatter([results["LSVM + PS(250)"]["time"], ], [results["LSVM + PS(250)"]["score"], ], label="Linear SVM + PolynomialCountSketch", c="blue") for n_components in N_COMPONENTS: ax.scatter([results[f"LSVM + PS({n_components})"]["time"], ], [results[f"LSVM + PS({n_components})"]["score"], ], c="blue") ax.annotate(f"n_comp.={n_components}", (results[f"LSVM + PS({n_components})"]["time"], results[f"LSVM + PS({n_components})"]["score"]), xytext=(-30, 10), textcoords="offset pixels") ax.scatter([results["KSVM"]["time"], ], [results["KSVM"]["score"], ], label="Kernel SVM", c="red", marker="x") ax.set_xlabel("Training time (s)") ax.set_ylabel("Accurary (%)") ax.legend() plt.show() # %% # References # ========== # # [1] Pham, Ninh and Rasmus Pagh. "Fast and scalable polynomial kernels via # explicit feature maps." KDD '13 (2013). # https://doi.org/10.1145/2487575.2487591 # # [2] LIBSVM binary datasets repository # https://www.csie.ntu.edu.tw/~cjlin/libsvmtools/datasets/binary.html
bsd-3-clause
ForgottenKahz/CloudOPC
venv/Lib/site.py
784
27543
"""Append module search paths for third-party packages to sys.path. **************************************************************** * This module is automatically imported during initialization. * **************************************************************** In earlier versions of Python (up to 1.5a3), scripts or modules that needed to use site-specific modules would place ``import site'' somewhere near the top of their code. Because of the automatic import, this is no longer necessary (but code that does it still works). This will append site-specific paths to the module search path. On Unix, it starts with sys.prefix and sys.exec_prefix (if different) and appends lib/python<version>/site-packages as well as lib/site-python. It also supports the Debian convention of lib/python<version>/dist-packages. On other platforms (mainly Mac and Windows), it uses just sys.prefix (and sys.exec_prefix, if different, but this is unlikely). The resulting directories, if they exist, are appended to sys.path, and also inspected for path configuration files. FOR DEBIAN, this sys.path is augmented with directories in /usr/local. Local addons go into /usr/local/lib/python<version>/site-packages (resp. /usr/local/lib/site-python), Debian addons install into /usr/{lib,share}/python<version>/dist-packages. A path configuration file is a file whose name has the form <package>.pth; its contents are additional directories (one per line) to be added to sys.path. Non-existing directories (or non-directories) are never added to sys.path; no directory is added to sys.path more than once. Blank lines and lines beginning with '#' are skipped. Lines starting with 'import' are executed. For example, suppose sys.prefix and sys.exec_prefix are set to /usr/local and there is a directory /usr/local/lib/python2.X/site-packages with three subdirectories, foo, bar and spam, and two path configuration files, foo.pth and bar.pth. Assume foo.pth contains the following: # foo package configuration foo bar bletch and bar.pth contains: # bar package configuration bar Then the following directories are added to sys.path, in this order: /usr/local/lib/python2.X/site-packages/bar /usr/local/lib/python2.X/site-packages/foo Note that bletch is omitted because it doesn't exist; bar precedes foo because bar.pth comes alphabetically before foo.pth; and spam is omitted because it is not mentioned in either path configuration file. After these path manipulations, an attempt is made to import a module named sitecustomize, which can perform arbitrary additional site-specific customizations. If this import fails with an ImportError exception, it is silently ignored. """ import sys import os try: import __builtin__ as builtins except ImportError: import builtins try: set except NameError: from sets import Set as set # Prefixes for site-packages; add additional prefixes like /usr/local here PREFIXES = [sys.prefix, sys.exec_prefix] # Enable per user site-packages directory # set it to False to disable the feature or True to force the feature ENABLE_USER_SITE = None # for distutils.commands.install USER_SITE = None USER_BASE = None _is_64bit = (getattr(sys, 'maxsize', None) or getattr(sys, 'maxint')) > 2**32 _is_pypy = hasattr(sys, 'pypy_version_info') _is_jython = sys.platform[:4] == 'java' if _is_jython: ModuleType = type(os) def makepath(*paths): dir = os.path.join(*paths) if _is_jython and (dir == '__classpath__' or dir.startswith('__pyclasspath__')): return dir, dir dir = os.path.abspath(dir) return dir, os.path.normcase(dir) def abs__file__(): """Set all module' __file__ attribute to an absolute path""" for m in sys.modules.values(): if ((_is_jython and not isinstance(m, ModuleType)) or hasattr(m, '__loader__')): # only modules need the abspath in Jython. and don't mess # with a PEP 302-supplied __file__ continue f = getattr(m, '__file__', None) if f is None: continue m.__file__ = os.path.abspath(f) def removeduppaths(): """ Remove duplicate entries from sys.path along with making them absolute""" # This ensures that the initial path provided by the interpreter contains # only absolute pathnames, even if we're running from the build directory. L = [] known_paths = set() for dir in sys.path: # Filter out duplicate paths (on case-insensitive file systems also # if they only differ in case); turn relative paths into absolute # paths. dir, dircase = makepath(dir) if not dircase in known_paths: L.append(dir) known_paths.add(dircase) sys.path[:] = L return known_paths # XXX This should not be part of site.py, since it is needed even when # using the -S option for Python. See http://www.python.org/sf/586680 def addbuilddir(): """Append ./build/lib.<platform> in case we're running in the build dir (especially for Guido :-)""" from distutils.util import get_platform s = "build/lib.%s-%.3s" % (get_platform(), sys.version) if hasattr(sys, 'gettotalrefcount'): s += '-pydebug' s = os.path.join(os.path.dirname(sys.path[-1]), s) sys.path.append(s) def _init_pathinfo(): """Return a set containing all existing directory entries from sys.path""" d = set() for dir in sys.path: try: if os.path.isdir(dir): dir, dircase = makepath(dir) d.add(dircase) except TypeError: continue return d def addpackage(sitedir, name, known_paths): """Add a new path to known_paths by combining sitedir and 'name' or execute sitedir if it starts with 'import'""" if known_paths is None: _init_pathinfo() reset = 1 else: reset = 0 fullname = os.path.join(sitedir, name) try: f = open(fullname, "rU") except IOError: return try: for line in f: if line.startswith("#"): continue if line.startswith("import"): exec(line) continue line = line.rstrip() dir, dircase = makepath(sitedir, line) if not dircase in known_paths and os.path.exists(dir): sys.path.append(dir) known_paths.add(dircase) finally: f.close() if reset: known_paths = None return known_paths def addsitedir(sitedir, known_paths=None): """Add 'sitedir' argument to sys.path if missing and handle .pth files in 'sitedir'""" if known_paths is None: known_paths = _init_pathinfo() reset = 1 else: reset = 0 sitedir, sitedircase = makepath(sitedir) if not sitedircase in known_paths: sys.path.append(sitedir) # Add path component try: names = os.listdir(sitedir) except os.error: return names.sort() for name in names: if name.endswith(os.extsep + "pth"): addpackage(sitedir, name, known_paths) if reset: known_paths = None return known_paths def addsitepackages(known_paths, sys_prefix=sys.prefix, exec_prefix=sys.exec_prefix): """Add site-packages (and possibly site-python) to sys.path""" prefixes = [os.path.join(sys_prefix, "local"), sys_prefix] if exec_prefix != sys_prefix: prefixes.append(os.path.join(exec_prefix, "local")) for prefix in prefixes: if prefix: if sys.platform in ('os2emx', 'riscos') or _is_jython: sitedirs = [os.path.join(prefix, "Lib", "site-packages")] elif _is_pypy: sitedirs = [os.path.join(prefix, 'site-packages')] elif sys.platform == 'darwin' and prefix == sys_prefix: if prefix.startswith("/System/Library/Frameworks/"): # Apple's Python sitedirs = [os.path.join("/Library/Python", sys.version[:3], "site-packages"), os.path.join(prefix, "Extras", "lib", "python")] else: # any other Python distros on OSX work this way sitedirs = [os.path.join(prefix, "lib", "python" + sys.version[:3], "site-packages")] elif os.sep == '/': sitedirs = [os.path.join(prefix, "lib", "python" + sys.version[:3], "site-packages"), os.path.join(prefix, "lib", "site-python"), os.path.join(prefix, "python" + sys.version[:3], "lib-dynload")] lib64_dir = os.path.join(prefix, "lib64", "python" + sys.version[:3], "site-packages") if (os.path.exists(lib64_dir) and os.path.realpath(lib64_dir) not in [os.path.realpath(p) for p in sitedirs]): if _is_64bit: sitedirs.insert(0, lib64_dir) else: sitedirs.append(lib64_dir) try: # sys.getobjects only available in --with-pydebug build sys.getobjects sitedirs.insert(0, os.path.join(sitedirs[0], 'debug')) except AttributeError: pass # Debian-specific dist-packages directories: sitedirs.append(os.path.join(prefix, "local/lib", "python" + sys.version[:3], "dist-packages")) if sys.version[0] == '2': sitedirs.append(os.path.join(prefix, "lib", "python" + sys.version[:3], "dist-packages")) else: sitedirs.append(os.path.join(prefix, "lib", "python" + sys.version[0], "dist-packages")) sitedirs.append(os.path.join(prefix, "lib", "dist-python")) else: sitedirs = [prefix, os.path.join(prefix, "lib", "site-packages")] if sys.platform == 'darwin': # for framework builds *only* we add the standard Apple # locations. Currently only per-user, but /Library and # /Network/Library could be added too if 'Python.framework' in prefix: home = os.environ.get('HOME') if home: sitedirs.append( os.path.join(home, 'Library', 'Python', sys.version[:3], 'site-packages')) for sitedir in sitedirs: if os.path.isdir(sitedir): addsitedir(sitedir, known_paths) return None def check_enableusersite(): """Check if user site directory is safe for inclusion The function tests for the command line flag (including environment var), process uid/gid equal to effective uid/gid. None: Disabled for security reasons False: Disabled by user (command line option) True: Safe and enabled """ if hasattr(sys, 'flags') and getattr(sys.flags, 'no_user_site', False): return False if hasattr(os, "getuid") and hasattr(os, "geteuid"): # check process uid == effective uid if os.geteuid() != os.getuid(): return None if hasattr(os, "getgid") and hasattr(os, "getegid"): # check process gid == effective gid if os.getegid() != os.getgid(): return None return True def addusersitepackages(known_paths): """Add a per user site-package to sys.path Each user has its own python directory with site-packages in the home directory. USER_BASE is the root directory for all Python versions USER_SITE is the user specific site-packages directory USER_SITE/.. can be used for data. """ global USER_BASE, USER_SITE, ENABLE_USER_SITE env_base = os.environ.get("PYTHONUSERBASE", None) def joinuser(*args): return os.path.expanduser(os.path.join(*args)) #if sys.platform in ('os2emx', 'riscos'): # # Don't know what to put here # USER_BASE = '' # USER_SITE = '' if os.name == "nt": base = os.environ.get("APPDATA") or "~" if env_base: USER_BASE = env_base else: USER_BASE = joinuser(base, "Python") USER_SITE = os.path.join(USER_BASE, "Python" + sys.version[0] + sys.version[2], "site-packages") else: if env_base: USER_BASE = env_base else: USER_BASE = joinuser("~", ".local") USER_SITE = os.path.join(USER_BASE, "lib", "python" + sys.version[:3], "site-packages") if ENABLE_USER_SITE and os.path.isdir(USER_SITE): addsitedir(USER_SITE, known_paths) if ENABLE_USER_SITE: for dist_libdir in ("lib", "local/lib"): user_site = os.path.join(USER_BASE, dist_libdir, "python" + sys.version[:3], "dist-packages") if os.path.isdir(user_site): addsitedir(user_site, known_paths) return known_paths def setBEGINLIBPATH(): """The OS/2 EMX port has optional extension modules that do double duty as DLLs (and must use the .DLL file extension) for other extensions. The library search path needs to be amended so these will be found during module import. Use BEGINLIBPATH so that these are at the start of the library search path. """ dllpath = os.path.join(sys.prefix, "Lib", "lib-dynload") libpath = os.environ['BEGINLIBPATH'].split(';') if libpath[-1]: libpath.append(dllpath) else: libpath[-1] = dllpath os.environ['BEGINLIBPATH'] = ';'.join(libpath) def setquit(): """Define new built-ins 'quit' and 'exit'. These are simply strings that display a hint on how to exit. """ if os.sep == ':': eof = 'Cmd-Q' elif os.sep == '\\': eof = 'Ctrl-Z plus Return' else: eof = 'Ctrl-D (i.e. EOF)' class Quitter(object): def __init__(self, name): self.name = name def __repr__(self): return 'Use %s() or %s to exit' % (self.name, eof) def __call__(self, code=None): # Shells like IDLE catch the SystemExit, but listen when their # stdin wrapper is closed. try: sys.stdin.close() except: pass raise SystemExit(code) builtins.quit = Quitter('quit') builtins.exit = Quitter('exit') class _Printer(object): """interactive prompt objects for printing the license text, a list of contributors and the copyright notice.""" MAXLINES = 23 def __init__(self, name, data, files=(), dirs=()): self.__name = name self.__data = data self.__files = files self.__dirs = dirs self.__lines = None def __setup(self): if self.__lines: return data = None for dir in self.__dirs: for filename in self.__files: filename = os.path.join(dir, filename) try: fp = open(filename, "rU") data = fp.read() fp.close() break except IOError: pass if data: break if not data: data = self.__data self.__lines = data.split('\n') self.__linecnt = len(self.__lines) def __repr__(self): self.__setup() if len(self.__lines) <= self.MAXLINES: return "\n".join(self.__lines) else: return "Type %s() to see the full %s text" % ((self.__name,)*2) def __call__(self): self.__setup() prompt = 'Hit Return for more, or q (and Return) to quit: ' lineno = 0 while 1: try: for i in range(lineno, lineno + self.MAXLINES): print(self.__lines[i]) except IndexError: break else: lineno += self.MAXLINES key = None while key is None: try: key = raw_input(prompt) except NameError: key = input(prompt) if key not in ('', 'q'): key = None if key == 'q': break def setcopyright(): """Set 'copyright' and 'credits' in __builtin__""" builtins.copyright = _Printer("copyright", sys.copyright) if _is_jython: builtins.credits = _Printer( "credits", "Jython is maintained by the Jython developers (www.jython.org).") elif _is_pypy: builtins.credits = _Printer( "credits", "PyPy is maintained by the PyPy developers: http://pypy.org/") else: builtins.credits = _Printer("credits", """\ Thanks to CWI, CNRI, BeOpen.com, Zope Corporation and a cast of thousands for supporting Python development. See www.python.org for more information.""") here = os.path.dirname(os.__file__) builtins.license = _Printer( "license", "See http://www.python.org/%.3s/license.html" % sys.version, ["LICENSE.txt", "LICENSE"], [os.path.join(here, os.pardir), here, os.curdir]) class _Helper(object): """Define the built-in 'help'. This is a wrapper around pydoc.help (with a twist). """ def __repr__(self): return "Type help() for interactive help, " \ "or help(object) for help about object." def __call__(self, *args, **kwds): import pydoc return pydoc.help(*args, **kwds) def sethelper(): builtins.help = _Helper() def aliasmbcs(): """On Windows, some default encodings are not provided by Python, while they are always available as "mbcs" in each locale. Make them usable by aliasing to "mbcs" in such a case.""" if sys.platform == 'win32': import locale, codecs enc = locale.getdefaultlocale()[1] if enc.startswith('cp'): # "cp***" ? try: codecs.lookup(enc) except LookupError: import encodings encodings._cache[enc] = encodings._unknown encodings.aliases.aliases[enc] = 'mbcs' def setencoding(): """Set the string encoding used by the Unicode implementation. The default is 'ascii', but if you're willing to experiment, you can change this.""" encoding = "ascii" # Default value set by _PyUnicode_Init() if 0: # Enable to support locale aware default string encodings. import locale loc = locale.getdefaultlocale() if loc[1]: encoding = loc[1] if 0: # Enable to switch off string to Unicode coercion and implicit # Unicode to string conversion. encoding = "undefined" if encoding != "ascii": # On Non-Unicode builds this will raise an AttributeError... sys.setdefaultencoding(encoding) # Needs Python Unicode build ! def execsitecustomize(): """Run custom site specific code, if available.""" try: import sitecustomize except ImportError: pass def virtual_install_main_packages(): f = open(os.path.join(os.path.dirname(__file__), 'orig-prefix.txt')) sys.real_prefix = f.read().strip() f.close() pos = 2 hardcoded_relative_dirs = [] if sys.path[0] == '': pos += 1 if _is_jython: paths = [os.path.join(sys.real_prefix, 'Lib')] elif _is_pypy: if sys.version_info > (3, 2): cpyver = '%d' % sys.version_info[0] elif sys.pypy_version_info >= (1, 5): cpyver = '%d.%d' % sys.version_info[:2] else: cpyver = '%d.%d.%d' % sys.version_info[:3] paths = [os.path.join(sys.real_prefix, 'lib_pypy'), os.path.join(sys.real_prefix, 'lib-python', cpyver)] if sys.pypy_version_info < (1, 9): paths.insert(1, os.path.join(sys.real_prefix, 'lib-python', 'modified-%s' % cpyver)) hardcoded_relative_dirs = paths[:] # for the special 'darwin' case below # # This is hardcoded in the Python executable, but relative to sys.prefix: for path in paths[:]: plat_path = os.path.join(path, 'plat-%s' % sys.platform) if os.path.exists(plat_path): paths.append(plat_path) elif sys.platform == 'win32': paths = [os.path.join(sys.real_prefix, 'Lib'), os.path.join(sys.real_prefix, 'DLLs')] else: paths = [os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3])] hardcoded_relative_dirs = paths[:] # for the special 'darwin' case below lib64_path = os.path.join(sys.real_prefix, 'lib64', 'python'+sys.version[:3]) if os.path.exists(lib64_path): if _is_64bit: paths.insert(0, lib64_path) else: paths.append(lib64_path) # This is hardcoded in the Python executable, but relative to # sys.prefix. Debian change: we need to add the multiarch triplet # here, which is where the real stuff lives. As per PEP 421, in # Python 3.3+, this lives in sys.implementation, while in Python 2.7 # it lives in sys. try: arch = getattr(sys, 'implementation', sys)._multiarch except AttributeError: # This is a non-multiarch aware Python. Fallback to the old way. arch = sys.platform plat_path = os.path.join(sys.real_prefix, 'lib', 'python'+sys.version[:3], 'plat-%s' % arch) if os.path.exists(plat_path): paths.append(plat_path) # This is hardcoded in the Python executable, but # relative to sys.prefix, so we have to fix up: for path in list(paths): tk_dir = os.path.join(path, 'lib-tk') if os.path.exists(tk_dir): paths.append(tk_dir) # These are hardcoded in the Apple's Python executable, # but relative to sys.prefix, so we have to fix them up: if sys.platform == 'darwin': hardcoded_paths = [os.path.join(relative_dir, module) for relative_dir in hardcoded_relative_dirs for module in ('plat-darwin', 'plat-mac', 'plat-mac/lib-scriptpackages')] for path in hardcoded_paths: if os.path.exists(path): paths.append(path) sys.path.extend(paths) def force_global_eggs_after_local_site_packages(): """ Force easy_installed eggs in the global environment to get placed in sys.path after all packages inside the virtualenv. This maintains the "least surprise" result that packages in the virtualenv always mask global packages, never the other way around. """ egginsert = getattr(sys, '__egginsert', 0) for i, path in enumerate(sys.path): if i > egginsert and path.startswith(sys.prefix): egginsert = i sys.__egginsert = egginsert + 1 def virtual_addsitepackages(known_paths): force_global_eggs_after_local_site_packages() return addsitepackages(known_paths, sys_prefix=sys.real_prefix) def fixclasspath(): """Adjust the special classpath sys.path entries for Jython. These entries should follow the base virtualenv lib directories. """ paths = [] classpaths = [] for path in sys.path: if path == '__classpath__' or path.startswith('__pyclasspath__'): classpaths.append(path) else: paths.append(path) sys.path = paths sys.path.extend(classpaths) def execusercustomize(): """Run custom user specific code, if available.""" try: import usercustomize except ImportError: pass def main(): global ENABLE_USER_SITE virtual_install_main_packages() abs__file__() paths_in_sys = removeduppaths() if (os.name == "posix" and sys.path and os.path.basename(sys.path[-1]) == "Modules"): addbuilddir() if _is_jython: fixclasspath() GLOBAL_SITE_PACKAGES = not os.path.exists(os.path.join(os.path.dirname(__file__), 'no-global-site-packages.txt')) if not GLOBAL_SITE_PACKAGES: ENABLE_USER_SITE = False if ENABLE_USER_SITE is None: ENABLE_USER_SITE = check_enableusersite() paths_in_sys = addsitepackages(paths_in_sys) paths_in_sys = addusersitepackages(paths_in_sys) if GLOBAL_SITE_PACKAGES: paths_in_sys = virtual_addsitepackages(paths_in_sys) if sys.platform == 'os2emx': setBEGINLIBPATH() setquit() setcopyright() sethelper() aliasmbcs() setencoding() execsitecustomize() if ENABLE_USER_SITE: execusercustomize() # Remove sys.setdefaultencoding() so that users cannot change the # encoding after initialization. The test for presence is needed when # this module is run as a script, because this code is executed twice. if hasattr(sys, "setdefaultencoding"): del sys.setdefaultencoding main() def _script(): help = """\ %s [--user-base] [--user-site] Without arguments print some useful information With arguments print the value of USER_BASE and/or USER_SITE separated by '%s'. Exit codes with --user-base or --user-site: 0 - user site directory is enabled 1 - user site directory is disabled by user 2 - uses site directory is disabled by super user or for security reasons >2 - unknown error """ args = sys.argv[1:] if not args: print("sys.path = [") for dir in sys.path: print(" %r," % (dir,)) print("]") def exists(path): if os.path.isdir(path): return "exists" else: return "doesn't exist" print("USER_BASE: %r (%s)" % (USER_BASE, exists(USER_BASE))) print("USER_SITE: %r (%s)" % (USER_SITE, exists(USER_BASE))) print("ENABLE_USER_SITE: %r" % ENABLE_USER_SITE) sys.exit(0) buffer = [] if '--user-base' in args: buffer.append(USER_BASE) if '--user-site' in args: buffer.append(USER_SITE) if buffer: print(os.pathsep.join(buffer)) if ENABLE_USER_SITE: sys.exit(0) elif ENABLE_USER_SITE is False: sys.exit(1) elif ENABLE_USER_SITE is None: sys.exit(2) else: sys.exit(3) else: import textwrap print(textwrap.dedent(help % (sys.argv[0], os.pathsep))) sys.exit(10) if __name__ == '__main__': _script()
mit
ASCrookes/django
tests/urlpatterns_reverse/erroneous_urls.py
199
1350
import warnings from django.conf.urls import url from django.utils.deprecation import RemovedInDjango110Warning from . import views # Test deprecated behavior of passing strings as view to url(). # Some of these can be removed in Django 1.10 as they aren't convertable to # callables. with warnings.catch_warnings(): warnings.filterwarnings('ignore', category=RemovedInDjango110Warning) urlpatterns = [ # View has erroneous import url(r'erroneous_inner/$', views.erroneous_view), # Module has erroneous import url(r'erroneous_outer/$', 'urlpatterns_reverse.erroneous_views_module.erroneous_view'), # Module is an unqualified string url(r'erroneous_unqualified/$', 'unqualified_view'), # View does not exist url(r'missing_inner/$', 'urlpatterns_reverse.views.missing_view'), # View is not a callable (string import; arbitrary Python object) url(r'uncallable-dotted/$', 'urlpatterns_reverse.views.uncallable'), # View is not a callable (explicit import; arbitrary Python object) url(r'uncallable-object/$', views.uncallable), # Module does not exist url(r'missing_outer/$', 'urlpatterns_reverse.missing_module.missing_view'), # Regex contains an error (refs #6170) url(r'(regex_error/$', views.empty_view), ]
bsd-3-clause
motion2015/edx-platform
lms/djangoapps/certificates/migrations/0003_auto__add_field_generatedcertificate_enabled.py
188
6869
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'GeneratedCertificate.enabled' db.add_column('certificates_generatedcertificate', 'enabled', self.gf('django.db.models.fields.BooleanField')(default=False), keep_default=False) def backwards(self, orm): # Deleting field 'GeneratedCertificate.enabled' db.delete_column('certificates_generatedcertificate', 'enabled') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}), 'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), 'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}), 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}), 'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), 'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}), 'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}), 'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}), 'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}), 'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), 'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}), 'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}) }, 'certificates.generatedcertificate': { 'Meta': {'object_name': 'GeneratedCertificate'}, 'certificate_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'download_url': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}), 'enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) } } complete_apps = ['certificates']
agpl-3.0
SrNetoChan/QGIS
python/plugins/processing/algs/gdal/ClipVectorByMask.py
15
4270
# -*- coding: utf-8 -*- """ *************************************************************************** ClipVectorByMask.py --------------------- Date : November 2012 Copyright : (C) 2012 by Victor Olaya Email : volayaf at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Victor Olaya' __date__ = 'November 2012' __copyright__ = '(C) 2012, Victor Olaya' from qgis.core import (QgsProcessing, QgsProcessingAlgorithm, QgsProcessingParameterDefinition, QgsProcessingParameterString, QgsProcessingParameterFeatureSource, QgsProcessingParameterVectorDestination) from processing.algs.gdal.GdalAlgorithm import GdalAlgorithm from processing.algs.gdal.GdalUtils import GdalUtils class ClipVectorByMask(GdalAlgorithm): INPUT = 'INPUT' MASK = 'MASK' OPTIONS = 'OPTIONS' OUTPUT = 'OUTPUT' def __init__(self): super().__init__() def flags(self): return QgsProcessingAlgorithm.FlagSupportsBatch | QgsProcessingAlgorithm.FlagRequiresMatchingCrs # cannot cancel! def initAlgorithm(self, config=None): self.addParameter(QgsProcessingParameterFeatureSource(self.INPUT, self.tr('Input layer'))) self.addParameter(QgsProcessingParameterFeatureSource(self.MASK, self.tr('Mask layer'), [QgsProcessing.TypeVectorPolygon])) options_param = QgsProcessingParameterString(self.OPTIONS, self.tr('Additional creation options'), defaultValue='', optional=True) options_param.setFlags(options_param.flags() | QgsProcessingParameterDefinition.FlagAdvanced) self.addParameter(options_param) self.addParameter(QgsProcessingParameterVectorDestination(self.OUTPUT, self.tr('Clipped (mask)'))) def name(self): return 'clipvectorbypolygon' def displayName(self): return self.tr('Clip vector by mask layer') def group(self): return self.tr('Vector geoprocessing') def groupId(self): return 'vectorgeoprocessing' def commandName(self): return 'ogr2ogr' def getConsoleCommands(self, parameters, context, feedback, executing=True): inLayer, inLayerName = self.getOgrCompatibleSource(self.INPUT, parameters, context, feedback, executing) maskLayer, maskLayerName = self.getOgrCompatibleSource(self.MASK, parameters, context, feedback, executing) options = self.parameterAsString(parameters, self.OPTIONS, context) outFile = self.parameterAsOutputLayer(parameters, self.OUTPUT, context) self.setOutputValue(self.OUTPUT, outFile) output, outputFormat = GdalUtils.ogrConnectionStringAndFormat(outFile, context) arguments = [] arguments.append('-clipsrc') arguments.append(maskLayer) arguments.append('-clipsrclayer') arguments.append(maskLayerName) arguments.append(output) arguments.append(inLayer) arguments.append(inLayerName) if options: arguments.append(options) if outputFormat: arguments.append('-f {}'.format(outputFormat)) return [self.commandName(), GdalUtils.escapeAndJoin(arguments)]
gpl-2.0
crunchyroll/ef-open
tests/unit_tests/test_ef_config_resolver.py
1
2007
""" Copyright 2016-2017 Ellation, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import unittest # For local application imports, context_paths must be first despite lexicon ordering import context_paths from ef_config import EFConfig from ef_config_resolver import EFConfigResolver class TestEFConfigResolver(unittest.TestCase): """Tests for 'ef_config_resolver.py'""" def test_account_alias_of_env(self): """Does accountaliasofenv,prod resolve to the prod account alias""" ef_config_resolver = EFConfigResolver() result_config_data = ef_config_resolver.lookup("accountaliasofenv,test") if result_config_data is None: result_config_data = '' self.assertRegexpMatches(result_config_data, "^testaccount$") def test_config_custom_data(self): target_custom_data = "custom_data" EFConfig.CUSTOM_DATA = {"mock_data": "custom_data"} ef_config_resolver = EFConfigResolver() result_custom_data = ef_config_resolver.lookup("customdata,mock_data") self.assertEquals(result_custom_data, target_custom_data) def test_config_custom_data_no_data(self): ef_config_resolver = EFConfigResolver() result_custom_data = ef_config_resolver.lookup("customdata,mock_data") self.assertEquals(result_custom_data, None) def test_config_custom_data_missing_lookup(self): EFConfig.CUSTOM_DATA = {} ef_config_resolver = EFConfigResolver() result_custom_data = ef_config_resolver.lookup("customdata,mock_data") self.assertEquals(result_custom_data, None)
apache-2.0
IEEEDTU/CMS
Assessment/models/AssignmentResponse.py
1
2948
from django.db import models from Profiler.models import Student from .Assignment import * class AssignmentResponseManager(models.Manager): def addAssignmentResponse(self, request): """ add assignment response """ A = Assignment.objects.getAssignmentByCode(request) S = Student.objects.getStudentByRegIdOrRollNo(request) AR = AssignmentResponse( assignment=A, student=S, responseLink=request['responseLink'], status=1 ) AR.save() return AR def evaluateAssignmentResponse(self, request): """ saves evaluation grade of an assignment response """ A = Assignment.objects.getAssignmentByCode(request) S = Student.objects.getStudentByRegIdOrRollNo(request) AR = AssignmentResponse.objects.get(assignment=A, student=S) AR.grade = request['grade'] AR.status = 2 AR.save() return AR def deleteAssignmentResponse(self, request): """ deletes an assignment response """ A = Assignment.objects.getAssignmentByCode(request) S = Student.objects.getStudentByRegIdOrRollNo(request) AR = AssignmentResponse.objects.get(assignment=A, student=S) AR = AR.delete() return AR def retrieveAssignmentResponsesByAssignment(self, request): """ retrieve student's assignment responses on the basis of assignment """ """ note: assignment code is compulsory; submission date and status are optional fields """ A = Assignment.objects.getAssignmentByCode(request) AR = AssignmentResponse.objects.filter(assignment=A) return AR def retrieveAssignmentResponsesByStudent(self, request): """ retrieve student's assignment responses on the basis of student """ """ note: student's dtuRegId or roll no is compulsory; grade and status are optional fields """ S = Student.objects.getStudentByRegIdOrRollNo(request) AR = AssignmentResponse.objects.filter(student=S) return AR class AssignmentResponse(models.Model): NOT_SUBMITTED = 0 SUBMITTED = 1 EVALUATED = 2 STATUS_CHOICES = ((NOT_SUBMITTED, 'Not Submitted'), (SUBMITTED, 'Submitted'), (EVALUATED, 'Evaluated')) # Assignment assignment = models.ForeignKey(Assignment, on_delete=models.CASCADE, default=False) # Student student = models.ForeignKey(Student, on_delete=models.CASCADE, default=False) # Response link responseLink = models.URLField(max_length=200) # Grade grade = models.CharField(max_length=2, null=True) # submission date submissionDate = models.DateField(auto_now=False, auto_now_add=True) # status status = models.PositiveIntegerField(choices=STATUS_CHOICES, null=False, blank=False, default=NOT_SUBMITTED) objects = AssignmentResponseManager() def __str__(self): return str(self.assignment) + str(self.student)
mit
jonathanunderwood/numpy
tools/swig/test/testFlat.py
108
6906
#! /usr/bin/env python from __future__ import division, absolute_import, print_function # System imports from distutils.util import get_platform import os import sys import unittest import struct # Import NumPy import numpy as np major, minor = [ int(d) for d in np.__version__.split(".")[:2] ] if major == 0: BadListError = TypeError else: BadListError = ValueError import Flat ###################################################################### class FlatTestCase(unittest.TestCase): def __init__(self, methodName="runTest"): unittest.TestCase.__init__(self, methodName) self.typeStr = "double" self.typeCode = "d" # Test the (type* INPLACE_ARRAY_FLAT, int DIM_FLAT) typemap def testProcess1D(self): "Test Process function 1D array" print(self.typeStr, "... ", end=' ', file=sys.stderr) process = Flat.__dict__[self.typeStr + "Process"] pack_output = '' for i in range(10): pack_output += struct.pack(self.typeCode,i) x = np.frombuffer(pack_output, dtype=self.typeCode) y = x.copy() process(y) self.assertEquals(np.all((x+1)==y),True) def testProcess3D(self): "Test Process function 3D array" print(self.typeStr, "... ", end=' ', file=sys.stderr) process = Flat.__dict__[self.typeStr + "Process"] pack_output = '' for i in range(24): pack_output += struct.pack(self.typeCode,i) x = np.frombuffer(pack_output, dtype=self.typeCode) x.shape = (2,3,4) y = x.copy() process(y) self.assertEquals(np.all((x+1)==y),True) def testProcess3DTranspose(self): "Test Process function 3D array, FORTRAN order" print(self.typeStr, "... ", end=' ', file=sys.stderr) process = Flat.__dict__[self.typeStr + "Process"] pack_output = '' for i in range(24): pack_output += struct.pack(self.typeCode,i) x = np.frombuffer(pack_output, dtype=self.typeCode) x.shape = (2,3,4) y = x.copy() process(y.T) self.assertEquals(np.all((x.T+1)==y.T),True) def testProcessNoncontiguous(self): "Test Process function with non-contiguous array, which should raise an error" print(self.typeStr, "... ", end=' ', file=sys.stderr) process = Flat.__dict__[self.typeStr + "Process"] pack_output = '' for i in range(24): pack_output += struct.pack(self.typeCode,i) x = np.frombuffer(pack_output, dtype=self.typeCode) x.shape = (2,3,4) self.assertRaises(TypeError, process, x[:,:,0]) ###################################################################### class scharTestCase(FlatTestCase): def __init__(self, methodName="runTest"): FlatTestCase.__init__(self, methodName) self.typeStr = "schar" self.typeCode = "b" ###################################################################### class ucharTestCase(FlatTestCase): def __init__(self, methodName="runTest"): FlatTestCase.__init__(self, methodName) self.typeStr = "uchar" self.typeCode = "B" ###################################################################### class shortTestCase(FlatTestCase): def __init__(self, methodName="runTest"): FlatTestCase.__init__(self, methodName) self.typeStr = "short" self.typeCode = "h" ###################################################################### class ushortTestCase(FlatTestCase): def __init__(self, methodName="runTest"): FlatTestCase.__init__(self, methodName) self.typeStr = "ushort" self.typeCode = "H" ###################################################################### class intTestCase(FlatTestCase): def __init__(self, methodName="runTest"): FlatTestCase.__init__(self, methodName) self.typeStr = "int" self.typeCode = "i" ###################################################################### class uintTestCase(FlatTestCase): def __init__(self, methodName="runTest"): FlatTestCase.__init__(self, methodName) self.typeStr = "uint" self.typeCode = "I" ###################################################################### class longTestCase(FlatTestCase): def __init__(self, methodName="runTest"): FlatTestCase.__init__(self, methodName) self.typeStr = "long" self.typeCode = "l" ###################################################################### class ulongTestCase(FlatTestCase): def __init__(self, methodName="runTest"): FlatTestCase.__init__(self, methodName) self.typeStr = "ulong" self.typeCode = "L" ###################################################################### class longLongTestCase(FlatTestCase): def __init__(self, methodName="runTest"): FlatTestCase.__init__(self, methodName) self.typeStr = "longLong" self.typeCode = "q" ###################################################################### class ulongLongTestCase(FlatTestCase): def __init__(self, methodName="runTest"): FlatTestCase.__init__(self, methodName) self.typeStr = "ulongLong" self.typeCode = "Q" ###################################################################### class floatTestCase(FlatTestCase): def __init__(self, methodName="runTest"): FlatTestCase.__init__(self, methodName) self.typeStr = "float" self.typeCode = "f" ###################################################################### class doubleTestCase(FlatTestCase): def __init__(self, methodName="runTest"): FlatTestCase.__init__(self, methodName) self.typeStr = "double" self.typeCode = "d" ###################################################################### if __name__ == "__main__": # Build the test suite suite = unittest.TestSuite() suite.addTest(unittest.makeSuite( scharTestCase)) suite.addTest(unittest.makeSuite( ucharTestCase)) suite.addTest(unittest.makeSuite( shortTestCase)) suite.addTest(unittest.makeSuite( ushortTestCase)) suite.addTest(unittest.makeSuite( intTestCase)) suite.addTest(unittest.makeSuite( uintTestCase)) suite.addTest(unittest.makeSuite( longTestCase)) suite.addTest(unittest.makeSuite( ulongTestCase)) suite.addTest(unittest.makeSuite( longLongTestCase)) suite.addTest(unittest.makeSuite(ulongLongTestCase)) suite.addTest(unittest.makeSuite( floatTestCase)) suite.addTest(unittest.makeSuite( doubleTestCase)) # Execute the test suite print("Testing 1D Functions of Module Flat") print("NumPy version", np.__version__) print() result = unittest.TextTestRunner(verbosity=2).run(suite) sys.exit(bool(result.errors + result.failures))
bsd-3-clause
diox/olympia
src/olympia/amo/tests/test_sitemap.py
1
25742
import os from unittest import mock from datetime import datetime from django.conf import settings from django.test import override_settings from django.urls import reverse from olympia import amo from olympia.addons.models import AddonCategory from olympia.amo.sitemap import ( AccountSitemap, AddonSitemap, AMOSitemap, CategoriesSitemap, CollectionSitemap, get_sitemap_path, get_sitemap_section_pages, get_sitemaps, render_index_xml, ) from olympia.amo.reverse import override_url_prefix from olympia.amo.tests import ( addon_factory, collection_factory, TestCase, user_factory, version_factory, ) from olympia.constants.categories import CATEGORIES from olympia.constants.promoted import RECOMMENDED from olympia.ratings.models import Rating from .test_views import TEST_SITEMAPS_DIR def rating_factory(addon): return Rating.objects.create( addon=addon, version=addon.current_version, rating=2, body='text', user=user_factory(), ) class TestAddonSitemap(TestCase): def setUp(self): it = AddonSitemap.item_tuple self.addon_a = addon_a = addon_factory(slug='addon-a') self.addon_b = addon_b = addon_factory(slug='addon-b') addon_b.update(last_updated=datetime(2020, 1, 1, 1, 1, 1)) self.addon_c = addon_c = addon_factory(slug='addon-c') addon_factory(status=amo.STATUS_NOMINATED) # shouldn't show up self.android_addon = addon_factory( version_kw={'application': amo.ANDROID.id} ) # shouldn't show up in expected self.make_addon_promoted(self.android_addon, RECOMMENDED, approve_version=True) self.expected = [ it(addon_c.last_updated, reverse('addons.detail', args=[addon_c.slug]), 1), it(addon_a.last_updated, reverse('addons.detail', args=[addon_a.slug]), 1), it(addon_b.last_updated, reverse('addons.detail', args=[addon_b.slug]), 1), it( addon_c.last_updated, reverse('addons.ratings.list', args=[addon_c.slug]), 1, ), it( addon_a.last_updated, reverse('addons.ratings.list', args=[addon_a.slug]), 1, ), it( addon_b.last_updated, reverse('addons.ratings.list', args=[addon_b.slug]), 1, ), ] def test_basic(self): sitemap = AddonSitemap() items = list(sitemap.items()) assert items == self.expected for item in sitemap.items(): assert sitemap.location(item) == item.url assert '/en-US/firefox/' in sitemap.location(item) assert sitemap.lastmod(item) == item.last_updated def test_rating_pagination(self): # add some ratings to test the rating page pagination rating_factory(self.addon_c) rating_factory(self.addon_c) rating_factory(self.addon_c) rating_factory(self.addon_a) rating_factory(self.addon_a) # only 2 for addon_a patched_drf_setting = dict(settings.REST_FRAMEWORK) patched_drf_setting['PAGE_SIZE'] = 2 sitemap = AddonSitemap() with override_settings(REST_FRAMEWORK=patched_drf_setting): items_with_ratings = list(sitemap.items()) # only one extra url, for a second ratings page, because PAGE_SIZE = 2 extra_rating = AddonSitemap.item_tuple( self.addon_c.last_updated, reverse('addons.ratings.list', args=[self.addon_c.slug]), 2, ) assert extra_rating in items_with_ratings assert set(items_with_ratings) - set(self.expected) == {extra_rating} item = items_with_ratings[-3] assert sitemap.location(item).endswith('/reviews/?page=2') assert sitemap.lastmod(item) == item.last_updated def test_android(self): it = AddonSitemap.item_tuple android_addon = self.android_addon with override_url_prefix(app_name='android'): assert list(AddonSitemap().items()) == [ it( android_addon.last_updated, reverse('addons.detail', args=[android_addon.slug]), 1, ), it( android_addon.last_updated, reverse('addons.ratings.list', args=[android_addon.slug]), 1, ), ] # make some of the Firefox add-ons be Android compatible version_factory(addon=self.addon_a, application=amo.ANDROID.id) self.make_addon_promoted(self.addon_a, RECOMMENDED, approve_version=True) self.addon_a.reload() version_factory(addon=self.addon_b, application=amo.ANDROID.id) # don't make b recommended - should be ignored even though it's compatible assert list(AddonSitemap().items()) == [ it( self.addon_a.last_updated, reverse('addons.detail', args=[self.addon_a.slug]), 1, ), it( android_addon.last_updated, reverse('addons.detail', args=[android_addon.slug]), 1, ), it( self.addon_a.last_updated, reverse('addons.ratings.list', args=[self.addon_a.slug]), 1, ), it( android_addon.last_updated, reverse('addons.ratings.list', args=[android_addon.slug]), 1, ), ] def test_amo_sitemap(): sitemap = AMOSitemap() for item in sitemap.items(): urlname, app = item assert sitemap.location(item).endswith(reverse(urlname, add_prefix=False)) if app: assert sitemap.location(item).endswith( f'/{app.short}{reverse(urlname, add_prefix=False)}' ) def test_categories_sitemap(): # without any addons we should still generate a url for each category empty_cats = list(CategoriesSitemap().items()) assert empty_cats == [ *( (category, 1) for category in CATEGORIES[amo.FIREFOX.id][amo.ADDON_EXTENSION].values() ), *( (category, 1) for category in CATEGORIES[amo.FIREFOX.id][amo.ADDON_STATICTHEME].values() ), ] # add some addons and check we generate extra pages when frontend would paginate bookmarks_category = CATEGORIES[amo.FIREFOX.id][amo.ADDON_EXTENSION]['bookmarks'] shopping_category = CATEGORIES[amo.FIREFOX.id][amo.ADDON_EXTENSION]['shopping'] AddonCategory.objects.create( addon=addon_factory(category=bookmarks_category), category=shopping_category ) AddonCategory.objects.create( addon=addon_factory(category=shopping_category), category=bookmarks_category ) addon_factory(category=bookmarks_category) addon_factory(category=shopping_category, status=amo.STATUS_NOMINATED) addon_factory( category=shopping_category, version_kw={'application': amo.ANDROID.id} ) # should be 4 addons in shopping (one not public, one not compatible with Firefox, # so 2 public), and 3 in bookmarks patched_drf_setting = dict(settings.REST_FRAMEWORK) patched_drf_setting['PAGE_SIZE'] = 2 with override_settings(REST_FRAMEWORK=patched_drf_setting): cats_with_addons = list(CategoriesSitemap().items()) # only one extra url, for a second bookmarks category page, because PAGE_SIZE = 2 extra = (bookmarks_category, 2) assert extra in cats_with_addons assert set(cats_with_addons) - set(empty_cats) == {extra} def test_collection_sitemap(mozilla_user): collection_a = collection_factory( author=mozilla_user, modified=datetime(2020, 1, 1, 1, 1, 1) ) collection_b = collection_factory( author=mozilla_user, modified=datetime(2020, 2, 2, 2, 2, 2) ) collection_factory(author=user_factory()) # not mozilla user sitemap = CollectionSitemap() assert list(sitemap.items()) == [ (collection_b.modified, collection_b.slug, mozilla_user.id), (collection_a.modified, collection_a.slug, mozilla_user.id), ] for item in sitemap.items(): assert sitemap.location(item) == reverse( 'collections.detail', args=[mozilla_user.id, item.slug] ) assert '/en-US/firefox/' in sitemap.location(item) assert sitemap.lastmod(item) == item.modified class TestAccountSitemap(TestCase): def test_basic(self): user_with_themes = user_factory() user_with_extensions = user_factory() user_with_both = user_factory() user_factory(is_public=True) # marked as public, but no addons. extension = addon_factory(users=(user_with_extensions, user_with_both)) theme = addon_factory( type=amo.ADDON_STATICTHEME, users=(user_with_themes, user_with_both) ) sitemap = AccountSitemap() items = list(sitemap.items()) assert items == [ ( theme.last_updated, reverse('users.profile', args=[user_with_both.id]), 1, 1, ), ( theme.last_updated, reverse('users.profile', args=[user_with_themes.id]), 1, 1, ), ( extension.last_updated, reverse('users.profile', args=[user_with_extensions.id]), 1, 1, ), ] @mock.patch('olympia.amo.sitemap.EXTENSIONS_BY_AUTHORS_PAGE_SIZE', 2) @mock.patch('olympia.amo.sitemap.THEMES_BY_AUTHORS_PAGE_SIZE', 3) def test_pagination(self): user_with_themes = user_factory() user_with_extensions = user_factory() user_with_both = user_factory() user_factory(is_public=True) # marked as public, but no addons. addon_factory(users=(user_with_extensions, user_with_both)) addon_factory( type=amo.ADDON_STATICTHEME, users=(user_with_themes, user_with_both) ) extra_extension_a = addon_factory(users=(user_with_extensions, user_with_both)) extra_extension_b = addon_factory(users=(user_with_extensions, user_with_both)) extra_theme_a = addon_factory( type=amo.ADDON_STATICTHEME, users=(user_with_themes, user_with_both) ) extra_theme_b = addon_factory( type=amo.ADDON_STATICTHEME, users=(user_with_themes, user_with_both) ) extra_theme_c = addon_factory( type=amo.ADDON_STATICTHEME, users=(user_with_themes, user_with_both) ) sitemap = AccountSitemap() paginated_items = list(sitemap.items()) assert paginated_items == [ ( extra_theme_c.last_updated, reverse('users.profile', args=[user_with_both.id]), 1, 1, ), ( extra_theme_c.last_updated, reverse('users.profile', args=[user_with_both.id]), 2, 1, ), ( extra_theme_c.last_updated, reverse('users.profile', args=[user_with_both.id]), 1, 2, ), ( extra_theme_c.last_updated, reverse('users.profile', args=[user_with_themes.id]), 1, 1, ), ( extra_theme_c.last_updated, reverse('users.profile', args=[user_with_themes.id]), 1, 2, ), ( extra_extension_b.last_updated, reverse('users.profile', args=[user_with_extensions.id]), 1, 1, ), ( extra_extension_b.last_updated, reverse('users.profile', args=[user_with_extensions.id]), 2, 1, ), ] # repeat, but after changing some of the addons so they wouldn't be visible extra_theme_a.update(status=amo.STATUS_NOMINATED) assert list(AccountSitemap().items()) == [ # now only one page of themes for both users ( extra_theme_c.last_updated, reverse('users.profile', args=[user_with_both.id]), 1, 1, ), ( extra_theme_c.last_updated, reverse('users.profile', args=[user_with_both.id]), 2, 1, ), ( extra_theme_c.last_updated, reverse('users.profile', args=[user_with_themes.id]), 1, 1, ), ( extra_extension_b.last_updated, reverse('users.profile', args=[user_with_extensions.id]), 1, 1, ), ( extra_extension_b.last_updated, reverse('users.profile', args=[user_with_extensions.id]), 2, 1, ), ] user_with_both.addonuser_set.filter(addon=extra_extension_a).update( listed=False ) assert list(AccountSitemap().items()) == [ ( extra_theme_c.last_updated, reverse('users.profile', args=[user_with_both.id]), 1, 1, ), ( extra_theme_c.last_updated, reverse('users.profile', args=[user_with_themes.id]), 1, 1, ), ( extra_extension_b.last_updated, reverse('users.profile', args=[user_with_extensions.id]), 1, 1, ), # user_with_extensions still has 2 pages of extensions though ( extra_extension_b.last_updated, reverse('users.profile', args=[user_with_extensions.id]), 2, 1, ), ] extra_theme_c.delete() assert list(AccountSitemap().items()) == [ # the date used for lastmod has changed ( extra_theme_b.last_updated, reverse('users.profile', args=[user_with_both.id]), 1, 1, ), ( extra_theme_b.last_updated, reverse('users.profile', args=[user_with_themes.id]), 1, 1, ), ( extra_extension_b.last_updated, reverse('users.profile', args=[user_with_extensions.id]), 1, 1, ), # user_with_extensions still has 2 pages of extensions though ( extra_extension_b.last_updated, reverse('users.profile', args=[user_with_extensions.id]), 2, 1, ), ] # and check that deleting roles works too user_with_both.addonuser_set.filter(addon=extra_theme_b).update( role=amo.AUTHOR_ROLE_DELETED ) assert list(AccountSitemap().items()) == [ # the date used for lastmod has changed, and the order too ( extra_theme_b.last_updated, reverse('users.profile', args=[user_with_themes.id]), 1, 1, ), ( extra_extension_b.last_updated, reverse('users.profile', args=[user_with_both.id]), 1, 1, ), ( extra_extension_b.last_updated, reverse('users.profile', args=[user_with_extensions.id]), 1, 1, ), ( extra_extension_b.last_updated, reverse('users.profile', args=[user_with_extensions.id]), 2, 1, ), ] @mock.patch('olympia.amo.sitemap.EXTENSIONS_BY_AUTHORS_PAGE_SIZE', 2) @mock.patch('olympia.amo.sitemap.THEMES_BY_AUTHORS_PAGE_SIZE', 1) def test_android(self): # users with just themes on Android won't be included user_with_themes = user_factory() user_with_extensions = user_factory() user_with_both = user_factory() user_factory(is_public=True) # marked as public, but no addons. extension = addon_factory( users=(user_with_extensions, user_with_both), version_kw={'application': amo.ANDROID.id}, ) self.make_addon_promoted(extension, RECOMMENDED, approve_version=True) extra_extension_a = addon_factory( users=(user_with_extensions, user_with_both), version_kw={'application': amo.ANDROID.id}, ) self.make_addon_promoted(extra_extension_a, RECOMMENDED, approve_version=True) extra_extension_b = addon_factory( users=(user_with_extensions, user_with_both), version_kw={'application': amo.ANDROID.id}, ) # and some addons that should be ignored addon_factory( type=amo.ADDON_STATICTHEME, users=(user_with_themes, user_with_both), version_kw={'application': amo.ANDROID.id}, ) addon_factory( type=amo.ADDON_STATICTHEME, users=(user_with_themes, user_with_both), version_kw={'application': amo.ANDROID.id}, ) firefox_addon = addon_factory( type=amo.ADDON_EXTENSION, users=(user_with_extensions, user_with_both), version_kw={'application': amo.FIREFOX.id}, ) self.make_addon_promoted(firefox_addon, RECOMMENDED, approve_version=True) # there would be 3 addons but one of them isn't promoted with override_url_prefix(app_name='android'): assert list(AccountSitemap().items()) == [ ( extra_extension_a.last_updated, reverse('users.profile', args=[user_with_both.id]), 1, 1, ), ( extra_extension_a.last_updated, reverse('users.profile', args=[user_with_extensions.id]), 1, 1, ), ] self.make_addon_promoted(extra_extension_b, RECOMMENDED, approve_version=True) with override_url_prefix(app_name='android'): assert list(AccountSitemap().items()) == [ ( extra_extension_b.last_updated, reverse('users.profile', args=[user_with_both.id]), 1, 1, ), ( extra_extension_b.last_updated, reverse('users.profile', args=[user_with_both.id]), 2, 1, ), ( extra_extension_b.last_updated, reverse('users.profile', args=[user_with_extensions.id]), 1, 1, ), ( extra_extension_b.last_updated, reverse('users.profile', args=[user_with_extensions.id]), 2, 1, ), ] # delete user_with_both from extra_extension_b user_with_both.addonuser_set.filter(addon=extra_extension_b).update( role=amo.AUTHOR_ROLE_DELETED ) with override_url_prefix(app_name='android'): assert list(AccountSitemap().items()) == [ ( extra_extension_b.last_updated, reverse('users.profile', args=[user_with_extensions.id]), 1, 1, ), ( extra_extension_b.last_updated, reverse('users.profile', args=[user_with_extensions.id]), 2, 1, ), ( extra_extension_a.last_updated, reverse('users.profile', args=[user_with_both.id]), 1, 1, ), ] def test_get_sitemap_section_pages(): addon_factory() addon_factory() addon_factory() sitemaps = get_sitemaps() pages = get_sitemap_section_pages(sitemaps) assert pages == [ ('amo', None, 1), ('addons', 'firefox', 1), ('addons', 'android', 1), ('categories', 'firefox', 1), ('collections', 'firefox', 1), ('collections', 'android', 1), ('users', 'firefox', 1), ('users', 'android', 1), ] with mock.patch.object(AddonSitemap, 'limit', 25): pages = get_sitemap_section_pages(sitemaps) # 2 pages per addon * 3 addons * 10 locales = 60 urls for addons; 3 pages @ 25pp assert len(sitemaps.get(('addons', amo.FIREFOX))._items()) == 60 assert pages == [ ('amo', None, 1), ('addons', 'firefox', 1), ('addons', 'firefox', 2), ('addons', 'firefox', 3), ('addons', 'android', 1), ('categories', 'firefox', 1), ('collections', 'firefox', 1), ('collections', 'android', 1), ('users', 'firefox', 1), ('users', 'android', 1), ] # test the default pagination limit def items_mock(self): return [ AccountSitemap.item_tuple(datetime.now(), user_id, 7, 8) for user_id in range(0, 401) ] with mock.patch.object(AccountSitemap, 'items', items_mock): # 401 mock user pages * 10 locales = 4010 urls for addons; 3 pages @ 2000pp pages = get_sitemap_section_pages(sitemaps) assert pages == [ ('amo', None, 1), ('addons', 'firefox', 1), ('addons', 'android', 1), ('categories', 'firefox', 1), ('collections', 'firefox', 1), ('collections', 'android', 1), ('users', 'firefox', 1), ('users', 'firefox', 2), ('users', 'firefox', 3), ('users', 'android', 1), ('users', 'android', 2), ('users', 'android', 3), ] def test_render_index_xml(): with mock.patch('olympia.amo.sitemap.get_sitemap_section_pages') as pages_mock: pages_mock.return_value = [ ('amo', None, 1), ('addons', 'firefox', 1), ('addons', 'firefox', 2), ('addons', 'android', 1), ('addons', 'android', 2), ] built = render_index_xml(sitemaps={}) with open(os.path.join(TEST_SITEMAPS_DIR, 'sitemap.xml')) as sitemap: assert built == sitemap.read() def test_sitemap_render(): def items_mock(self): return [ AddonSitemap.item_tuple( datetime(2020, 10, 2, 0, 0, 0), reverse('addons.detail', args=['delicious-barbeque']), ), AddonSitemap.item_tuple( datetime(2020, 10, 1, 0, 0, 0), reverse('addons.detail', args=['spicy-sandwich']), ), AddonSitemap.item_tuple( datetime(2020, 9, 30, 0, 0, 0), reverse('addons.detail', args=['delicious-chocolate']), ), AddonSitemap.item_tuple( datetime(2020, 10, 2, 0, 0, 0), reverse('addons.ratings.list', args=['delicious-barbeque']), ), AddonSitemap.item_tuple( datetime(2020, 10, 1, 0, 0, 0), reverse('addons.ratings.list', args=['spicy-sandwich']), ), AddonSitemap.item_tuple( datetime(2020, 9, 30, 0, 0, 0), reverse('addons.ratings.list', args=['delicious-chocolate']), ), ] with mock.patch.object(AddonSitemap, 'items', items_mock): firefox_built = AddonSitemap().render('firefox', 1) firefox_file = os.path.join(TEST_SITEMAPS_DIR, 'sitemap-addons-firefox.xml') with open(firefox_file) as sitemap: assert firefox_built == sitemap.read() android_built = AddonSitemap().render('android', 1) android_file = os.path.join(TEST_SITEMAPS_DIR, 'sitemap-addons-android.xml') with open(android_file) as sitemap: assert android_built == sitemap.read() def test_get_sitemap_path(): path = settings.SITEMAP_STORAGE_PATH assert get_sitemap_path(None, None) == f'{path}/sitemap.xml' assert get_sitemap_path('foo', None) == f'{path}/sitemap-foo.xml' assert get_sitemap_path('foo', 'bar') == f'{path}/sitemap-foo-bar.xml' assert get_sitemap_path('foo', None, 1) == f'{path}/sitemap-foo.xml' assert get_sitemap_path('foo', None, 2) == f'{path}/sitemap-foo-2.xml' assert get_sitemap_path('foo', 'bar', 1) == f'{path}/sitemap-foo-bar.xml' assert get_sitemap_path('foo', 'bar', 2) == f'{path}/sitemap-foo-bar-2.xml'
bsd-3-clause
imruahmed/microblog
flask/lib/python2.7/site-packages/pip/_vendor/distlib/markers.py
1261
6282
# -*- coding: utf-8 -*- # # Copyright (C) 2012-2013 Vinay Sajip. # Licensed to the Python Software Foundation under a contributor agreement. # See LICENSE.txt and CONTRIBUTORS.txt. # """Parser for the environment markers micro-language defined in PEP 345.""" import ast import os import sys import platform from .compat import python_implementation, string_types from .util import in_venv __all__ = ['interpret'] class Evaluator(object): """ A limited evaluator for Python expressions. """ operators = { 'eq': lambda x, y: x == y, 'gt': lambda x, y: x > y, 'gte': lambda x, y: x >= y, 'in': lambda x, y: x in y, 'lt': lambda x, y: x < y, 'lte': lambda x, y: x <= y, 'not': lambda x: not x, 'noteq': lambda x, y: x != y, 'notin': lambda x, y: x not in y, } allowed_values = { 'sys_platform': sys.platform, 'python_version': '%s.%s' % sys.version_info[:2], # parsing sys.platform is not reliable, but there is no other # way to get e.g. 2.7.2+, and the PEP is defined with sys.version 'python_full_version': sys.version.split(' ', 1)[0], 'os_name': os.name, 'platform_in_venv': str(in_venv()), 'platform_release': platform.release(), 'platform_version': platform.version(), 'platform_machine': platform.machine(), 'platform_python_implementation': python_implementation(), } def __init__(self, context=None): """ Initialise an instance. :param context: If specified, names are looked up in this mapping. """ self.context = context or {} self.source = None def get_fragment(self, offset): """ Get the part of the source which is causing a problem. """ fragment_len = 10 s = '%r' % (self.source[offset:offset + fragment_len]) if offset + fragment_len < len(self.source): s += '...' return s def get_handler(self, node_type): """ Get a handler for the specified AST node type. """ return getattr(self, 'do_%s' % node_type, None) def evaluate(self, node, filename=None): """ Evaluate a source string or node, using ``filename`` when displaying errors. """ if isinstance(node, string_types): self.source = node kwargs = {'mode': 'eval'} if filename: kwargs['filename'] = filename try: node = ast.parse(node, **kwargs) except SyntaxError as e: s = self.get_fragment(e.offset) raise SyntaxError('syntax error %s' % s) node_type = node.__class__.__name__.lower() handler = self.get_handler(node_type) if handler is None: if self.source is None: s = '(source not available)' else: s = self.get_fragment(node.col_offset) raise SyntaxError("don't know how to evaluate %r %s" % ( node_type, s)) return handler(node) def get_attr_key(self, node): assert isinstance(node, ast.Attribute), 'attribute node expected' return '%s.%s' % (node.value.id, node.attr) def do_attribute(self, node): if not isinstance(node.value, ast.Name): valid = False else: key = self.get_attr_key(node) valid = key in self.context or key in self.allowed_values if not valid: raise SyntaxError('invalid expression: %s' % key) if key in self.context: result = self.context[key] else: result = self.allowed_values[key] return result def do_boolop(self, node): result = self.evaluate(node.values[0]) is_or = node.op.__class__ is ast.Or is_and = node.op.__class__ is ast.And assert is_or or is_and if (is_and and result) or (is_or and not result): for n in node.values[1:]: result = self.evaluate(n) if (is_or and result) or (is_and and not result): break return result def do_compare(self, node): def sanity_check(lhsnode, rhsnode): valid = True if isinstance(lhsnode, ast.Str) and isinstance(rhsnode, ast.Str): valid = False #elif (isinstance(lhsnode, ast.Attribute) # and isinstance(rhsnode, ast.Attribute)): # klhs = self.get_attr_key(lhsnode) # krhs = self.get_attr_key(rhsnode) # valid = klhs != krhs if not valid: s = self.get_fragment(node.col_offset) raise SyntaxError('Invalid comparison: %s' % s) lhsnode = node.left lhs = self.evaluate(lhsnode) result = True for op, rhsnode in zip(node.ops, node.comparators): sanity_check(lhsnode, rhsnode) op = op.__class__.__name__.lower() if op not in self.operators: raise SyntaxError('unsupported operation: %r' % op) rhs = self.evaluate(rhsnode) result = self.operators[op](lhs, rhs) if not result: break lhs = rhs lhsnode = rhsnode return result def do_expression(self, node): return self.evaluate(node.body) def do_name(self, node): valid = False if node.id in self.context: valid = True result = self.context[node.id] elif node.id in self.allowed_values: valid = True result = self.allowed_values[node.id] if not valid: raise SyntaxError('invalid expression: %s' % node.id) return result def do_str(self, node): return node.s def interpret(marker, execution_context=None): """ Interpret a marker and return a result depending on environment. :param marker: The marker to interpret. :type marker: str :param execution_context: The context used for name lookup. :type execution_context: mapping """ return Evaluator(execution_context).evaluate(marker.strip())
bsd-3-clause
beblount/Steer-Clear-Backend-Web
env/Lib/site-packages/sqlalchemy/sql/expression.py
49
5833
# sql/expression.py # Copyright (C) 2005-2015 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php """Defines the public namespace for SQL expression constructs. Prior to version 0.9, this module contained all of "elements", "dml", "default_comparator" and "selectable". The module was broken up and most "factory" functions were moved to be grouped with their associated class. """ __all__ = [ 'Alias', 'ClauseElement', 'ColumnCollection', 'ColumnElement', 'CompoundSelect', 'Delete', 'FromClause', 'Insert', 'Join', 'Select', 'Selectable', 'TableClause', 'Update', 'alias', 'and_', 'asc', 'between', 'bindparam', 'case', 'cast', 'column', 'delete', 'desc', 'distinct', 'except_', 'except_all', 'exists', 'extract', 'func', 'modifier', 'collate', 'insert', 'intersect', 'intersect_all', 'join', 'label', 'literal', 'literal_column', 'not_', 'null', 'nullsfirst', 'nullslast', 'or_', 'outparam', 'outerjoin', 'over', 'select', 'subquery', 'table', 'text', 'tuple_', 'type_coerce', 'union', 'union_all', 'update'] from .visitors import Visitable from .functions import func, modifier, FunctionElement, Function from ..util.langhelpers import public_factory from .elements import ClauseElement, ColumnElement,\ BindParameter, UnaryExpression, BooleanClauseList, \ Label, Cast, Case, ColumnClause, TextClause, Over, Null, \ True_, False_, BinaryExpression, Tuple, TypeClause, Extract, \ Grouping, not_, \ collate, literal_column, between,\ literal, outparam, type_coerce, ClauseList, FunctionFilter from .elements import SavepointClause, RollbackToSavepointClause, \ ReleaseSavepointClause from .base import ColumnCollection, Generative, Executable, \ PARSE_AUTOCOMMIT from .selectable import Alias, Join, Select, Selectable, TableClause, \ CompoundSelect, CTE, FromClause, FromGrouping, SelectBase, \ alias, GenerativeSelect, \ subquery, HasPrefixes, HasSuffixes, Exists, ScalarSelect, TextAsFrom from .dml import Insert, Update, Delete, UpdateBase, ValuesBase # factory functions - these pull class-bound constructors and classmethods # from SQL elements and selectables into public functions. This allows # the functions to be available in the sqlalchemy.sql.* namespace and # to be auto-cross-documenting from the function to the class itself. and_ = public_factory(BooleanClauseList.and_, ".expression.and_") or_ = public_factory(BooleanClauseList.or_, ".expression.or_") bindparam = public_factory(BindParameter, ".expression.bindparam") select = public_factory(Select, ".expression.select") text = public_factory(TextClause._create_text, ".expression.text") table = public_factory(TableClause, ".expression.table") column = public_factory(ColumnClause, ".expression.column") over = public_factory(Over, ".expression.over") label = public_factory(Label, ".expression.label") case = public_factory(Case, ".expression.case") cast = public_factory(Cast, ".expression.cast") extract = public_factory(Extract, ".expression.extract") tuple_ = public_factory(Tuple, ".expression.tuple_") except_ = public_factory(CompoundSelect._create_except, ".expression.except_") except_all = public_factory( CompoundSelect._create_except_all, ".expression.except_all") intersect = public_factory( CompoundSelect._create_intersect, ".expression.intersect") intersect_all = public_factory( CompoundSelect._create_intersect_all, ".expression.intersect_all") union = public_factory(CompoundSelect._create_union, ".expression.union") union_all = public_factory( CompoundSelect._create_union_all, ".expression.union_all") exists = public_factory(Exists, ".expression.exists") nullsfirst = public_factory( UnaryExpression._create_nullsfirst, ".expression.nullsfirst") nullslast = public_factory( UnaryExpression._create_nullslast, ".expression.nullslast") asc = public_factory(UnaryExpression._create_asc, ".expression.asc") desc = public_factory(UnaryExpression._create_desc, ".expression.desc") distinct = public_factory( UnaryExpression._create_distinct, ".expression.distinct") true = public_factory(True_._instance, ".expression.true") false = public_factory(False_._instance, ".expression.false") null = public_factory(Null._instance, ".expression.null") join = public_factory(Join._create_join, ".expression.join") outerjoin = public_factory(Join._create_outerjoin, ".expression.outerjoin") insert = public_factory(Insert, ".expression.insert") update = public_factory(Update, ".expression.update") delete = public_factory(Delete, ".expression.delete") funcfilter = public_factory( FunctionFilter, ".expression.funcfilter") # internal functions still being called from tests and the ORM, # these might be better off in some other namespace from .base import _from_objects from .elements import _literal_as_text, _clause_element_as_expr,\ _is_column, _labeled, _only_column_elements, _string_or_unprintable, \ _truncated_label, _clone, _cloned_difference, _cloned_intersection,\ _column_as_key, _literal_as_binds, _select_iterables, \ _corresponding_column_or_error, _literal_as_label_reference, \ _expression_literal_as_text from .selectable import _interpret_as_from # old names for compatibility _Executable = Executable _BindParamClause = BindParameter _Label = Label _SelectBase = SelectBase _BinaryExpression = BinaryExpression _Cast = Cast _Null = Null _False = False_ _True = True_ _TextClause = TextClause _UnaryExpression = UnaryExpression _Case = Case _Tuple = Tuple _Over = Over _Generative = Generative _TypeClause = TypeClause _Extract = Extract _Exists = Exists _Grouping = Grouping _FromGrouping = FromGrouping _ScalarSelect = ScalarSelect
mit
rcbops/nova-buildpackage
nova/manager.py
2
7540
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Base Manager class. Managers are responsible for a certain aspect of the system. It is a logical grouping of code relating to a portion of the system. In general other components should be using the manager to make changes to the components that it is responsible for. For example, other components that need to deal with volumes in some way, should do so by calling methods on the VolumeManager instead of directly changing fields in the database. This allows us to keep all of the code relating to volumes in the same place. We have adopted a basic strategy of Smart managers and dumb data, which means rather than attaching methods to data objects, components should call manager methods that act on the data. Methods on managers that can be executed locally should be called directly. If a particular method must execute on a remote host, this should be done via rpc to the service that wraps the manager Managers should be responsible for most of the db access, and non-implementation specific data. Anything implementation specific that can't be generalized should be done by the Driver. In general, we prefer to have one manager with multiple drivers for different implementations, but sometimes it makes sense to have multiple managers. You can think of it this way: Abstract different overall strategies at the manager level(FlatNetwork vs VlanNetwork), and different implementations at the driver level(LinuxNetDriver vs CiscoNetDriver). Managers will often provide methods for initial setup of a host or periodic tasks to a wrapping service. This module provides Manager, a base class for managers. """ from nova.db import base from nova import flags from nova import log as logging from nova.scheduler import api from nova import version FLAGS = flags.FLAGS LOG = logging.getLogger('nova.manager') def periodic_task(*args, **kwargs): """Decorator to indicate that a method is a periodic task. This decorator can be used in two ways: 1. Without arguments '@periodic_task', this will be run on every tick of the periodic scheduler. 2. With arguments, @periodic_task(ticks_between_runs=N), this will be run on every N ticks of the periodic scheduler. """ def decorator(f): f._periodic_task = True f._ticks_between_runs = kwargs.pop('ticks_between_runs', 0) return f # NOTE(sirp): The `if` is necessary to allow the decorator to be used with # and without parens. # # In the 'with-parens' case (with kwargs present), this function needs to # return a decorator function since the interpreter will invoke it like: # # periodic_task(*args, **kwargs)(f) # # In the 'without-parens' case, the original function will be passed # in as the first argument, like: # # periodic_task(f) if kwargs: return decorator else: return decorator(args[0]) class ManagerMeta(type): def __init__(cls, names, bases, dict_): """Metaclass that allows us to collect decorated periodic tasks.""" super(ManagerMeta, cls).__init__(names, bases, dict_) # NOTE(sirp): if the attribute is not present then we must be the base # class, so, go ahead an initialize it. If the attribute is present, # then we're a subclass so make a copy of it so we don't step on our # parent's toes. try: cls._periodic_tasks = cls._periodic_tasks[:] except AttributeError: cls._periodic_tasks = [] try: cls._ticks_to_skip = cls._ticks_to_skip.copy() except AttributeError: cls._ticks_to_skip = {} for value in cls.__dict__.values(): if getattr(value, '_periodic_task', False): task = value name = task.__name__ cls._periodic_tasks.append((name, task)) cls._ticks_to_skip[name] = task._ticks_between_runs class Manager(base.Base): __metaclass__ = ManagerMeta def __init__(self, host=None, db_driver=None): if not host: host = FLAGS.host self.host = host super(Manager, self).__init__(db_driver) def periodic_tasks(self, context, raise_on_error=False): """Tasks to be run at a periodic interval.""" for task_name, task in self._periodic_tasks: full_task_name = '.'.join([self.__class__.__name__, task_name]) ticks_to_skip = self._ticks_to_skip[task_name] if ticks_to_skip > 0: LOG.debug(_("Skipping %(full_task_name)s, %(ticks_to_skip)s" " ticks left until next run"), locals()) self._ticks_to_skip[task_name] -= 1 continue self._ticks_to_skip[task_name] = task._ticks_between_runs LOG.debug(_("Running periodic task %(full_task_name)s"), locals()) try: task(self, context) except Exception as e: if raise_on_error: raise LOG.exception(_("Error during %(full_task_name)s: %(e)s"), locals()) def init_host(self): """Handle initialization if this is a standalone service. Child classes should override this method. """ pass def service_version(self, context): return version.version_string() def service_config(self, context): config = {} for key in FLAGS: config[key] = FLAGS.get(key, None) return config class SchedulerDependentManager(Manager): """Periodically send capability updates to the Scheduler services. Services that need to update the Scheduler of their capabilities should derive from this class. Otherwise they can derive from manager.Manager directly. Updates are only sent after update_service_capabilities is called with non-None values. """ def __init__(self, host=None, db_driver=None, service_name='undefined'): self.last_capabilities = None self.service_name = service_name super(SchedulerDependentManager, self).__init__(host, db_driver) def update_service_capabilities(self, capabilities): """Remember these capabilities to send on next periodic update.""" self.last_capabilities = capabilities @periodic_task def _publish_service_capabilities(self, context): """Pass data back to the scheduler at a periodic interval.""" if self.last_capabilities: LOG.debug(_('Notifying Schedulers of capabilities ...')) api.update_service_capabilities(context, self.service_name, self.host, self.last_capabilities)
apache-2.0
arthurprs/aerospike-client-python
examples/client/info_node.py
2
3760
# -*- coding: utf-8 -*- ################################################################################ # Copyright 2013-2015 Aerospike, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. ################################################################################ from __future__ import print_function import aerospike import sys from optparse import OptionParser ################################################################################ # Options Parsing ################################################################################ usage = "usage: %prog [options] [REQUEST]" optparser = OptionParser(usage=usage, add_help_option=False) optparser.add_option( "--help", dest="help", action="store_true", help="Displays this message.") optparser.add_option( "-U", "--username", dest="username", type="string", metavar="<USERNAME>", help="Username to connect to database.") optparser.add_option( "-P", "--password", dest="password", type="string", metavar="<PASSWORD>", help="Password to connect to database.") optparser.add_option( "-h", "--host", dest="host", type="string", default="127.0.0.1", metavar="<ADDRESS>", help="Address of Aerospike server.") optparser.add_option( "-p", "--port", dest="port", type="int", default=3000, metavar="<PORT>", help="Port of the Aerospike server.") (options, args) = optparser.parse_args() if options.help: optparser.print_help() print() sys.exit(1) ################################################################################ # Client Configuration ################################################################################ config = { 'hosts': [ (options.host, options.port) ] } ################################################################################ # Application ################################################################################ exitCode = 0 try: # ---------------------------------------------------------------------------- # Connect to Cluster # ---------------------------------------------------------------------------- client = aerospike.client(config).connect(options.username, options.password) # ---------------------------------------------------------------------------- # Perform Operation # ---------------------------------------------------------------------------- try: request = "statistics" if len(args) > 0: request = ' '.join(args) response = client.info_node(request, config['hosts'][0]) print("---") print("Response of info_node request is: ") print(response) except Exception as e: print("error: {0}".format(e), file=sys.stderr) exitCode = 2 # ---------------------------------------------------------------------------- # Close Connection to Cluster # ---------------------------------------------------------------------------- client.close() except Exception, eargs: print("error: {0}".format(eargs), file=sys.stderr) exitCode = 3 ################################################################################ # Exit ################################################################################ sys.exit(exitCode)
apache-2.0
ojii/sandlib
lib/lib-python/2.7/test/test_dict.py
4
20279
import unittest from test import test_support import UserDict, random, string import gc, weakref class DictTest(unittest.TestCase): def test_constructor(self): # calling built-in types without argument must return empty self.assertEqual(dict(), {}) self.assertIsNot(dict(), {}) def test_literal_constructor(self): # check literal constructor for different sized dicts # (to exercise the BUILD_MAP oparg). for n in (0, 1, 6, 256, 400): items = [(''.join(random.sample(string.letters, 8)), i) for i in range(n)] random.shuffle(items) formatted_items = ('{!r}: {:d}'.format(k, v) for k, v in items) dictliteral = '{' + ', '.join(formatted_items) + '}' self.assertEqual(eval(dictliteral), dict(items)) def test_bool(self): self.assertIs(not {}, True) self.assertTrue({1: 2}) self.assertIs(bool({}), False) self.assertIs(bool({1: 2}), True) def test_keys(self): d = {} self.assertEqual(d.keys(), []) d = {'a': 1, 'b': 2} k = d.keys() self.assertTrue(d.has_key('a')) self.assertTrue(d.has_key('b')) self.assertRaises(TypeError, d.keys, None) def test_values(self): d = {} self.assertEqual(d.values(), []) d = {1:2} self.assertEqual(d.values(), [2]) self.assertRaises(TypeError, d.values, None) def test_items(self): d = {} self.assertEqual(d.items(), []) d = {1:2} self.assertEqual(d.items(), [(1, 2)]) self.assertRaises(TypeError, d.items, None) def test_has_key(self): d = {} self.assertFalse(d.has_key('a')) d = {'a': 1, 'b': 2} k = d.keys() k.sort() self.assertEqual(k, ['a', 'b']) self.assertRaises(TypeError, d.has_key) def test_contains(self): d = {} self.assertNotIn('a', d) self.assertFalse('a' in d) self.assertTrue('a' not in d) d = {'a': 1, 'b': 2} self.assertIn('a', d) self.assertIn('b', d) self.assertNotIn('c', d) self.assertRaises(TypeError, d.__contains__) def test_len(self): d = {} self.assertEqual(len(d), 0) d = {'a': 1, 'b': 2} self.assertEqual(len(d), 2) def test_getitem(self): d = {'a': 1, 'b': 2} self.assertEqual(d['a'], 1) self.assertEqual(d['b'], 2) d['c'] = 3 d['a'] = 4 self.assertEqual(d['c'], 3) self.assertEqual(d['a'], 4) del d['b'] self.assertEqual(d, {'a': 4, 'c': 3}) self.assertRaises(TypeError, d.__getitem__) class BadEq(object): def __eq__(self, other): raise Exc() def __hash__(self): return 24 d = {} d[BadEq()] = 42 self.assertRaises(KeyError, d.__getitem__, 23) class Exc(Exception): pass class BadHash(object): fail = False def __hash__(self): if self.fail: raise Exc() else: return 42 x = BadHash() d[x] = 42 x.fail = True self.assertRaises(Exc, d.__getitem__, x) def test_clear(self): d = {1:1, 2:2, 3:3} d.clear() self.assertEqual(d, {}) self.assertRaises(TypeError, d.clear, None) def test_update(self): d = {} d.update({1:100}) d.update({2:20}) d.update({1:1, 2:2, 3:3}) self.assertEqual(d, {1:1, 2:2, 3:3}) d.update() self.assertEqual(d, {1:1, 2:2, 3:3}) self.assertRaises((TypeError, AttributeError), d.update, None) class SimpleUserDict: def __init__(self): self.d = {1:1, 2:2, 3:3} def keys(self): return self.d.keys() def __getitem__(self, i): return self.d[i] d.clear() d.update(SimpleUserDict()) self.assertEqual(d, {1:1, 2:2, 3:3}) class Exc(Exception): pass d.clear() class FailingUserDict: def keys(self): raise Exc self.assertRaises(Exc, d.update, FailingUserDict()) class FailingUserDict: def keys(self): class BogonIter: def __init__(self): self.i = 1 def __iter__(self): return self def next(self): if self.i: self.i = 0 return 'a' raise Exc return BogonIter() def __getitem__(self, key): return key self.assertRaises(Exc, d.update, FailingUserDict()) class FailingUserDict: def keys(self): class BogonIter: def __init__(self): self.i = ord('a') def __iter__(self): return self def next(self): if self.i <= ord('z'): rtn = chr(self.i) self.i += 1 return rtn raise StopIteration return BogonIter() def __getitem__(self, key): raise Exc self.assertRaises(Exc, d.update, FailingUserDict()) class badseq(object): def __iter__(self): return self def next(self): raise Exc() self.assertRaises(Exc, {}.update, badseq()) self.assertRaises(ValueError, {}.update, [(1, 2, 3)]) def test_fromkeys(self): self.assertEqual(dict.fromkeys('abc'), {'a':None, 'b':None, 'c':None}) d = {} self.assertIsNot(d.fromkeys('abc'), d) self.assertEqual(d.fromkeys('abc'), {'a':None, 'b':None, 'c':None}) self.assertEqual(d.fromkeys((4,5),0), {4:0, 5:0}) self.assertEqual(d.fromkeys([]), {}) def g(): yield 1 self.assertEqual(d.fromkeys(g()), {1:None}) self.assertRaises(TypeError, {}.fromkeys, 3) class dictlike(dict): pass self.assertEqual(dictlike.fromkeys('a'), {'a':None}) self.assertEqual(dictlike().fromkeys('a'), {'a':None}) self.assertIsInstance(dictlike.fromkeys('a'), dictlike) self.assertIsInstance(dictlike().fromkeys('a'), dictlike) class mydict(dict): def __new__(cls): return UserDict.UserDict() ud = mydict.fromkeys('ab') self.assertEqual(ud, {'a':None, 'b':None}) self.assertIsInstance(ud, UserDict.UserDict) self.assertRaises(TypeError, dict.fromkeys) class Exc(Exception): pass class baddict1(dict): def __init__(self): raise Exc() self.assertRaises(Exc, baddict1.fromkeys, [1]) class BadSeq(object): def __iter__(self): return self def next(self): raise Exc() self.assertRaises(Exc, dict.fromkeys, BadSeq()) class baddict2(dict): def __setitem__(self, key, value): raise Exc() self.assertRaises(Exc, baddict2.fromkeys, [1]) # test fast path for dictionary inputs d = dict(zip(range(6), range(6))) self.assertEqual(dict.fromkeys(d, 0), dict(zip(range(6), [0]*6))) def test_copy(self): d = {1:1, 2:2, 3:3} self.assertEqual(d.copy(), {1:1, 2:2, 3:3}) self.assertEqual({}.copy(), {}) self.assertRaises(TypeError, d.copy, None) def test_get(self): d = {} self.assertIs(d.get('c'), None) self.assertEqual(d.get('c', 3), 3) d = {'a': 1, 'b': 2} self.assertIs(d.get('c'), None) self.assertEqual(d.get('c', 3), 3) self.assertEqual(d.get('a'), 1) self.assertEqual(d.get('a', 3), 1) self.assertRaises(TypeError, d.get) self.assertRaises(TypeError, d.get, None, None, None) def test_setdefault(self): # dict.setdefault() d = {} self.assertIs(d.setdefault('key0'), None) d.setdefault('key0', []) self.assertIs(d.setdefault('key0'), None) d.setdefault('key', []).append(3) self.assertEqual(d['key'][0], 3) d.setdefault('key', []).append(4) self.assertEqual(len(d['key']), 2) self.assertRaises(TypeError, d.setdefault) class Exc(Exception): pass class BadHash(object): fail = False def __hash__(self): if self.fail: raise Exc() else: return 42 x = BadHash() d[x] = 42 x.fail = True self.assertRaises(Exc, d.setdefault, x, []) def test_popitem(self): # dict.popitem() for copymode in -1, +1: # -1: b has same structure as a # +1: b is a.copy() for log2size in range(12): size = 2**log2size a = {} b = {} for i in range(size): a[repr(i)] = i if copymode < 0: b[repr(i)] = i if copymode > 0: b = a.copy() for i in range(size): ka, va = ta = a.popitem() self.assertEqual(va, int(ka)) kb, vb = tb = b.popitem() self.assertEqual(vb, int(kb)) if test_support.check_impl_detail(): self.assertFalse(copymode < 0 and ta != tb) self.assertFalse(a) self.assertFalse(b) d = {} self.assertRaises(KeyError, d.popitem) def test_pop(self): # Tests for pop with specified key d = {} k, v = 'abc', 'def' d[k] = v self.assertRaises(KeyError, d.pop, 'ghi') self.assertEqual(d.pop(k), v) self.assertEqual(len(d), 0) self.assertRaises(KeyError, d.pop, k) # verify longs/ints get same value when key > 32 bits # (for 64-bit archs). See SF bug #689659. x = 4503599627370496L y = 4503599627370496 h = {x: 'anything', y: 'something else'} self.assertEqual(h[x], h[y]) self.assertEqual(d.pop(k, v), v) d[k] = v self.assertEqual(d.pop(k, 1), v) self.assertRaises(TypeError, d.pop) class Exc(Exception): pass class BadHash(object): fail = False def __hash__(self): if self.fail: raise Exc() else: return 42 x = BadHash() d[x] = 42 x.fail = True self.assertRaises(Exc, d.pop, x) def test_mutatingiteration(self): # changing dict size during iteration d = {} d[1] = 1 with self.assertRaises(RuntimeError): for i in d: d[i+1] = 1 def test_repr(self): d = {} self.assertEqual(repr(d), '{}') d[1] = 2 self.assertEqual(repr(d), '{1: 2}') d = {} d[1] = d self.assertEqual(repr(d), '{1: {...}}') class Exc(Exception): pass class BadRepr(object): def __repr__(self): raise Exc() d = {1: BadRepr()} self.assertRaises(Exc, repr, d) def test_le(self): self.assertFalse({} < {}) self.assertFalse({1: 2} < {1L: 2L}) class Exc(Exception): pass class BadCmp(object): def __eq__(self, other): raise Exc() def __hash__(self): return 42 d1 = {BadCmp(): 1} d2 = {1: 1} with self.assertRaises(Exc): d1 < d2 def test_missing(self): # Make sure dict doesn't have a __missing__ method self.assertFalse(hasattr(dict, "__missing__")) self.assertFalse(hasattr({}, "__missing__")) # Test several cases: # (D) subclass defines __missing__ method returning a value # (E) subclass defines __missing__ method raising RuntimeError # (F) subclass sets __missing__ instance variable (no effect) # (G) subclass doesn't define __missing__ at a all class D(dict): def __missing__(self, key): return 42 d = D({1: 2, 3: 4}) self.assertEqual(d[1], 2) self.assertEqual(d[3], 4) self.assertNotIn(2, d) self.assertNotIn(2, d.keys()) self.assertEqual(d[2], 42) class E(dict): def __missing__(self, key): raise RuntimeError(key) e = E() with self.assertRaises(RuntimeError) as c: e[42] self.assertEqual(c.exception.args, (42,)) class F(dict): def __init__(self): # An instance variable __missing__ should have no effect self.__missing__ = lambda key: None f = F() with self.assertRaises(KeyError) as c: f[42] self.assertEqual(c.exception.args, (42,)) class G(dict): pass g = G() with self.assertRaises(KeyError) as c: g[42] self.assertEqual(c.exception.args, (42,)) def test_tuple_keyerror(self): # SF #1576657 d = {} with self.assertRaises(KeyError) as c: d[(1,)] self.assertEqual(c.exception.args, ((1,),)) def test_bad_key(self): # Dictionary lookups should fail if __cmp__() raises an exception. class CustomException(Exception): pass class BadDictKey: def __hash__(self): return hash(self.__class__) def __cmp__(self, other): if isinstance(other, self.__class__): raise CustomException return other d = {} x1 = BadDictKey() x2 = BadDictKey() d[x1] = 1 for stmt in ['d[x2] = 2', 'z = d[x2]', 'x2 in d', 'd.has_key(x2)', 'd.get(x2)', 'd.setdefault(x2, 42)', 'd.pop(x2)', 'd.update({x2: 2})']: with self.assertRaises(CustomException): exec stmt in locals() def test_resize1(self): # Dict resizing bug, found by Jack Jansen in 2.2 CVS development. # This version got an assert failure in debug build, infinite loop in # release build. Unfortunately, provoking this kind of stuff requires # a mix of inserts and deletes hitting exactly the right hash codes in # exactly the right order, and I can't think of a randomized approach # that would be *likely* to hit a failing case in reasonable time. d = {} for i in range(5): d[i] = i for i in range(5): del d[i] for i in range(5, 9): # i==8 was the problem d[i] = i def test_resize2(self): # Another dict resizing bug (SF bug #1456209). # This caused Segmentation faults or Illegal instructions. class X(object): def __hash__(self): return 5 def __eq__(self, other): if resizing: d.clear() return False d = {} resizing = False d[X()] = 1 d[X()] = 2 d[X()] = 3 d[X()] = 4 d[X()] = 5 # now trigger a resize resizing = True d[9] = 6 def test_empty_presized_dict_in_freelist(self): # Bug #3537: if an empty but presized dict with a size larger # than 7 was in the freelist, it triggered an assertion failure with self.assertRaises(ZeroDivisionError): d = {'a': 1 // 0, 'b': None, 'c': None, 'd': None, 'e': None, 'f': None, 'g': None, 'h': None} d = {} def test_container_iterator(self): # Bug #3680: tp_traverse was not implemented for dictiter objects class C(object): pass iterators = (dict.iteritems, dict.itervalues, dict.iterkeys) for i in iterators: obj = C() ref = weakref.ref(obj) container = {obj: 1} obj.x = i(container) del obj, container gc.collect() self.assertIs(ref(), None, "Cycle was not collected") def _not_tracked(self, t): # Nested containers can take several collections to untrack gc.collect() gc.collect() self.assertFalse(gc.is_tracked(t), t) def _tracked(self, t): self.assertTrue(gc.is_tracked(t), t) gc.collect() gc.collect() self.assertTrue(gc.is_tracked(t), t) @test_support.cpython_only def test_track_literals(self): # Test GC-optimization of dict literals x, y, z, w = 1.5, "a", (1, None), [] self._not_tracked({}) self._not_tracked({x:(), y:x, z:1}) self._not_tracked({1: "a", "b": 2}) self._not_tracked({1: 2, (None, True, False, ()): int}) self._not_tracked({1: object()}) # Dicts with mutable elements are always tracked, even if those # elements are not tracked right now. self._tracked({1: []}) self._tracked({1: ([],)}) self._tracked({1: {}}) self._tracked({1: set()}) @test_support.cpython_only def test_track_dynamic(self): # Test GC-optimization of dynamically-created dicts class MyObject(object): pass x, y, z, w, o = 1.5, "a", (1, object()), [], MyObject() d = dict() self._not_tracked(d) d[1] = "a" self._not_tracked(d) d[y] = 2 self._not_tracked(d) d[z] = 3 self._not_tracked(d) self._not_tracked(d.copy()) d[4] = w self._tracked(d) self._tracked(d.copy()) d[4] = None self._not_tracked(d) self._not_tracked(d.copy()) # dd isn't tracked right now, but it may mutate and therefore d # which contains it must be tracked. d = dict() dd = dict() d[1] = dd self._not_tracked(dd) self._tracked(d) dd[1] = d self._tracked(dd) d = dict.fromkeys([x, y, z]) self._not_tracked(d) dd = dict() dd.update(d) self._not_tracked(dd) d = dict.fromkeys([x, y, z, o]) self._tracked(d) dd = dict() dd.update(d) self._tracked(dd) d = dict(x=x, y=y, z=z) self._not_tracked(d) d = dict(x=x, y=y, z=z, w=w) self._tracked(d) d = dict() d.update(x=x, y=y, z=z) self._not_tracked(d) d.update(w=w) self._tracked(d) d = dict([(x, y), (z, 1)]) self._not_tracked(d) d = dict([(x, y), (z, w)]) self._tracked(d) d = dict() d.update([(x, y), (z, 1)]) self._not_tracked(d) d.update([(x, y), (z, w)]) self._tracked(d) @test_support.cpython_only def test_track_subtypes(self): # Dict subtypes are always tracked class MyDict(dict): pass self._tracked(MyDict()) from test import mapping_tests class GeneralMappingTests(mapping_tests.BasicTestMappingProtocol): type2test = dict class Dict(dict): pass class SubclassMappingTests(mapping_tests.BasicTestMappingProtocol): type2test = Dict def test_main(): with test_support.check_py3k_warnings( ('dict(.has_key..| inequality comparisons) not supported in 3.x', DeprecationWarning)): test_support.run_unittest( DictTest, GeneralMappingTests, SubclassMappingTests, ) if __name__ == "__main__": test_main()
bsd-3-clause
shawnferry/ansible
lib/ansible/module_utils/openstack.py
198
4502
# This code is part of Ansible, but is an independent component. # This particular file snippet, and this file snippet only, is BSD licensed. # Modules you write using this snippet, which is embedded dynamically by Ansible # still belong to the author of the module, and may assign their own license # to the complete work. # # Copyright (c) 2014 Hewlett-Packard Development Company, L.P. # All rights reserved. # # Redistribution and use in source and binary forms, with or without modification, # are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE # USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import os def openstack_argument_spec(): # DEPRECATED: This argument spec is only used for the deprecated old # OpenStack modules. It turns out that modern OpenStack auth is WAY # more complex than this. # Consume standard OpenStack environment variables. # This is mainly only useful for ad-hoc command line operation as # in playbooks one would assume variables would be used appropriately OS_AUTH_URL=os.environ.get('OS_AUTH_URL', 'http://127.0.0.1:35357/v2.0/') OS_PASSWORD=os.environ.get('OS_PASSWORD', None) OS_REGION_NAME=os.environ.get('OS_REGION_NAME', None) OS_USERNAME=os.environ.get('OS_USERNAME', 'admin') OS_TENANT_NAME=os.environ.get('OS_TENANT_NAME', OS_USERNAME) spec = dict( login_username = dict(default=OS_USERNAME), auth_url = dict(default=OS_AUTH_URL), region_name = dict(default=OS_REGION_NAME), availability_zone = dict(default=None), ) if OS_PASSWORD: spec['login_password'] = dict(default=OS_PASSWORD) else: spec['login_password'] = dict(required=True) if OS_TENANT_NAME: spec['login_tenant_name'] = dict(default=OS_TENANT_NAME) else: spec['login_tenant_name'] = dict(required=True) return spec def openstack_find_nova_addresses(addresses, ext_tag, key_name=None): ret = [] for (k, v) in addresses.iteritems(): if key_name and k == key_name: ret.extend([addrs['addr'] for addrs in v]) else: for interface_spec in v: if 'OS-EXT-IPS:type' in interface_spec and interface_spec['OS-EXT-IPS:type'] == ext_tag: ret.append(interface_spec['addr']) return ret def openstack_full_argument_spec(**kwargs): spec = dict( cloud=dict(default=None), auth_type=dict(default=None), auth=dict(default=None), region_name=dict(default=None), availability_zone=dict(default=None), verify=dict(default=True, aliases=['validate_certs']), cacert=dict(default=None), cert=dict(default=None), key=dict(default=None), wait=dict(default=True, type='bool'), timeout=dict(default=180, type='int'), api_timeout=dict(default=None, type='int'), endpoint_type=dict( default='public', choices=['public', 'internal', 'admin'] ) ) spec.update(kwargs) return spec def openstack_module_kwargs(**kwargs): ret = {} for key in ('mutually_exclusive', 'required_together', 'required_one_of'): if key in kwargs: if key in ret: ret[key].extend(kwargs[key]) else: ret[key] = kwargs[key] return ret
gpl-3.0
Harand97/Magic-ping
server/server.py
1
9573
import socket import struct import logging import datetime import os import signal from Crypto.Cipher import XOR ECHO_REPLY = 0 ECHO_REQUEST = 8 PORT = 14900 def set_logger(log_file_name): """ Установка нужных настроек логирования :param log_file_name: имя файла-лога :return: - """ logging.basicConfig(format='%(filename)s[LINE:%(lineno)d]# %(levelname)-8s [%(asctime)s] %(message)s', level=logging.DEBUG, filename=log_file_name) def handle_sigint(signal, frame): """ Обработка SigInt (Crtl + C) """ logging.info('Work was stopped') print('\nInterrupting. Server was stopped') exit(0) def check_sum(inf_string): """ Подсчет контрольной суммы для icmp пакета по алгоритму RFC1071 :param inf_string: пакет в виде байт-строки :return: полученная контрольная сумма """ hash_sum = 0 count_to = (len(inf_string) // 2) * 2 count = 0 while count < count_to: this_val = ord(inf_string[count + 1]) * 256 + ord(inf_string[count]) hash_sum += this_val hash_sum &= 0xffffffff count += 2 if count_to < len(inf_string): hash_sum += ord(inf_string[count_to]) hash_sum &= 0xffffffff hash_sum = (hash_sum >> 16) + hash_sum & 0xffff hash_sum += hash_sum >> 16 answer = ~hash_sum answer &= 0xffff answer = answer >> 8 | (answer << 8 & 0xffff) return answer def check_correct(packet): """ Проверка корректности доставленного пакета через пересчет контрольной суммы :param packet: доставленный пакет :return: True, если пакет корректен, иначе False """ packet_type, code, expected_sum, client_id, num = struct.unpack('bbHHH', packet[20:28]) message = packet[28:len(packet)] header = struct.pack('bbHHH', packet_type, code, 0, client_id, num) real_sum = check_sum(str(header + message)) return real_sum == expected_sum def create_packet(server_id, num, message): """ Создание icmp пакета :param server_id: идентификатор из заголовка пакета :param num: номер пакета :param message: поле данных пакета :return: полученный пакет """ header = struct.pack('bbHHH', ECHO_REPLY, 0, 0, server_id, num) packet = header + message check = check_sum(str(packet)) header = struct.pack('bbHHH', ECHO_REPLY, 0, check, server_id, num) return header + message def give_unique_name(file_name): """ Создание уникального имени файла для исходного имени файла. Функция создана для корректности работы сервера в случае получения двух разных файлов с одинаковыми именами. "Уникальность" обеспечивается добавлением числа в начало имени файла. Соответственно, предполагается, что совпадения имен файлов, получаемых сервером, будут происходить редко. :param file_name: исходное имя файла :return: уникальное имя файла, полученное на основе исходного """ if os.path.exists(file_name): i = 1 while True: if os.path.exists(str(i) + '_' + file_name): i += 1 else: break return str(i) + '_' + file_name else: return file_name def listen(key): """ Функция, в которой сервер проводит все время работы, в ней происходит обмен пакетами с клиентами :param key: ключ для XOR-шифрования :return: - """ s = socket.socket(socket.AF_INET, socket.SOCK_RAW, socket.IPPROTO_ICMP) s.bind(('', PORT)) logging.info('Start working') cipher = XOR.new(key) # Открывается файл, в который будет вноситься вся информация о полученных файлах # connects - словарь вида # {адрес : (открытый файл, номер текущего пакета, информация о посылке, статус шифрования)} info = open('work_history.txt', 'a') connects = {} while True: data, address = s.recvfrom(60000) # Сервер обрабатывает только запросы if struct.unpack('b', data[20:21])[0] != ECHO_REQUEST: continue address = (socket.gethostbyname(address[0]), PORT) logging.info('Received packet from {} '.format(address[0])) packet_num = struct.unpack('H', data[26:28])[0] # Проверка корректности данных, оповещение клиента в случае некорректности if not check_correct(data): logging.debug('Corrupted data in packet') print('Data corrupted') s.sendto(create_packet(0, packet_num, 'corrupted'.encode()), address) continue # Если нет такого IP-адреса в словаре, значит не было приема данных от этого клиента if address[0] not in connects.keys(): # Пустой пакет клиент присылает последним сообщением, если при этом его не оказалось в текущих # коннектах, значит он отправлял этот пакет не один раз, выждав таймаут или получив некорректный ответ if len(data) == 28: s.sendto(create_packet(0, packet_num, b''), address) continue # Иначе создается новый файл под новое соединение, генерируется имя для этого файла # В about_file пишется информация о новом клиенте, чтобы в случае успешного сеанса записать в файл отчета file_name, crypt = (data[28:len(data)]).decode().split(' ') crypt = int(crypt) uniq_name = give_unique_name(file_name) about_file = '\nFile: {}\nSaved as: {}\nReceived from: {}\nTime: {}\n'.format( file_name, uniq_name, address[0], datetime.datetime.now() ) logging.info('New client. Opening new file {} for acceptance'.format(uniq_name)) connects[address[0]] = [open(uniq_name, 'wb'), 0, about_file, crypt] # Даем права доступа другим пользователям к получаемому файлу os.chmod(uniq_name, 0o777) s.sendto(create_packet(0, packet_num, 'opened'.encode()), address) # Ловится всегда следующий пакет elif packet_num == connects[address[0]][1] + 1 or connects[address[0]][1] + 1 == 0xffff and packet_num == 0: # Пустой пакет от клиента означает завершение передачи. Связи обрываются, информация пишется в файл отчета. if len(data) == 28: logging.info('Received end of file. Closing file') s.sendto(create_packet(0, packet_num, b''), address) item = connects.pop(address[0]) item[0].close() print('\nNew file received.\nInformation, that will be written in work_history.txt:{}'. format(item[2])) info.write(item[2]) # Иначе просто обновляется счетчик пакетов, дописывается принимаемый файл else: logging.debug('Correct data in packet') connects[address[0]][1] = packet_num message = data[28:len(data)] if connects[address[0]][3]: message = cipher.decrypt(message) connects[address[0]][0].write(message) packet = create_packet(0, packet_num, 'correct'.encode()) s.sendto(packet, address) # Клиент может прислать один и тот же пакет несколько раз, если до него не дошел или дошел некорректно # ответ сервера о получении пакета, в таком случае отправляется сообщение "again" elif packet_num == connects[address[0]][1]: logging.debug('again') s.sendto(create_packet(0, packet_num, 'again'.encode()), address) if __name__ == '__main__': signal.signal(signal.SIGINT, handle_sigint) conf = open('config.txt', 'r') try: set_logger(conf.readline()[:-1]) listen(conf.read(32)) except EOFError: logging.error('Wrong format of config file')
mit
crmccreary/openerp_server
openerp/addons/stock_invoice_directly/__openerp__.py
9
1743
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Invoice Picking Directly', 'version': '1.0', "category" : "Warehouse Management", 'complexity': "easy", 'description': """ Invoice Wizard for Delivery. ============================ When you send or deliver goods, this module automatically launch the invoicing wizard if the delivery is to be invoiced. """, 'author': 'OpenERP SA', 'website': 'http://www.openerp.com', 'images': ['images/create_invoice.jpeg'], 'depends': ['delivery', 'stock'], 'init_xml': [], 'update_xml': [], 'demo_xml': [], 'test': ['test/stock_invoice_directly.yml'], 'installable': True, 'auto_install': False, 'certificate': '0081385081261', } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
noba3/KoTos
addons/script.module.youtube.dl/lib/youtube_dl/extractor/screenwavemedia.py
58
5429
# encoding: utf-8 from __future__ import unicode_literals import re from .common import InfoExtractor from ..utils import ( int_or_none, unified_strdate, js_to_json, ) class ScreenwaveMediaIE(InfoExtractor): _VALID_URL = r'https?://player\d?\.screenwavemedia\.com/(?:play/)?[a-zA-Z]+\.php\?.*\bid=(?P<id>[A-Za-z0-9-]+)' EMBED_PATTERN = r'src=(["\'])(?P<url>(?:https?:)?//player\d?\.screenwavemedia\.com/(?:play/)?[a-zA-Z]+\.php\?.*\bid=.+?)\1' _TESTS = [{ 'url': 'http://player.screenwavemedia.com/play/play.php?playerdiv=videoarea&companiondiv=squareAd&id=Cinemassacre-19911', 'only_matching': True, }] def _real_extract(self, url): video_id = self._match_id(url) playerdata = self._download_webpage( 'http://player.screenwavemedia.com/player.php?id=%s' % video_id, video_id, 'Downloading player webpage') vidtitle = self._search_regex( r'\'vidtitle\'\s*:\s*"([^"]+)"', playerdata, 'vidtitle').replace('\\/', '/') playerconfig = self._download_webpage( 'http://player.screenwavemedia.com/player.js', video_id, 'Downloading playerconfig webpage') videoserver = self._search_regex(r'SWMServer\s*=\s*"([\d\.]+)"', playerdata, 'videoserver') sources = self._parse_json( js_to_json( re.sub( r'(?s)/\*.*?\*/', '', self._search_regex( r"sources\s*:\s*(\[[^\]]+?\])", playerconfig, 'sources', ).replace( "' + thisObj.options.videoserver + '", videoserver ).replace( "' + playerVidId + '", video_id ) ) ), video_id, fatal=False ) # Fallback to hardcoded sources if JS changes again if not sources: self.report_warning('Falling back to a hardcoded list of streams') sources = [{ 'file': 'http://%s/vod/%s_%s.mp4' % (videoserver, video_id, format_id), 'type': 'mp4', 'label': format_label, } for format_id, format_label in ( ('low', '144p Low'), ('med', '160p Med'), ('high', '360p High'), ('hd1', '720p HD1'))] sources.append({ 'file': 'http://%s/vod/smil:%s.smil/playlist.m3u8' % (videoserver, video_id), 'type': 'hls', }) formats = [] for source in sources: if source['type'] == 'hls': formats.extend(self._extract_m3u8_formats(source['file'], video_id)) else: file_ = source.get('file') if not file_: continue format_label = source.get('label') format_id = self._search_regex( r'_(.+?)\.[^.]+$', file_, 'format id', default=None) height = int_or_none(self._search_regex( r'^(\d+)[pP]', format_label, 'height', default=None)) formats.append({ 'url': source['file'], 'format_id': format_id, 'format': format_label, 'ext': source.get('type'), 'height': height, }) self._sort_formats(formats) return { 'id': video_id, 'title': vidtitle, 'formats': formats, } class TeamFourIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?teamfourstar\.com/video/(?P<id>[a-z0-9\-]+)/?' _TEST = { 'url': 'http://teamfourstar.com/video/a-moment-with-tfs-episode-4/', 'info_dict': { 'id': 'TeamFourStar-5292a02f20bfa', 'ext': 'mp4', 'upload_date': '20130401', 'description': 'Check out this and more on our website: http://teamfourstar.com\nTFS Store: http://sharkrobot.com/team-four-star\nFollow on Twitter: http://twitter.com/teamfourstar\nLike on FB: http://facebook.com/teamfourstar', 'title': 'A Moment With TFS Episode 4', } } def _real_extract(self, url): display_id = self._match_id(url) webpage = self._download_webpage(url, display_id) playerdata_url = self._search_regex( r'src="(http://player\d?\.screenwavemedia\.com/(?:play/)?[a-zA-Z]+\.php\?[^"]*\bid=.+?)"', webpage, 'player data URL') video_title = self._html_search_regex( r'<div class="heroheadingtitle">(?P<title>.+?)</div>', webpage, 'title') video_date = unified_strdate(self._html_search_regex( r'<div class="heroheadingdate">(?P<date>.+?)</div>', webpage, 'date', fatal=False)) video_description = self._html_search_regex( r'(?s)<div class="postcontent">(?P<description>.+?)</div>', webpage, 'description', fatal=False) video_thumbnail = self._og_search_thumbnail(webpage) return { '_type': 'url_transparent', 'display_id': display_id, 'title': video_title, 'description': video_description, 'upload_date': video_date, 'thumbnail': video_thumbnail, 'url': playerdata_url, }
gpl-2.0
m-hogue/nifi
nifi-nar-bundles/nifi-scripting-bundle/nifi-scripting-processors/src/test/resources/jython/callbacks/read_first_line.py
38
1649
#! /usr/bin/python # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import sys import traceback from org.apache.nifi.processor.io import InputStreamCallback from java.io import BufferedReader, InputStreamReader class ReadFirstLine(InputStreamCallback) : __line = None; def __init__(self) : pass def getLine(self) : return self.__line def process(self, input) : try : reader = InputStreamReader(input) bufferedReader = BufferedReader(reader) self.__line = bufferedReader.readLine() except : print "Exception in Reader:" print '-' * 60 traceback.print_exc(file=sys.stdout) print '-' * 60 raise finally : if bufferedReader is not None : bufferedReader.close() if reader is not None : reader.close()
apache-2.0
martinkaberg/sshca
scripts/get-cert-no-mfa.py
1
1056
from awsrequests import AwsRequester import boto3 import click import subprocess @click.command() @click.option('--host', help="Hostname of the api gateway") @click.option('--stage', help="Deployment stage") @click.option('--public-key-file', type=click.Path(exists=True), help="ssh public key file") def main(host, stage, public_key_file): with open(public_key_file) as f: pub_key = f.read() f.close() req = AwsRequester( "eu-west-1", ) response = req.post( "https://{}/{}/{}".format(host, stage, "cert"), json={ "public_key_to_sign": pub_key }, verify=True ) if response.status_code is not 200: print response.text exit(1) cert_file = '.'.join(public_key_file.split(".")[:-1]) + "-cert.pub" with open(cert_file, "w+") as f: f.write(response.text) f.close() key_gen_cmd = [ "ssh-keygen", "-L", "-f", cert_file ] subprocess.call(key_gen_cmd) if __name__ == '__main__': main()
bsd-3-clause
leafjungle/luigi
test/scheduler_visualisation_test.py
49
13542
# -*- coding: utf-8 -*- # # Copyright 2012-2015 Spotify AB # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from __future__ import division import os import tempfile import time from helpers import unittest import luigi import luigi.notifications import luigi.scheduler import luigi.worker luigi.notifications.DEBUG = True tempdir = tempfile.mkdtemp() class DummyTask(luigi.Task): task_id = luigi.Parameter() def run(self): f = self.output().open('w') f.close() def output(self): return luigi.LocalTarget(os.path.join(tempdir, str(self.task_id))) class FactorTask(luigi.Task): product = luigi.Parameter() def requires(self): for factor in range(2, self.product): if self.product % factor == 0: yield FactorTask(factor) yield FactorTask(self.product // factor) return def run(self): f = self.output().open('w') f.close() def output(self): return luigi.LocalTarget(os.path.join(tempdir, 'luigi_test_factor_%d' % self.product)) class BadReqTask(luigi.Task): succeed = luigi.BoolParameter() def requires(self): assert self.succeed yield BadReqTask(False) def run(self): pass def complete(self): return False class FailingTask(luigi.Task): task_id = luigi.Parameter() def run(self): raise Exception("Error Message") class SchedulerVisualisationTest(unittest.TestCase): def setUp(self): self.scheduler = luigi.scheduler.CentralPlannerScheduler() def tearDown(self): pass def _assert_complete(self, tasks): for t in tasks: self.assert_(t.complete()) def _build(self, tasks): w = luigi.worker.Worker(scheduler=self.scheduler, worker_processes=1) for t in tasks: w.add(t) w.run() w.stop() def _remote(self): return self.scheduler def _test_run(self, workers): tasks = [DummyTask(i) for i in range(20)] self._build(tasks, workers=workers) self._assert_complete(tasks) def test_graph(self): start = time.time() tasks = [DummyTask(task_id=1), DummyTask(task_id=2)] self._build(tasks) self._assert_complete(tasks) end = time.time() remote = self._remote() graph = remote.graph() self.assertEqual(len(graph), 2) self.assert_(u'DummyTask(task_id=1)' in graph) d1 = graph[u'DummyTask(task_id=1)'] self.assertEqual(d1[u'status'], u'DONE') self.assertEqual(d1[u'deps'], []) self.assertGreaterEqual(d1[u'start_time'], start) self.assertLessEqual(d1[u'start_time'], end) d2 = graph[u'DummyTask(task_id=2)'] self.assertEqual(d2[u'status'], u'DONE') self.assertEqual(d2[u'deps'], []) self.assertGreaterEqual(d2[u'start_time'], start) self.assertLessEqual(d2[u'start_time'], end) def _assert_all_done(self, tasks): self._assert_all(tasks, u'DONE') def _assert_all(self, tasks, status): for task in tasks.values(): self.assertEqual(task[u'status'], status) def test_dep_graph_single(self): self._build([FactorTask(1)]) remote = self._remote() dep_graph = remote.dep_graph('FactorTask(product=1)') self.assertEqual(len(dep_graph), 1) self._assert_all_done(dep_graph) d1 = dep_graph.get(u'FactorTask(product=1)') self.assertEqual(type(d1), type({})) self.assertEqual(d1[u'deps'], []) def test_dep_graph_not_found(self): self._build([FactorTask(1)]) remote = self._remote() dep_graph = remote.dep_graph('FactorTask(product=5)') self.assertEqual(len(dep_graph), 0) def test_dep_graph_tree(self): self._build([FactorTask(30)]) remote = self._remote() dep_graph = remote.dep_graph('FactorTask(product=30)') self.assertEqual(len(dep_graph), 5) self._assert_all_done(dep_graph) d30 = dep_graph[u'FactorTask(product=30)'] self.assertEqual(sorted(d30[u'deps']), [u'FactorTask(product=15)', 'FactorTask(product=2)']) d2 = dep_graph[u'FactorTask(product=2)'] self.assertEqual(sorted(d2[u'deps']), []) d15 = dep_graph[u'FactorTask(product=15)'] self.assertEqual(sorted(d15[u'deps']), [u'FactorTask(product=3)', 'FactorTask(product=5)']) d3 = dep_graph[u'FactorTask(product=3)'] self.assertEqual(sorted(d3[u'deps']), []) d5 = dep_graph[u'FactorTask(product=5)'] self.assertEqual(sorted(d5[u'deps']), []) def test_dep_graph_missing_deps(self): self._build([BadReqTask(True)]) dep_graph = self._remote().dep_graph('BadReqTask(succeed=True)') self.assertEqual(len(dep_graph), 2) suc = dep_graph[u'BadReqTask(succeed=True)'] self.assertEqual(suc[u'deps'], [u'BadReqTask(succeed=False)']) fail = dep_graph[u'BadReqTask(succeed=False)'] self.assertEqual(fail[u'name'], 'BadReqTask') self.assertEqual(fail[u'params'], {'succeed': 'False'}) self.assertEqual(fail[u'status'], 'UNKNOWN') def test_dep_graph_diamond(self): self._build([FactorTask(12)]) remote = self._remote() dep_graph = remote.dep_graph('FactorTask(product=12)') self.assertEqual(len(dep_graph), 4) self._assert_all_done(dep_graph) d12 = dep_graph[u'FactorTask(product=12)'] self.assertEqual(sorted(d12[u'deps']), [u'FactorTask(product=2)', 'FactorTask(product=6)']) d6 = dep_graph[u'FactorTask(product=6)'] self.assertEqual(sorted(d6[u'deps']), [u'FactorTask(product=2)', 'FactorTask(product=3)']) d3 = dep_graph[u'FactorTask(product=3)'] self.assertEqual(sorted(d3[u'deps']), []) d2 = dep_graph[u'FactorTask(product=2)'] self.assertEqual(sorted(d2[u'deps']), []) def test_task_list_single(self): self._build([FactorTask(7)]) remote = self._remote() tasks_done = remote.task_list('DONE', '') self.assertEqual(len(tasks_done), 1) self._assert_all_done(tasks_done) t7 = tasks_done.get(u'FactorTask(product=7)') self.assertEqual(type(t7), type({})) self.assertEqual(remote.task_list('', ''), tasks_done) self.assertEqual(remote.task_list('FAILED', ''), {}) self.assertEqual(remote.task_list('PENDING', ''), {}) def test_task_list_failed(self): self._build([FailingTask(8)]) remote = self._remote() failed = remote.task_list('FAILED', '') self.assertEqual(len(failed), 1) f8 = failed.get(u'FailingTask(task_id=8)') self.assertEqual(f8[u'status'], u'FAILED') self.assertEqual(remote.task_list('DONE', ''), {}) self.assertEqual(remote.task_list('PENDING', ''), {}) def test_task_list_upstream_status(self): class A(luigi.ExternalTask): pass class B(luigi.ExternalTask): def complete(self): return True class C(luigi.Task): def requires(self): return [A(), B()] class F(luigi.Task): def run(self): raise Exception() class D(luigi.Task): def requires(self): return [F()] class E(luigi.Task): def requires(self): return [C(), D()] self._build([E()]) remote = self._remote() done = remote.task_list('DONE', '') self.assertEqual(len(done), 1) db = done.get('B()') self.assertEqual(db['status'], 'DONE') missing_input = remote.task_list('PENDING', 'UPSTREAM_MISSING_INPUT') self.assertEqual(len(missing_input), 2) pa = missing_input.get(u'A()') self.assertEqual(pa['status'], 'PENDING') self.assertEqual(remote._upstream_status('A()', {}), 'UPSTREAM_MISSING_INPUT') pc = missing_input.get(u'C()') self.assertEqual(pc['status'], 'PENDING') self.assertEqual(remote._upstream_status('C()', {}), 'UPSTREAM_MISSING_INPUT') upstream_failed = remote.task_list('PENDING', 'UPSTREAM_FAILED') self.assertEqual(len(upstream_failed), 2) pe = upstream_failed.get(u'E()') self.assertEqual(pe['status'], 'PENDING') self.assertEqual(remote._upstream_status('E()', {}), 'UPSTREAM_FAILED') pe = upstream_failed.get(u'D()') self.assertEqual(pe['status'], 'PENDING') self.assertEqual(remote._upstream_status('D()', {}), 'UPSTREAM_FAILED') pending = dict(missing_input) pending.update(upstream_failed) self.assertEqual(remote.task_list('PENDING', ''), pending) self.assertEqual(remote.task_list('PENDING', 'UPSTREAM_RUNNING'), {}) failed = remote.task_list('FAILED', '') self.assertEqual(len(failed), 1) fd = failed.get('F()') self.assertEqual(fd['status'], 'FAILED') all = dict(pending) all.update(done) all.update(failed) self.assertEqual(remote.task_list('', ''), all) self.assertEqual(remote.task_list('RUNNING', ''), {}) def test_task_search(self): self._build([FactorTask(8)]) self._build([FailingTask(8)]) remote = self._remote() all_tasks = remote.task_search('Task') self.assertEqual(len(all_tasks), 2) self._assert_all(all_tasks['DONE'], 'DONE') self._assert_all(all_tasks['FAILED'], 'FAILED') def test_fetch_error(self): self._build([FailingTask(8)]) remote = self._remote() error = remote.fetch_error("FailingTask(task_id=8)") self.assertEqual(error["taskId"], "FailingTask(task_id=8)") self.assertTrue("Error Message" in error["error"]) self.assertTrue("Runtime error" in error["error"]) self.assertTrue("Traceback" in error["error"]) def test_inverse_deps(self): class X(luigi.Task): pass class Y(luigi.Task): def requires(self): return [X()] class Z(luigi.Task): id = luigi.Parameter() def requires(self): return [Y()] class ZZ(luigi.Task): def requires(self): return [Z(1), Z(2)] self._build([ZZ()]) dep_graph = self._remote().inverse_dep_graph('X()') def assert_has_deps(task_id, deps): self.assertTrue(task_id in dep_graph, '%s not in dep_graph %s' % (task_id, dep_graph)) task = dep_graph[task_id] self.assertEqual(sorted(task['deps']), sorted(deps), '%s does not have deps %s' % (task_id, deps)) assert_has_deps('X()', ['Y()']) assert_has_deps('Y()', ['Z(id=1)', 'Z(id=2)']) assert_has_deps('Z(id=1)', ['ZZ()']) assert_has_deps('Z(id=2)', ['ZZ()']) assert_has_deps('ZZ()', []) def test_simple_worker_list(self): class X(luigi.Task): def run(self): self._complete = True def complete(self): return getattr(self, '_complete', False) self._build([X()]) workers = self._remote().worker_list() self.assertEqual(1, len(workers)) worker = workers[0] self.assertEqual('X()', worker['first_task']) self.assertEqual(0, worker['num_pending']) self.assertEqual(0, worker['num_uniques']) self.assertEqual(0, worker['num_running']) self.assertEqual(1, worker['workers']) def test_worker_list_pending_uniques(self): class X(luigi.Task): def complete(self): return False class Y(X): def requires(self): return X() class Z(Y): pass w1 = luigi.worker.Worker(scheduler=self.scheduler, worker_processes=1) w2 = luigi.worker.Worker(scheduler=self.scheduler, worker_processes=1) w1.add(Y()) w2.add(Z()) workers = self._remote().worker_list() self.assertEqual(2, len(workers)) for worker in workers: self.assertEqual(2, worker['num_pending']) self.assertEqual(1, worker['num_uniques']) self.assertEqual(0, worker['num_running']) def test_worker_list_running(self): class X(luigi.Task): n = luigi.IntParameter() w = luigi.worker.Worker(scheduler=self.scheduler, worker_processes=3) w.add(X(0)) w.add(X(1)) w.add(X(2)) w.add(X(3)) w._get_work() w._get_work() w._get_work() workers = self._remote().worker_list() self.assertEqual(1, len(workers)) worker = workers[0] self.assertEqual(3, worker['num_running']) self.assertEqual(1, worker['num_pending']) self.assertEqual(1, worker['num_uniques']) if __name__ == '__main__': unittest.main()
apache-2.0
Neamar/django
django/contrib/messages/api.py
512
3202
from django.contrib.messages import constants from django.contrib.messages.storage import default_storage from django.http import HttpRequest __all__ = ( 'add_message', 'get_messages', 'get_level', 'set_level', 'debug', 'info', 'success', 'warning', 'error', 'MessageFailure', ) class MessageFailure(Exception): pass def add_message(request, level, message, extra_tags='', fail_silently=False): """ Attempts to add a message to the request using the 'messages' app. """ if not isinstance(request, HttpRequest): raise TypeError("add_message() argument must be an HttpRequest object, " "not '%s'." % request.__class__.__name__) if hasattr(request, '_messages'): return request._messages.add(level, message, extra_tags) if not fail_silently: raise MessageFailure('You cannot add messages without installing ' 'django.contrib.messages.middleware.MessageMiddleware') def get_messages(request): """ Returns the message storage on the request if it exists, otherwise returns an empty list. """ if hasattr(request, '_messages'): return request._messages else: return [] def get_level(request): """ Returns the minimum level of messages to be recorded. The default level is the ``MESSAGE_LEVEL`` setting. If this is not found, the ``INFO`` level is used. """ if hasattr(request, '_messages'): storage = request._messages else: storage = default_storage(request) return storage.level def set_level(request, level): """ Sets the minimum level of messages to be recorded, returning ``True`` if the level was recorded successfully. If set to ``None``, the default level will be used (see the ``get_level`` method). """ if not hasattr(request, '_messages'): return False request._messages.level = level return True def debug(request, message, extra_tags='', fail_silently=False): """ Adds a message with the ``DEBUG`` level. """ add_message(request, constants.DEBUG, message, extra_tags=extra_tags, fail_silently=fail_silently) def info(request, message, extra_tags='', fail_silently=False): """ Adds a message with the ``INFO`` level. """ add_message(request, constants.INFO, message, extra_tags=extra_tags, fail_silently=fail_silently) def success(request, message, extra_tags='', fail_silently=False): """ Adds a message with the ``SUCCESS`` level. """ add_message(request, constants.SUCCESS, message, extra_tags=extra_tags, fail_silently=fail_silently) def warning(request, message, extra_tags='', fail_silently=False): """ Adds a message with the ``WARNING`` level. """ add_message(request, constants.WARNING, message, extra_tags=extra_tags, fail_silently=fail_silently) def error(request, message, extra_tags='', fail_silently=False): """ Adds a message with the ``ERROR`` level. """ add_message(request, constants.ERROR, message, extra_tags=extra_tags, fail_silently=fail_silently)
bsd-3-clause
dariemp/odoo
addons/email_template/html2text.py
440
14143
#!/usr/bin/env python """html2text: Turn HTML into equivalent Markdown-structured text.""" __version__ = "2.36" __author__ = "Aaron Swartz (me@aaronsw.com)" __copyright__ = "(C) 2004-2008 Aaron Swartz. GNU GPL 3." __contributors__ = ["Martin 'Joey' Schulze", "Ricardo Reyes", "Kevin Jay North"] # TODO: # Support decoded entities with unifiable. if not hasattr(__builtins__, 'True'): True, False = 1, 0 import re, sys, urllib, htmlentitydefs, codecs import sgmllib import urlparse sgmllib.charref = re.compile('&#([xX]?[0-9a-fA-F]+)[^0-9a-fA-F]') try: from textwrap import wrap except: pass # Use Unicode characters instead of their ascii psuedo-replacements UNICODE_SNOB = 0 # Put the links after each paragraph instead of at the end. LINKS_EACH_PARAGRAPH = 0 # Wrap long lines at position. 0 for no wrapping. (Requires Python 2.3.) BODY_WIDTH = 78 # Don't show internal links (href="#local-anchor") -- corresponding link targets # won't be visible in the plain text file anyway. SKIP_INTERNAL_LINKS = False ### Entity Nonsense ### def name2cp(k): if k == 'apos': return ord("'") if hasattr(htmlentitydefs, "name2codepoint"): # requires Python 2.3 return htmlentitydefs.name2codepoint[k] else: k = htmlentitydefs.entitydefs[k] if k.startswith("&#") and k.endswith(";"): return int(k[2:-1]) # not in latin-1 return ord(codecs.latin_1_decode(k)[0]) unifiable = {'rsquo':"'", 'lsquo':"'", 'rdquo':'"', 'ldquo':'"', 'copy':'(C)', 'mdash':'--', 'nbsp':' ', 'rarr':'->', 'larr':'<-', 'middot':'*', 'ndash':'-', 'oelig':'oe', 'aelig':'ae', 'agrave':'a', 'aacute':'a', 'acirc':'a', 'atilde':'a', 'auml':'a', 'aring':'a', 'egrave':'e', 'eacute':'e', 'ecirc':'e', 'euml':'e', 'igrave':'i', 'iacute':'i', 'icirc':'i', 'iuml':'i', 'ograve':'o', 'oacute':'o', 'ocirc':'o', 'otilde':'o', 'ouml':'o', 'ugrave':'u', 'uacute':'u', 'ucirc':'u', 'uuml':'u'} unifiable_n = {} for k in unifiable.keys(): unifiable_n[name2cp(k)] = unifiable[k] def charref(name): if name[0] in ['x','X']: c = int(name[1:], 16) else: c = int(name) if not UNICODE_SNOB and c in unifiable_n.keys(): return unifiable_n[c] else: return unichr(c) def entityref(c): if not UNICODE_SNOB and c in unifiable.keys(): return unifiable[c] else: try: name2cp(c) except KeyError: return "&" + c else: return unichr(name2cp(c)) def replaceEntities(s): s = s.group(1) if s[0] == "#": return charref(s[1:]) else: return entityref(s) r_unescape = re.compile(r"&(#?[xX]?(?:[0-9a-fA-F]+|\w{1,8}));") def unescape(s): return r_unescape.sub(replaceEntities, s) def fixattrs(attrs): # Fix bug in sgmllib.py if not attrs: return attrs newattrs = [] for attr in attrs: newattrs.append((attr[0], unescape(attr[1]))) return newattrs ### End Entity Nonsense ### def onlywhite(line): """Return true if the line does only consist of whitespace characters.""" for c in line: if c is not ' ' and c is not ' ': return c is ' ' return line def optwrap(text): """Wrap all paragraphs in the provided text.""" if not BODY_WIDTH: return text assert wrap, "Requires Python 2.3." result = '' newlines = 0 for para in text.split("\n"): if len(para) > 0: if para[0] is not ' ' and para[0] is not '-' and para[0] is not '*': for line in wrap(para, BODY_WIDTH): result += line + "\n" result += "\n" newlines = 2 else: if not onlywhite(para): result += para + "\n" newlines = 1 else: if newlines < 2: result += "\n" newlines += 1 return result def hn(tag): if tag[0] == 'h' and len(tag) == 2: try: n = int(tag[1]) if n in range(1, 10): return n except ValueError: return 0 class _html2text(sgmllib.SGMLParser): def __init__(self, out=sys.stdout.write, baseurl=''): sgmllib.SGMLParser.__init__(self) if out is None: self.out = self.outtextf else: self.out = out self.outtext = u'' self.quiet = 0 self.p_p = 0 self.outcount = 0 self.start = 1 self.space = 0 self.a = [] self.astack = [] self.acount = 0 self.list = [] self.blockquote = 0 self.pre = 0 self.startpre = 0 self.lastWasNL = 0 self.abbr_title = None # current abbreviation definition self.abbr_data = None # last inner HTML (for abbr being defined) self.abbr_list = {} # stack of abbreviations to write later self.baseurl = baseurl def outtextf(self, s): self.outtext += s def close(self): sgmllib.SGMLParser.close(self) self.pbr() self.o('', 0, 'end') return self.outtext def handle_charref(self, c): self.o(charref(c)) def handle_entityref(self, c): self.o(entityref(c)) def unknown_starttag(self, tag, attrs): self.handle_tag(tag, attrs, 1) def unknown_endtag(self, tag): self.handle_tag(tag, None, 0) def previousIndex(self, attrs): """ returns the index of certain set of attributes (of a link) in the self.a list If the set of attributes is not found, returns None """ if not attrs.has_key('href'): return None i = -1 for a in self.a: i += 1 match = 0 if a.has_key('href') and a['href'] == attrs['href']: if a.has_key('title') or attrs.has_key('title'): if (a.has_key('title') and attrs.has_key('title') and a['title'] == attrs['title']): match = True else: match = True if match: return i def handle_tag(self, tag, attrs, start): attrs = fixattrs(attrs) if hn(tag): self.p() if start: self.o(hn(tag)*"#" + ' ') if tag in ['p', 'div']: self.p() if tag == "br" and start: self.o(" \n") if tag == "hr" and start: self.p() self.o("* * *") self.p() if tag in ["head", "style", 'script']: if start: self.quiet += 1 else: self.quiet -= 1 if tag in ["body"]: self.quiet = 0 # sites like 9rules.com never close <head> if tag == "blockquote": if start: self.p(); self.o('> ', 0, 1); self.start = 1 self.blockquote += 1 else: self.blockquote -= 1 self.p() if tag in ['em', 'i', 'u']: self.o("_") if tag in ['strong', 'b']: self.o("**") if tag == "code" and not self.pre: self.o('`') #TODO: `` `this` `` if tag == "abbr": if start: attrsD = {} for (x, y) in attrs: attrsD[x] = y attrs = attrsD self.abbr_title = None self.abbr_data = '' if attrs.has_key('title'): self.abbr_title = attrs['title'] else: if self.abbr_title != None: self.abbr_list[self.abbr_data] = self.abbr_title self.abbr_title = None self.abbr_data = '' if tag == "a": if start: attrsD = {} for (x, y) in attrs: attrsD[x] = y attrs = attrsD if attrs.has_key('href') and not (SKIP_INTERNAL_LINKS and attrs['href'].startswith('#')): self.astack.append(attrs) self.o("[") else: self.astack.append(None) else: if self.astack: a = self.astack.pop() if a: i = self.previousIndex(a) if i is not None: a = self.a[i] else: self.acount += 1 a['count'] = self.acount a['outcount'] = self.outcount self.a.append(a) self.o("][" + `a['count']` + "]") if tag == "img" and start: attrsD = {} for (x, y) in attrs: attrsD[x] = y attrs = attrsD if attrs.has_key('src'): attrs['href'] = attrs['src'] alt = attrs.get('alt', '') i = self.previousIndex(attrs) if i is not None: attrs = self.a[i] else: self.acount += 1 attrs['count'] = self.acount attrs['outcount'] = self.outcount self.a.append(attrs) self.o("![") self.o(alt) self.o("]["+`attrs['count']`+"]") if tag == 'dl' and start: self.p() if tag == 'dt' and not start: self.pbr() if tag == 'dd' and start: self.o(' ') if tag == 'dd' and not start: self.pbr() if tag in ["ol", "ul"]: if start: self.list.append({'name':tag, 'num':0}) else: if self.list: self.list.pop() self.p() if tag == 'li': if start: self.pbr() if self.list: li = self.list[-1] else: li = {'name':'ul', 'num':0} self.o(" "*len(self.list)) #TODO: line up <ol><li>s > 9 correctly. if li['name'] == "ul": self.o("* ") elif li['name'] == "ol": li['num'] += 1 self.o(`li['num']`+". ") self.start = 1 else: self.pbr() if tag in ["table", "tr"] and start: self.p() if tag == 'td': self.pbr() if tag == "pre": if start: self.startpre = 1 self.pre = 1 else: self.pre = 0 self.p() def pbr(self): if self.p_p == 0: self.p_p = 1 def p(self): self.p_p = 2 def o(self, data, puredata=0, force=0): if self.abbr_data is not None: self.abbr_data += data if not self.quiet: if puredata and not self.pre: data = re.sub('\s+', ' ', data) if data and data[0] == ' ': self.space = 1 data = data[1:] if not data and not force: return if self.startpre: #self.out(" :") #TODO: not output when already one there self.startpre = 0 bq = (">" * self.blockquote) if not (force and data and data[0] == ">") and self.blockquote: bq += " " if self.pre: bq += " " data = data.replace("\n", "\n"+bq) if self.start: self.space = 0 self.p_p = 0 self.start = 0 if force == 'end': # It's the end. self.p_p = 0 self.out("\n") self.space = 0 if self.p_p: self.out(('\n'+bq)*self.p_p) self.space = 0 if self.space: if not self.lastWasNL: self.out(' ') self.space = 0 if self.a and ((self.p_p == 2 and LINKS_EACH_PARAGRAPH) or force == "end"): if force == "end": self.out("\n") newa = [] for link in self.a: if self.outcount > link['outcount']: self.out(" ["+`link['count']`+"]: " + urlparse.urljoin(self.baseurl, link['href'])) if link.has_key('title'): self.out(" ("+link['title']+")") self.out("\n") else: newa.append(link) if self.a != newa: self.out("\n") # Don't need an extra line when nothing was done. self.a = newa if self.abbr_list and force == "end": for abbr, definition in self.abbr_list.items(): self.out(" *[" + abbr + "]: " + definition + "\n") self.p_p = 0 self.out(data) self.lastWasNL = data and data[-1] == '\n' self.outcount += 1 def handle_data(self, data): if r'\/script>' in data: self.quiet -= 1 self.o(data, 1) def unknown_decl(self, data): pass def wrapwrite(text): sys.stdout.write(text.encode('utf8')) def html2text_file(html, out=wrapwrite, baseurl=''): h = _html2text(out, baseurl) h.feed(html) h.feed("") return h.close() def html2text(html, baseurl=''): return optwrap(html2text_file(html, None, baseurl)) if __name__ == "__main__": baseurl = '' if sys.argv[1:]: arg = sys.argv[1] if arg.startswith('http://'): baseurl = arg j = urllib.urlopen(baseurl) try: from feedparser import _getCharacterEncoding as enc except ImportError: enc = lambda x, y: ('utf-8', 1) text = j.read() encoding = enc(j.headers, text)[0] if encoding == 'us-ascii': encoding = 'utf-8' data = text.decode(encoding) else: encoding = 'utf8' if len(sys.argv) > 2: encoding = sys.argv[2] f = open(arg, 'r') try: data = f.read().decode(encoding) finally: f.close() else: data = sys.stdin.read().decode('utf8') wrapwrite(html2text(data, baseurl)) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
cyrusin/tornado
demos/s3server/s3server.py
98
9650
#!/usr/bin/env python # # Copyright 2009 Facebook # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Implementation of an S3-like storage server based on local files. Useful to test features that will eventually run on S3, or if you want to run something locally that was once running on S3. We don't support all the features of S3, but it does work with the standard S3 client for the most basic semantics. To use the standard S3 client with this module: c = S3.AWSAuthConnection("", "", server="localhost", port=8888, is_secure=False) c.create_bucket("mybucket") c.put("mybucket", "mykey", "a value") print c.get("mybucket", "mykey").body """ import bisect import datetime import hashlib import os import os.path import urllib from tornado import escape from tornado import httpserver from tornado import ioloop from tornado import web def start(port, root_directory="/tmp/s3", bucket_depth=0): """Starts the mock S3 server on the given port at the given path.""" application = S3Application(root_directory, bucket_depth) http_server = httpserver.HTTPServer(application) http_server.listen(port) ioloop.IOLoop.current().start() class S3Application(web.Application): """Implementation of an S3-like storage server based on local files. If bucket depth is given, we break files up into multiple directories to prevent hitting file system limits for number of files in each directories. 1 means one level of directories, 2 means 2, etc. """ def __init__(self, root_directory, bucket_depth=0): web.Application.__init__(self, [ (r"/", RootHandler), (r"/([^/]+)/(.+)", ObjectHandler), (r"/([^/]+)/", BucketHandler), ]) self.directory = os.path.abspath(root_directory) if not os.path.exists(self.directory): os.makedirs(self.directory) self.bucket_depth = bucket_depth class BaseRequestHandler(web.RequestHandler): SUPPORTED_METHODS = ("PUT", "GET", "DELETE") def render_xml(self, value): assert isinstance(value, dict) and len(value) == 1 self.set_header("Content-Type", "application/xml; charset=UTF-8") name = value.keys()[0] parts = [] parts.append('<' + escape.utf8(name) + ' xmlns="http://doc.s3.amazonaws.com/2006-03-01">') self._render_parts(value.values()[0], parts) parts.append('</' + escape.utf8(name) + '>') self.finish('<?xml version="1.0" encoding="UTF-8"?>\n' + ''.join(parts)) def _render_parts(self, value, parts=[]): if isinstance(value, (unicode, bytes)): parts.append(escape.xhtml_escape(value)) elif isinstance(value, int) or isinstance(value, long): parts.append(str(value)) elif isinstance(value, datetime.datetime): parts.append(value.strftime("%Y-%m-%dT%H:%M:%S.000Z")) elif isinstance(value, dict): for name, subvalue in value.iteritems(): if not isinstance(subvalue, list): subvalue = [subvalue] for subsubvalue in subvalue: parts.append('<' + escape.utf8(name) + '>') self._render_parts(subsubvalue, parts) parts.append('</' + escape.utf8(name) + '>') else: raise Exception("Unknown S3 value type %r", value) def _object_path(self, bucket, object_name): if self.application.bucket_depth < 1: return os.path.abspath(os.path.join( self.application.directory, bucket, object_name)) hash = hashlib.md5(object_name).hexdigest() path = os.path.abspath(os.path.join( self.application.directory, bucket)) for i in range(self.application.bucket_depth): path = os.path.join(path, hash[:2 * (i + 1)]) return os.path.join(path, object_name) class RootHandler(BaseRequestHandler): def get(self): names = os.listdir(self.application.directory) buckets = [] for name in names: path = os.path.join(self.application.directory, name) info = os.stat(path) buckets.append({ "Name": name, "CreationDate": datetime.datetime.utcfromtimestamp( info.st_ctime), }) self.render_xml({"ListAllMyBucketsResult": { "Buckets": {"Bucket": buckets}, }}) class BucketHandler(BaseRequestHandler): def get(self, bucket_name): prefix = self.get_argument("prefix", u"") marker = self.get_argument("marker", u"") max_keys = int(self.get_argument("max-keys", 50000)) path = os.path.abspath(os.path.join(self.application.directory, bucket_name)) terse = int(self.get_argument("terse", 0)) if not path.startswith(self.application.directory) or \ not os.path.isdir(path): raise web.HTTPError(404) object_names = [] for root, dirs, files in os.walk(path): for file_name in files: object_names.append(os.path.join(root, file_name)) skip = len(path) + 1 for i in range(self.application.bucket_depth): skip += 2 * (i + 1) + 1 object_names = [n[skip:] for n in object_names] object_names.sort() contents = [] start_pos = 0 if marker: start_pos = bisect.bisect_right(object_names, marker, start_pos) if prefix: start_pos = bisect.bisect_left(object_names, prefix, start_pos) truncated = False for object_name in object_names[start_pos:]: if not object_name.startswith(prefix): break if len(contents) >= max_keys: truncated = True break object_path = self._object_path(bucket_name, object_name) c = {"Key": object_name} if not terse: info = os.stat(object_path) c.update({ "LastModified": datetime.datetime.utcfromtimestamp( info.st_mtime), "Size": info.st_size, }) contents.append(c) marker = object_name self.render_xml({"ListBucketResult": { "Name": bucket_name, "Prefix": prefix, "Marker": marker, "MaxKeys": max_keys, "IsTruncated": truncated, "Contents": contents, }}) def put(self, bucket_name): path = os.path.abspath(os.path.join( self.application.directory, bucket_name)) if not path.startswith(self.application.directory) or \ os.path.exists(path): raise web.HTTPError(403) os.makedirs(path) self.finish() def delete(self, bucket_name): path = os.path.abspath(os.path.join( self.application.directory, bucket_name)) if not path.startswith(self.application.directory) or \ not os.path.isdir(path): raise web.HTTPError(404) if len(os.listdir(path)) > 0: raise web.HTTPError(403) os.rmdir(path) self.set_status(204) self.finish() class ObjectHandler(BaseRequestHandler): def get(self, bucket, object_name): object_name = urllib.unquote(object_name) path = self._object_path(bucket, object_name) if not path.startswith(self.application.directory) or \ not os.path.isfile(path): raise web.HTTPError(404) info = os.stat(path) self.set_header("Content-Type", "application/unknown") self.set_header("Last-Modified", datetime.datetime.utcfromtimestamp( info.st_mtime)) object_file = open(path, "rb") try: self.finish(object_file.read()) finally: object_file.close() def put(self, bucket, object_name): object_name = urllib.unquote(object_name) bucket_dir = os.path.abspath(os.path.join( self.application.directory, bucket)) if not bucket_dir.startswith(self.application.directory) or \ not os.path.isdir(bucket_dir): raise web.HTTPError(404) path = self._object_path(bucket, object_name) if not path.startswith(bucket_dir) or os.path.isdir(path): raise web.HTTPError(403) directory = os.path.dirname(path) if not os.path.exists(directory): os.makedirs(directory) object_file = open(path, "w") object_file.write(self.request.body) object_file.close() self.finish() def delete(self, bucket, object_name): object_name = urllib.unquote(object_name) path = self._object_path(bucket, object_name) if not path.startswith(self.application.directory) or \ not os.path.isfile(path): raise web.HTTPError(404) os.unlink(path) self.set_status(204) self.finish()
apache-2.0
popazerty/enigma2
lib/python/Screens/DVD.py
2
20284
import os from enigma import eTimer, iPlayableService, iServiceInformation, eServiceReference, iServiceKeys, getDesktop from Screens.Screen import Screen from Screens.MessageBox import MessageBox from Screens.ChoiceBox import ChoiceBox from Screens.HelpMenu import HelpableScreen from Screens.InfoBarGenerics import InfoBarSeek, InfoBarPVRState, InfoBarCueSheetSupport, InfoBarShowHide, InfoBarNotifications, InfoBarAudioSelection, InfoBarSubtitleSupport, InfoBarLongKeyDetection from Components.ActionMap import ActionMap, NumberActionMap, HelpableActionMap from Components.Label import Label from Components.Pixmap import Pixmap from Components.ServiceEventTracker import ServiceEventTracker, InfoBarBase from Components.config import config from Tools.Directories import pathExists from Components.Harddisk import harddiskmanager lastpath = "" class DVDSummary(Screen): def __init__(self, session, parent): Screen.__init__(self, session, parent) self["Title"] = Label("") self["Time"] = Label("") self["Chapter"] = Label("") def updateChapter(self, chapter): self["Chapter"].setText(chapter) def setTitle(self, title): self["Title"].setText(title) class DVDOverlay(Screen): def __init__(self, session, args = None, height = None): desktop_size = getDesktop(0).size() w = desktop_size.width() h = desktop_size.height() if height is not None: h = height DVDOverlay.skin = """<screen name="DVDOverlay" position="0,0" size="%d,%d" flags="wfNoBorder" zPosition="-1" backgroundColor="transparent" />""" %(w, h) Screen.__init__(self, session) class ChapterZap(Screen): skin = """ <screen name="ChapterZap" position="235,255" size="250,60" title="Chapter" > <widget name="chapter" position="35,15" size="110,25" font="Regular;23" /> <widget name="number" position="145,15" size="80,25" halign="right" font="Regular;23" /> </screen>""" def quit(self): self.Timer.stop() self.close(0) def keyOK(self): self.Timer.stop() self.close(int(self["number"].getText())) def keyNumberGlobal(self, number): self.Timer.start(3000, True) #reset timer self.field += str(number) self["number"].setText(self.field) if len(self.field) >= 4: self.keyOK() def __init__(self, session, number): Screen.__init__(self, session) self.field = str(number) self["chapter"] = Label(_("Chapter:")) self["number"] = Label(self.field) self["actions"] = NumberActionMap( [ "SetupActions" ], { "cancel": self.quit, "ok": self.keyOK, "1": self.keyNumberGlobal, "2": self.keyNumberGlobal, "3": self.keyNumberGlobal, "4": self.keyNumberGlobal, "5": self.keyNumberGlobal, "6": self.keyNumberGlobal, "7": self.keyNumberGlobal, "8": self.keyNumberGlobal, "9": self.keyNumberGlobal, "0": self.keyNumberGlobal }) self.Timer = eTimer() self.Timer.callback.append(self.keyOK) self.Timer.start(3000, True) class DVDPlayer(Screen, InfoBarBase, InfoBarNotifications, InfoBarSeek, InfoBarPVRState, InfoBarShowHide, HelpableScreen, InfoBarCueSheetSupport, InfoBarAudioSelection, InfoBarSubtitleSupport, InfoBarLongKeyDetection): ALLOW_SUSPEND = Screen.SUSPEND_PAUSES ENABLE_RESUME_SUPPORT = True def save_infobar_seek_config(self): self.saved_config_speeds_forward = config.seek.speeds_forward.value self.saved_config_speeds_backward = config.seek.speeds_backward.value self.saved_config_enter_forward = config.seek.enter_forward.value self.saved_config_enter_backward = config.seek.enter_backward.value self.saved_config_seek_on_pause = config.seek.on_pause.value self.saved_config_seek_speeds_slowmotion = config.seek.speeds_slowmotion.value def change_infobar_seek_config(self): config.seek.speeds_forward.value = [2, 4, 6, 8, 16, 32, 64] config.seek.speeds_backward.value = [2, 4, 6, 8, 16, 32, 64] config.seek.speeds_slowmotion.value = [ 2, 3, 4, 6 ] config.seek.enter_forward.value = "2" config.seek.enter_backward.value = "2" config.seek.on_pause.value = "play" def restore_infobar_seek_config(self): config.seek.speeds_forward.value = self.saved_config_speeds_forward config.seek.speeds_backward.value = self.saved_config_speeds_backward config.seek.speeds_slowmotion.value = self.saved_config_seek_speeds_slowmotion config.seek.enter_forward.value = self.saved_config_enter_forward config.seek.enter_backward.value = self.saved_config_enter_backward config.seek.on_pause.value = self.saved_config_seek_on_pause def __init__(self, session, dvd_device=None, dvd_filelist=None, args=None): if not dvd_filelist: dvd_filelist = [] Screen.__init__(self, session) InfoBarBase.__init__(self) InfoBarNotifications.__init__(self) InfoBarCueSheetSupport.__init__(self, actionmap = "MediaPlayerCueSheetActions") InfoBarShowHide.__init__(self) InfoBarAudioSelection.__init__(self) InfoBarSubtitleSupport.__init__(self) HelpableScreen.__init__(self) self.save_infobar_seek_config() self.change_infobar_seek_config() InfoBarSeek.__init__(self) InfoBarPVRState.__init__(self) InfoBarLongKeyDetection.__init__(self) self.oldService = self.session.nav.getCurrentlyPlayingServiceOrGroup() self.session.nav.stopService() self["audioLabel"] = Label("n/a") self["subtitleLabel"] = Label("") self["angleLabel"] = Label("") self["chapterLabel"] = Label("") self["anglePix"] = Pixmap() self["anglePix"].hide() self.last_audioTuple = None self.last_subtitleTuple = None self.last_angleTuple = None self.totalChapters = 0 self.currentChapter = 0 self.totalTitles = 0 self.currentTitle = 0 self.__event_tracker = ServiceEventTracker(screen=self, eventmap= { iPlayableService.evStopped: self.__serviceStopped, iPlayableService.evUser: self.__timeUpdated, iPlayableService.evUser+1: self.__statePlay, iPlayableService.evUser+2: self.__statePause, iPlayableService.evUser+3: self.__osdFFwdInfoAvail, iPlayableService.evUser+4: self.__osdFBwdInfoAvail, iPlayableService.evUser+5: self.__osdStringAvail, iPlayableService.evUser+6: self.__osdAudioInfoAvail, iPlayableService.evUser+7: self.__osdSubtitleInfoAvail, iPlayableService.evUser+8: self.__chapterUpdated, iPlayableService.evUser+9: self.__titleUpdated, iPlayableService.evUser+11: self.__menuOpened, iPlayableService.evUser+12: self.__menuClosed, iPlayableService.evUser+13: self.__osdAngleInfoAvail }) self["DVDPlayerDirectionActions"] = ActionMap(["DirectionActions"], { #MENU KEY DOWN ACTIONS "left": self.keyLeft, "right": self.keyRight, "up": self.keyUp, "down": self.keyDown, #MENU KEY REPEATED ACTIONS "leftRepeated": self.doNothing, "rightRepeated": self.doNothing, "upRepeated": self.doNothing, "downRepeated": self.doNothing, #MENU KEY UP ACTIONS "leftUp": self.doNothing, "rightUp": self.doNothing, "upUp": self.doNothing, "downUp": self.doNothing, }) self["OkCancelActions"] = ActionMap(["OkCancelActions"], { "ok": self.keyOk, "cancel": self.keyCancel, }) self["DVDPlayerPlaybackActions"] = HelpableActionMap(self, "DVDPlayerActions", { #PLAYER ACTIONS "dvdMenu": (self.enterDVDMenu, _("show DVD main menu")), "toggleInfo": (self.toggleInfo, _("toggle time, chapter, audio, subtitle info")), "nextChapter": (self.nextChapter, _("forward to the next chapter")), "prevChapter": (self.prevChapter, _("rewind to the previous chapter")), "nextTitle": (self.nextTitle, _("jump forward to the next title")), "prevTitle": (self.prevTitle, _("jump back to the previous title")), "tv": (self.askLeavePlayer, _("exit DVD player or return to file browser")), "dvdAudioMenu": (self.enterDVDAudioMenu, _("(show optional DVD audio menu)")), "AudioSelection": (self.enterAudioSelection, _("Select audio track")), "nextAudioTrack": (self.nextAudioTrack, _("switch to the next audio track")), "nextSubtitleTrack": (self.nextSubtitleTrack, _("switch to the next subtitle language")), "nextAngle": (self.nextAngle, _("switch to the next angle")), "seekBeginning": self.seekBeginning, }, -2) self["NumberActions"] = NumberActionMap( [ "NumberActions"], { "1": self.keyNumberGlobal, "2": self.keyNumberGlobal, "3": self.keyNumberGlobal, "4": self.keyNumberGlobal, "5": self.keyNumberGlobal, "6": self.keyNumberGlobal, "7": self.keyNumberGlobal, "8": self.keyNumberGlobal, "9": self.keyNumberGlobal, "0": self.keyNumberGlobal, }) self.onClose.append(self.__onClose) try: from Plugins.SystemPlugins.Hotplug.plugin import hotplugNotifier hotplugNotifier.append(self.hotplugCB) except: pass self.autoplay = dvd_device or dvd_filelist if dvd_device: self.physicalDVD = True else: self.scanHotplug() self.dvd_filelist = dvd_filelist self.onFirstExecBegin.append(self.opened) self.service = None self.in_menu = False def keyNumberGlobal(self, number): print "You pressed number " + str(number) self.session.openWithCallback(self.numberEntered, ChapterZap, number) def numberEntered(self, retval): # print self.servicelist if retval > 0: self.zapToNumber(retval) def getServiceInterface(self, iface): service = self.service if service: attr = getattr(service, iface, None) if callable(attr): return attr() return None def __serviceStopped(self): self.dvdScreen.hide() subs = self.getServiceInterface("subtitle") if subs: subs.disableSubtitles(self.session.current_dialog.instance) def serviceStarted(self): #override InfoBarShowHide function self.dvdScreen.show() def doEofInternal(self, playing): if self.in_menu: self.hide() def __menuOpened(self): self.hide() self.in_menu = True self["NumberActions"].setEnabled(False) def __menuClosed(self): self.show() self.in_menu = False self["NumberActions"].setEnabled(True) def setChapterLabel(self): chapterLCD = "Menu" chapterOSD = "DVD Menu" if self.currentTitle > 0: chapterLCD = "%s %d" % (_("Chap."), self.currentChapter) chapterOSD = "DVD %s %d/%d" % (_("Chapter"), self.currentChapter, self.totalChapters) chapterOSD += " (%s %d/%d)" % (_("Title"), self.currentTitle, self.totalTitles) self["chapterLabel"].setText(chapterOSD) try: self.session.summary.updateChapter(chapterLCD) except: pass def doNothing(self): pass def toggleInfo(self): if not self.in_menu: self.toggleShow() print "toggleInfo" def __timeUpdated(self): print "timeUpdated" def __statePlay(self): print "statePlay" def __statePause(self): print "statePause" def __osdFFwdInfoAvail(self): self.setChapterLabel() print "FFwdInfoAvail" def __osdFBwdInfoAvail(self): self.setChapterLabel() print "FBwdInfoAvail" def __osdStringAvail(self): print "StringAvail" def __osdAudioInfoAvail(self): info = self.getServiceInterface("info") audioTuple = info and info.getInfoObject(iServiceInformation.sUser+6) print "AudioInfoAvail ", repr(audioTuple) if audioTuple: #audioString = "%d: %s (%s)" % (audioTuple[0], audioTuple[1],audioTuple[2]) audioString = "%s (%s)" % (audioTuple[1],audioTuple[2]) self["audioLabel"].setText(audioString) if audioTuple != self.last_audioTuple and not self.in_menu: self.doShow() self.last_audioTuple = audioTuple def __osdSubtitleInfoAvail(self): info = self.getServiceInterface("info") subtitleTuple = info and info.getInfoObject(iServiceInformation.sUser+7) print "SubtitleInfoAvail ", repr(subtitleTuple) if subtitleTuple: subtitleString = "" if subtitleTuple[0] is not 0: #subtitleString = "%d: %s" % (subtitleTuple[0], subtitleTuple[1]) subtitleString = "%s" % subtitleTuple[1] self["subtitleLabel"].setText(subtitleString) if subtitleTuple != self.last_subtitleTuple and not self.in_menu: self.doShow() self.last_subtitleTuple = subtitleTuple def __osdAngleInfoAvail(self): info = self.getServiceInterface("info") angleTuple = info and info.getInfoObject(iServiceInformation.sUser+8) print "AngleInfoAvail ", repr(angleTuple) if angleTuple: angleString = "" if angleTuple[1] > 1: angleString = "%d / %d" % (angleTuple[0], angleTuple[1]) self["anglePix"].show() else: self["anglePix"].hide() self["angleLabel"].setText(angleString) if angleTuple != self.last_angleTuple and not self.in_menu: self.doShow() self.last_angleTuple = angleTuple def __chapterUpdated(self): info = self.getServiceInterface("info") if info: self.currentChapter = info.getInfo(iServiceInformation.sCurrentChapter) self.totalChapters = info.getInfo(iServiceInformation.sTotalChapters) self.setChapterLabel() print "__chapterUpdated: %d/%d" % (self.currentChapter, self.totalChapters) def __titleUpdated(self): info = self.getServiceInterface("info") if info: self.currentTitle = info.getInfo(iServiceInformation.sCurrentTitle) self.totalTitles = info.getInfo(iServiceInformation.sTotalTitles) self.setChapterLabel() print "__titleUpdated: %d/%d" % (self.currentTitle, self.totalTitles) if not self.in_menu: self.doShow() def askLeavePlayer(self): if self.autoplay: self.exitCB((None,"exit")) return choices = [(_("Exit"), "exit"), (_("Continue playing"), "play")] if self.physicalDVD: cur = self.session.nav.getCurrentlyPlayingServiceOrGroup() if cur and not cur.toString().endswith(harddiskmanager.getAutofsMountpoint(harddiskmanager.getCD())): choices.insert(0,(_("Play DVD"), "playPhysical" )) self.session.openWithCallback(self.exitCB, ChoiceBox, title=_("Leave DVD player?"), list = choices) def sendKey(self, key): keys = self.getServiceInterface("keys") if keys: keys.keyPressed(key) return keys def enterAudioSelection(self): self.audioSelection() def nextAudioTrack(self): self.sendKey(iServiceKeys.keyUser) def nextSubtitleTrack(self): self.sendKey(iServiceKeys.keyUser+1) def enterDVDAudioMenu(self): self.sendKey(iServiceKeys.keyUser+2) def nextChapter(self): self.sendKey(iServiceKeys.keyUser+3) def prevChapter(self): self.sendKey(iServiceKeys.keyUser+4) def nextTitle(self): self.sendKey(iServiceKeys.keyUser+5) def prevTitle(self): self.sendKey(iServiceKeys.keyUser+6) def enterDVDMenu(self): self.sendKey(iServiceKeys.keyUser+7) def nextAngle(self): self.sendKey(iServiceKeys.keyUser+8) def seekBeginning(self): if self.service: seekable = self.getSeek() if seekable: seekable.seekTo(0) def zapToNumber(self, number): if self.service: seekable = self.getSeek() if seekable: print "seek to chapter %d" % number seekable.seekChapter(number) # MENU ACTIONS def keyRight(self): self.sendKey(iServiceKeys.keyRight) def keyLeft(self): self.sendKey(iServiceKeys.keyLeft) def keyUp(self): self.sendKey(iServiceKeys.keyUp) def keyDown(self): self.sendKey(iServiceKeys.keyDown) def keyOk(self): if self.sendKey(iServiceKeys.keyOk) and not self.in_menu: self.toggleInfo() def keyCancel(self): self.askLeavePlayer() def opened(self): if self.autoplay and self.dvd_filelist: # opened via autoplay self.FileBrowserClosed(self.dvd_filelist[0]) elif self.autoplay and self.physicalDVD: self.playPhysicalCB(True) elif self.physicalDVD: # opened from menu with dvd in drive self.session.openWithCallback(self.playPhysicalCB, MessageBox, text=_("Do you want to play DVD in drive?"), timeout=5 ) def playPhysicalCB(self, answer): if answer: harddiskmanager.setDVDSpeed(harddiskmanager.getCD(), 1) self.FileBrowserClosed(harddiskmanager.getAutofsMountpoint(harddiskmanager.getCD())) def FileBrowserClosed(self, val): curref = self.session.nav.getCurrentlyPlayingServiceOrGroup() print "FileBrowserClosed", val if val is None: self.askLeavePlayer() else: isopathname = "/VIDEO_TS.ISO" if os.path.exists(val + isopathname): val += isopathname newref = eServiceReference(4369, 0, val) print "play", newref.toString() if curref is None or curref != newref: if newref.toString().endswith("/VIDEO_TS") or newref.toString().endswith("/"): names = newref.toString().rsplit("/",3) if names[2].startswith("Disk ") or names[2].startswith("DVD "): name = str(names[1]) + " - " + str(names[2]) else: name = names[2] print "setting name to: ", self.service newref.setName(str(name)) # Construct a path for the IFO header assuming it exists ifofilename = val if not ifofilename.upper().endswith("/VIDEO_TS"): ifofilename += "/VIDEO_TS" files = [("/VIDEO_TS.IFO", 0x100), ("/VTS_01_0.IFO", 0x100), ("/VTS_01_0.IFO", 0x200)] # ( filename, offset ) for name in files: (status, isNTSC, isLowResolution) = self.readVideoAtributes( ifofilename, name ) if status: break height = getDesktop(0).size().height() print "[DVD] height:", height if isNTSC: height = height * 576 / 480 print "[DVD] NTSC height:", height if isLowResolution: height *= 2 print "[DVD] LowResolution:", height self.dvdScreen = self.session.instantiateDialog(DVDOverlay, height=height) self.session.nav.playService(newref) self.service = self.session.nav.getCurrentService() print "self.service", self.service print "cur_dlg", self.session.current_dialog subs = self.getServiceInterface("subtitle") if subs: subs.enableSubtitles(self.dvdScreen.instance, None) def readVideoAtributes(self, isofilename, checked_file): (name, offset) = checked_file isofilename += name print "[DVD] file", name status = False isNTSC = False isLowResolution = False ifofile = None try: # Try to read the IFO header to determine PAL/NTSC format and the resolution ifofile = open(isofilename, "r") ifofile.seek(offset) video_attr_high = ord(ifofile.read(1)) if video_attr_high != 0: status = True video_attr_low = ord(ifofile.read(1)) print "[DVD] %s: video_attr_high = %x" % ( name, video_attr_high ), "video_attr_low = %x" % video_attr_low isNTSC = (video_attr_high & 0x10 == 0) isLowResolution = (video_attr_low & 0x18 == 0x18) except: # If the service is an .iso or .img file we assume it is PAL # Sorry we cannot open image files here. print "[DVD] Cannot read file or is ISO/IMG" finally: if ifofile is not None: ifofile.close() return status, isNTSC, isLowResolution def exitCB(self, answer): if answer is not None: if answer[1] == "exit": if self.service: self.service = None self.close() elif answer[1] == "playPhysical": if self.service: self.service = None self.playPhysicalCB(True) else: pass def __onClose(self): self.restore_infobar_seek_config() self.session.nav.playService(self.oldService) try: from Plugins.SystemPlugins.Hotplug.plugin import hotplugNotifier hotplugNotifier.remove(self.hotplugCB) except: pass def playLastCB(self, answer): # overwrite infobar cuesheet function print "playLastCB", answer, self.resume_point if self.service: if answer: seekable = self.getSeek() if seekable: seekable.seekTo(self.resume_point) pause = self.service.pause() pause.unpause() self.hideAfterResume() def showAfterCuesheetOperation(self): if not self.in_menu: self.show() def createSummary(self): return DVDSummary #override some InfoBarSeek functions def doEof(self): self.setSeekState(self.SEEK_STATE_PLAY) def calcRemainingTime(self): return 0 def hotplugCB(self, dev, media_state): print "[hotplugCB]", dev, media_state if dev == harddiskmanager.getCD(): if media_state == "1": self.scanHotplug() else: self.physicalDVD = False def scanHotplug(self): devicepath = harddiskmanager.getAutofsMountpoint(harddiskmanager.getCD()) if pathExists(devicepath): from Components.Scanner import scanDevice res = scanDevice(devicepath) list = [ (r.description, r, res[r], self.session) for r in res ] if list: (desc, scanner, files, session) = list[0] for file in files: print file if file.mimetype == "video/x-dvd": print "physical dvd found:", devicepath self.physicalDVD = True return self.physicalDVD = False
gpl-2.0
dlazz/ansible
lib/ansible/modules/storage/netapp/na_elementsw_snapshot_restore.py
52
6143
#!/usr/bin/python # (c) 2018, NetApp, Inc # GNU General Public License v3.0+ (see COPYING or # https://www.gnu.org/licenses/gpl-3.0.txt) """ Element Software Snapshot Restore """ from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'certified'} DOCUMENTATION = ''' module: na_elementsw_snapshot_restore short_description: NetApp Element Software Restore Snapshot extends_documentation_fragment: - netapp.solidfire version_added: '2.7' author: NetApp Ansible Team (@carchi8py) <ng-ansibleteam@netapp.com> description: - Element OS Cluster restore snapshot to volume. options: src_volume_id: description: - ID or Name of source active volume. required: true src_snapshot_id: description: - ID or Name of an existing snapshot. required: true dest_volume_name: description: - New Name of destination for restoring the snapshot required: true account_id: description: - Account ID or Name of Parent/Source Volume. required: true ''' EXAMPLES = """ - name: Restore snapshot to volume tags: - elementsw_create_snapshot_restore na_elementsw_snapshot_restore: hostname: "{{ elementsw_hostname }}" username: "{{ elementsw_username }}" password: "{{ elementsw_password }}" account_id: ansible-1 src_snapshot_id: snapshot_20171021 src_volume_id: volume-playarea dest_volume_name: dest-volume-area """ RETURN = """ msg: description: Success message returned: success type: str """ import traceback from ansible.module_utils.basic import AnsibleModule from ansible.module_utils._text import to_native import ansible.module_utils.netapp as netapp_utils from ansible.module_utils.netapp_elementsw_module import NaElementSWModule HAS_SF_SDK = netapp_utils.has_sf_sdk() class ElementOSSnapshotRestore(object): """ Element OS Restore from snapshot """ def __init__(self): self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() self.argument_spec.update(dict( account_id=dict(required=True, type='str'), src_volume_id=dict(required=True, type='str'), dest_volume_name=dict(required=True, type='str'), src_snapshot_id=dict(required=True, type='str') )) self.module = AnsibleModule( argument_spec=self.argument_spec, supports_check_mode=True ) input_params = self.module.params self.account_id = input_params['account_id'] self.src_volume_id = input_params['src_volume_id'] self.dest_volume_name = input_params['dest_volume_name'] self.src_snapshot_id = input_params['src_snapshot_id'] if HAS_SF_SDK is False: self.module.fail_json( msg="Unable to import the SolidFire Python SDK") else: self.sfe = netapp_utils.create_sf_connection(module=self.module) self.elementsw_helper = NaElementSWModule(self.sfe) # add telemetry attributes self.attributes = self.elementsw_helper.set_element_attributes(source='na_elementsw_snapshot_restore') def get_account_id(self): """ Get account id if found """ try: # Update and return self.account_id self.account_id = self.elementsw_helper.account_exists(self.account_id) return self.account_id except Exception as err: self.module.fail_json(msg="Error: account_id %s does not exist" % self.account_id, exception=to_native(err)) def get_snapshot_id(self): """ Return snapshot details if found """ src_snapshot = self.elementsw_helper.get_snapshot(self.src_snapshot_id, self.src_volume_id) # Update and return self.src_snapshot_id if src_snapshot: self.src_snapshot_id = src_snapshot.snapshot_id # Return self.src_snapshot_id return self.src_snapshot_id return None def restore_snapshot(self): """ Restore Snapshot to Volume """ try: self.sfe.clone_volume(volume_id=self.src_volume_id, name=self.dest_volume_name, snapshot_id=self.src_snapshot_id, attributes=self.attributes) except Exception as exception_object: self.module.fail_json( msg='Error restore snapshot %s' % (to_native(exception_object)), exception=traceback.format_exc()) def apply(self): """ Check, process and initiate restore snapshot to volume operation """ changed = False result_message = None snapshot_detail = None self.get_account_id() src_vol_id = self.elementsw_helper.volume_exists(self.src_volume_id, self.account_id) if src_vol_id is not None: # Update self.src_volume_id self.src_volume_id = src_vol_id if self.get_snapshot_id() is not None: # Addressing idempotency by comparing volume does not exist with same volume name if self.elementsw_helper.volume_exists(self.dest_volume_name, self.account_id) is None: self.restore_snapshot() changed = True else: result_message = "No changes requested, Skipping changes" else: self.module.fail_json(msg="Snapshot id not found %s" % self.src_snapshot_id) else: self.module.fail_json(msg="Volume id not found %s" % self.src_volume_id) self.module.exit_json(changed=changed, msg=result_message) def main(): """ Main function """ na_elementsw_snapshot_restore = ElementOSSnapshotRestore() na_elementsw_snapshot_restore.apply() if __name__ == '__main__': main()
gpl-3.0
ivelum/django-debug-toolbar
debug_toolbar/panels/__init__.py
22
5054
from __future__ import absolute_import, unicode_literals import warnings from django.template.loader import render_to_string from debug_toolbar import settings as dt_settings from debug_toolbar.utils import get_name_from_obj class Panel(object): """ Base class for panels. """ def __init__(self, toolbar): self.toolbar = toolbar # Private panel properties @property def panel_id(self): return self.__class__.__name__ @property def enabled(self): # Check to see if settings has a default value for it if get_name_from_obj(self) in dt_settings.CONFIG['DISABLE_PANELS']: default = 'off' else: default = 'on' # The user's cookies should override the default value return self.toolbar.request.COOKIES.get('djdt' + self.panel_id, default) == 'on' # Titles and content @property def nav_title(self): """ Title shown in the side bar. Defaults to :attr:`title`. """ return self.title @property def nav_subtitle(self): """ Subtitle shown in the side bar. Defaults to the empty string. """ return '' @property def has_content(self): """ ``True`` if the panel can be displayed in full screen, ``False`` if it's only shown in the side bar. Defaults to ``True``. """ return True @property def title(self): """ Title shown in the panel when it's displayed in full screen. Mandatory, unless the panel sets :attr:`has_content` to ``False``. """ raise NotImplementedError @property def template(self): """ Template used to render :attr:`content`. Mandatory, unless the panel sets :attr:`has_content` to ``False`` or overrides `attr`:content`. """ raise NotImplementedError @property def content(self): """ Content of the panel when it's displayed in full screen. By default this renders the template defined by :attr:`template`. Statistics stored with :meth:`record_stats` are available in the template's context. """ if self.has_content: return render_to_string(self.template, self.get_stats()) # URLs for panel-specific views @classmethod def get_urls(cls): """ Return URLpatterns, if the panel has its own views. """ return [] # Enable and disable (expensive) instrumentation, must be idempotent def enable_instrumentation(self): """ Enable instrumentation to gather data for this panel. This usually means monkey-patching (!) or registering signal receivers. Any instrumentation with a non-negligible effect on performance should be installed by this method rather than at import time. Unless the toolbar or this panel is disabled, this method will be called early in :class:`DebugToolbarMiddleware.process_request`. It should be idempotent. """ def disable_instrumentation(self): """ Disable instrumentation to gather data for this panel. This is the opposite of :meth:`enable_instrumentation`. Unless the toolbar or this panel is disabled, this method will be called late in :class:`DebugToolbarMiddleware.process_response`. It should be idempotent. """ # Store and retrieve stats (shared between panels for no good reason) def record_stats(self, stats): """ Store data gathered by the panel. ``stats`` is a :class:`dict`. Each call to ``record_stats`` updates the statistics dictionary. """ self.toolbar.stats.setdefault(self.panel_id, {}).update(stats) def get_stats(self): """ Access data stored by the panel. Returns a :class:`dict`. """ return self.toolbar.stats.get(self.panel_id, {}) # Standard middleware methods def process_request(self, request): """ Like process_request in Django's middleware. Write panel logic related to the request there. Save data with :meth:`record_stats`. """ def process_view(self, request, view_func, view_args, view_kwargs): """ Like process_view in Django's middleware. Write panel logic related to the view there. Save data with :meth:`record_stats`. """ def process_response(self, request, response): """ Like process_response in Django's middleware. Write panel logic related to the response there. Post-process data gathered while the view executed. Save data with :meth:`record_stats`. """ # Backward-compatibility for 1.0, remove in 2.0. class DebugPanel(Panel): def __init__(self, *args, **kwargs): warnings.warn("DebugPanel was renamed to Panel.", DeprecationWarning) super(DebugPanel, self).__init__(*args, **kwargs)
bsd-3-clause
windedge/odoo
addons/website_quote/models/order.py
33
16099
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2013-Today OpenERP SA (<http://www.openerp.com>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import api from openerp.osv import osv, fields import uuid import time import datetime import openerp.addons.decimal_precision as dp class sale_quote_template(osv.osv): _name = "sale.quote.template" _description = "Sale Quotation Template" _columns = { 'name': fields.char('Quotation Template', required=True), 'website_description': fields.html('Description', translate=True), 'quote_line': fields.one2many('sale.quote.line', 'quote_id', 'Quote Template Lines', copy=True), 'note': fields.text('Terms and conditions'), 'options': fields.one2many('sale.quote.option', 'template_id', 'Optional Products Lines', copy=True), 'number_of_days': fields.integer('Quote Duration', help='Number of days for the validaty date computation of the quotation'), } def open_template(self, cr, uid, quote_id, context=None): return { 'type': 'ir.actions.act_url', 'target': 'self', 'url': '/quote/template/%d' % quote_id[0] } class sale_quote_line(osv.osv): _name = "sale.quote.line" _description = "Quotation Template Lines" _columns = { 'quote_id': fields.many2one('sale.quote.template', 'Quotation Template Reference', required=True, ondelete='cascade', select=True), 'name': fields.text('Description', required=True, translate=True), 'product_id': fields.many2one('product.product', 'Product', domain=[('sale_ok', '=', True)], required=True), 'website_description': fields.related('product_id', 'product_tmpl_id', 'quote_description', string='Line Description', type='html', translate=True), 'price_unit': fields.float('Unit Price', required=True, digits_compute= dp.get_precision('Product Price')), 'discount': fields.float('Discount (%)', digits_compute= dp.get_precision('Discount')), 'product_uom_qty': fields.float('Quantity', required=True, digits_compute= dp.get_precision('Product UoS')), 'product_uom_id': fields.many2one('product.uom', 'Unit of Measure ', required=True), } _defaults = { 'product_uom_qty': 1, 'discount': 0.0, } def on_change_product_id(self, cr, uid, ids, product, context=None): vals = {} product_obj = self.pool.get('product.product').browse(cr, uid, product, context=context) name = product_obj.name if product_obj.description_sale: name += '\n' + product_obj.description_sale vals.update({ 'price_unit': product_obj.lst_price, 'product_uom_id': product_obj.uom_id.id, 'website_description': product_obj and (product_obj.quote_description or product_obj.website_description) or '', 'name': name, }) return {'value': vals} def _inject_quote_description(self, cr, uid, values, context=None): values = dict(values or {}) if not values.get('website_description') and values.get('product_id'): product = self.pool['product.product'].browse(cr, uid, values['product_id'], context=context) values['website_description'] = product.quote_description or product.website_description or '' return values def create(self, cr, uid, values, context=None): values = self._inject_quote_description(cr, uid, values, context) ret = super(sale_quote_line, self).create(cr, uid, values, context=context) # hack because create don t make the job for a related field if values.get('website_description'): self.write(cr, uid, ret, {'website_description': values['website_description']}, context=context) return ret def write(self, cr, uid, ids, values, context=None): values = self._inject_quote_description(cr, uid, values, context) return super(sale_quote_line, self).write(cr, uid, ids, values, context=context) class sale_order_line(osv.osv): _inherit = "sale.order.line" _description = "Sales Order Line" _columns = { 'website_description': fields.html('Line Description'), 'option_line_id': fields.one2many('sale.order.option', 'line_id', 'Optional Products Lines'), } def _inject_quote_description(self, cr, uid, values, context=None): values = dict(values or {}) if not values.get('website_description') and values.get('product_id'): product = self.pool['product.product'].browse(cr, uid, values['product_id'], context=context) values['website_description'] = product.quote_description or product.website_description return values def create(self, cr, uid, values, context=None): values = self._inject_quote_description(cr, uid, values, context) ret = super(sale_order_line, self).create(cr, uid, values, context=context) # hack because create don t make the job for a related field if values.get('website_description'): self.write(cr, uid, ret, {'website_description': values['website_description']}, context=context) return ret def write(self, cr, uid, ids, values, context=None): values = self._inject_quote_description(cr, uid, values, context) return super(sale_order_line, self).write(cr, uid, ids, values, context=context) class sale_order(osv.osv): _inherit = 'sale.order' def _get_total(self, cr, uid, ids, name, arg, context=None): res = {} for order in self.browse(cr, uid, ids, context=context): total = 0.0 for line in order.order_line: total += line.price_subtotal + line.price_unit * ((line.discount or 0.0) / 100.0) * line.product_uom_qty res[order.id] = total return res _columns = { 'access_token': fields.char('Security Token', required=True, copy=False), 'template_id': fields.many2one('sale.quote.template', 'Quote Template', readonly=True, states={'draft': [('readonly', False)], 'sent': [('readonly', False)]}), 'website_description': fields.html('Description'), 'options' : fields.one2many('sale.order.option', 'order_id', 'Optional Products Lines', copy=True), 'validity_date': fields.date('Expiry Date'), 'amount_undiscounted': fields.function(_get_total, string='Amount Before Discount', type="float", digits_compute=dp.get_precision('Account')) } _defaults = { 'access_token': lambda self, cr, uid, ctx={}: str(uuid.uuid4()) } def open_quotation(self, cr, uid, quote_id, context=None): quote = self.browse(cr, uid, quote_id[0], context=context) return { 'type': 'ir.actions.act_url', 'target': 'self', 'url': '/quote/%s' % (quote.id) } def onchange_template_id(self, cr, uid, ids, template_id, partner=False, fiscal_position=False, context=None): if not template_id: return True if context is None: context = {} context = dict(context, lang=self.pool.get('res.partner').browse(cr, uid, partner, context).lang) lines = [(5,)] quote_template = self.pool.get('sale.quote.template').browse(cr, uid, template_id, context=context) for line in quote_template.quote_line: res = self.pool.get('sale.order.line').product_id_change(cr, uid, False, False, line.product_id.id, line.product_uom_qty, line.product_uom_id.id, line.product_uom_qty, line.product_uom_id.id, line.name, partner, False, True, time.strftime('%Y-%m-%d'), False, fiscal_position, True, context) data = res.get('value', {}) if 'tax_id' in data: data['tax_id'] = [(6, 0, data['tax_id'])] data.update({ 'name': line.name, 'price_unit': line.price_unit, 'discount': line.discount, 'product_uom_qty': line.product_uom_qty, 'product_id': line.product_id.id, 'product_uom': line.product_uom_id.id, 'website_description': line.website_description, 'state': 'draft', }) lines.append((0, 0, data)) options = [] for option in quote_template.options: options.append((0, 0, { 'product_id': option.product_id.id, 'name': option.name, 'quantity': option.quantity, 'uom_id': option.uom_id.id, 'price_unit': option.price_unit, 'discount': option.discount, 'website_description': option.website_description, })) date = False if quote_template.number_of_days > 0: date = (datetime.datetime.now() + datetime.timedelta(quote_template.number_of_days)).strftime("%Y-%m-%d") data = {'order_line': lines, 'website_description': quote_template.website_description, 'note': quote_template.note, 'options': options, 'validity_date': date} return {'value': data} def recommended_products(self, cr, uid, ids, context=None): order_line = self.browse(cr, uid, ids[0], context=context).order_line product_pool = self.pool.get('product.product') products = [] for line in order_line: products += line.product_id.product_tmpl_id.recommended_products(context=context) return products def get_access_action(self, cr, uid, id, context=None): """ Override method that generated the link to access the document. Instead of the classic form view, redirect to the online quote if exists. """ quote = self.browse(cr, uid, id, context=context) if not quote.template_id: return super(sale_order, self).get_access_action(cr, uid, id, context=context) return { 'type': 'ir.actions.act_url', 'url': '/quote/%s' % id, 'target': 'self', 'res_id': id, } def action_quotation_send(self, cr, uid, ids, context=None): action = super(sale_order, self).action_quotation_send(cr, uid, ids, context=context) ir_model_data = self.pool.get('ir.model.data') quote_template_id = self.read(cr, uid, ids, ['template_id'], context=context)[0]['template_id'] if quote_template_id: try: template_id = ir_model_data.get_object_reference(cr, uid, 'website_quote', 'email_template_edi_sale')[1] except ValueError: pass else: action['context'].update({ 'default_template_id': template_id, 'default_use_template': True }) return action class sale_quote_option(osv.osv): _name = "sale.quote.option" _description = "Quote Option" _columns = { 'template_id': fields.many2one('sale.quote.template', 'Quotation Template Reference', ondelete='cascade', select=True, required=True), 'name': fields.text('Description', required=True, translate=True), 'product_id': fields.many2one('product.product', 'Product', domain=[('sale_ok', '=', True)], required=True), 'website_description': fields.html('Option Description', translate=True), 'price_unit': fields.float('Unit Price', required=True, digits_compute= dp.get_precision('Product Price')), 'discount': fields.float('Discount (%)', digits_compute= dp.get_precision('Discount')), 'uom_id': fields.many2one('product.uom', 'Unit of Measure ', required=True), 'quantity': fields.float('Quantity', required=True, digits_compute= dp.get_precision('Product UoS')), } _defaults = { 'quantity': 1, } def on_change_product_id(self, cr, uid, ids, product, context=None): vals = {} product_obj = self.pool.get('product.product').browse(cr, uid, product, context=context) vals.update({ 'price_unit': product_obj.list_price, 'website_description': product_obj.product_tmpl_id.quote_description, 'name': product_obj.name, 'uom_id': product_obj.product_tmpl_id.uom_id.id, }) if product_obj.description_sale: vals['name'] += '\n'+product_obj.description_sale return {'value': vals} class sale_order_option(osv.osv): _name = "sale.order.option" _description = "Sale Options" _columns = { 'order_id': fields.many2one('sale.order', 'Sale Order Reference', ondelete='cascade', select=True), 'line_id': fields.many2one('sale.order.line', on_delete="set null"), 'name': fields.text('Description', required=True), 'product_id': fields.many2one('product.product', 'Product', domain=[('sale_ok', '=', True)]), 'website_description': fields.html('Line Description'), 'price_unit': fields.float('Unit Price', required=True, digits_compute= dp.get_precision('Product Price')), 'discount': fields.float('Discount (%)', digits_compute= dp.get_precision('Discount')), 'uom_id': fields.many2one('product.uom', 'Unit of Measure ', required=True), 'quantity': fields.float('Quantity', required=True, digits_compute= dp.get_precision('Product UoS')), } _defaults = { 'quantity': 1, } # TODO master: to remove, replaced by onchange of the new api def on_change_product_id(self, cr, uid, ids, product, context=None): vals = {} if not product: return vals product_obj = self.pool.get('product.product').browse(cr, uid, product, context=context) vals.update({ 'price_unit': product_obj.list_price, 'website_description': product_obj and (product_obj.quote_description or product_obj.website_description), 'name': product_obj.name, 'uom_id': product_obj.product_tmpl_id.uom_id.id, }) if product_obj.description_sale: vals['name'] += '\n'+product_obj.description_sale return {'value': vals} @api.onchange('product_id') def _onchange_product_id(self): product = self.product_id.with_context(lang=self.order_id.partner_id.lang) self.price_unit = product.list_price self.website_description = product.quote_description or product.website_description self.name = product.name if product.description_sale: self.name += '\n' + product.description_sale self.uom_id = product.product_tmpl_id.uom_id if product and self.order_id.pricelist_id: partner_id = self.order_id.partner_id.id pricelist = self.order_id.pricelist_id.id self.price_unit = self.order_id.pricelist_id.price_get(product.id, self.quantity, partner_id)[pricelist] class product_template(osv.Model): _inherit = "product.template" _columns = { 'website_description': fields.html('Description for the website'), # hack, if website_sale is not installed 'quote_description': fields.html('Description for the quote'), }
agpl-3.0
phoenixsbk/kvmmgr
packaging/setup/ovirt_engine_setup/engine_common/constants.py
2
8564
# # ovirt-engine-setup -- ovirt engine setup # Copyright (C) 2013-2014 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """Constants.""" import os import platform import gettext _ = lambda m: gettext.dgettext(message=m, domain='ovirt-engine-setup') from otopi import util from . import config from ovirt_engine_setup import constants as osetupcons from ovirt_engine_setup.constants import classproperty from ovirt_engine_setup.constants import osetupattrsclass from ovirt_engine_setup.constants import osetupattrs @util.export class FileLocations(object): SYSCONFDIR = '/etc' OVIRT_ENGINE_COMMON_DATADIR = config.ENGINE_COMMON_DATADIR OVIRT_ENGINE_PKIDIR = config.ENGINE_PKIDIR OVIRT_ENGINE_PKICERTSDIR = os.path.join( OVIRT_ENGINE_PKIDIR, 'certs', ) OVIRT_ENGINE_PKIKEYSDIR = os.path.join( OVIRT_ENGINE_PKIDIR, 'keys', ) DIR_HTTPD = os.path.join( osetupcons.FileLocations.SYSCONFDIR, 'httpd', ) HTTPD_CONF_OVIRT_ROOT = os.path.join( DIR_HTTPD, 'conf.d', 'ovirt-engine-root-redirect.conf', ) HTTPD_CONF_OVIRT_ROOT_TEMPLATE = os.path.join( osetupcons.FileLocations.OVIRT_SETUP_DATADIR, 'conf', 'ovirt-engine-root-redirect.conf.in', ) HTTPD_CONF_SSL = os.path.join( DIR_HTTPD, 'conf.d', 'ssl.conf', ) JBOSS_HOME = os.path.join( osetupcons.FileLocations.DATADIR, 'jboss-as', ) OVIRT_ENGINE_SYSCTL = os.path.join( SYSCONFDIR, 'sysctl.d', 'ovirt-postgresql.conf', ) OVIRT_ENGINE_PKI_APACHE_CA_CERT = os.path.join( OVIRT_ENGINE_PKIDIR, 'apache-ca.pem', ) OVIRT_ENGINE_PKI_APACHE_CERT = os.path.join( OVIRT_ENGINE_PKICERTSDIR, 'apache.cer', ) OVIRT_ENGINE_PKI_APACHE_KEY = os.path.join( OVIRT_ENGINE_PKIKEYSDIR, 'apache.key.nopass', ) @util.export class Defaults(object): DEFAULT_SYSTEM_USER_ROOT = 'root' DEFAULT_SYSTEM_USER_VDSM = 'vdsm' DEFAULT_SYSTEM_GROUP_KVM = 'kvm' DEFAULT_SYSTEM_USER_APACHE = 'apache' DEFAULT_SYSTEM_USER_POSTGRES = 'postgres' @classproperty def DEFAULT_SYSTEM_SHMMAX(self): SHMMAX = { 'x86_64': 68719476736, 'i686': 4294967295, 'ppc64': 137438953472, 'default': 4294967295, } return SHMMAX.get(platform.machine(), SHMMAX['default']) DEFAULT_PKI_COUNTRY = 'US' DEFAULT_PKI_STORE_PASS = 'mypass' DEFAULT_NETWORK_HTTP_PORT = 80 DEFAULT_NETWORK_HTTPS_PORT = 443 DEFAULT_NETWORK_JBOSS_HTTP_PORT = 8080 DEFAULT_NETWORK_JBOSS_HTTPS_PORT = 8443 DEFAULT_NETWORK_JBOSS_AJP_PORT = 8702 DEFAULT_NETWORK_JBOSS_DEBUG_ADDRESS = '127.0.0.1:8787' DEFAULT_HTTPD_SERVICE = 'httpd' DEFAULT_POSTGRES_PROVISIONING_PGDATA_DIR = os.path.join( osetupcons.FileLocations.LOCALSTATEDIR, 'lib', 'pgsql', 'data', ) DEFAULT_POSTGRES_PROVISIONING_PG_CONF = os.path.join( DEFAULT_POSTGRES_PROVISIONING_PGDATA_DIR, 'postgresql.conf', ) DEFAULT_POSTGRES_PROVISIONING_PG_HBA = os.path.join( DEFAULT_POSTGRES_PROVISIONING_PGDATA_DIR, 'pg_hba.conf', ) DEFAULT_POSTGRES_PROVISIONING_PG_VERSION = os.path.join( DEFAULT_POSTGRES_PROVISIONING_PGDATA_DIR, 'PG_VERSION', ) DEFAULT_POSTGRES_PROVISIONING_SERVICE = 'postgresql' DEFAULT_POSTGRES_PROVISIONING_MAX_CONN = 150 DEFAULT_POSTGRES_PROVISIONING_LISTEN_ADDRESS = "'*'" @util.export class Stages(object): APACHE_RESTART = 'osetup.apache.core.restart' CORE_ENGINE_START = 'osetup.core.engine.start' DB_CONNECTION_SETUP = 'osetup.db.connection.setup' DB_CONNECTION_CUSTOMIZATION = 'osetup.db.connection.customization' DB_OWNERS_CONNECTIONS_CUSTOMIZED = \ 'osetup.db.owners.connections.customized' DB_CONNECTION_STATUS = 'osetup.db.connection.status' DB_CREDENTIALS_AVAILABLE_EARLY = 'osetup.db.connection.credentials.early' DB_CREDENTIALS_AVAILABLE_LATE = 'osetup.db.connection.credentials.late' DB_CONNECTION_AVAILABLE = 'osetup.db.connection.available' DB_SCHEMA = 'osetup.db.schema' CONFIG_DB_ENCRYPTION_AVAILABLE = 'osetup.config.encryption.available' NETWORK_OWNERS_CONFIG_CUSTOMIZED = \ 'osetup.network.owners.config.customized' DIALOG_TITLES_S_ALLINONE = 'osetup.dialog.titles.allinone.start' DIALOG_TITLES_S_APACHE = 'osetup.dialog.titles.apache.start' DIALOG_TITLES_S_DATABASE = 'osetup.dialog.titles.database.start' DIALOG_TITLES_S_PKI = 'osetup.dialog.titles.pki.start' DIALOG_TITLES_E_ALLINONE = 'osetup.dialog.titles.allinone.end' DIALOG_TITLES_E_APACHE = 'osetup.dialog.titles.apache.end' DIALOG_TITLES_E_DATABASE = 'osetup.dialog.titles.database.end' DIALOG_TITLES_E_PKI = 'osetup.dialog.titles.pki.end' DIALOG_TITLES_S_ENGINE = 'osetup.dialog.titles.engine.start' DIALOG_TITLES_E_ENGINE = 'osetup.dialog.titles.engine.end' RENAME_PKI_CONF_MISC = 'osetup.rename.pki.conf.misc' @util.export @util.codegen @osetupattrsclass class SystemEnv(object): USER_APACHE = 'OVESETUP_SYSTEM/userApache' USER_POSTGRES = 'OVESETUP_SYSTEM/userPostgres' USER_ROOT = 'OVESETUP_SYSTEM/userRoot' USER_VDSM = 'OVESETUP_SYSTEM/userVdsm' GROUP_KVM = 'OVESETUP_SYSTEM/groupKvm' SHMMAX = 'OVESETUP_SYSTEM/shmmax' @util.export @util.codegen @osetupattrsclass class ConfigEnv(object): JAVA_HOME = 'OVESETUP_CONFIG/javaHome' JBOSS_HOME = 'OVESETUP_CONFIG/jbossHome' PUBLIC_HTTP_PORT = 'OVESETUP_CONFIG/publicHttpPort' # internal use PUBLIC_HTTPS_PORT = 'OVESETUP_CONFIG/publicHttpsPort' # internal use HTTP_PORT = 'OVESETUP_CONFIG/httpPort' HTTPS_PORT = 'OVESETUP_CONFIG/httpsPort' JBOSS_HTTP_PORT = 'OVESETUP_CONFIG/jbossHttpPort' JBOSS_HTTPS_PORT = 'OVESETUP_CONFIG/jbossHttpsPort' JBOSS_AJP_PORT = 'OVESETUP_CONFIG/jbossAjpPort' JBOSS_DIRECT_HTTP_PORT = 'OVESETUP_CONFIG/jbossDirectHttpPort' JBOSS_DIRECT_HTTPS_PORT = 'OVESETUP_CONFIG/jbossDirectHttpsPort' JBOSS_DEBUG_ADDRESS = 'OVESETUP_CONFIG/jbossDebugAddress' JBOSS_NEEDED = 'OVESETUP_CONFIG/jbossNeeded' JAVA_NEEDED = 'OVESETUP_CONFIG/javaNeeded' @util.export @util.codegen @osetupattrsclass class ProvisioningEnv(object): @osetupattrs( answerfile=True, summary=True, description=_('Configure local Engine database'), ) def POSTGRES_PROVISIONING_ENABLED(self): return 'OVESETUP_PROVISIONING/postgresProvisioningEnabled' POSTGRES_CONF = 'OVESETUP_PROVISIONING/postgresConf' POSTGRES_PG_HBA = 'OVESETUP_PROVISIONING/postgresPgHba' POSTGRES_PG_VERSION = 'OVESETUP_PROVISIONING/postgresPgVersion' POSTGRES_SERVICE = 'OVESETUP_PROVISIONING/postgresService' POSTGRES_MAX_CONN = 'OVESETUP_PROVISIONING/postgresMaxConn' POSTGRES_LISTEN_ADDRESS = 'OVESETUP_PROVISIONING/postgresListenAddress' @util.export @util.codegen @osetupattrsclass class ApacheEnv(object): @osetupattrs( postinstallfile=True, ) def CONFIGURED(self): return 'OVESETUP_APACHE/configured' @osetupattrs( answerfile=True, summary=True, description=_('Set application as default page'), ) def CONFIGURE_ROOT_REDIRECTION(self): return 'OVESETUP_APACHE/configureRootRedirection' @osetupattrs( answerfile=True, summary=True, description=_('Configure Apache SSL'), ) def CONFIGURE_SSL(self): return 'OVESETUP_APACHE/configureSsl' CONFIGURE_ROOT_REDIRECTIOND_DEFAULT = \ 'OVESETUP_APACHE/configureRootRedirectionDefault' ENABLE = 'OVESETUP_APACHE/enable' HTTPD_CONF_OVIRT_ROOT = 'OVESETUP_APACHE/configFileOvirtRoot' HTTPD_CONF_SSL = 'OVESETUP_APACHE/configFileSsl' HTTPD_SERVICE = 'OVESETUP_APACHE/httpdService' NEED_RESTART = 'OVESETUP_APACHE/needRestart' # vim: expandtab tabstop=4 shiftwidth=4
apache-2.0
nkcr/WebIndex
app/venv/lib/python3.5/site-packages/werkzeug/contrib/sessions.py
256
12577
# -*- coding: utf-8 -*- r""" werkzeug.contrib.sessions ~~~~~~~~~~~~~~~~~~~~~~~~~ This module contains some helper classes that help one to add session support to a python WSGI application. For full client-side session storage see :mod:`~werkzeug.contrib.securecookie` which implements a secure, client-side session storage. Application Integration ======================= :: from werkzeug.contrib.sessions import SessionMiddleware, \ FilesystemSessionStore app = SessionMiddleware(app, FilesystemSessionStore()) The current session will then appear in the WSGI environment as `werkzeug.session`. However it's recommended to not use the middleware but the stores directly in the application. However for very simple scripts a middleware for sessions could be sufficient. This module does not implement methods or ways to check if a session is expired. That should be done by a cronjob and storage specific. For example to prune unused filesystem sessions one could check the modified time of the files. It sessions are stored in the database the new() method should add an expiration timestamp for the session. For better flexibility it's recommended to not use the middleware but the store and session object directly in the application dispatching:: session_store = FilesystemSessionStore() def application(environ, start_response): request = Request(environ) sid = request.cookies.get('cookie_name') if sid is None: request.session = session_store.new() else: request.session = session_store.get(sid) response = get_the_response_object(request) if request.session.should_save: session_store.save(request.session) response.set_cookie('cookie_name', request.session.sid) return response(environ, start_response) :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ import re import os import tempfile from os import path from time import time from random import random from hashlib import sha1 from pickle import dump, load, HIGHEST_PROTOCOL from werkzeug.datastructures import CallbackDict from werkzeug.utils import dump_cookie, parse_cookie from werkzeug.wsgi import ClosingIterator from werkzeug.posixemulation import rename from werkzeug._compat import PY2, text_type from werkzeug.filesystem import get_filesystem_encoding _sha1_re = re.compile(r'^[a-f0-9]{40}$') def _urandom(): if hasattr(os, 'urandom'): return os.urandom(30) return text_type(random()).encode('ascii') def generate_key(salt=None): if salt is None: salt = repr(salt).encode('ascii') return sha1(b''.join([ salt, str(time()).encode('ascii'), _urandom() ])).hexdigest() class ModificationTrackingDict(CallbackDict): __slots__ = ('modified',) def __init__(self, *args, **kwargs): def on_update(self): self.modified = True self.modified = False CallbackDict.__init__(self, on_update=on_update) dict.update(self, *args, **kwargs) def copy(self): """Create a flat copy of the dict.""" missing = object() result = object.__new__(self.__class__) for name in self.__slots__: val = getattr(self, name, missing) if val is not missing: setattr(result, name, val) return result def __copy__(self): return self.copy() class Session(ModificationTrackingDict): """Subclass of a dict that keeps track of direct object changes. Changes in mutable structures are not tracked, for those you have to set `modified` to `True` by hand. """ __slots__ = ModificationTrackingDict.__slots__ + ('sid', 'new') def __init__(self, data, sid, new=False): ModificationTrackingDict.__init__(self, data) self.sid = sid self.new = new def __repr__(self): return '<%s %s%s>' % ( self.__class__.__name__, dict.__repr__(self), self.should_save and '*' or '' ) @property def should_save(self): """True if the session should be saved. .. versionchanged:: 0.6 By default the session is now only saved if the session is modified, not if it is new like it was before. """ return self.modified class SessionStore(object): """Baseclass for all session stores. The Werkzeug contrib module does not implement any useful stores besides the filesystem store, application developers are encouraged to create their own stores. :param session_class: The session class to use. Defaults to :class:`Session`. """ def __init__(self, session_class=None): if session_class is None: session_class = Session self.session_class = session_class def is_valid_key(self, key): """Check if a key has the correct format.""" return _sha1_re.match(key) is not None def generate_key(self, salt=None): """Simple function that generates a new session key.""" return generate_key(salt) def new(self): """Generate a new session.""" return self.session_class({}, self.generate_key(), True) def save(self, session): """Save a session.""" def save_if_modified(self, session): """Save if a session class wants an update.""" if session.should_save: self.save(session) def delete(self, session): """Delete a session.""" def get(self, sid): """Get a session for this sid or a new session object. This method has to check if the session key is valid and create a new session if that wasn't the case. """ return self.session_class({}, sid, True) #: used for temporary files by the filesystem session store _fs_transaction_suffix = '.__wz_sess' class FilesystemSessionStore(SessionStore): """Simple example session store that saves sessions on the filesystem. This store works best on POSIX systems and Windows Vista / Windows Server 2008 and newer. .. versionchanged:: 0.6 `renew_missing` was added. Previously this was considered `True`, now the default changed to `False` and it can be explicitly deactivated. :param path: the path to the folder used for storing the sessions. If not provided the default temporary directory is used. :param filename_template: a string template used to give the session a filename. ``%s`` is replaced with the session id. :param session_class: The session class to use. Defaults to :class:`Session`. :param renew_missing: set to `True` if you want the store to give the user a new sid if the session was not yet saved. """ def __init__(self, path=None, filename_template='werkzeug_%s.sess', session_class=None, renew_missing=False, mode=0o644): SessionStore.__init__(self, session_class) if path is None: path = tempfile.gettempdir() self.path = path if isinstance(filename_template, text_type) and PY2: filename_template = filename_template.encode( get_filesystem_encoding()) assert not filename_template.endswith(_fs_transaction_suffix), \ 'filename templates may not end with %s' % _fs_transaction_suffix self.filename_template = filename_template self.renew_missing = renew_missing self.mode = mode def get_session_filename(self, sid): # out of the box, this should be a strict ASCII subset but # you might reconfigure the session object to have a more # arbitrary string. if isinstance(sid, text_type) and PY2: sid = sid.encode(get_filesystem_encoding()) return path.join(self.path, self.filename_template % sid) def save(self, session): fn = self.get_session_filename(session.sid) fd, tmp = tempfile.mkstemp(suffix=_fs_transaction_suffix, dir=self.path) f = os.fdopen(fd, 'wb') try: dump(dict(session), f, HIGHEST_PROTOCOL) finally: f.close() try: rename(tmp, fn) os.chmod(fn, self.mode) except (IOError, OSError): pass def delete(self, session): fn = self.get_session_filename(session.sid) try: os.unlink(fn) except OSError: pass def get(self, sid): if not self.is_valid_key(sid): return self.new() try: f = open(self.get_session_filename(sid), 'rb') except IOError: if self.renew_missing: return self.new() data = {} else: try: try: data = load(f) except Exception: data = {} finally: f.close() return self.session_class(data, sid, False) def list(self): """Lists all sessions in the store. .. versionadded:: 0.6 """ before, after = self.filename_template.split('%s', 1) filename_re = re.compile(r'%s(.{5,})%s$' % (re.escape(before), re.escape(after))) result = [] for filename in os.listdir(self.path): #: this is a session that is still being saved. if filename.endswith(_fs_transaction_suffix): continue match = filename_re.match(filename) if match is not None: result.append(match.group(1)) return result class SessionMiddleware(object): """A simple middleware that puts the session object of a store provided into the WSGI environ. It automatically sets cookies and restores sessions. However a middleware is not the preferred solution because it won't be as fast as sessions managed by the application itself and will put a key into the WSGI environment only relevant for the application which is against the concept of WSGI. The cookie parameters are the same as for the :func:`~dump_cookie` function just prefixed with ``cookie_``. Additionally `max_age` is called `cookie_age` and not `cookie_max_age` because of backwards compatibility. """ def __init__(self, app, store, cookie_name='session_id', cookie_age=None, cookie_expires=None, cookie_path='/', cookie_domain=None, cookie_secure=None, cookie_httponly=False, environ_key='werkzeug.session'): self.app = app self.store = store self.cookie_name = cookie_name self.cookie_age = cookie_age self.cookie_expires = cookie_expires self.cookie_path = cookie_path self.cookie_domain = cookie_domain self.cookie_secure = cookie_secure self.cookie_httponly = cookie_httponly self.environ_key = environ_key def __call__(self, environ, start_response): cookie = parse_cookie(environ.get('HTTP_COOKIE', '')) sid = cookie.get(self.cookie_name, None) if sid is None: session = self.store.new() else: session = self.store.get(sid) environ[self.environ_key] = session def injecting_start_response(status, headers, exc_info=None): if session.should_save: self.store.save(session) headers.append(('Set-Cookie', dump_cookie(self.cookie_name, session.sid, self.cookie_age, self.cookie_expires, self.cookie_path, self.cookie_domain, self.cookie_secure, self.cookie_httponly))) return start_response(status, headers, exc_info) return ClosingIterator(self.app(environ, injecting_start_response), lambda: self.store.save_if_modified(session))
mit
tschijnmo/programmabletuple
programmabletuple/tests/programmabletuple_test.py
1
7639
""" Unit test for the programmable tuple metaclass """ import unittest import itertools from programmabletuple import ProgrammableTuple, ProgrammableExpr # # The programmable tuples class definition # ======================================== # # Some utility functions # ---------------------- # def _get_full_name(first_name, last_name): """Gets the full name""" return ', '.join([last_name, first_name]) def _get_sui(self): """Gets the age in conventional Asian way Here the new borns starts at one year old. """ return self.age + 1 # # The actual classes # ------------------ # class PersonPT(ProgrammableTuple, auto_defining=True): """A toy person class It just has three simple fields, first name, last name, and age, the full name is also given in a data field. The defining fields are going to be assigned automatically. """ __data_fields__ = [ 'full_name', ] def __init__(self, first_name, last_name, age): """Initialize a person The full name will be set as well. """ self.full_name = _get_full_name(first_name, last_name) sui = property(_get_sui) class PersonPE(ProgrammableExpr): """A toy person class as programmable expression It is just like the above class. Just the defining fields are going to be assigned manually and it is not a tuple subclass. """ __data_fields__ = [ 'full_name', ] def __init__(self, first_name, last_name, age): """Initialize a person The full name will be set as well. """ self.first_name = first_name self.last_name = last_name self.age = age self.full_name = _get_full_name(first_name, last_name) sui = property(_get_sui) # # Subclass definition # =================== # class JohnsonsPT(PersonPT): """Members of the Johnson family""" def __init__(self, first_name, age): self.super().__init__(first_name, 'Johnson', age) def is_johnsons(self): return True class JohnsonsPE(PersonPE): """Members of the Johnson family""" def __init__(self, first_name, age): self.super().__init__(first_name, 'Johnson', age) def is_johnsons(self): return True # # The tests # ========= # class ImmutableClassTest(unittest.TestCase): """Test suite for the programmable tuple metaclass""" def setUp(self): self.jsmith_pt = PersonPT('John', 'Smith', 49) self.jsmith_pe = PersonPE('John', 'Smith', 49) self.jsmiths = [self.jsmith_pt, self.jsmith_pe] self.ajohnson_pt = JohnsonsPT('Andy', 8) self.ajohnson_pe = JohnsonsPE('Andy', 8) # # Tests of the essential behaviour of programmable tuples # def test_access(self): """Tests the access of the fields of the person""" for jsmith in self.jsmiths: self.assertEqual(jsmith.first_name, 'John') self.assertEqual(jsmith.last_name, 'Smith') self.assertEqual(jsmith.age, 49) self.assertEqual(jsmith.full_name, 'Smith, John') def test_method(self): """Tests if the method defined in the class can be called""" for jsmith in self.jsmiths: self.assertEqual(jsmith.sui, 50) def test_immutability(self): """Tests if the attributes are really not mutable""" def mutate_pt(): self.jsmith_pt.age = 15 def mutate_pe(): self.jsmith_pe.age = 15 self.assertRaises(AttributeError, mutate_pt) self.assertRaises(AttributeError, mutate_pe) def test_subclassing(self): """Tests if the subclassing is working properly""" for andy in [self.ajohnson_pt, self.ajohnson_pe]: self.assertEqual(andy.first_name, 'Andy') self.assertEqual(andy.last_name, 'Johnson') self.assertEqual(andy.age, 8) self.assertEqual(andy.sui, 9) self.assertEqual(andy.full_name, 'Johnson, Andy') self.assertTrue(andy.is_johnsons()) def test_hashing(self): """Tests the correctness of hashing and equality testing""" equal_ones = [] # Each entry is a list of equal ones. Different # entries are not equal. for i in self.jsmiths: equal_ones.append([ i, type(i)('John', 'Smith', 49) ]) equal_ones.append([type(i)('John', 'Smith', 3)]) for i, v in enumerate(equal_ones): # Assert that each pair within the chunk are equal and the same # hash. for j, k in itertools.combinations(v, 2): self.assertEqual(hash(j), hash(k)) self.assertEqual(j, k) continue # Assert than each member of the chunk is not equal and has # different hash with anything else. for j in v: for k in itertools.chain.from_iterable(equal_ones[i + 1:]): self.assertNotEqual(hash(j), hash(k)) self.assertNotEqual(j, k) continue continue # Continue to the next chunk. continue # # Tests of the utilities in the mixin class # def test_update(self): """Tests updating a defining attribute""" for jsmith in self.jsmiths: doug = jsmith._update(first_name='Doug') self.assertEqual(doug.first_name, 'Doug') self.assertEqual(doug.last_name, 'Smith') self.assertEqual(doug.full_name, 'Smith, Doug') self.assertEqual(doug.age, 49) def test_replace(self): """Tests forced replacement of an attribute""" for jsmith in self.jsmiths: doug_inconsistent = jsmith._replace(first_name='Doug') self.assertEqual(doug_inconsistent.first_name, 'Doug') self.assertEqual(doug_inconsistent.last_name, 'Smith') self.assertEqual(doug_inconsistent.full_name, 'Smith, John') def test_formating(self): """Tests the formatting as repr and str""" # We need to test all combinations, repr and str, with PT and PE. repr_args = "(first_name='John', last_name='Smith', age=49)" str_args = "(first_name=John, last_name=Smith, age=49)" for head, person in [ ('PersonPT', self.jsmith_pt), ('PersonPE', self.jsmith_pe) ]: self.assertEqual(repr(person), head + repr_args) self.assertEqual(str(person), head + str_args) continue def test_asdict(self): """Tests the asdict methods Here only the naive encoding and decoding are tested, not the complicated recursive cases. """ for jsmith in self.jsmiths: # Tests the conversion to dictionaries. def_dict = jsmith._asdict() full_dict = jsmith._asdict(full=True) for i in [def_dict, full_dict]: self.assertEqual(i['first_name'], 'John') self.assertEqual(i['last_name'], 'Smith') self.assertEqual(i['age'], 49) self.assertEqual(len(def_dict), 3) self.assertEqual(full_dict['full_name'], 'Smith, John') self.assertEqual(len(full_dict), 4) # Tests the loading from dictionaries. resolved_jsmith = jsmith._load_from_dict(def_dict) self.assertEqual(jsmith, resolved_jsmith) resolved_jsmith = jsmith._load_from_dict(full_dict, full=True) self.assertEqual(jsmith, resolved_jsmith)
mit
vicente-gonzalez-ruiz/YAPT
workshops/programacion_python_ESO/using_sprites.py
1
4288
import pygame import threading import time from lib import colors as color # In Pygame, the Sprite class helps to detect collisions between # sprites. This is not our case (yet) because there is only a bouncing # ball in the screen, and the limits of the screen should not be # considered as an huge sprite with a big hole inside. However, we are # going to use it only to know how to instantiate the class (allocate # a sprite-ball). class Ball(pygame.sprite.Sprite): def __init__(self, ball_color, ball_width, ball_height, initial_x_coordinate, initial_y_coordinate): # Call the parent class (Sprite) constructor. Compulsory. super().__init__() # Size of the sprite. self.image = pygame.Surface([ball_width, ball_height]) # Color of the transparent pixels of the sprite. self.image.fill(color.black) self.image.set_colorkey(color.black) # Fetch the rectangle object that has the dimensions of the # image. This should return [ball_width, ball_height] (see # https://www.pygame.org/docs/ref/surface.html#pygame.Surface.get_rect). Notice # that "self.rect", which controls the position of the sprite # in the screen, is a attribute used by the parent class to # blit the sprite, so, you cannot change its name. self.rect = self.image.get_rect() # Draw the (squared) ball. pygame.draw.rect(self.image, ball_color, [self.rect.x, self.rect.y, ball_width, ball_height]) # Initial position of the ball in the screen. self.rect.x = initial_x_coordinate self.rect.y = initial_y_coordinate # Initial direction of the ball. self.x_direction_step = 1 # Go to the right, one pixel self.y_direction_step = 1 # Go to bottom, one pixel # This method controls the sprite behaviour # (https://www.pygame.org/docs/ref/sprite.html#pygame.sprite.Sprite.update) # and we will use it to control were to move (blit) the ball. It # is called by Sprite.draw() (see below). def update(self): self.rect.x += self.x_direction_step self.rect.y += self.y_direction_step #print(f"{self.x_coordinate} {self.y_coordinate}") # Basic initialization stuff (audio and video). pygame.mixer.pre_init(44100, -16, 1, 512) pygame.init() # Create the screen. screen_width = 800 screen_height = 600 screen = pygame.display.set_mode((screen_width, screen_height)) pygame.display.set_caption("A bouncing squared sprite") # Sound of the ball when it bounces off the walls. ping_sound = pygame.mixer.Sound(file="4391__noisecollector__pongblipf-5.wav") # Create the (sprite) ball. ball_height = 16 ball_width = 16 # Maximum FPS max_FPS = 60 # Place of the starting ball. initial_x_coordinate = screen_width//2 - ball_width//2 initial_y_coordinate = 3*screen_height//4 - ball_height//2 print(f"{initial_x_coordinate} {initial_y_coordinate}") # The ball sprite. ball = Ball(color.white, ball_width, ball_height, initial_x_coordinate, initial_y_coordinate) #ball = Ball(color.white, ball_width, ball_height, 0, 0) # All sprites of this list are drawn by a single call of Sprite.call() # (see below). all_sprites_list = pygame.sprite.Group() all_sprites_list.add(ball) clock = pygame.time.Clock() running = True def print_outputs(): while running: FPS = clock.get_fps() # Frames Per Second print(f"FPS={FPS:03.2f}") time.sleep(1) # 1 second print("Goodbye") print_outputs__thread = threading.Thread(target = print_outputs) print_outputs__thread.start() while running: for event in pygame.event.get(): if event.type == pygame.QUIT: running = False all_sprites_list.update() if (ball.rect.x + ball_width) > screen_width or ball.rect.x < 0: ball.x_direction_step = -ball.x_direction_step ping_sound.play() elif (ball.rect.y + ball_height) > screen_height or ball.rect.y < 0: ball.y_direction_step = -ball.y_direction_step ping_sound.play() screen.fill(color.black) all_sprites_list.draw(screen) pygame.display.update() clock.tick(max_FPS) # Set max FPS print_outputs__thread.join() # Waits until the thread terminates pygame.quit()
cc0-1.0
NarlikarLab/DIVERSITY
weblogoMod/test_corebio.py
2
1385
#!/usr/bin/env python import unittest def suite(): modules_to_test = ( 'test_corebio.test_array_io', 'test_corebio.test_astral', 'test_corebio.test_clustal_io', 'test_corebio.test_data', 'test_corebio.test_db', 'test_corebio.test_dssp', 'test_corebio.test_fasta_io', 'test_corebio.test_genbank_io', 'test_corebio.test_intelligenetics_io', 'test_corebio.test_matrix', 'test_corebio.test_moremath', 'test_corebio.test_msf_io', 'test_corebio.test_nbrf_io', 'test_corebio.test_nexus', 'test_corebio.test_nexus_io', 'test_corebio.test_null_io', 'test_corebio.test_phylip_io', 'test_corebio.test_plain_io', 'test_corebio.test_ssearch_io', 'test_corebio.test_scop', 'test_corebio.test_secstruc', 'test_corebio.test_seq', 'test_corebio.test_seq_io', 'test_corebio.test_stockholm_io', 'test_corebio.test_stride', 'test_corebio.test_table_io', 'test_corebio.test_transform', 'test_corebio.test_utils', ) alltests = unittest.TestSuite() for module in modules_to_test : alltests.addTest(unittest.defaultTestLoader.loadTestsFromName(module)) return alltests if __name__ == '__main__': unittest.main(defaultTest='suite')
gpl-3.0
toontownfunserver/Panda3D-1.9.0
direct/directscripts/eggcacher.py
2
3086
############################################################################## # # eggcacher # # EggCacher searches a directory for egg files, and loads # them all into the model-cache. This is used as part of the # panda installation process. # ############################################################################## import os,sys,gc from pandac.PandaModules import * class EggCacher: def __init__(self, args): maindir = Filename.fromOsSpecific(os.getcwd()).getFullpath() ExecutionEnvironment.setEnvironmentVariable("MAIN_DIR", maindir) self.bamcache = BamCache.getGlobalPtr() self.pandaloader = PandaLoader() self.loaderopts = LoaderOptions() if (self.bamcache.getActive() == 0): print "The model cache is not currently active." print "You must set a model-cache-dir in your config file." sys.exit(1) self.parseArgs(args) files = self.scanPaths(self.paths) self.processFiles(files) def parseArgs(self, args): self.concise = 0 self.pzkeep = 0 while len(args): if (args[0]=="--concise"): self.concise = 1 args = args[1:] elif (args[0]=="--pzkeep"): self.pzkeep = 1 args = args[1:] else: break if (len(args) < 1): print "Usage: eggcacher options file-or-directory" sys.exit(1) self.paths = args def scanPath(self, eggs, path): if (os.path.exists(path)==0): print "No such file or directory: "+path return if (os.path.isdir(path)): for f in os.listdir(path): self.scanPath(eggs, os.path.join(path,f)) return if (path.endswith(".egg")): size = os.path.getsize(path) eggs.append((path,size)) return if (path.endswith(".egg.pz")): size = os.path.getsize(path) if (self.pzkeep): eggs.append((path,size)) else: eggs.append((path[:-3],size)) def scanPaths(self, paths): eggs = [] for path in paths: abs = os.path.abspath(path) self.scanPath(eggs,path) return eggs def processFiles(self, files): total = 0 for (path,size) in files: total += size progress = 0 for (path,size) in files: fn = Filename.fromOsSpecific(path) cached = self.bamcache.lookup(fn,"bam") percent = (progress * 100) / total report = path if (self.concise): report = os.path.basename(report) print "Preprocessing Models %2d%% %s" % (percent, report) sys.stdout.flush() if (cached) and (cached.hasData()==0): self.pandaloader.loadSync(fn, self.loaderopts) gc.collect() ModelPool.releaseAllModels() TexturePool.releaseAllTextures() progress += size cacher = EggCacher(sys.argv[1:])
bsd-3-clause
jjscarafia/odoo
addons/account/__init__.py
19
1408
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import partner import account import installer import account_invoice import account_bank_statement import account_bank import account_cash_statement import account_move_line import account_analytic_line import account_financial_report import wizard import report import product import ir_sequence import company import res_currency import edi import res_config # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
sudheesh001/oh-mainline
vendor/packages/django-celery/djcelery/tests/test_backends/test_database.py
17
3043
from __future__ import absolute_import from datetime import datetime, timedelta from celery import current_app from celery import states from celery.result import AsyncResult from celery.task import PeriodicTask from celery.utils import gen_unique_id from djcelery.backends.database import DatabaseBackend from djcelery.tests.utils import unittest class SomeClass(object): def __init__(self, data): self.data = data class MyPeriodicTask(PeriodicTask): name = "c.u.my-periodic-task-244" run_every = timedelta(seconds=1) def run(self, **kwargs): return 42 class TestDatabaseBackend(unittest.TestCase): def test_backend(self): b = DatabaseBackend() tid = gen_unique_id() self.assertEqual(b.get_status(tid), states.PENDING) self.assertIsNone(b.get_result(tid)) b.mark_as_done(tid, 42) self.assertEqual(b.get_status(tid), states.SUCCESS) self.assertEqual(b.get_result(tid), 42) tid2 = gen_unique_id() result = {"foo": "baz", "bar": SomeClass(12345)} b.mark_as_done(tid2, result) # is serialized properly. rindb = b.get_result(tid2) self.assertEqual(rindb.get("foo"), "baz") self.assertEqual(rindb.get("bar").data, 12345) tid3 = gen_unique_id() try: raise KeyError("foo") except KeyError, exception: pass b.mark_as_failure(tid3, exception) self.assertEqual(b.get_status(tid3), states.FAILURE) self.assertIsInstance(b.get_result(tid3), KeyError) def test_forget(self): b = DatabaseBackend() tid = gen_unique_id() b.mark_as_done(tid, {"foo": "bar"}) x = AsyncResult(tid) self.assertEqual(x.result.get("foo"), "bar") x.forget() self.assertIsNone(x.result) def test_taskset_store(self): b = DatabaseBackend() tid = gen_unique_id() self.assertIsNone(b.restore_taskset(tid)) result = {"foo": "baz", "bar": SomeClass(12345)} b.save_taskset(tid, result) rindb = b.restore_taskset(tid) self.assertIsNotNone(rindb) self.assertEqual(rindb.get("foo"), "baz") self.assertEqual(rindb.get("bar").data, 12345) b.delete_taskset(tid) self.assertIsNone(b.restore_taskset(tid)) def test_cleanup(self): b = DatabaseBackend() b.TaskModel._default_manager.all().delete() ids = [gen_unique_id() for _ in xrange(3)] for i, res in enumerate((16, 32, 64)): b.mark_as_done(ids[i], res) self.assertEqual(b.TaskModel._default_manager.count(), 3) then = datetime.now() - \ current_app.conf.CELERY_TASK_RESULT_EXPIRES * 2 # Have to avoid save() because it applies the auto_now=True. b.TaskModel._default_manager.filter(task_id__in=ids[:-1]) \ .update(date_done=then) b.cleanup() self.assertEqual(b.TaskModel._default_manager.count(), 1)
agpl-3.0
zamattiac/osf.io
api/base/exceptions.py
7
8974
import httplib as http from django.utils.translation import ugettext_lazy as _ from rest_framework import status from rest_framework.exceptions import APIException, AuthenticationFailed def dict_error_formatting(errors, index=None): """ Formats all dictionary error messages for both single and bulk requests """ formatted_error_list = [] # Error objects may have the following members. Title and id removed to avoid clash with "title" and "id" field errors. top_level_error_keys = ['links', 'status', 'code', 'detail', 'source', 'meta'] # Resource objects must contain at least 'id' and 'type' resource_object_identifiers = ['type', 'id'] if index is None: index = '' else: index = str(index) + '/' for error_key, error_description in errors.iteritems(): if isinstance(error_description, basestring): error_description = [error_description] if error_key in top_level_error_keys: formatted_error_list.extend({error_key: description} for description in error_description) elif error_key in resource_object_identifiers: formatted_error_list.extend([{'source': {'pointer': '/data/{}'.format(index) + error_key}, 'detail': reason} for reason in error_description]) elif error_key == 'non_field_errors': formatted_error_list.extend([{'detail': description for description in error_description}]) else: formatted_error_list.extend([{'source': {'pointer': '/data/{}attributes/'.format(index) + error_key}, 'detail': reason} for reason in error_description]) return formatted_error_list def json_api_exception_handler(exc, context): """ Custom exception handler that returns errors object as an array """ # We're deliberately not stripping html from exception detail. # This creates potential vulnerabilities to script injection attacks # when returning raw user input into error messages. # # Fortunately, Django's templating language strips markup bu default, # but if our frontend changes we may lose that protection. # TODO: write tests to ensure our html frontend strips html # Import inside method to avoid errors when the OSF is loaded without Django from rest_framework.views import exception_handler response = exception_handler(exc, context) errors = [] if response: message = response.data if isinstance(exc, TwoFactorRequiredError): response['X-OSF-OTP'] = 'required; app' if isinstance(exc, JSONAPIException): errors.extend([{'source': exc.source or {}, 'detail': exc.detail, 'meta': exc.meta or {}}]) elif isinstance(message, dict): errors.extend(dict_error_formatting(message, None)) else: if isinstance(message, basestring): message = [message] for index, error in enumerate(message): if isinstance(error, dict): errors.extend(dict_error_formatting(error, index)) else: errors.append({'detail': error}) response.data = {'errors': errors} return response class EndpointNotImplementedError(APIException): status_code = status.HTTP_501_NOT_IMPLEMENTED default_detail = _('This endpoint is not yet implemented.') class ServiceUnavailableError(APIException): status_code = status.HTTP_503_SERVICE_UNAVAILABLE default_detail = _('Service is unavailable at this time.') class JSONAPIException(APIException): """Inherits from the base DRF API exception and adds extra metadata to support JSONAPI error objects :param str detail: a human-readable explanation specific to this occurrence of the problem :param dict source: A dictionary containing references to the source of the error. See http://jsonapi.org/format/#error-objects. Example: ``source={'pointer': '/data/attributes/title'}`` :param dict meta: A meta object containing non-standard meta info about the error. """ status_code = status.HTTP_400_BAD_REQUEST def __init__(self, detail=None, source=None, meta=None): super(JSONAPIException, self).__init__(detail=detail) self.source = source self.meta = meta # Custom Exceptions the Django Rest Framework does not support class Gone(JSONAPIException): status_code = status.HTTP_410_GONE default_detail = ('The requested resource is no longer available.') class Conflict(APIException): status_code = status.HTTP_409_CONFLICT default_detail = ('Resource identifier does not match server endpoint.') class JSONAPIParameterException(JSONAPIException): def __init__(self, detail=None, parameter=None): source = { 'parameter': parameter } super(JSONAPIParameterException, self).__init__(detail=detail, source=source) class JSONAPIAttributeException(JSONAPIException): def __init__(self, detail=None, attribute=None): source = { 'pointer': '/data/attributes/{}'.format(attribute) } super(JSONAPIAttributeException, self).__init__(detail=detail, source=source) class InvalidQueryStringError(JSONAPIParameterException): """Raised when client passes an invalid value to a query string parameter.""" default_detail = 'Query string contains an invalid value.' status_code = http.BAD_REQUEST class InvalidFilterOperator(JSONAPIParameterException): """Raised when client passes an invalid operator to a query param filter.""" status_code = http.BAD_REQUEST def __init__(self, detail=None, value=None, valid_operators=('eq', 'lt', 'lte', 'gt', 'gte', 'contains', 'icontains')): if value and not detail: valid_operators = ', '.join(valid_operators) detail = "Value '{0}' is not a supported filter operator; use one of {1}.".format( value, valid_operators ) super(InvalidFilterOperator, self).__init__(detail=detail, parameter='filter') class InvalidFilterValue(JSONAPIParameterException): """Raised when client passes an invalid value to a query param filter.""" status_code = http.BAD_REQUEST def __init__(self, detail=None, value=None, field_type=None): if not detail: detail = "Value '{0}' is not valid".format(value) if field_type: detail += " for a filter on type {0}".format( field_type ) detail += "." super(InvalidFilterValue, self).__init__(detail=detail, parameter='filter') class InvalidFilterError(JSONAPIParameterException): """Raised when client passes an malformed filter in the query string.""" default_detail = _('Query string contains a malformed filter.') status_code = http.BAD_REQUEST def __init__(self, detail=None): super(InvalidFilterError, self).__init__(detail=detail, parameter='filter') class InvalidFilterComparisonType(JSONAPIParameterException): """Raised when client tries to filter on a field that is not a date or number type""" default_detail = _('Comparison operators are only supported for dates and numbers.') status_code = http.BAD_REQUEST class InvalidFilterMatchType(JSONAPIParameterException): """Raised when client tries to do a match filter on a field that is not a string or a list""" default_detail = _('Match operators are only supported for strings and lists.') status_code = http.BAD_REQUEST class InvalidFilterFieldError(JSONAPIParameterException): """Raised when client tries to filter on a field that is not supported""" default_detail = _('Query contained one or more filters for invalid fields.') status_code = http.BAD_REQUEST def __init__(self, detail=None, parameter=None, value=None): if value and not detail: detail = "Value '{}' is not a filterable field.".format(value) super(InvalidFilterFieldError, self).__init__(detail=detail, parameter=parameter) class UnconfirmedAccountError(APIException): status_code = 400 default_detail = _('Please confirm your account before using the API.') class DeactivatedAccountError(APIException): status_code = 400 default_detail = _('Making API requests with credentials associated with a deactivated account is not allowed.') class TwoFactorRequiredError(AuthenticationFailed): default_detail = _('Must specify two-factor authentication OTP code.') pass class InvalidModelValueError(JSONAPIException): status_code = 400 default_detail = _('Invalid value in POST/PUT/PATCH request.') class TargetNotSupportedError(Exception): """Raised if a TargetField is used for a resource that isn't supported.""" pass class RelationshipPostMakesNoChanges(Exception): """Raised when a post is on a relationship that already exists, so view can return a 204""" pass
apache-2.0
RyanYoung25/tensorflow
tensorflow/python/kernel_tests/slice_op_test.py
6
9650
# Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Functional tests for slice op.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from six.moves import xrange # pylint: disable=redefined-builtin import tensorflow as tf class SliceTest(tf.test.TestCase): def _testEmpty(self, use_gpu): inp = np.random.rand(4, 4).astype("f") for k in xrange(4): with self.test_session(use_gpu=use_gpu): a = tf.constant(inp, shape=[4, 4], dtype=tf.float32) slice_t = a[2, k:k] slice_val = slice_t.eval() self.assertAllEqual(slice_val, inp[2, k:k]) def testEmptyAll(self): self._testEmpty(use_gpu=False) self._testEmpty(use_gpu=True) def _testInt32(self, use_gpu): inp = np.random.rand(4, 4).astype("i") for k in xrange(4): with self.test_session(use_gpu=use_gpu): a = tf.constant(inp, shape=[4, 4], dtype=tf.int32) slice_t = a[2, k:k] slice_val = slice_t.eval() self.assertAllEqual(slice_val, inp[2, k:k]) def testInt32(self): self._testEmpty(use_gpu=False) self._testEmpty(use_gpu=True) def _testSelectAll(self, use_gpu): with self.test_session(use_gpu=use_gpu): inp = np.random.rand(4, 4, 4, 4).astype("f") a = tf.constant(inp, shape=[4, 4, 4, 4], dtype=tf.float32) slice_explicit_t = tf.slice(a, [0, 0, 0, 0], [-1, -1, -1, -1]) slice_implicit_t = a[:, :, :, :] self.assertAllEqual(inp, slice_explicit_t.eval()) self.assertAllEqual(inp, slice_implicit_t.eval()) self.assertEqual(inp.shape, slice_explicit_t.get_shape()) self.assertEqual(inp.shape, slice_implicit_t.get_shape()) def testSelectAll(self): for _ in range(10): self._testSelectAll(use_gpu=False) self._testSelectAll(use_gpu=True) def _testSingleDimension(self, use_gpu): with self.test_session(use_gpu=use_gpu): inp = np.random.rand(10).astype("f") a = tf.constant(inp, shape=[10], dtype=tf.float32) hi = np.random.random_integers(0, 9) scalar_t = a[hi] scalar_val = scalar_t.eval() self.assertAllEqual(scalar_val, inp[hi]) lo = np.random.random_integers(0, hi) slice_t = a[lo:hi] slice_val = slice_t.eval() self.assertAllEqual(slice_val, inp[lo:hi]) def testSingleDimension(self): for _ in range(10): self._testSingleDimension(use_gpu=False) self._testSingleDimension(use_gpu=True) def _testSliceMatrixDim0(self, x, begin, size, use_gpu): with self.test_session(use_gpu=use_gpu): tf_ans = tf.slice(x, [begin, 0], [size, x.shape[1]]).eval() np_ans = x[begin:begin+size, :] self.assertAllEqual(tf_ans, np_ans) def testSliceMatrixDim0(self): for use_gpu in [False, True]: x = np.random.rand(8, 4).astype("f") self._testSliceMatrixDim0(x, 1, 2, use_gpu) self._testSliceMatrixDim0(x, 3, 3, use_gpu) y = np.random.rand(8, 7).astype("f") # 7 * sizeof(float) is not aligned self._testSliceMatrixDim0(y, 1, 2, use_gpu) self._testSliceMatrixDim0(y, 3, 3, use_gpu) def _testIndexAndSlice(self, use_gpu): with self.test_session(use_gpu=use_gpu): inp = np.random.rand(4, 4).astype("f") a = tf.constant(inp, shape=[4, 4], dtype=tf.float32) x, y = np.random.random_integers(0, 3, size=2).tolist() slice_t = a[x, 0:y] slice_val = slice_t.eval() self.assertAllEqual(slice_val, inp[x, 0:y]) def testSingleElementAll(self): for _ in range(10): self._testIndexAndSlice(use_gpu=False) self._testIndexAndSlice(use_gpu=True) def _testSimple(self, use_gpu): with self.test_session(use_gpu=use_gpu) as sess: inp = np.random.rand(4, 4).astype("f") a = tf.constant([float(x) for x in inp.ravel(order="C")], shape=[4, 4], dtype=tf.float32) slice_t = tf.slice(a, [0, 0], [2, 2]) slice2_t = a[:2, :2] slice_val, slice2_val = sess.run([slice_t, slice2_t]) self.assertAllEqual(slice_val, inp[:2, :2]) self.assertAllEqual(slice2_val, inp[:2, :2]) self.assertEqual(slice_val.shape, slice_t.get_shape()) self.assertEqual(slice2_val.shape, slice2_t.get_shape()) def testSimpleAll(self): self._testSimple(use_gpu=False) self._testSimple(use_gpu=True) def _testComplex(self, use_gpu): with self.test_session(use_gpu=use_gpu): inp = np.random.rand(4, 10, 10, 4).astype("f") a = tf.constant(inp, dtype=tf.float32) x = np.random.random_integers(0, 9) z = np.random.random_integers(0, 9) y = np.random.random_integers(0, z) slice_t = a[:, x, y:z, :] self.assertAllEqual(slice_t.eval(), inp[:, x, y:z, :]) def testComplex(self): for _ in range(10): self._testComplex(use_gpu=False) self._testComplex(use_gpu=True) def _RunAndVerifyResult(self, use_gpu): # Random dims of rank 6 input_shape = np.random.randint(0, 20, size=6) inp = np.random.rand(*input_shape).astype("f") with self.test_session(use_gpu=use_gpu) as sess: a = tf.constant([float(x) for x in inp.ravel(order="C")], shape=input_shape, dtype=tf.float32) indices = [0 if x == 0 else np.random.randint(x) for x in input_shape] sizes = [np.random.randint(0, input_shape[i] - indices[i] + 1) for i in range(6)] slice_t = tf.slice(a, indices, sizes) slice2_t = a[indices[0]:indices[0]+sizes[0], indices[1]:indices[1]+sizes[1], indices[2]:indices[2]+sizes[2], indices[3]:indices[3]+sizes[3], indices[4]:indices[4]+sizes[4], indices[5]:indices[5]+sizes[5]] slice_val, slice2_val = sess.run([slice_t, slice2_t]) expected_val = inp[indices[0]:indices[0]+sizes[0], indices[1]:indices[1]+sizes[1], indices[2]:indices[2]+sizes[2], indices[3]:indices[3]+sizes[3], indices[4]:indices[4]+sizes[4], indices[5]:indices[5]+sizes[5]] self.assertAllEqual(slice_val, expected_val) self.assertAllEqual(slice2_val, expected_val) self.assertEqual(expected_val.shape, slice_t.get_shape()) self.assertEqual(expected_val.shape, slice2_t.get_shape()) def testRandom(self): for _ in range(10): self._RunAndVerifyResult(use_gpu=False) self._RunAndVerifyResult(use_gpu=True) def _testGradientSlice(self, input_shape, slice_begin, slice_size, use_gpu): with self.test_session(use_gpu=use_gpu): num_inputs = np.prod(input_shape) num_grads = np.prod(slice_size) inp = np.random.rand(num_inputs).astype("f").reshape(input_shape) a = tf.constant([float(x) for x in inp.ravel(order="C")], shape=input_shape, dtype=tf.float32) slice_t = tf.slice(a, slice_begin, slice_size) grads = np.random.rand(num_grads).astype("f").reshape(slice_size) grad_tensor = tf.constant(grads) grad = tf.gradients(slice_t, [a], grad_tensor)[0] result = grad.eval() # Create a zero tensor of the input shape ane place # the grads into the right location to compare against TensorFlow. np_ans = np.zeros(input_shape) slices = [] for i in xrange(len(input_shape)): slices.append(slice(slice_begin[i], slice_begin[i] + slice_size[i])) np_ans[slices] = grads self.assertAllClose(np_ans, result) def _testGradientVariableSize(self, use_gpu): with self.test_session(use_gpu=use_gpu): inp = tf.constant([1.0, 2.0, 3.0], name="in") out = tf.slice(inp, [1], [-1]) grad_actual = tf.gradients(out, inp)[0].eval() self.assertAllClose([0., 1., 1.], grad_actual) def _testGradientsSimple(self, use_gpu): # Slice the middle square out of a 4x4 input self._testGradientSlice([4, 4], [1, 1], [2, 2], use_gpu) # Slice the upper left square out of a 4x4 input self._testGradientSlice([4, 4], [0, 0], [2, 2], use_gpu) # Slice a non-square input starting from (2,1) self._testGradientSlice([4, 4], [2, 1], [1, 2], use_gpu) # Slice a 3D tensor self._testGradientSlice([3, 3, 3], [0, 1, 0], [2, 1, 1], use_gpu) # Use -1 as a slice dimension. self._testGradientVariableSize(use_gpu) def testGradientsAll(self): self._testGradientsSimple(use_gpu=False) self._testGradientsSimple(use_gpu=True) def testNotIterable(self): # NOTE(mrry): If we register __getitem__ as an overloaded # operator, Python will valiantly attempt to iterate over the # Tensor from 0 to infinity. This test ensures that this # unintended behavior is prevented. c = tf.constant(5.0) with self.assertRaisesWithPredicateMatch( TypeError, lambda e: "'Tensor' object is not iterable" in str(e)): for _ in c: pass if __name__ == "__main__": tf.test.main()
apache-2.0
matmutant/sl4a
python/src/Lib/ctypes/test/test_random_things.py
51
2859
from ctypes import * import unittest, sys def callback_func(arg): 42 / arg raise ValueError(arg) if sys.platform == "win32": class call_function_TestCase(unittest.TestCase): # _ctypes.call_function is deprecated and private, but used by # Gary Bishp's readline module. If we have it, we must test it as well. def test(self): from _ctypes import call_function windll.kernel32.LoadLibraryA.restype = c_void_p windll.kernel32.GetProcAddress.argtypes = c_void_p, c_char_p windll.kernel32.GetProcAddress.restype = c_void_p hdll = windll.kernel32.LoadLibraryA("kernel32") funcaddr = windll.kernel32.GetProcAddress(hdll, "GetModuleHandleA") self.failUnlessEqual(call_function(funcaddr, (None,)), windll.kernel32.GetModuleHandleA(None)) class CallbackTracbackTestCase(unittest.TestCase): # When an exception is raised in a ctypes callback function, the C # code prints a traceback. # # This test makes sure the exception types *and* the exception # value is printed correctly. # # Changed in 0.9.3: No longer is '(in callback)' prepended to the # error message - instead a additional frame for the C code is # created, then a full traceback printed. When SystemExit is # raised in a callback function, the interpreter exits. def capture_stderr(self, func, *args, **kw): # helper - call function 'func', and return the captured stderr import StringIO old_stderr = sys.stderr logger = sys.stderr = StringIO.StringIO() try: func(*args, **kw) finally: sys.stderr = old_stderr return logger.getvalue() def test_ValueError(self): cb = CFUNCTYPE(c_int, c_int)(callback_func) out = self.capture_stderr(cb, 42) self.failUnlessEqual(out.splitlines()[-1], "ValueError: 42") def test_IntegerDivisionError(self): cb = CFUNCTYPE(c_int, c_int)(callback_func) out = self.capture_stderr(cb, 0) self.failUnlessEqual(out.splitlines()[-1][:19], "ZeroDivisionError: ") def test_FloatDivisionError(self): cb = CFUNCTYPE(c_int, c_double)(callback_func) out = self.capture_stderr(cb, 0.0) self.failUnlessEqual(out.splitlines()[-1][:19], "ZeroDivisionError: ") def test_TypeErrorDivisionError(self): cb = CFUNCTYPE(c_int, c_char_p)(callback_func) out = self.capture_stderr(cb, "spam") self.failUnlessEqual(out.splitlines()[-1], "TypeError: " "unsupported operand type(s) for /: 'int' and 'str'") if __name__ == '__main__': unittest.main()
apache-2.0
alirizakeles/zato
code/zato-web-admin/src/zato/admin/static/brython/_brython/Lib/site-packages/ui/slider.py
2
2354
from . import widget from browser import doc,html class Slider(widget.Widget): def __init__(self, id=None, label=False): self._div_shell=html.DIV(Class="ui-slider ui-slider-horizontal ui-widget ui-widget-content ui-corner-all") widget.Widget.__init__(self, self._div_shell, 'slider', id) self._handle=html.A(Class="ui-slider-handle ui-state-default ui-corner-all", Href='#', style={'left': '0px'}) self._value=0 self._isMouseDown=False def startSlide(e): self._isMouseDown=True self._upperBound = self._div_shell.offsetWidth - self._handle.offsetWidth pos = widget.getMousePosition(e) self._startMouseX=pos['x'] self._lastElementLeft = parseInt(self._handle.style.left) updatePosition(e) def updatePosition(e): pos = widget.getMousePosition(e) #print('mose pos',pos) _newPos = self._lastElementLeft + pos['x'] - self._startMouseX _newPos = max(0, _newPos) _newPos = min(_newPos, self._upperBound) self._handle.style.left = '%spx' % _newPos #print('new position',self._handle.style.left) self._lastElementLeft = _newPos def moving(e): if self._isMouseDown: updatePosition(e) def dropCallback(e): self._isMouseDown=False self._handle.unbind('mousemove', moving) self._handle.bind('mousemove', moving) self._handle.bind('mouseup', dropCallback) #self._handle.bind('mouseout', dropCallback) self._handle.bind('mousedown', startSlide) def mouseover(e): _class=self._handle.getAttribute('class') self._handle.setAttribute('class', '%s %s' % (_class, 'ui-state-hover')) def mouseout(e): self._isMouseDown=False _class=self._handle.getAttribute('class') self._handle.setAttribute('class', _class.replace('ui-state-hover', '')) self._handle.bind('mouseover', mouseover) self._handle.bind('mouseout', mouseout) self._div_shell <= self._handle def get_value(self): return self._value #def set_value(self, value): # self._value=value # self._handle.style.left='%spx' % value
gpl-3.0
hack4impact/maps4all-jlc-sp2
app/__init__.py
1
2931
import os from flask import Flask from flask.ext.mail import Mail from flask.ext.sqlalchemy import SQLAlchemy from flask.ext.login import LoginManager from flask.ext.assets import Environment from flask.ext.wtf import CsrfProtect from flask.ext.compress import Compress from flask.ext.rq import RQ from config import config from assets import app_css, app_js, vendor_css, vendor_js basedir = os.path.abspath(os.path.dirname(__file__)) mail = Mail() db = SQLAlchemy() csrf = CsrfProtect() compress = Compress() # Set up Flask-Login login_manager = LoginManager() # TODO: Ideally this should be strong, but that led to bugs. Once this is # fixed, switch protection mode back to 'strong' login_manager.session_protection = 'basic' login_manager.login_view = 'account.login' def create_app(config_name): app = Flask(__name__) app.config.from_object(config[config_name]) config[config_name].init_app(app) # Set up extensions mail.init_app(app) db.init_app(app) login_manager.init_app(app) csrf.init_app(app) compress.init_app(app) RQ(app) # Register Jinja template functions from utils import register_template_utils register_template_utils(app) # Set up asset pipeline assets_env = Environment(app) dirs = ['assets/styles', 'assets/scripts'] for path in dirs: assets_env.append_path(os.path.join(basedir, path)) assets_env.url_expire = True assets_env.register('app_css', app_css) assets_env.register('app_js', app_js) assets_env.register('vendor_css', vendor_css) assets_env.register('vendor_js', vendor_js) # Configure SSL if platform supports it if not app.debug and not app.testing and not app.config['SSL_DISABLE']: from flask.ext.sslify import SSLify SSLify(app) # Create app blueprints from main import main as main_blueprint app.register_blueprint(main_blueprint) from account import account as account_blueprint app.register_blueprint(account_blueprint, url_prefix='/account') from admin import admin as admin_blueprint app.register_blueprint(admin_blueprint, url_prefix='/admin') from bulk_resource import bulk_resource as bulk_resource_blueprint app.register_blueprint(bulk_resource_blueprint, url_prefix='/bulk-resource') from descriptor import descriptor as descriptor_blueprint app.register_blueprint(descriptor_blueprint, url_prefix='/descriptor') from single_resource import single_resource as single_resource_blueprint app.register_blueprint(single_resource_blueprint, url_prefix='/single-resource') from suggestion import suggestion as suggestion_blueprint app.register_blueprint(suggestion_blueprint, url_prefix='/suggestion') from contact import contact as contact_blueprint app.register_blueprint(contact_blueprint, url_prefix='/contact') return app
mit
hugobessa/django-shared-schema-tenants
shared_schema_tenants_custom_data/views.py
1
8116
from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ObjectDoesNotExist from django.http import Http404 from rest_framework import status, generics, viewsets from rest_framework.views import APIView from rest_framework.response import Response from shared_schema_tenants.utils import import_from_string from shared_schema_tenants_custom_data.permissions import DjangoTenantSpecificTablePermissions from shared_schema_tenants_custom_data.settings import get_setting from shared_schema_tenants_custom_data.models import ( TenantSpecificTable, TenantSpecificFieldDefinition) from shared_schema_tenants_custom_data.serializers import ( get_tenant_specific_table_row_serializer_class, TenantSpecificTableSerializer, TenantSpecificFieldsModelDefinitionsUpdateSerializer) from shared_schema_tenants_custom_data.helpers.custom_tables_helpers import get_custom_table_manager class CustomizableModelsList(APIView): def get_permissions(self): return [ import_from_string(permission)() for permission in get_setting('CUSTOMIZABLE_MODELS_LIST_CREATE_PERMISSIONS') ] def get_queryset(self): custom_tables = TenantSpecificTable.objects.all() customizable_models_names = get_setting('CUSTOMIZABLE_MODELS') search = self.request.GET.get('search') if search: custom_tables = custom_tables.filter(name__icontains=search) customizable_models_names = [ m.replace('.', get_setting('CUSTOMIZABLE_TABLES_LABEL_SEPARATOR')).lower() for m in customizable_models_names if search in m.replace('.', get_setting('CUSTOMIZABLE_TABLES_LABEL_SEPARATOR')).lower() ] filter_results = self.request.GET.get('filter') if filter_results == get_setting('CUSTOM_TABLES_FILTER_KEYWORD'): customizable_models_names = [] elif filter_results == 'customizable_models': custom_tables = custom_tables.none() return { 'custom_tables': custom_tables.order_by('name'), 'customizable_models_names': sorted(customizable_models_names), } def get_custom_tables_names(self, custom_tables): return [ get_setting('CUSTOM_TABLES_LABEL') + get_setting('CUSTOMIZABLE_TABLES_LABEL_SEPARATOR') + t for t in custom_tables.values_list('name', flat=True) ] def paginate_results(self, custom_tables, customizable_models_names): page_number = self.request.GET.get('page') page_length = self.request.GET.get('length') total_count = len(customizable_models_names + self.get_custom_tables_names(custom_tables)) if not page_number or not page_length: return [{'name': n} for n in ( customizable_models_names + self.get_custom_tables_names(custom_tables) )] page_number = int(page_number) page_length = int(page_length) first_item_index = (page_number - 1) * page_length last_item_index = first_item_index + page_length if len(customizable_models_names) > last_item_index: return { 'count': total_count, 'results': [{'name': n} for n in ( customizable_models_names[first_item_index:last_item_index] )], } if len(customizable_models_names) >= first_item_index: selected_customizable_models_names = customizable_models_names[first_item_index:] return { 'count': total_count, 'results': [{'name': n} for n in ( selected_customizable_models_names + self.get_custom_tables_names(custom_tables[ 0:page_length - len(selected_customizable_models_names)]) )], } return { 'count': total_count, 'results': [{'name': n} for n in ( self.get_custom_tables_names(custom_tables[first_item_index:last_item_index]) )] } def get(self, request, *args, **kwargs): return Response(self.paginate_results(**self.get_queryset())) def post(self, request, *args, **kwargs): serializer = TenantSpecificTableSerializer( data=self.request.data, context={'request': request, 'view': self}) if serializer.is_valid(): serializer.save() return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) class CustomTableDetails(generics.RetrieveUpdateDestroyAPIView): def get_permissions(self): return [ import_from_string(permission)() for permission in get_setting('CUSTOMIZABLE_MODELS_RETRIEVE_UTPADE_DESTROY_PERMISSIONS') ] def get_queryset(self): table_slug = self.kwargs['slug'] table_slug_parts = table_slug.split(get_setting('CUSTOMIZABLE_TABLES_LABEL_SEPARATOR')) app = table_slug_parts[0] if app == get_setting('CUSTOM_TABLES_LABEL'): return TenantSpecificTable.objects.all() return ContentType.objects.filter() def get_object(self): if not hasattr(self, 'object'): table_slug = self.kwargs['slug'] table_slug_parts = table_slug.split(get_setting('CUSTOMIZABLE_TABLES_LABEL_SEPARATOR')) app = table_slug_parts[0] try: if app == get_setting('CUSTOM_TABLES_LABEL'): self.object = self.get_queryset().get(name=table_slug_parts[1]) elif (table_slug in [m.replace('.', get_setting('CUSTOMIZABLE_TABLES_LABEL_SEPARATOR')).lower() for m in get_setting('CUSTOMIZABLE_MODELS')]): self.object = ContentType.objects.get_by_natural_key(*table_slug_parts) else: raise Http404() except ObjectDoesNotExist: raise Http404() return self.object def get_serializer_class(self): obj = self.get_object() if type(obj).__name__ == 'TenantSpecificTable': return TenantSpecificTableSerializer return TenantSpecificFieldsModelDefinitionsUpdateSerializer def destroy(self, request, *args, **kwargs): obj = self.get_object() if type(obj).__name__ == 'TenantSpecificTable': TenantSpecificFieldDefinition.objects.filter( table_content_type=ContentType.objects.get_for_model(TenantSpecificTable), table_id=obj.id ).delete() obj.delete() else: TenantSpecificFieldDefinition.objects.filter( table_content_type=obj ).delete() return Response() class TenantSpecificTableRowViewset(viewsets.ModelViewSet): permission_classes = [DjangoTenantSpecificTablePermissions] def get_queryset(self): table_slug = self.kwargs['slug'] if get_setting('CUSTOMIZABLE_TABLES_LABEL_SEPARATOR') in table_slug: table_slug_parts = table_slug.split(get_setting('CUSTOMIZABLE_TABLES_LABEL_SEPARATOR')) if table_slug_parts[0] == get_setting('CUSTOM_TABLES_LABEL'): try: return get_custom_table_manager(table_slug_parts[1]).all() except TenantSpecificTable.DoesNotExist: pass raise Http404() def get_serializer_class(self): table_slug = self.kwargs['slug'] if get_setting('CUSTOMIZABLE_TABLES_LABEL_SEPARATOR') in table_slug: table_slug_parts = table_slug.split(get_setting('CUSTOMIZABLE_TABLES_LABEL_SEPARATOR')) if table_slug_parts[0] == get_setting('CUSTOM_TABLES_LABEL'): try: return get_tenant_specific_table_row_serializer_class(table_slug_parts[1]) except TenantSpecificTable.DoesNotExist: pass raise Http404()
mit
uber-common/opentracing-python-instrumentation
opentracing_instrumentation/client_hooks/_dbapi2.py
1
9176
# Copyright (c) 2015-2017 Uber Technologies, Inc. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. from __future__ import absolute_import from builtins import object import contextlib2 import wrapt from opentracing.ext import tags as ext_tags from ._current_span import current_span_func from .. import utils from ..local_span import func_span # Utils for instrumenting DB API v2 compatible drivers. # PEP-249 - https://www.python.org/dev/peps/pep-0249/ _BEGIN = 'begin-trans' _COMMIT = 'commit' _ROLLBACK = 'rollback' _TRANS_TAGS = [_BEGIN, _COMMIT, _ROLLBACK] NO_ARG = object() def db_span(sql_statement, module_name, sql_parameters=None, connect_params=None, cursor_params=None): span = current_span_func() @contextlib2.contextmanager def empty_ctx_mgr(): yield None if span is None: return empty_ctx_mgr() if bytes is not str and isinstance(sql_statement, bytes): sql_statement = sql_statement.decode('utf-8', errors='ignore') statement = sql_statement.strip() add_sql_tag = True if sql_statement in _TRANS_TAGS: operation = sql_statement add_sql_tag = False else: space_idx = statement.find(' ') if space_idx == -1: operation = '' # unrecognized format of the query else: operation = statement[0:space_idx] tags = {ext_tags.SPAN_KIND: ext_tags.SPAN_KIND_RPC_CLIENT} if add_sql_tag: tags['sql'] = statement if sql_parameters: tags['sql.params'] = sql_parameters if connect_params: tags['sql.conn'] = connect_params if cursor_params: tags['sql.cursor'] = cursor_params return utils.start_child_span( operation_name='%s:%s' % (module_name, operation), parent=span, tags=tags ) class CursorWrapper(wrapt.ObjectProxy): __slots__ = ('_module_name', '_connect_params', '_cursor_params') def __init__(self, cursor, module_name, connect_params=None, cursor_params=None): super(CursorWrapper, self).__init__(wrapped=cursor) self._module_name = module_name self._connect_params = connect_params self._cursor_params = cursor_params # We could also start a span now and then override close() to capture # the life time of the cursor def execute(self, sql, params=NO_ARG): with db_span(sql_statement=sql, sql_parameters=params if params is not NO_ARG else None, module_name=self._module_name, connect_params=self._connect_params, cursor_params=self._cursor_params): if params is NO_ARG: return self.__wrapped__.execute(sql) else: return self.__wrapped__.execute(sql, params) def executemany(self, sql, seq_of_parameters): with db_span(sql_statement=sql, sql_parameters=seq_of_parameters, module_name=self._module_name, connect_params=self._connect_params, cursor_params=self._cursor_params): return self.__wrapped__.executemany(sql, seq_of_parameters) def callproc(self, proc_name, params=NO_ARG): with db_span(sql_statement='sproc:%s' % proc_name, sql_parameters=params if params is not NO_ARG else None, module_name=self._module_name, connect_params=self._connect_params, cursor_params=self._cursor_params): if params is NO_ARG: return self.__wrapped__.callproc(proc_name) else: return self.__wrapped__.callproc(proc_name, params) class ConnectionFactory(object): """ Wraps connect_func of the DB API v2 module by creating a wrapper object for the actual connection. """ def __init__(self, connect_func, module_name, conn_wrapper_ctor=None, cursor_wrapper=CursorWrapper): self._connect_func = connect_func self._module_name = module_name if hasattr(connect_func, '__name__'): self._connect_func_name = '%s:%s' % (module_name, connect_func.__name__) else: self._connect_func_name = '%s:%s' % (module_name, connect_func) self._wrapper_ctor = conn_wrapper_ctor \ if conn_wrapper_ctor is not None else ConnectionWrapper self._cursor_wrapper = cursor_wrapper def __call__(self, *args, **kwargs): safe_kwargs = kwargs if 'passwd' in kwargs or 'password' in kwargs or 'conv' in kwargs: safe_kwargs = dict(kwargs) if 'passwd' in safe_kwargs: del safe_kwargs['passwd'] if 'password' in safe_kwargs: del safe_kwargs['password'] if 'conv' in safe_kwargs: # don't log conversion functions del safe_kwargs['conv'] connect_params = (args, safe_kwargs) if args or safe_kwargs else None tags = {ext_tags.SPAN_KIND: ext_tags.SPAN_KIND_RPC_CLIENT} with func_span(self._connect_func_name, tags=tags): return self._wrapper_ctor( connection=self._connect_func(*args, **kwargs), module_name=self._module_name, connect_params=connect_params, cursor_wrapper=self._cursor_wrapper) class ConnectionWrapper(wrapt.ObjectProxy): __slots__ = ('_module_name', '_connect_params', '_cursor_wrapper') def __init__(self, connection, module_name, connect_params, cursor_wrapper): super(ConnectionWrapper, self).__init__(wrapped=connection) self._module_name = module_name self._connect_params = connect_params self._cursor_wrapper = cursor_wrapper def cursor(self, *args, **kwargs): return self._cursor_wrapper( cursor=self.__wrapped__.cursor(*args, **kwargs), module_name=self._module_name, connect_params=self._connect_params, cursor_params=(args, kwargs) if args or kwargs else None) def begin(self): with db_span(sql_statement=_BEGIN, module_name=self._module_name): return self.__wrapped__.begin() def commit(self): with db_span(sql_statement=_COMMIT, module_name=self._module_name): return self.__wrapped__.commit() def rollback(self): with db_span(sql_statement=_ROLLBACK, module_name=self._module_name): return self.__wrapped__.rollback() class ContextManagerConnectionWrapper(ConnectionWrapper): """ Extends ConnectionWrapper by implementing `__enter__` and `__exit__` methods of the context manager API, for connections that can be used in as context managers to control the transactions, e.g. .. code-block:: python with MySQLdb.connect(...) as cursor: cursor.execute(...) """ def __init__(self, connection, module_name, connect_params, cursor_wrapper): super(ContextManagerConnectionWrapper, self).__init__( connection=connection, module_name=module_name, connect_params=connect_params, cursor_wrapper=cursor_wrapper ) def __getattr__(self, name): # Tip suggested here: # https://gist.github.com/mjallday/3d4c92e7e6805af1e024. if name == '_sqla_unwrap': return self.__wrapped__ return super(ContextManagerConnectionWrapper, self).__getattr__(name) def __enter__(self): with func_span('%s:begin_transaction' % self._module_name): cursor = self.__wrapped__.__enter__() return CursorWrapper(cursor=cursor, module_name=self._module_name, connect_params=self._connect_params) def __exit__(self, exc, value, tb): outcome = _COMMIT if exc is None else _ROLLBACK with db_span(sql_statement=outcome, module_name=self._module_name): return self.__wrapped__.__exit__(exc, value, tb)
mit
lenstr/rethinkdb
external/v8_3.30.33.16/build/gyp/pylib/gyp/easy_xml_test.py
2698
3270
#!/usr/bin/env python # Copyright (c) 2011 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Unit tests for the easy_xml.py file. """ import gyp.easy_xml as easy_xml import unittest import StringIO class TestSequenceFunctions(unittest.TestCase): def setUp(self): self.stderr = StringIO.StringIO() def test_EasyXml_simple(self): self.assertEqual( easy_xml.XmlToString(['test']), '<?xml version="1.0" encoding="utf-8"?><test/>') self.assertEqual( easy_xml.XmlToString(['test'], encoding='Windows-1252'), '<?xml version="1.0" encoding="Windows-1252"?><test/>') def test_EasyXml_simple_with_attributes(self): self.assertEqual( easy_xml.XmlToString(['test2', {'a': 'value1', 'b': 'value2'}]), '<?xml version="1.0" encoding="utf-8"?><test2 a="value1" b="value2"/>') def test_EasyXml_escaping(self): original = '<test>\'"\r&\nfoo' converted = '&lt;test&gt;\'&quot;&#xD;&amp;&#xA;foo' converted_apos = converted.replace("'", '&apos;') self.assertEqual( easy_xml.XmlToString(['test3', {'a': original}, original]), '<?xml version="1.0" encoding="utf-8"?><test3 a="%s">%s</test3>' % (converted, converted_apos)) def test_EasyXml_pretty(self): self.assertEqual( easy_xml.XmlToString( ['test3', ['GrandParent', ['Parent1', ['Child'] ], ['Parent2'] ] ], pretty=True), '<?xml version="1.0" encoding="utf-8"?>\n' '<test3>\n' ' <GrandParent>\n' ' <Parent1>\n' ' <Child/>\n' ' </Parent1>\n' ' <Parent2/>\n' ' </GrandParent>\n' '</test3>\n') def test_EasyXml_complex(self): # We want to create: target = ( '<?xml version="1.0" encoding="utf-8"?>' '<Project>' '<PropertyGroup Label="Globals">' '<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>' '<Keyword>Win32Proj</Keyword>' '<RootNamespace>automated_ui_tests</RootNamespace>' '</PropertyGroup>' '<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>' '<PropertyGroup ' 'Condition="\'$(Configuration)|$(Platform)\'==' '\'Debug|Win32\'" Label="Configuration">' '<ConfigurationType>Application</ConfigurationType>' '<CharacterSet>Unicode</CharacterSet>' '</PropertyGroup>' '</Project>') xml = easy_xml.XmlToString( ['Project', ['PropertyGroup', {'Label': 'Globals'}, ['ProjectGuid', '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}'], ['Keyword', 'Win32Proj'], ['RootNamespace', 'automated_ui_tests'] ], ['Import', {'Project': '$(VCTargetsPath)\\Microsoft.Cpp.props'}], ['PropertyGroup', {'Condition': "'$(Configuration)|$(Platform)'=='Debug|Win32'", 'Label': 'Configuration'}, ['ConfigurationType', 'Application'], ['CharacterSet', 'Unicode'] ] ]) self.assertEqual(xml, target) if __name__ == '__main__': unittest.main()
agpl-3.0
openhatch/oh-mainline
vendor/packages/python-openid/examples/consumer.py
60
18266
#!/usr/bin/env python """ Simple example for an OpenID consumer. Once you understand this example you'll know the basics of OpenID and using the Python OpenID library. You can then move on to more robust examples, and integrating OpenID into your application. """ __copyright__ = 'Copyright 2005-2008, Janrain, Inc.' from Cookie import SimpleCookie import cgi import urlparse import cgitb import sys def quoteattr(s): qs = cgi.escape(s, 1) return '"%s"' % (qs,) from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler try: import openid except ImportError: sys.stderr.write(""" Failed to import the OpenID library. In order to use this example, you must either install the library (see INSTALL in the root of the distribution) or else add the library to python's import path (the PYTHONPATH environment variable). For more information, see the README in the root of the library distribution.""") sys.exit(1) from openid.store import memstore from openid.store import filestore from openid.consumer import consumer from openid.oidutil import appendArgs from openid.cryptutil import randomString from openid.fetchers import setDefaultFetcher, Urllib2Fetcher from openid.extensions import pape, sreg # Used with an OpenID provider affiliate program. OPENID_PROVIDER_NAME = 'MyOpenID' OPENID_PROVIDER_URL ='https://www.myopenid.com/affiliate_signup?affiliate_id=39' class OpenIDHTTPServer(HTTPServer): """http server that contains a reference to an OpenID consumer and knows its base URL. """ def __init__(self, store, *args, **kwargs): HTTPServer.__init__(self, *args, **kwargs) self.sessions = {} self.store = store if self.server_port != 80: self.base_url = ('http://%s:%s/' % (self.server_name, self.server_port)) else: self.base_url = 'http://%s/' % (self.server_name,) class OpenIDRequestHandler(BaseHTTPRequestHandler): """Request handler that knows how to verify an OpenID identity.""" SESSION_COOKIE_NAME = 'pyoidconsexsid' session = None def getConsumer(self, stateless=False): if stateless: store = None else: store = self.server.store return consumer.Consumer(self.getSession(), store) def getSession(self): """Return the existing session or a new session""" if self.session is not None: return self.session # Get value of cookie header that was sent cookie_str = self.headers.get('Cookie') if cookie_str: cookie_obj = SimpleCookie(cookie_str) sid_morsel = cookie_obj.get(self.SESSION_COOKIE_NAME, None) if sid_morsel is not None: sid = sid_morsel.value else: sid = None else: sid = None # If a session id was not set, create a new one if sid is None: sid = randomString(16, '0123456789abcdef') session = None else: session = self.server.sessions.get(sid) # If no session exists for this session ID, create one if session is None: session = self.server.sessions[sid] = {} session['id'] = sid self.session = session return session def setSessionCookie(self): sid = self.getSession()['id'] session_cookie = '%s=%s;' % (self.SESSION_COOKIE_NAME, sid) self.send_header('Set-Cookie', session_cookie) def do_GET(self): """Dispatching logic. There are three paths defined: / - Display an empty form asking for an identity URL to verify /verify - Handle form submission, initiating OpenID verification /process - Handle a redirect from an OpenID server Any other path gets a 404 response. This function also parses the query parameters. If an exception occurs in this function, a traceback is written to the requesting browser. """ try: self.parsed_uri = urlparse.urlparse(self.path) self.query = {} for k, v in cgi.parse_qsl(self.parsed_uri[4]): self.query[k] = v.decode('utf-8') path = self.parsed_uri[2] if path == '/': self.render() elif path == '/verify': self.doVerify() elif path == '/process': self.doProcess() elif path == '/affiliate': self.doAffiliate() else: self.notFound() except (KeyboardInterrupt, SystemExit): raise except: self.send_response(500) self.send_header('Content-type', 'text/html') self.setSessionCookie() self.end_headers() self.wfile.write(cgitb.html(sys.exc_info(), context=10)) def doVerify(self): """Process the form submission, initating OpenID verification. """ # First, make sure that the user entered something openid_url = self.query.get('openid_identifier') if not openid_url: self.render('Enter an OpenID Identifier to verify.', css_class='error', form_contents=openid_url) return immediate = 'immediate' in self.query use_sreg = 'use_sreg' in self.query use_pape = 'use_pape' in self.query use_stateless = 'use_stateless' in self.query oidconsumer = self.getConsumer(stateless = use_stateless) try: request = oidconsumer.begin(openid_url) except consumer.DiscoveryFailure, exc: fetch_error_string = 'Error in discovery: %s' % ( cgi.escape(str(exc[0]))) self.render(fetch_error_string, css_class='error', form_contents=openid_url) else: if request is None: msg = 'No OpenID services found for <code>%s</code>' % ( cgi.escape(openid_url),) self.render(msg, css_class='error', form_contents=openid_url) else: # Then, ask the library to begin the authorization. # Here we find out the identity server that will verify the # user's identity, and get a token that allows us to # communicate securely with the identity server. if use_sreg: self.requestRegistrationData(request) if use_pape: self.requestPAPEDetails(request) trust_root = self.server.base_url return_to = self.buildURL('process') if request.shouldSendRedirect(): redirect_url = request.redirectURL( trust_root, return_to, immediate=immediate) self.send_response(302) self.send_header('Location', redirect_url) self.writeUserHeader() self.end_headers() else: form_html = request.htmlMarkup( trust_root, return_to, form_tag_attrs={'id':'openid_message'}, immediate=immediate) self.wfile.write(form_html) def requestRegistrationData(self, request): sreg_request = sreg.SRegRequest( required=['nickname'], optional=['fullname', 'email']) request.addExtension(sreg_request) def requestPAPEDetails(self, request): pape_request = pape.Request([pape.AUTH_PHISHING_RESISTANT]) request.addExtension(pape_request) def doProcess(self): """Handle the redirect from the OpenID server. """ oidconsumer = self.getConsumer() # Ask the library to check the response that the server sent # us. Status is a code indicating the response type. info is # either None or a string containing more information about # the return type. url = 'http://'+self.headers.get('Host')+self.path info = oidconsumer.complete(self.query, url) sreg_resp = None pape_resp = None css_class = 'error' display_identifier = info.getDisplayIdentifier() if info.status == consumer.FAILURE and display_identifier: # In the case of failure, if info is non-None, it is the # URL that we were verifying. We include it in the error # message to help the user figure out what happened. fmt = "Verification of %s failed: %s" message = fmt % (cgi.escape(display_identifier), info.message) elif info.status == consumer.SUCCESS: # Success means that the transaction completed without # error. If info is None, it means that the user cancelled # the verification. css_class = 'alert' # This is a successful verification attempt. If this # was a real application, we would do our login, # comment posting, etc. here. fmt = "You have successfully verified %s as your identity." message = fmt % (cgi.escape(display_identifier),) sreg_resp = sreg.SRegResponse.fromSuccessResponse(info) pape_resp = pape.Response.fromSuccessResponse(info) if info.endpoint.canonicalID: # You should authorize i-name users by their canonicalID, # rather than their more human-friendly identifiers. That # way their account with you is not compromised if their # i-name registration expires and is bought by someone else. message += (" This is an i-name, and its persistent ID is %s" % (cgi.escape(info.endpoint.canonicalID),)) elif info.status == consumer.CANCEL: # cancelled message = 'Verification cancelled' elif info.status == consumer.SETUP_NEEDED: if info.setup_url: message = '<a href=%s>Setup needed</a>' % ( quoteattr(info.setup_url),) else: # This means auth didn't succeed, but you're welcome to try # non-immediate mode. message = 'Setup needed' else: # Either we don't understand the code or there is no # openid_url included with the error. Give a generic # failure message. The library should supply debug # information in a log. message = 'Verification failed.' self.render(message, css_class, display_identifier, sreg_data=sreg_resp, pape_data=pape_resp) def doAffiliate(self): """Direct the user sign up with an affiliate OpenID provider.""" sreg_req = sreg.SRegRequest(['nickname'], ['fullname', 'email']) href = sreg_req.toMessage().toURL(OPENID_PROVIDER_URL) message = """Get an OpenID at <a href=%s>%s</a>""" % ( quoteattr(href), OPENID_PROVIDER_NAME) self.render(message) def renderSREG(self, sreg_data): if not sreg_data: self.wfile.write( '<div class="alert">No registration data was returned</div>') else: sreg_list = sreg_data.items() sreg_list.sort() self.wfile.write( '<h2>Registration Data</h2>' '<table class="sreg">' '<thead><tr><th>Field</th><th>Value</th></tr></thead>' '<tbody>') odd = ' class="odd"' for k, v in sreg_list: field_name = sreg.data_fields.get(k, k) value = cgi.escape(v.encode('UTF-8')) self.wfile.write( '<tr%s><td>%s</td><td>%s</td></tr>' % (odd, field_name, value)) if odd: odd = '' else: odd = ' class="odd"' self.wfile.write('</tbody></table>') def renderPAPE(self, pape_data): if not pape_data: self.wfile.write( '<div class="alert">No PAPE data was returned</div>') else: self.wfile.write('<div class="alert">Effective Auth Policies<ul>') for policy_uri in pape_data.auth_policies: self.wfile.write('<li><tt>%s</tt></li>' % (cgi.escape(policy_uri),)) if not pape_data.auth_policies: self.wfile.write('<li>No policies were applied.</li>') self.wfile.write('</ul></div>') def buildURL(self, action, **query): """Build a URL relative to the server base_url, with the given query parameters added.""" base = urlparse.urljoin(self.server.base_url, action) return appendArgs(base, query) def notFound(self): """Render a page with a 404 return code and a message.""" fmt = 'The path <q>%s</q> was not understood by this server.' msg = fmt % (self.path,) openid_url = self.query.get('openid_identifier') self.render(msg, 'error', openid_url, status=404) def render(self, message=None, css_class='alert', form_contents=None, status=200, title="Python OpenID Consumer Example", sreg_data=None, pape_data=None): """Render a page.""" self.send_response(status) self.pageHeader(title) if message: self.wfile.write("<div class='%s'>" % (css_class,)) self.wfile.write(message) self.wfile.write("</div>") if sreg_data is not None: self.renderSREG(sreg_data) if pape_data is not None: self.renderPAPE(pape_data) self.pageFooter(form_contents) def pageHeader(self, title): """Render the page header""" self.setSessionCookie() self.wfile.write('''\ Content-type: text/html; charset=UTF-8 <html> <head><title>%s</title></head> <style type="text/css"> * { font-family: verdana,sans-serif; } body { width: 50em; margin: 1em; } div { padding: .5em; } tr.odd td { background-color: #dddddd; } table.sreg { border: 1px solid black; border-collapse: collapse; } table.sreg th { border-bottom: 1px solid black; } table.sreg td, table.sreg th { padding: 0.5em; text-align: left; } table { margin: 0; padding: 0; } .alert { border: 1px solid #e7dc2b; background: #fff888; } .error { border: 1px solid #ff0000; background: #ffaaaa; } #verify-form { border: 1px solid #777777; background: #dddddd; margin-top: 1em; padding-bottom: 0em; } </style> <body> <h1>%s</h1> <p> This example consumer uses the <a href= "http://github.com/openid/python-openid" >Python OpenID</a> library. It just verifies that the identifier that you enter is your identifier. </p> ''' % (title, title)) def pageFooter(self, form_contents): """Render the page footer""" if not form_contents: form_contents = '' self.wfile.write('''\ <div id="verify-form"> <form method="get" accept-charset="UTF-8" action=%s> Identifier: <input type="text" name="openid_identifier" value=%s /> <input type="submit" value="Verify" /><br /> <input type="checkbox" name="immediate" id="immediate" /><label for="immediate">Use immediate mode</label> <input type="checkbox" name="use_sreg" id="use_sreg" /><label for="use_sreg">Request registration data</label> <input type="checkbox" name="use_pape" id="use_pape" /><label for="use_pape">Request phishing-resistent auth policy (PAPE)</label> <input type="checkbox" name="use_stateless" id="use_stateless" /><label for="use_stateless">Use stateless mode</label> </form> </div> </body> </html> ''' % (quoteattr(self.buildURL('verify')), quoteattr(form_contents))) def main(host, port, data_path, weak_ssl=False): # Instantiate OpenID consumer store and OpenID consumer. If you # were connecting to a database, you would create the database # connection and instantiate an appropriate store here. if data_path: store = filestore.FileOpenIDStore(data_path) else: store = memstore.MemoryStore() if weak_ssl: setDefaultFetcher(Urllib2Fetcher()) addr = (host, port) server = OpenIDHTTPServer(store, addr, OpenIDRequestHandler) print 'Server running at:' print server.base_url server.serve_forever() if __name__ == '__main__': host = 'localhost' port = 8001 weak_ssl = False try: import optparse except ImportError: pass # Use defaults (for Python 2.2) else: parser = optparse.OptionParser('Usage:\n %prog [options]') parser.add_option( '-d', '--data-path', dest='data_path', help='Data directory for storing OpenID consumer state. ' 'Setting this option implies using a "FileStore."') parser.add_option( '-p', '--port', dest='port', type='int', default=port, help='Port on which to listen for HTTP requests. ' 'Defaults to port %default.') parser.add_option( '-s', '--host', dest='host', default=host, help='Host on which to listen for HTTP requests. ' 'Also used for generating URLs. Defaults to %default.') parser.add_option( '-w', '--weakssl', dest='weakssl', default=False, action='store_true', help='Skip ssl cert verification') options, args = parser.parse_args() if args: parser.error('Expected no arguments. Got %r' % args) host = options.host port = options.port data_path = options.data_path weak_ssl = options.weakssl main(host, port, data_path, weak_ssl)
agpl-3.0
tkerola/chainer
tests/chainer_tests/distributions_tests/test_log_normal.py
8
1258
import numpy from chainer import distributions from chainer import testing from chainer import utils @testing.parameterize(*testing.product({ 'shape': [(2, 3), ()], 'is_variable': [True, False], 'sample_shape': [(3, 2), ()], })) @testing.fix_random() @testing.with_requires('scipy') class TestLogNormal(testing.distribution_unittest): scipy_onebyone = True def setUp_configure(self): from scipy import stats self.dist = distributions.LogNormal self.scipy_dist = stats.lognorm self.test_targets = set([ 'batch_shape', 'entropy', 'event_shape', 'log_prob', 'mean', 'sample', 'support', 'variance']) mu = utils.force_array( numpy.random.uniform(-1, 1, self.shape).astype(numpy.float32)) sigma = utils.force_array(numpy.exp(numpy.random.uniform( -1, 0, self.shape)).astype(numpy.float32)) self.params = {'mu': mu, 'sigma': sigma} self.scipy_params = {'s': sigma, 'scale': numpy.exp(mu)} self.support = 'positive' def sample_for_test(self): smp = numpy.random.lognormal( size=self.sample_shape + self.shape).astype(numpy.float32) return smp testing.run_module(__name__, __file__)
mit
carpedm20/fbchat
fbchat/_models/_quick_reply.py
1
2323
import attr from . import Attachment from .._common import attrs_default from typing import Any, Optional @attrs_default class QuickReply: """Represents a quick reply.""" #: Payload of the quick reply payload = attr.ib(None, type=Any) #: External payload for responses external_payload = attr.ib(None, type=Any) #: Additional data data = attr.ib(None, type=Any) #: Whether it's a response for a quick reply is_response = attr.ib(False, type=bool) @attrs_default class QuickReplyText(QuickReply): """Represents a text quick reply.""" #: Title of the quick reply title = attr.ib(None, type=Optional[str]) #: URL of the quick reply image image_url = attr.ib(None, type=Optional[str]) #: Type of the quick reply _type = "text" @attrs_default class QuickReplyLocation(QuickReply): """Represents a location quick reply (Doesn't work on mobile).""" #: Type of the quick reply _type = "location" @attrs_default class QuickReplyPhoneNumber(QuickReply): """Represents a phone number quick reply (Doesn't work on mobile).""" #: URL of the quick reply image image_url = attr.ib(None, type=Optional[str]) #: Type of the quick reply _type = "user_phone_number" @attrs_default class QuickReplyEmail(QuickReply): """Represents an email quick reply (Doesn't work on mobile).""" #: URL of the quick reply image image_url = attr.ib(None, type=Optional[str]) #: Type of the quick reply _type = "user_email" def graphql_to_quick_reply(q, is_response=False): data = dict() _type = q.get("content_type").lower() if q.get("payload"): data["payload"] = q["payload"] if q.get("data"): data["data"] = q["data"] if q.get("image_url") and _type is not QuickReplyLocation._type: data["image_url"] = q["image_url"] data["is_response"] = is_response if _type == QuickReplyText._type: if q.get("title") is not None: data["title"] = q["title"] rtn = QuickReplyText(**data) elif _type == QuickReplyLocation._type: rtn = QuickReplyLocation(**data) elif _type == QuickReplyPhoneNumber._type: rtn = QuickReplyPhoneNumber(**data) elif _type == QuickReplyEmail._type: rtn = QuickReplyEmail(**data) return rtn
bsd-3-clause
Vassy/odoo
addons/base_vat/__init__.py
440
1081
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import res_company import base_vat # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
estheraragaos/speakerfight
fabfile.py
19
2002
# coding: utf-8 from os import environ from fabric.api import env, cd, run from fabric.colors import yellow, green REPOSITORY = 'git@github.com:luanfonceca/speakerfight.git' REMOTE = 'origin' BRANCH = 'master' env.hosts = ['speakerfight.com'] env.user = 'root' env.password = environ.get('PASSWORD') env.app_dir = '/home/speakerfight' env.project_name = 'speakerfight' env.virtualenv_dir = '/home/virtualenv' def _run(command, pip='python'): run('{venv}/bin/{target} {command}'.format( venv=env.virtualenv_dir, command=command, target=pip)) def _update_app(): with cd(env.app_dir): print yellow('Fetch the Code') run('git pull {remote} {branch}'.format( remote=REMOTE, branch=BRANCH)) print yellow('Update the Python Requirements') _run('install -r requirements.txt --quiet', 'pip') print yellow('Cleanning the .pyc files') _run('manage.py clean_pyc') print yellow('Migrate the DB') _run('manage.py migrate --noinput --verbosity=0') print yellow('Collecting the static files') _run('manage.py collectstatic --noinput --verbosity=0') print yellow('Compiling the strings') _run('manage.py compilemessages') print green('App succefully updated') def _restart_app(): print yellow('Restart the Uwsgi') run('service uwsgi restart') print yellow('Restart the Nginx') run('service nginx restart') print green('Services succefully restarted') def deploy(): _update_app() _restart_app() print green('Deploy succefully done!') def load_initial_data(): fixtures = [ 'deck/fixtures/user.json', 'deck/fixtures/event.json', 'deck/fixtures/proposal.json', ] with cd(env.app_dir): print yellow('Collecting the initial data') for fixture in fixtures: _run('manage.py loaddata {}'.format(fixture)) print green('Data succefully loaded')
mit
cloudera/thrift_sasl
thrift_sasl/__init__.py
1
7713
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # """ SASL transports for Thrift. """ # Initially copied from the Impala repo from __future__ import absolute_import import six import sys import struct from thrift.transport.TTransport import (TTransportException, TTransportBase, CReadableTransport) # TODO: Check whether the following distinction is necessary. Does not appear to # break anything when `io.BytesIO` is used everywhere, but there may be some edge # cases where things break down. if sys.version_info[0] == 3: from io import BytesIO as BufferIO else: from cStringIO import StringIO as BufferIO class TSaslClientTransport(TTransportBase, CReadableTransport): START = 1 OK = 2 BAD = 3 ERROR = 4 COMPLETE = 5 def __init__(self, sasl_client_factory, mechanism, trans): """ @param sasl_client_factory: a callable that returns a new sasl.Client object @param mechanism: the SASL mechanism (e.g. "GSSAPI") @param trans: the underlying transport over which to communicate. """ self._trans = trans self.sasl_client_factory = sasl_client_factory self.sasl = None self.mechanism = mechanism self.__wbuf = BufferIO() self.__rbuf = BufferIO() self.opened = False self.encode = None def isOpen(self): try: is_open = self._trans.isOpen # Thrift except AttributeError: is_open = self._trans.is_open # thriftpy return is_open() def is_open(self): return self.isOpen() def open(self): if not self.isOpen(): self._trans.open() if self.sasl is not None: raise TTransportException( type=TTransportException.NOT_OPEN, message="Already open!") self.sasl = self.sasl_client_factory() ret, chosen_mech, initial_response = self.sasl.start(self.mechanism) if not ret: raise TTransportException(type=TTransportException.NOT_OPEN, message=("Could not start SASL: %s" % self.sasl.getError())) # Send initial response self._send_message(self.START, chosen_mech) self._send_message(self.OK, initial_response) # SASL negotiation loop while True: status, payload = self._recv_sasl_message() if status not in (self.OK, self.COMPLETE): raise TTransportException(type=TTransportException.NOT_OPEN, message=("Bad status: %d (%s)" % (status, payload))) if status == self.COMPLETE: break ret, response = self.sasl.step(payload) if not ret: raise TTransportException(type=TTransportException.NOT_OPEN, message=("Bad SASL result: %s" % (self.sasl.getError()))) self._send_message(self.OK, response) def _send_message(self, status, body): header = struct.pack(">BI", status, len(body)) body = six.ensure_binary(body) self._trans.write(header + body) self._trans.flush() def _recv_sasl_message(self): header = self._trans_read_all(5) status, length = struct.unpack(">BI", header) if length > 0: payload = self._trans_read_all(length) else: payload = "" return status, payload def write(self, data): self.__wbuf.write(data) def flush(self): buffer = self.__wbuf.getvalue() # The first time we flush data, we send it to sasl.encode() # If the length doesn't change, then we must be using a QOP # of auth and we should no longer call sasl.encode(), otherwise # we encode every time. if self.encode == None: success, encoded = self.sasl.encode(buffer) if not success: raise TTransportException(type=TTransportException.UNKNOWN, message=self.sasl.getError()) if (len(encoded)==len(buffer)): self.encode = False self._flushPlain(buffer) else: self.encode = True self._trans.write(encoded) elif self.encode: self._flushEncoded(buffer) else: self._flushPlain(buffer) self._trans.flush() self.__wbuf = BufferIO() def _flushEncoded(self, buffer): # sasl.ecnode() does the encoding and adds the length header, so nothing # to do but call it and write the result. success, encoded = self.sasl.encode(buffer) if not success: raise TTransportException(type=TTransportException.UNKNOWN, message=self.sasl.getError()) self._trans.write(encoded) def _flushPlain(self, buffer): # When we have QOP of auth, sasl.encode() will pass the input to the output # but won't put a length header, so we have to do that. # Note stolen from TFramedTransport: # N.B.: Doing this string concatenation is WAY cheaper than making # two separate calls to the underlying socket object. Socket writes in # Python turn out to be REALLY expensive, but it seems to do a pretty # good job of managing string buffer operations without excessive copies self._trans.write(struct.pack(">I", len(buffer)) + buffer) def read(self, sz): ret = self.__rbuf.read(sz) if len(ret) == sz: return ret self._read_frame() return ret + self.__rbuf.read(sz - len(ret)) def _read_frame(self): header = self._trans_read_all(4) (length,) = struct.unpack(">I", header) if self.encode: # If the frames are encoded (i.e. you're using a QOP of auth-int or # auth-conf), then make sure to include the header in the bytes you send to # sasl.decode() encoded = header + self._trans_read_all(length) success, decoded = self.sasl.decode(encoded) if not success: raise TTransportException(type=TTransportException.UNKNOWN, message=self.sasl.getError()) else: # If the frames are not encoded, just pass it through decoded = self._trans_read_all(length) self.__rbuf = BufferIO(decoded) def _trans_read_all(self, sz): try: read_all = self._trans.readAll # Thrift except AttributeError: def read_all(sz): # thriftpy buff = b'' have = 0 while have < sz: chunk = self._trans.read(sz - have) have += len(chunk) buff += chunk if len(chunk) == 0: raise TTransportException(type=TTransportException.END_OF_FILE, message="End of file reading from transport") return buff return read_all(sz) def close(self): self._trans.close() self.sasl = None # Implement the CReadableTransport interface. # Stolen shamelessly from TFramedTransport @property def cstringio_buf(self): return self.__rbuf def cstringio_refill(self, prefix, reqlen): # self.__rbuf will already be empty here because fastbinary doesn't # ask for a refill until the previous buffer is empty. Therefore, # we can start reading new frames immediately. while len(prefix) < reqlen: self._read_frame() prefix += self.__rbuf.getvalue() self.__rbuf = BufferIO(prefix) return self.__rbuf
apache-2.0
mapycz/mapnik
scons/scons-local-2.5.1/SCons/Tool/sunlink.py
3
2564
"""SCons.Tool.sunlink Tool-specific initialization for the Sun Solaris (Forte) linker. There normally shouldn't be any need to import this module directly. It will usually be imported through the generic SCons.Tool.Tool() selection method. """ # # Copyright (c) 2001 - 2016 The SCons Foundation # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY # KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE # WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. # __revision__ = "src/engine/SCons/Tool/sunlink.py rel_2.5.1:3735:9dc6cee5c168 2016/11/03 14:02:02 bdbaddog" import os import os.path import SCons.Util import link ccLinker = None # search for the acc compiler and linker front end try: dirs = os.listdir('/opt') except (IOError, OSError): # Not being able to read the directory because it doesn't exist # (IOError) or isn't readable (OSError) is okay. dirs = [] for d in dirs: linker = '/opt/' + d + '/bin/CC' if os.path.exists(linker): ccLinker = linker break def generate(env): """Add Builders and construction variables for Forte to an Environment.""" link.generate(env) env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -G') env['RPATHPREFIX'] = '-R' env['RPATHSUFFIX'] = '' env['_RPATH'] = '${_concat(RPATHPREFIX, RPATH, RPATHSUFFIX, __env__)}' # Support for versioned libraries link._setup_versioned_lib_variables(env, tool = 'sunlink', use_soname = True) env['LINKCALLBACKS'] = link._versioned_lib_callbacks() def exists(env): return ccLinker # Local Variables: # tab-width:4 # indent-tabs-mode:nil # End: # vim: set expandtab tabstop=4 shiftwidth=4:
lgpl-2.1
rseubert/scikit-learn
sklearn/linear_model/tests/test_bayes.py
30
1812
# Author: Alexandre Gramfort <alexandre.gramfort@inria.fr> # Fabian Pedregosa <fabian.pedregosa@inria.fr> # # License: BSD 3 clause import numpy as np from sklearn.utils.testing import assert_array_equal from sklearn.utils.testing import SkipTest from sklearn.linear_model.bayes import BayesianRidge, ARDRegression from sklearn import datasets from sklearn.utils.testing import assert_array_almost_equal def test_bayesian_on_diabetes(): """ Test BayesianRidge on diabetes """ raise SkipTest("XFailed Test") diabetes = datasets.load_diabetes() X, y = diabetes.data, diabetes.target clf = BayesianRidge(compute_score=True) # Test with more samples than features clf.fit(X, y) # Test that scores are increasing at each iteration assert_array_equal(np.diff(clf.scores_) > 0, True) # Test with more features than samples X = X[:5, :] y = y[:5] clf.fit(X, y) # Test that scores are increasing at each iteration assert_array_equal(np.diff(clf.scores_) > 0, True) def test_toy_bayesian_ridge_object(): """ Test BayesianRidge on toy """ X = np.array([[1], [2], [6], [8], [10]]) Y = np.array([1, 2, 6, 8, 10]) clf = BayesianRidge(compute_score=True) clf.fit(X, Y) # Check that the model could approximately learn the identity function test = [[1], [3], [4]] assert_array_almost_equal(clf.predict(test), [1, 3, 4], 2) def test_toy_ard_object(): """ Test BayesianRegression ARD classifier """ X = np.array([[1], [2], [3]]) Y = np.array([1, 2, 3]) clf = ARDRegression(compute_score=True) clf.fit(X, Y) # Check that the model could approximately learn the identity function test = [[1], [3], [4]] assert_array_almost_equal(clf.predict(test), [1, 3, 4], 2)
bsd-3-clause
0x0all/SASM
Windows/MinGW64/opt/lib/python2.7/sre_parse.py
49
27505
# # Secret Labs' Regular Expression Engine # # convert re-style regular expression to sre pattern # # Copyright (c) 1998-2001 by Secret Labs AB. All rights reserved. # # See the sre.py file for information on usage and redistribution. # """Internal support module for sre""" # XXX: show string offset and offending character for all errors import sys from sre_constants import * from _sre import MAXREPEAT SPECIAL_CHARS = ".\\[{()*+?^$|" REPEAT_CHARS = "*+?{" DIGITS = set("0123456789") OCTDIGITS = set("01234567") HEXDIGITS = set("0123456789abcdefABCDEF") WHITESPACE = set(" \t\n\r\v\f") ESCAPES = { r"\a": (LITERAL, ord("\a")), r"\b": (LITERAL, ord("\b")), r"\f": (LITERAL, ord("\f")), r"\n": (LITERAL, ord("\n")), r"\r": (LITERAL, ord("\r")), r"\t": (LITERAL, ord("\t")), r"\v": (LITERAL, ord("\v")), r"\\": (LITERAL, ord("\\")) } CATEGORIES = { r"\A": (AT, AT_BEGINNING_STRING), # start of string r"\b": (AT, AT_BOUNDARY), r"\B": (AT, AT_NON_BOUNDARY), r"\d": (IN, [(CATEGORY, CATEGORY_DIGIT)]), r"\D": (IN, [(CATEGORY, CATEGORY_NOT_DIGIT)]), r"\s": (IN, [(CATEGORY, CATEGORY_SPACE)]), r"\S": (IN, [(CATEGORY, CATEGORY_NOT_SPACE)]), r"\w": (IN, [(CATEGORY, CATEGORY_WORD)]), r"\W": (IN, [(CATEGORY, CATEGORY_NOT_WORD)]), r"\Z": (AT, AT_END_STRING), # end of string } FLAGS = { # standard flags "i": SRE_FLAG_IGNORECASE, "L": SRE_FLAG_LOCALE, "m": SRE_FLAG_MULTILINE, "s": SRE_FLAG_DOTALL, "x": SRE_FLAG_VERBOSE, # extensions "t": SRE_FLAG_TEMPLATE, "u": SRE_FLAG_UNICODE, } class Pattern: # master pattern object. keeps track of global attributes def __init__(self): self.flags = 0 self.open = [] self.groups = 1 self.groupdict = {} def opengroup(self, name=None): gid = self.groups self.groups = gid + 1 if name is not None: ogid = self.groupdict.get(name, None) if ogid is not None: raise error, ("redefinition of group name %s as group %d; " "was group %d" % (repr(name), gid, ogid)) self.groupdict[name] = gid self.open.append(gid) return gid def closegroup(self, gid): self.open.remove(gid) def checkgroup(self, gid): return gid < self.groups and gid not in self.open class SubPattern: # a subpattern, in intermediate form def __init__(self, pattern, data=None): self.pattern = pattern if data is None: data = [] self.data = data self.width = None def dump(self, level=0): nl = 1 seqtypes = type(()), type([]) for op, av in self.data: print level*" " + op,; nl = 0 if op == "in": # member sublanguage print; nl = 1 for op, a in av: print (level+1)*" " + op, a elif op == "branch": print; nl = 1 i = 0 for a in av[1]: if i > 0: print level*" " + "or" a.dump(level+1); nl = 1 i = i + 1 elif type(av) in seqtypes: for a in av: if isinstance(a, SubPattern): if not nl: print a.dump(level+1); nl = 1 else: print a, ; nl = 0 else: print av, ; nl = 0 if not nl: print def __repr__(self): return repr(self.data) def __len__(self): return len(self.data) def __delitem__(self, index): del self.data[index] def __getitem__(self, index): if isinstance(index, slice): return SubPattern(self.pattern, self.data[index]) return self.data[index] def __setitem__(self, index, code): self.data[index] = code def insert(self, index, code): self.data.insert(index, code) def append(self, code): self.data.append(code) def getwidth(self): # determine the width (min, max) for this subpattern if self.width: return self.width lo = hi = 0L UNITCODES = (ANY, RANGE, IN, LITERAL, NOT_LITERAL, CATEGORY) REPEATCODES = (MIN_REPEAT, MAX_REPEAT) for op, av in self.data: if op is BRANCH: i = sys.maxint j = 0 for av in av[1]: l, h = av.getwidth() i = min(i, l) j = max(j, h) lo = lo + i hi = hi + j elif op is CALL: i, j = av.getwidth() lo = lo + i hi = hi + j elif op is SUBPATTERN: i, j = av[1].getwidth() lo = lo + i hi = hi + j elif op in REPEATCODES: i, j = av[2].getwidth() lo = lo + long(i) * av[0] hi = hi + long(j) * av[1] elif op in UNITCODES: lo = lo + 1 hi = hi + 1 elif op == SUCCESS: break self.width = int(min(lo, sys.maxint)), int(min(hi, sys.maxint)) return self.width class Tokenizer: def __init__(self, string): self.string = string self.index = 0 self.__next() def __next(self): if self.index >= len(self.string): self.next = None return char = self.string[self.index] if char[0] == "\\": try: c = self.string[self.index + 1] except IndexError: raise error, "bogus escape (end of line)" char = char + c self.index = self.index + len(char) self.next = char def match(self, char, skip=1): if char == self.next: if skip: self.__next() return 1 return 0 def get(self): this = self.next self.__next() return this def tell(self): return self.index, self.next def seek(self, index): self.index, self.next = index def isident(char): return "a" <= char <= "z" or "A" <= char <= "Z" or char == "_" def isdigit(char): return "0" <= char <= "9" def isname(name): # check that group name is a valid string if not isident(name[0]): return False for char in name[1:]: if not isident(char) and not isdigit(char): return False return True def _class_escape(source, escape): # handle escape code inside character class code = ESCAPES.get(escape) if code: return code code = CATEGORIES.get(escape) if code and code[0] == IN: return code try: c = escape[1:2] if c == "x": # hexadecimal escape (exactly two digits) while source.next in HEXDIGITS and len(escape) < 4: escape = escape + source.get() escape = escape[2:] if len(escape) != 2: raise error, "bogus escape: %s" % repr("\\" + escape) return LITERAL, int(escape, 16) & 0xff elif c in OCTDIGITS: # octal escape (up to three digits) while source.next in OCTDIGITS and len(escape) < 4: escape = escape + source.get() escape = escape[1:] return LITERAL, int(escape, 8) & 0xff elif c in DIGITS: raise error, "bogus escape: %s" % repr(escape) if len(escape) == 2: return LITERAL, ord(escape[1]) except ValueError: pass raise error, "bogus escape: %s" % repr(escape) def _escape(source, escape, state): # handle escape code in expression code = CATEGORIES.get(escape) if code: return code code = ESCAPES.get(escape) if code: return code try: c = escape[1:2] if c == "x": # hexadecimal escape while source.next in HEXDIGITS and len(escape) < 4: escape = escape + source.get() if len(escape) != 4: raise ValueError return LITERAL, int(escape[2:], 16) & 0xff elif c == "0": # octal escape while source.next in OCTDIGITS and len(escape) < 4: escape = escape + source.get() return LITERAL, int(escape[1:], 8) & 0xff elif c in DIGITS: # octal escape *or* decimal group reference (sigh) if source.next in DIGITS: escape = escape + source.get() if (escape[1] in OCTDIGITS and escape[2] in OCTDIGITS and source.next in OCTDIGITS): # got three octal digits; this is an octal escape escape = escape + source.get() return LITERAL, int(escape[1:], 8) & 0xff # not an octal escape, so this is a group reference group = int(escape[1:]) if group < state.groups: if not state.checkgroup(group): raise error, "cannot refer to open group" return GROUPREF, group raise ValueError if len(escape) == 2: return LITERAL, ord(escape[1]) except ValueError: pass raise error, "bogus escape: %s" % repr(escape) def _parse_sub(source, state, nested=1): # parse an alternation: a|b|c items = [] itemsappend = items.append sourcematch = source.match while 1: itemsappend(_parse(source, state)) if sourcematch("|"): continue if not nested: break if not source.next or sourcematch(")", 0): break else: raise error, "pattern not properly closed" if len(items) == 1: return items[0] subpattern = SubPattern(state) subpatternappend = subpattern.append # check if all items share a common prefix while 1: prefix = None for item in items: if not item: break if prefix is None: prefix = item[0] elif item[0] != prefix: break else: # all subitems start with a common "prefix". # move it out of the branch for item in items: del item[0] subpatternappend(prefix) continue # check next one break # check if the branch can be replaced by a character set for item in items: if len(item) != 1 or item[0][0] != LITERAL: break else: # we can store this as a character set instead of a # branch (the compiler may optimize this even more) set = [] setappend = set.append for item in items: setappend(item[0]) subpatternappend((IN, set)) return subpattern subpattern.append((BRANCH, (None, items))) return subpattern def _parse_sub_cond(source, state, condgroup): item_yes = _parse(source, state) if source.match("|"): item_no = _parse(source, state) if source.match("|"): raise error, "conditional backref with more than two branches" else: item_no = None if source.next and not source.match(")", 0): raise error, "pattern not properly closed" subpattern = SubPattern(state) subpattern.append((GROUPREF_EXISTS, (condgroup, item_yes, item_no))) return subpattern _PATTERNENDERS = set("|)") _ASSERTCHARS = set("=!<") _LOOKBEHINDASSERTCHARS = set("=!") _REPEATCODES = set([MIN_REPEAT, MAX_REPEAT]) def _parse(source, state): # parse a simple pattern subpattern = SubPattern(state) # precompute constants into local variables subpatternappend = subpattern.append sourceget = source.get sourcematch = source.match _len = len PATTERNENDERS = _PATTERNENDERS ASSERTCHARS = _ASSERTCHARS LOOKBEHINDASSERTCHARS = _LOOKBEHINDASSERTCHARS REPEATCODES = _REPEATCODES while 1: if source.next in PATTERNENDERS: break # end of subpattern this = sourceget() if this is None: break # end of pattern if state.flags & SRE_FLAG_VERBOSE: # skip whitespace and comments if this in WHITESPACE: continue if this == "#": while 1: this = sourceget() if this in (None, "\n"): break continue if this and this[0] not in SPECIAL_CHARS: subpatternappend((LITERAL, ord(this))) elif this == "[": # character set set = [] setappend = set.append ## if sourcematch(":"): ## pass # handle character classes if sourcematch("^"): setappend((NEGATE, None)) # check remaining characters start = set[:] while 1: this = sourceget() if this == "]" and set != start: break elif this and this[0] == "\\": code1 = _class_escape(source, this) elif this: code1 = LITERAL, ord(this) else: raise error, "unexpected end of regular expression" if sourcematch("-"): # potential range this = sourceget() if this == "]": if code1[0] is IN: code1 = code1[1][0] setappend(code1) setappend((LITERAL, ord("-"))) break elif this: if this[0] == "\\": code2 = _class_escape(source, this) else: code2 = LITERAL, ord(this) if code1[0] != LITERAL or code2[0] != LITERAL: raise error, "bad character range" lo = code1[1] hi = code2[1] if hi < lo: raise error, "bad character range" setappend((RANGE, (lo, hi))) else: raise error, "unexpected end of regular expression" else: if code1[0] is IN: code1 = code1[1][0] setappend(code1) # XXX: <fl> should move set optimization to compiler! if _len(set)==1 and set[0][0] is LITERAL: subpatternappend(set[0]) # optimization elif _len(set)==2 and set[0][0] is NEGATE and set[1][0] is LITERAL: subpatternappend((NOT_LITERAL, set[1][1])) # optimization else: # XXX: <fl> should add charmap optimization here subpatternappend((IN, set)) elif this and this[0] in REPEAT_CHARS: # repeat previous item if this == "?": min, max = 0, 1 elif this == "*": min, max = 0, MAXREPEAT elif this == "+": min, max = 1, MAXREPEAT elif this == "{": if source.next == "}": subpatternappend((LITERAL, ord(this))) continue here = source.tell() min, max = 0, MAXREPEAT lo = hi = "" while source.next in DIGITS: lo = lo + source.get() if sourcematch(","): while source.next in DIGITS: hi = hi + sourceget() else: hi = lo if not sourcematch("}"): subpatternappend((LITERAL, ord(this))) source.seek(here) continue if lo: min = int(lo) if min >= MAXREPEAT: raise OverflowError("the repetition number is too large") if hi: max = int(hi) if max >= MAXREPEAT: raise OverflowError("the repetition number is too large") if max < min: raise error("bad repeat interval") else: raise error, "not supported" # figure out which item to repeat if subpattern: item = subpattern[-1:] else: item = None if not item or (_len(item) == 1 and item[0][0] == AT): raise error, "nothing to repeat" if item[0][0] in REPEATCODES: raise error, "multiple repeat" if sourcematch("?"): subpattern[-1] = (MIN_REPEAT, (min, max, item)) else: subpattern[-1] = (MAX_REPEAT, (min, max, item)) elif this == ".": subpatternappend((ANY, None)) elif this == "(": group = 1 name = None condgroup = None if sourcematch("?"): group = 0 # options if sourcematch("P"): # python extensions if sourcematch("<"): # named group: skip forward to end of name name = "" while 1: char = sourceget() if char is None: raise error, "unterminated name" if char == ">": break name = name + char group = 1 if not name: raise error("missing group name") if not isname(name): raise error("bad character in group name %r" % name) elif sourcematch("="): # named backreference name = "" while 1: char = sourceget() if char is None: raise error, "unterminated name" if char == ")": break name = name + char if not name: raise error("missing group name") if not isname(name): raise error("bad character in backref group name " "%r" % name) gid = state.groupdict.get(name) if gid is None: raise error, "unknown group name" subpatternappend((GROUPREF, gid)) continue else: char = sourceget() if char is None: raise error, "unexpected end of pattern" raise error, "unknown specifier: ?P%s" % char elif sourcematch(":"): # non-capturing group group = 2 elif sourcematch("#"): # comment while 1: if source.next is None or source.next == ")": break sourceget() if not sourcematch(")"): raise error, "unbalanced parenthesis" continue elif source.next in ASSERTCHARS: # lookahead assertions char = sourceget() dir = 1 if char == "<": if source.next not in LOOKBEHINDASSERTCHARS: raise error, "syntax error" dir = -1 # lookbehind char = sourceget() p = _parse_sub(source, state) if not sourcematch(")"): raise error, "unbalanced parenthesis" if char == "=": subpatternappend((ASSERT, (dir, p))) else: subpatternappend((ASSERT_NOT, (dir, p))) continue elif sourcematch("("): # conditional backreference group condname = "" while 1: char = sourceget() if char is None: raise error, "unterminated name" if char == ")": break condname = condname + char group = 2 if not condname: raise error("missing group name") if isname(condname): condgroup = state.groupdict.get(condname) if condgroup is None: raise error, "unknown group name" else: try: condgroup = int(condname) except ValueError: raise error, "bad character in group name" else: # flags if not source.next in FLAGS: raise error, "unexpected end of pattern" while source.next in FLAGS: state.flags = state.flags | FLAGS[sourceget()] if group: # parse group contents if group == 2: # anonymous group group = None else: group = state.opengroup(name) if condgroup: p = _parse_sub_cond(source, state, condgroup) else: p = _parse_sub(source, state) if not sourcematch(")"): raise error, "unbalanced parenthesis" if group is not None: state.closegroup(group) subpatternappend((SUBPATTERN, (group, p))) else: while 1: char = sourceget() if char is None: raise error, "unexpected end of pattern" if char == ")": break raise error, "unknown extension" elif this == "^": subpatternappend((AT, AT_BEGINNING)) elif this == "$": subpattern.append((AT, AT_END)) elif this and this[0] == "\\": code = _escape(source, this, state) subpatternappend(code) else: raise error, "parser error" return subpattern def parse(str, flags=0, pattern=None): # parse 're' pattern into list of (opcode, argument) tuples source = Tokenizer(str) if pattern is None: pattern = Pattern() pattern.flags = flags pattern.str = str p = _parse_sub(source, pattern, 0) tail = source.get() if tail == ")": raise error, "unbalanced parenthesis" elif tail: raise error, "bogus characters at end of regular expression" if flags & SRE_FLAG_DEBUG: p.dump() if not (flags & SRE_FLAG_VERBOSE) and p.pattern.flags & SRE_FLAG_VERBOSE: # the VERBOSE flag was switched on inside the pattern. to be # on the safe side, we'll parse the whole thing again... return parse(str, p.pattern.flags) return p def parse_template(source, pattern): # parse 're' replacement string into list of literals and # group references s = Tokenizer(source) sget = s.get p = [] a = p.append def literal(literal, p=p, pappend=a): if p and p[-1][0] is LITERAL: p[-1] = LITERAL, p[-1][1] + literal else: pappend((LITERAL, literal)) sep = source[:0] if type(sep) is type(""): makechar = chr else: makechar = unichr while 1: this = sget() if this is None: break # end of replacement string if this and this[0] == "\\": # group c = this[1:2] if c == "g": name = "" if s.match("<"): while 1: char = sget() if char is None: raise error, "unterminated group name" if char == ">": break name = name + char if not name: raise error, "missing group name" try: index = int(name) if index < 0: raise error, "negative group number" except ValueError: if not isname(name): raise error, "bad character in group name" try: index = pattern.groupindex[name] except KeyError: raise IndexError, "unknown group name" a((MARK, index)) elif c == "0": if s.next in OCTDIGITS: this = this + sget() if s.next in OCTDIGITS: this = this + sget() literal(makechar(int(this[1:], 8) & 0xff)) elif c in DIGITS: isoctal = False if s.next in DIGITS: this = this + sget() if (c in OCTDIGITS and this[2] in OCTDIGITS and s.next in OCTDIGITS): this = this + sget() isoctal = True literal(makechar(int(this[1:], 8) & 0xff)) if not isoctal: a((MARK, int(this[1:]))) else: try: this = makechar(ESCAPES[this][1]) except KeyError: pass literal(this) else: literal(this) # convert template to groups and literals lists i = 0 groups = [] groupsappend = groups.append literals = [None] * len(p) for c, s in p: if c is MARK: groupsappend((i, s)) # literal[i] is already None else: literals[i] = s i = i + 1 return groups, literals def expand_template(template, match): g = match.group sep = match.string[:0] groups, literals = template literals = literals[:] try: for index, group in groups: literals[index] = s = g(group) if s is None: raise error, "unmatched group" except IndexError: raise error, "invalid group reference" return sep.join(literals)
gpl-3.0
apache/libcloud
libcloud/loadbalancer/drivers/brightbox.py
58
4831
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from libcloud.utils.py3 import httplib from libcloud.common.brightbox import BrightboxConnection from libcloud.loadbalancer.base import Driver, Algorithm, Member from libcloud.loadbalancer.base import LoadBalancer from libcloud.loadbalancer.types import State from libcloud.utils.misc import reverse_dict API_VERSION = '1.0' class BrightboxLBDriver(Driver): connectionCls = BrightboxConnection name = 'Brightbox' website = 'http://www.brightbox.co.uk/' LB_STATE_MAP = { 'creating': State.PENDING, 'active': State.RUNNING, 'deleting': State.UNKNOWN, 'deleted': State.UNKNOWN, 'failing': State.UNKNOWN, 'failed': State.UNKNOWN, } _VALUE_TO_ALGORITHM_MAP = { 'round-robin': Algorithm.ROUND_ROBIN, 'least-connections': Algorithm.LEAST_CONNECTIONS } _ALGORITHM_TO_VALUE_MAP = reverse_dict(_VALUE_TO_ALGORITHM_MAP) def list_protocols(self): return ['tcp', 'http'] def list_balancers(self): data = self.connection.request('/%s/load_balancers' % API_VERSION) \ .object return list(map(self._to_balancer, data)) def create_balancer(self, name, port, protocol, algorithm, members): response = self._post( '/%s/load_balancers' % API_VERSION, {'name': name, 'nodes': list(map(self._member_to_node, members)), 'policy': self._algorithm_to_value(algorithm), 'listeners': [{'in': port, 'out': port, 'protocol': protocol}], 'healthcheck': {'type': protocol, 'port': port}} ) return self._to_balancer(response.object) def destroy_balancer(self, balancer): response = self.connection.request('/%s/load_balancers/%s' % (API_VERSION, balancer.id), method='DELETE') return response.status == httplib.ACCEPTED def get_balancer(self, balancer_id): data = self.connection.request( '/%s/load_balancers/%s' % (API_VERSION, balancer_id)).object return self._to_balancer(data) def balancer_attach_compute_node(self, balancer, node): return self.balancer_attach_member(balancer, node) def balancer_attach_member(self, balancer, member): path = '/%s/load_balancers/%s/add_nodes' % (API_VERSION, balancer.id) self._post(path, {'nodes': [self._member_to_node(member)]}) return member def balancer_detach_member(self, balancer, member): path = '/%s/load_balancers/%s/remove_nodes' % (API_VERSION, balancer.id) response = self._post(path, {'nodes': [self._member_to_node(member)]}) return response.status == httplib.ACCEPTED def balancer_list_members(self, balancer): path = '/%s/load_balancers/%s' % (API_VERSION, balancer.id) data = self.connection.request(path).object def func(data): return self._node_to_member(data, balancer) return list(map(func, data['nodes'])) def _post(self, path, data={}): headers = {'Content-Type': 'application/json'} return self.connection.request(path, data=data, headers=headers, method='POST') def _to_balancer(self, data): return LoadBalancer( id=data['id'], name=data['name'], state=self.LB_STATE_MAP.get(data['status'], State.UNKNOWN), ip=self._public_ip(data), port=data['listeners'][0]['in'], driver=self.connection.driver ) def _member_to_node(self, member): return {'node': member.id} def _node_to_member(self, data, balancer): return Member(id=data['id'], ip=None, port=None, balancer=balancer) def _public_ip(self, data): if len(data['cloud_ips']) > 0: ip = data['cloud_ips'][0]['public_ip'] else: ip = None return ip
apache-2.0
tempbottle/kbengine
kbe/src/lib/python/Lib/turtledemo/paint.py
94
1291
#!/usr/bin/env python3 """ turtle-example-suite: tdemo_paint.py A simple event-driven paint program - left mouse button moves turtle - middle mouse button changes color - right mouse button toogles betweem pen up (no line drawn when the turtle moves) and pen down (line is drawn). If pen up follows at least two pen-down moves, the polygon that includes the starting point is filled. ------------------------------------------- Play around by clicking into the canvas using all three mouse buttons. ------------------------------------------- To exit press STOP button ------------------------------------------- """ from turtle import * def switchupdown(x=0, y=0): if pen()["pendown"]: end_fill() up() else: down() begin_fill() def changecolor(x=0, y=0): global colors colors = colors[1:]+colors[:1] color(colors[0]) def main(): global colors shape("circle") resizemode("user") shapesize(.5) width(3) colors=["red", "green", "blue", "yellow"] color(colors[0]) switchupdown() onscreenclick(goto,1) onscreenclick(changecolor,2) onscreenclick(switchupdown,3) return "EVENTLOOP" if __name__ == "__main__": msg = main() print(msg) mainloop()
lgpl-3.0
samukasmk/pythonbrasil_mobile
app/utils/patch_browser.py
4
2296
from kivy.utils import platform from kivy.core.window import Window import webbrowser webbrowser._opened = False if platform == 'android': from jnius import autoclass from android.runnable import run_on_ui_thread WebView = autoclass('android.webkit.WebView') print "webview" WebViewClient = autoclass('android.webkit.WebViewClient') print "got client" activity = autoclass('org.kivy.android.PythonActivity').mActivity print "activity" import os @run_on_ui_thread def initiate_webview(): webview = WebView(activity) webbrowser._webview = webview webbrowser._view_cached = activity.getCurrentFocus() settings = webbrowser._webview.getSettings() settings.setJavaScriptEnabled(True) settings.setUseWideViewPort(True) # enables viewport html meta tags settings.setLoadWithOverviewMode(True) # uses viewport settings.setSupportZoom(True) # enables zoom settings.setBuiltInZoomControls(True) # enables zoom controls wvc = WebViewClient() webbrowser._webview.setWebViewClient(wvc) initiate_webview() def _webopen(*args, **kwargs): #print '9'*90 @run_on_ui_thread def webopen(*args, **kwargs): # open webview here url = args[0] webview = webbrowser._webview webview.resumeTimers() webview.clearHistory() webview.loadUrl("about:blank") webview.clearCache(True) webview.freeMemory() activity.setContentView(webview) webbrowser._webview.loadUrl('{}'.format(url)) webbrowser._opened = True webopen(*args, **kwargs) return True @run_on_ui_thread def close(*args): if not webbrowser._webview: print "no_webview"*20 return wv = webbrowser._webview wv.clearHistory() wv.clearCache(True) wv.loadUrl("about:blank") print 'abt bank'*3 wv.freeMemory() print 'free mem'*3 wv.pauseTimers() print 'pause timers'*3 activity.setContentView(webbrowser._view_cached) webbrowser._opened = False webbrowser.open = _webopen webbrowser.close = close
agpl-3.0
tchx84/sugar-toolkit-gtk3
src/sugar3/profile.py
7
6880
# Copyright (C) 2006-2007, Red Hat, Inc. # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the # Free Software Foundation, Inc., 59 Temple Place - Suite 330, # Boston, MA 02111-1307, USA. """User settings/configuration loading. """ from gi.repository import Gio import os import logging from ConfigParser import ConfigParser from sugar3 import env from sugar3 import util from sugar3.graphics.xocolor import XoColor _profile = None class Profile(object): """Local user's current options/profile information The profile is also responsible for loading the user's public and private ssh keys from disk. Attributes: pubkey -- public ssh key privkey_hash -- SHA has of the child's public key """ def __init__(self, path): self._pubkey = None self._privkey_hash = None def _get_pubkey(self): if self._pubkey is None: self._pubkey = self._load_pubkey() return self._pubkey pubkey = property(fget=_get_pubkey) def _get_privkey_hash(self): if self._privkey_hash is None: self._privkey_hash = self._hash_private_key() return self._privkey_hash privkey_hash = property(fget=_get_privkey_hash) def is_valid(self): settings = Gio.Settings('org.sugarlabs.user') nick = settings.get_string('nick') color = settings.get_string('color') return nick is not '' and \ color is not '' and \ self.pubkey is not None and \ self.privkey_hash is not None def _load_pubkey(self): key_path = os.path.join(env.get_profile_path(), 'owner.key.pub') if not os.path.exists(key_path): return None try: f = open(key_path, 'r') lines = f.readlines() f.close() except IOError: logging.exception('Error reading public key') return None magic = 'ssh-dss ' for l in lines: l = l.strip() if not l.startswith(magic): continue return l[len(magic):] else: logging.error('Error parsing public key.') return None def _hash_private_key(self): key_path = os.path.join(env.get_profile_path(), 'owner.key') if not os.path.exists(key_path): return None try: f = open(key_path, 'r') lines = f.readlines() f.close() except IOError: logging.exception('Error reading private key') return None key = "" begin_found = False end_found = False for l in lines: l = l.strip() if l.startswith('-----BEGIN DSA PRIVATE KEY-----'): begin_found = True continue if l.startswith('-----END DSA PRIVATE KEY-----'): end_found = True continue key += l if not (len(key) and begin_found and end_found): logging.error('Error parsing public key.') return None # hash it key_hash = util.sha_data(key) return util.printable_hash(key_hash) def convert_profile(self): cp = ConfigParser() path = os.path.join(env.get_profile_path(), 'config') cp.read([path]) settings = Gio.Settings('org.sugarlabs.user') if cp.has_option('Buddy', 'NickName'): name = cp.get('Buddy', 'NickName') # decode nickname from ascii-safe chars to unicode nick = name.decode('utf-8') settings.set_string('nick', nick) if cp.has_option('Buddy', 'Color'): color = cp.get('Buddy', 'Color') settings.set_string('color', color) if cp.has_option('Jabber', 'Server'): server = cp.get('Jabber', 'Server') settings = Gio.Settings('org.sugarlabs.collaboration') settings.set_string('jabber-server', server) if cp.has_option('Date', 'Timezone'): timezone = cp.get('Date', 'Timezone') settings = Gio.Settings('org.sugarlabs.date') settings.set_string('timezone', timezone) settings = Gio.Settings('org.sugarlabs.frame') if cp.has_option('Frame', 'HotCorners'): delay = float(cp.get('Frame', 'HotCorners')) settings.set_int('corner-delay', int(delay)) if cp.has_option('Frame', 'WarmEdges'): delay = float(cp.get('Frame', 'WarmEdges')) settings.set_int('edge-delay', int(delay)) if cp.has_option('Server', 'Backup1'): backup1 = cp.get('Server', 'Backup1') settings = Gio.Settings('org.sugarlabs') settings.set_string('backup-url', backup1) if cp.has_option('Sound', 'Volume'): volume = float(cp.get('Sound', 'Volume')) settings = Gio.Settings('org.sugarlabs.sound') settings.set_int('volume', int(volume)) settings = Gio.Settings('org.sugarlabs.power') if cp.has_option('Power', 'AutomaticPM'): state = cp.get('Power', 'AutomaticPM') if state.lower() == 'true': settings.set_boolean('automatic', True) if cp.has_option('Power', 'ExtremePM'): state = cp.get('Power', 'ExtremePM') if state.lower() == 'true': settings.set_boolean('extreme', True) if cp.has_option('Shell', 'FavoritesLayout'): layout = cp.get('Shell', 'FavoritesLayout') settings = Gio.Settings('org.sugarlabs.desktop') settings.set_string('favorites-layout', layout) del cp try: os.unlink(path) except OSError: logging.error('Error removing old profile.') def get_profile(): global _profile if not _profile: path = os.path.join(env.get_profile_path(), 'config') _profile = Profile(path) return _profile def get_nick_name(): settings = Gio.Settings('org.sugarlabs.user') return settings.get_string('nick') def get_color(): settings = Gio.Settings('org.sugarlabs.user') color = settings.get_string('color') return XoColor(color) def get_pubkey(): return get_profile().pubkey
lgpl-2.1
scottferg/web-console
django/conf/locale/nn/formats.py
27
1311
DATE_FORMAT = 'j. F Y' TIME_FORMAT = 'H:i' DATETIME_FORMAT = 'j. F Y H:i' YEAR_MONTH_FORMAT = 'F Y' MONTH_DAY_FORMAT = 'j. F' SHORT_DATE_FORMAT = 'd.m.Y' SHORT_DATETIME_FORMAT = 'd.m.Y H:i' FIRST_DAY_OF_WEEK = 1 # Monday DATE_INPUT_FORMATS = ( '%Y-%m-%d', '%j.%m.%Y', '%j.%m.%y', # '2006-10-25', '25.10.2006', '25.10.06' '%Y-%m-%j', # '2006-10-25', # '%j. %b %Y', '%j %b %Y', # '25. okt 2006', '25 okt 2006' # '%j. %b. %Y', '%j %b. %Y', # '25. okt. 2006', '25 okt. 2006' # '%j. %B %Y', '%j %B %Y', # '25. oktober 2006', '25 oktober 2006' ) TIME_INPUT_FORMATS = ( '%H:%M:%S', # '14:30:59' '%H:%M', # '14:30' ) DATETIME_INPUT_FORMATS = ( '%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59' '%Y-%m-%d %H:%M', # '2006-10-25 14:30' '%Y-%m-%d', # '2006-10-25' '%Y-%m-%j', # '2006-10-25' '%j.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59' '%j.%m.%Y %H:%M', # '25.10.2006 14:30' '%j.%m.%Y', # '25.10.2006' '%j.%m.%y %H:%M:%S', # '25.10.06 14:30:59' '%j.%m.%y %H:%M', # '25.10.06 14:30' '%j.%m.%y', # '25.10.06' ) DECIMAL_SEPARATOR = ',' THOUSAND_SEPARATOR = ' ' NUMBER_GROUPING = 3
bsd-3-clause
ikeikeikeike/tastypie-queryset-client
queryset_client/tests/base/models.py
2
4567
from django.db import ( models, transaction ) from django.utils.translation import ugettext class Inbox(models.Model): """ Inbox """ did = models.CharField(ugettext("device id"), max_length="250") ctime = models.DateTimeField(ugettext("ctime"), auto_now_add=True) utime = models.DateTimeField(ugettext("utime"), auto_now=True) class Meta: verbose_name = ugettext("inbox") verbose_name_plural = ugettext("inboxes") def __unicode__(self): return u"{0}".format(self.did) class Message(models.Model): """ Messages """ subject = models.CharField(ugettext("subject"), max_length="250") body = models.TextField(ugettext("body")) ctime = models.DateTimeField(ugettext("ctime"), auto_now_add=True) utime = models.DateTimeField(ugettext("utime"), auto_now=True) class Meta: verbose_name = ugettext("message") verbose_name_plural = ugettext("messages") def __unicode__(self): return u"{0}".format(self.subject) class InboxMessage(models.Model): """ InboxMessage """ inbox = models.ForeignKey(Inbox, unique=False, db_index=True, help_text=ugettext("inbox == mailto")) message = models.ForeignKey(Message, unique=False, db_index=True, null=True) mailfrom = models.CharField(ugettext("mailfrom"), max_length="250", blank=True, null=True, help_text=ugettext("Anyway")) read = models.NullBooleanField(ugettext("read"), db_index=True, help_text=ugettext("null=new, 0=unread, 1=read")) ctime = models.DateTimeField(ugettext("ctime"), auto_now_add=True) utime = models.DateTimeField(ugettext("utime"), auto_now=True) class Meta: verbose_name = ugettext("inbox_message") verbose_name_plural = ugettext("inbox_messages") unique_together = (("inbox", "message")) # multi unique key def __unicode__(self): return u"{0} ({1})".format(self.inbox, self.message) class InboxMessageMany(models.Model): """ InboxMessage Many """ inbox_message = models.ManyToManyField(InboxMessage, null=True) ctime = models.DateTimeField(ugettext("ctime"), auto_now_add=True) utime = models.DateTimeField(ugettext("utime"), auto_now=True) class Operation(models.Model): """ """ opid = models.CharField(ugettext("opid"), max_length="250", help_text=ugettext("operation id (UUID)")) name = models.CharField(ugettext("name"), max_length="250", help_text=ugettext("operation type name")) meta = models.TextField(ugettext("meta"), help_text=ugettext("operation meta data.")) ctime = models.DateTimeField(ugettext("ctime"), auto_now_add=True) utime = models.DateTimeField(ugettext("utime"), auto_now=True) class Meta: verbose_name = ugettext("operation") verbose_name_plural = ugettext("operations") def __unicode__(self): return u"name: {0}, opid: {1}".format(self.name, self.opid) class Status(models.Model): operation = models.ForeignKey(Operation, unique=False, db_index=True) contenttype = models.ForeignKey(InboxMessage, unique=False, db_index=True, help_text=ugettext("# TODO: fix contenttype framework")) status = models.CharField(ugettext("status"), db_index=True, max_length="20", default="ready") reason = models.CharField(ugettext("reason"), max_length="250", blank=True) trace = models.TextField(ugettext("stack trace"), blank=True) ctime = models.DateTimeField(ugettext("ctime"), auto_now_add=True) utime = models.DateTimeField(ugettext("utime"), auto_now=True) class Meta: verbose_name = ugettext("status") verbose_name_plural = ugettext("statuses") unique_together = (("operation", "contenttype")) # multi unique key def __unicode__(self): return u"operation: {0}, contenttype: {1}, status: {2}".format(self.operation, self.contenttype, self.status) class Strict(models.Model): """ For strict field """ integer_test = models.IntegerField('integer', null=True) float_test = models.FloatField('float', null=True) decimal_test = models.DecimalField('decimal', max_digits=5, decimal_places=2, null=True) # boolean_test = models.BooleanField('boolean') # generic_ip_address_test = models. ctime = models.DateTimeField(ugettext("ctime"), auto_now_add=True) utime = models.DateTimeField(ugettext("utime"), auto_now=True) class Meta: verbose_name = ugettext("strict") verbose_name_plural = ugettext("stricts") def __unicode__(self): return u"{0}".format(self.pk)
mit
SUSE/azure-sdk-for-python
azure-mgmt-web/azure/mgmt/web/models/resource.py
3
1651
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. # -------------------------------------------------------------------------- from msrest.serialization import Model class Resource(Model): """Azure resource. Variables are only populated by the server, and will be ignored when sending a request. :ivar id: Resource Id. :vartype id: str :param name: Resource Name. :type name: str :param kind: Kind of resource. :type kind: str :param location: Resource Location. :type location: str :param type: Resource type. :type type: str :param tags: Resource tags. :type tags: dict """ _validation = { 'id': {'readonly': True}, 'location': {'required': True}, } _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'kind': {'key': 'kind', 'type': 'str'}, 'location': {'key': 'location', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'tags': {'key': 'tags', 'type': '{str}'}, } def __init__(self, location, name=None, kind=None, type=None, tags=None): self.id = None self.name = name self.kind = kind self.location = location self.type = type self.tags = tags
mit
andrewcmyers/tensorflow
tensorflow/python/debug/lib/debug_utils_test.py
79
13093
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for TensorFlow Debugger (tfdbg) Utilities.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.core.protobuf import config_pb2 from tensorflow.python.client import session from tensorflow.python.debug.lib import debug_utils from tensorflow.python.framework import constant_op from tensorflow.python.framework import test_util from tensorflow.python.ops import math_ops # Import resource_variable_ops for the variables-to-tensor implicit conversion. from tensorflow.python.ops import resource_variable_ops # pylint: disable=unused-import from tensorflow.python.ops import variables from tensorflow.python.platform import googletest class DebugUtilsTest(test_util.TensorFlowTestCase): @classmethod def setUpClass(cls): cls._sess = session.Session() with cls._sess: cls._a_init_val = np.array([[5.0, 3.0], [-1.0, 0.0]]) cls._b_init_val = np.array([[2.0], [-1.0]]) cls._c_val = np.array([[-4.0], [np.nan]]) cls._a_init = constant_op.constant( cls._a_init_val, shape=[2, 2], name="a1_init") cls._b_init = constant_op.constant( cls._b_init_val, shape=[2, 1], name="b_init") cls._a = variables.Variable(cls._a_init, name="a1") cls._b = variables.Variable(cls._b_init, name="b") cls._c = constant_op.constant(cls._c_val, shape=[2, 1], name="c") # Matrix product of a and b. cls._p = math_ops.matmul(cls._a, cls._b, name="p1") # Sum of two vectors. cls._s = math_ops.add(cls._p, cls._c, name="s") cls._graph = cls._sess.graph # These are all the expected nodes in the graph: # Two variables (a, b), each with four nodes (Variable, init, Assign, # read). # One constant (c). # One add operation and one matmul operation. cls._expected_num_nodes = 4 * 2 + 1 + 1 + 1 def setUp(self): self._run_options = config_pb2.RunOptions() def _verify_watches(self, watch_opts, expected_output_slot, expected_debug_ops, expected_debug_urls): """Verify a list of debug tensor watches. This requires all watches in the watch list have exactly the same output_slot, debug_ops and debug_urls. Args: watch_opts: Repeated protobuf field of DebugTensorWatch. expected_output_slot: Expected output slot index, as an integer. expected_debug_ops: Expected debug ops, as a list of strings. expected_debug_urls: Expected debug URLs, as a list of strings. Returns: List of node names from the list of debug tensor watches. """ node_names = [] for watch in watch_opts: node_names.append(watch.node_name) self.assertEqual(expected_output_slot, watch.output_slot) self.assertEqual(expected_debug_ops, watch.debug_ops) self.assertEqual(expected_debug_urls, watch.debug_urls) return node_names def testAddDebugTensorWatches_defaultDebugOp(self): debug_utils.add_debug_tensor_watch( self._run_options, "foo/node_a", 1, debug_urls="file:///tmp/tfdbg_1") debug_utils.add_debug_tensor_watch( self._run_options, "foo/node_b", 0, debug_urls="file:///tmp/tfdbg_2") debug_watch_opts = self._run_options.debug_options.debug_tensor_watch_opts self.assertEqual(2, len(debug_watch_opts)) watch_0 = debug_watch_opts[0] watch_1 = debug_watch_opts[1] self.assertEqual("foo/node_a", watch_0.node_name) self.assertEqual(1, watch_0.output_slot) self.assertEqual("foo/node_b", watch_1.node_name) self.assertEqual(0, watch_1.output_slot) # Verify default debug op name. self.assertEqual(["DebugIdentity"], watch_0.debug_ops) self.assertEqual(["DebugIdentity"], watch_1.debug_ops) # Verify debug URLs. self.assertEqual(["file:///tmp/tfdbg_1"], watch_0.debug_urls) self.assertEqual(["file:///tmp/tfdbg_2"], watch_1.debug_urls) def testAddDebugTensorWatches_explicitDebugOp(self): debug_utils.add_debug_tensor_watch( self._run_options, "foo/node_a", 0, debug_ops="DebugNanCount", debug_urls="file:///tmp/tfdbg_1") debug_watch_opts = self._run_options.debug_options.debug_tensor_watch_opts self.assertEqual(1, len(debug_watch_opts)) watch_0 = debug_watch_opts[0] self.assertEqual("foo/node_a", watch_0.node_name) self.assertEqual(0, watch_0.output_slot) # Verify default debug op name. self.assertEqual(["DebugNanCount"], watch_0.debug_ops) # Verify debug URLs. self.assertEqual(["file:///tmp/tfdbg_1"], watch_0.debug_urls) def testAddDebugTensorWatches_multipleDebugOps(self): debug_utils.add_debug_tensor_watch( self._run_options, "foo/node_a", 0, debug_ops=["DebugNanCount", "DebugIdentity"], debug_urls="file:///tmp/tfdbg_1") debug_watch_opts = self._run_options.debug_options.debug_tensor_watch_opts self.assertEqual(1, len(debug_watch_opts)) watch_0 = debug_watch_opts[0] self.assertEqual("foo/node_a", watch_0.node_name) self.assertEqual(0, watch_0.output_slot) # Verify default debug op name. self.assertEqual(["DebugNanCount", "DebugIdentity"], watch_0.debug_ops) # Verify debug URLs. self.assertEqual(["file:///tmp/tfdbg_1"], watch_0.debug_urls) def testAddDebugTensorWatches_multipleURLs(self): debug_utils.add_debug_tensor_watch( self._run_options, "foo/node_a", 0, debug_ops="DebugNanCount", debug_urls=["file:///tmp/tfdbg_1", "file:///tmp/tfdbg_2"]) debug_watch_opts = self._run_options.debug_options.debug_tensor_watch_opts self.assertEqual(1, len(debug_watch_opts)) watch_0 = debug_watch_opts[0] self.assertEqual("foo/node_a", watch_0.node_name) self.assertEqual(0, watch_0.output_slot) # Verify default debug op name. self.assertEqual(["DebugNanCount"], watch_0.debug_ops) # Verify debug URLs. self.assertEqual(["file:///tmp/tfdbg_1", "file:///tmp/tfdbg_2"], watch_0.debug_urls) def testWatchGraph_allNodes(self): debug_utils.watch_graph( self._run_options, self._graph, debug_ops=["DebugIdentity", "DebugNanCount"], debug_urls="file:///tmp/tfdbg_1") debug_watch_opts = self._run_options.debug_options.debug_tensor_watch_opts self.assertEqual(self._expected_num_nodes, len(debug_watch_opts)) # Verify that each of the nodes in the graph with output tensors in the # graph have debug tensor watch. node_names = self._verify_watches(debug_watch_opts, 0, ["DebugIdentity", "DebugNanCount"], ["file:///tmp/tfdbg_1"]) # Verify the node names. self.assertTrue("a1_init" in node_names) self.assertTrue("a1" in node_names) self.assertTrue("a1/Assign" in node_names) self.assertTrue("a1/read" in node_names) self.assertTrue("b_init" in node_names) self.assertTrue("b" in node_names) self.assertTrue("b/Assign" in node_names) self.assertTrue("b/read" in node_names) self.assertTrue("c" in node_names) self.assertTrue("p1" in node_names) self.assertTrue("s" in node_names) def testWatchGraph_nodeNameWhitelist(self): debug_utils.watch_graph( self._run_options, self._graph, debug_urls="file:///tmp/tfdbg_1", node_name_regex_whitelist="(a1$|a1_init$|a1/.*|p1$)") node_names = self._verify_watches( self._run_options.debug_options.debug_tensor_watch_opts, 0, ["DebugIdentity"], ["file:///tmp/tfdbg_1"]) self.assertEqual( sorted(["a1_init", "a1", "a1/Assign", "a1/read", "p1"]), sorted(node_names)) def testWatchGraph_opTypeWhitelist(self): debug_utils.watch_graph( self._run_options, self._graph, debug_urls="file:///tmp/tfdbg_1", op_type_regex_whitelist="(Variable|MatMul)") node_names = self._verify_watches( self._run_options.debug_options.debug_tensor_watch_opts, 0, ["DebugIdentity"], ["file:///tmp/tfdbg_1"]) self.assertEqual(sorted(["a1", "b", "p1"]), sorted(node_names)) def testWatchGraph_nodeNameAndOpTypeWhitelists(self): debug_utils.watch_graph( self._run_options, self._graph, debug_urls="file:///tmp/tfdbg_1", node_name_regex_whitelist="([a-z]+1$)", op_type_regex_whitelist="(MatMul)") node_names = self._verify_watches( self._run_options.debug_options.debug_tensor_watch_opts, 0, ["DebugIdentity"], ["file:///tmp/tfdbg_1"]) self.assertEqual(["p1"], node_names) def testWatchGraph_tensorDTypeWhitelist(self): debug_utils.watch_graph( self._run_options, self._graph, debug_urls="file:///tmp/tfdbg_1", tensor_dtype_regex_whitelist=".*_ref") node_names = self._verify_watches( self._run_options.debug_options.debug_tensor_watch_opts, 0, ["DebugIdentity"], ["file:///tmp/tfdbg_1"]) self.assertItemsEqual(["a1", "a1/Assign", "b", "b/Assign"], node_names) def testWatchGraph_nodeNameAndTensorDTypeWhitelists(self): debug_utils.watch_graph( self._run_options, self._graph, debug_urls="file:///tmp/tfdbg_1", node_name_regex_whitelist="^a.*", tensor_dtype_regex_whitelist=".*_ref") node_names = self._verify_watches( self._run_options.debug_options.debug_tensor_watch_opts, 0, ["DebugIdentity"], ["file:///tmp/tfdbg_1"]) self.assertItemsEqual(["a1", "a1/Assign"], node_names) def testWatchGraph_nodeNameBlacklist(self): debug_utils.watch_graph_with_blacklists( self._run_options, self._graph, debug_urls="file:///tmp/tfdbg_1", node_name_regex_blacklist="(a1$|a1_init$|a1/.*|p1$)") node_names = self._verify_watches( self._run_options.debug_options.debug_tensor_watch_opts, 0, ["DebugIdentity"], ["file:///tmp/tfdbg_1"]) self.assertEqual( sorted(["b_init", "b", "b/Assign", "b/read", "c", "s"]), sorted(node_names)) def testWatchGraph_opTypeBlacklist(self): debug_utils.watch_graph_with_blacklists( self._run_options, self._graph, debug_urls="file:///tmp/tfdbg_1", op_type_regex_blacklist="(Variable|Identity|Assign|Const)") node_names = self._verify_watches( self._run_options.debug_options.debug_tensor_watch_opts, 0, ["DebugIdentity"], ["file:///tmp/tfdbg_1"]) self.assertEqual(sorted(["p1", "s"]), sorted(node_names)) def testWatchGraph_nodeNameAndOpTypeBlacklists(self): debug_utils.watch_graph_with_blacklists( self._run_options, self._graph, debug_urls="file:///tmp/tfdbg_1", node_name_regex_blacklist="p1$", op_type_regex_blacklist="(Variable|Identity|Assign|Const)") node_names = self._verify_watches( self._run_options.debug_options.debug_tensor_watch_opts, 0, ["DebugIdentity"], ["file:///tmp/tfdbg_1"]) self.assertEqual(["s"], node_names) def testWatchGraph_tensorDTypeBlacklists(self): debug_utils.watch_graph_with_blacklists( self._run_options, self._graph, debug_urls="file:///tmp/tfdbg_1", tensor_dtype_regex_blacklist=".*_ref") node_names = self._verify_watches( self._run_options.debug_options.debug_tensor_watch_opts, 0, ["DebugIdentity"], ["file:///tmp/tfdbg_1"]) self.assertNotIn("a1", node_names) self.assertNotIn("a1/Assign", node_names) self.assertNotIn("b", node_names) self.assertNotIn("b/Assign", node_names) self.assertIn("s", node_names) def testWatchGraph_nodeNameAndTensorDTypeBlacklists(self): debug_utils.watch_graph_with_blacklists( self._run_options, self._graph, debug_urls="file:///tmp/tfdbg_1", node_name_regex_blacklist="^s$", tensor_dtype_regex_blacklist=".*_ref") node_names = self._verify_watches( self._run_options.debug_options.debug_tensor_watch_opts, 0, ["DebugIdentity"], ["file:///tmp/tfdbg_1"]) self.assertNotIn("a1", node_names) self.assertNotIn("a1/Assign", node_names) self.assertNotIn("b", node_names) self.assertNotIn("b/Assign", node_names) self.assertNotIn("s", node_names) if __name__ == "__main__": googletest.main()
apache-2.0
kernt/linuxtools
gnome3-shell/nautilus-scripts/System/File System Management/Quick-Burn.py
1
7960
#!/usr/bin/env python2 # Nautilus CD burning script # # Owner: Michele Campeotto <micampe@micampe.it> # http://www.micampe.it # # Licence: GNU GPL # Copyright (C) 2002 Michele Campeotto # # Dependency: GTK+ >= 2.0 # PyGTK >= 1.99.8 # Python >= 2.2 # mkisofs and cdrecord # # 20020330 - Ver 0.1 # First release import sys, os, os.path, popen2 import gtk # To configure the script, you should only need to edit these values device = '0,0,0' driver = 'mmc_cdr' options = 'driveropts=burnproof' dummy = '-dummy' # To actually burn CDs, uncomment the following line and comment out the next one #burn_command_template = 'mkisofs -gui -r -J -l -D -L -graft-points %(graft_points)s | cdrecord dev="%(device)s" %(options)s driver=%(driver)s %(dummy)s -eject -pad -tsize=%(size)ss -' burn_command_template = 'mkisofs -gui -r -J -l -D -L -o image.img -graft-points %(graft_points)s' size_command_template = 'mkisofs -r -J -l -D -L -v -print-size -graft-points %(graft_points)s' def build_mappings(paths): mappings = [] for i in paths: if os.path.isdir(i): mappings.append('"%s/=%s"' % (i.split('/')[-1], i)) else: mappings.append('"%s=%s"' % (i.split('/')[-1], i)) return mappings def burn_command(paths): global graft_points graft_points = ' '.join(build_mappings(paths)) return burn_command_template % globals() def size_command(paths): global graft_points graft_points = ' '.join(build_mappings(paths)) return size_command_template % globals() def get_image_size(paths): return int(os.popen4(size_command(paths))[1].readlines()[-1]) def burn(paths, progress): def cb(fd, cond): data = os.read(fd, 80) print data, if data.find('extents') >= 0: progress.set_fraction(1.0) progress.set_text('100%') return gtk.TRUE elif data.find('read/written') >= 0: progress.set_text('Finishing...') try: perc = int(data.split('.')[0]) progress.set_fraction(perc/100.0) progress.set_text('%d%%' % (perc,)) except (ValueError): pass command = burn_command(paths) print command pipe = popen2.Popen4(command) gtk.input_add(pipe.fromchild, gtk.gdk.INPUT_READ, cb) while pipe.poll() < 0: gtk.mainiteration() return pipe.poll() def response(dialog, response): sys.exit() def burn_window(paths): win = gtk.Dialog("Nautilus Quick Burner", None, gtk.DIALOG_MODAL, (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL)) win.connect('response', response) win.set_default_size(400, -1) hbox = gtk.HBox() hbox.set_border_width(8) hbox.set_spacing(8) win.vbox.pack_start(hbox) stock = gtk.image_new_from_stock(gtk.STOCK_DIALOG_INFO, gtk.ICON_SIZE_DIALOG) hbox.pack_start(stock, gtk.FALSE, gtk.FALSE) vbox = gtk.VBox() vbox.set_spacing(8) hbox.pack_start(vbox, gtk.TRUE, gtk.TRUE) label = gtk.Label("<b>Burning...</b>") label.set_use_markup(gtk.TRUE) label.set_alignment(0, 0.5) vbox.pack_start(label) label = gtk.Label('\n'.join(paths)) label.set_alignment(0, 0.5) vbox.pack_start(label) win.progress = gtk.ProgressBar() win.progress.set_fraction(0.0) win.progress.set_text("Initializing...") vbox.pack_start(win.progress) win.show_all() return win def help(): win = gtk.Dialog("Nautilus Quick Burner", None, gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT, (gtk.STOCK_OK, gtk.RESPONSE_OK)) hbox = gtk.HBox(gtk.FALSE, 8) hbox.set_border_width(8) win.vbox.pack_start(hbox) stock = gtk.image_new_from_stock(gtk.STOCK_DIALOG_ERROR, gtk.ICON_SIZE_DIALOG) hbox.pack_start(stock) label = gtk.Label("You must specify the files to be burned.") hbox.pack_start(label) win.show_all() return win def confirm(paths): win = gtk.Dialog("Nautilus Quick Burner", None, gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT, (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, "_Burn 'em!", gtk.RESPONSE_OK)) hbox = gtk.HBox(gtk.FALSE, 8) hbox.set_border_width(8) win.vbox.pack_start(hbox) stock = gtk.image_new_from_stock(gtk.STOCK_DIALOG_QUESTION, gtk.ICON_SIZE_DIALOG) hbox.pack_start(stock) vbox = gtk.VBox() vbox.set_spacing(8) hbox.pack_start(vbox) label = gtk.Label("<b>Are you sure you want to burn these?</b>") label.set_use_markup(gtk.TRUE) label.set_alignment(0, 0.5) vbox.pack_start(label) label = gtk.Label('\n'.join(paths)) label.set_alignment(0, 0.5) vbox.pack_start(label) win.show_all() response = win.run() win.destroy() return response def error(paths): win = gtk.Dialog("Burning finished", None, gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT, (gtk.STOCK_OK, gtk.RESPONSE_OK)) hbox = gtk.HBox(gtk.FALSE, 8) hbox.set_border_width(8) win.vbox.pack_start(hbox) stock = gtk.image_new_from_stock(gtk.STOCK_DIALOG_ERROR, gtk.ICON_SIZE_DIALOG) hbox.pack_start(stock) vbox = gtk.VBox() vbox.set_spacing(8) hbox.pack_start(vbox) label = gtk.Label("<b>Some error occurred while building the image.</b>") label.set_use_markup(gtk.TRUE) label.set_alignment(0.0, 0.5) vbox.pack_start(label) label = gtk.Label("Run:\n<tt>%s</tt>\nfrom the commman line to see what went wrong." % size_command(paths)) label.set_use_markup(gtk.TRUE) label.set_selectable(gtk.TRUE) label.set_line_wrap(gtk.TRUE) vbox.pack_start(label) win.show_all() response = win.run() win.destroy() return response def success(): win = gtk.Dialog("Burning finished", None, gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT, (gtk.STOCK_OK, gtk.RESPONSE_OK)) hbox = gtk.HBox(gtk.FALSE, 8) hbox.set_border_width(8) win.vbox.pack_start(hbox) stock = gtk.image_new_from_stock(gtk.STOCK_DIALOG_INFO, gtk.ICON_SIZE_DIALOG) hbox.pack_start(stock) label = gtk.Label("Burning process successfully completed.") hbox.pack_start(label) win.show_all() response = win.run() win.destroy() return response def failed(): win = gtk.Dialog("Burning failed", None, gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT, (gtk.STOCK_OK, gtk.RESPONSE_OK)) hbox = gtk.HBox(gtk.FALSE, 8) hbox.set_border_width(8) win.vbox.pack_start(hbox) stock = gtk.image_new_from_stock(gtk.STOCK_DIALOG_WARNING, gtk.ICON_SIZE_DIALOG) hbox.pack_start(stock) label = gtk.Label("Burning process failed.") hbox.pack_start(label) win.show_all() response = win.run() win.destroy() return response def main(): global size if len(sys.argv) == 1: help().run() else: paths = [os.path.abspath(i) for i in sys.argv[1:]] if confirm(paths) == gtk.RESPONSE_OK: try: size = get_image_size(paths) except ValueError: error(paths) else: win = burn_window(paths) result = burn(paths, win.progress) print result win.destroy() if result == 0: success() else: failed() if __name__ == '__main__': main()
gpl-3.0
inguma/bokken
ui/generate_dot.py
4
3305
# generate_dot.py # # Copyright 2011 Hugo Teso <hugo.teso@gmail.com> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, # MA 02110-1301, USA. def generate_dot(data, root_node): prefix = '<f0>' dotcode = ''' graph G { graph [ overlap="scale", bgcolor="#475672", concentrate="true",rankdir="LR" , root="%s"] node [color=azure3, fontcolor=white, fillcolor="#373D49", shape=circle, style=filled, height=0.7,width=0.7]; "%s" [height=1, width=1, shape=circle] ''' % (root_node, root_node) for branch in data: node = branch.keys()[0] elements = branch[node] # Parse params to create clusters if '&amp;' in node or '&' in node or '?' in node or '&quest' in node: # Check if there is param name or just value if len( node.split('?') ) > 2: node = node.replace('?', '|') if len( node.split('&amp;') ) > 2: node = node.replace('&amp;', '|') if len( node.split('&') ) > 2: node = node.replace('&', '|') dotcode += '''"%s" [label="%s", shape="record", style="rounded, filled"]\n''' % (node, node) dotcode += '''"%s" -- "%s" [len=1.25, color=azure3]; ''' % (root_node, node) else: # Add branch node and connect with root dotcode += '''"%s" [shape="doublecircle", style=filled, fillcolor="#5E82C6", height=0.9, width=0.9, URL="%s"]\n''' % (node, node) dotcode += '''"%s" -- "%s" [len=1.25, color=azure3];\n''' % (root_node, node) # Add elements to node branch prev_element = node for element in elements: if element != '': if '&amp;' in element: if not prefix in element: element = prefix + element element = element.replace('&amp;', '|') if '?' in element: if not prefix in element: element = prefix + element element = element.replace('?', '|') if '&' in element: if not prefix in element: element = prefix + element element = element.replace('&', '|') dotcode += '''"%s" [label="%s", shape="record", style="rounded, filled"]\n''' % (element, element) dotcode += '''"%s" -- "%s" [len=1.25, color=azure3]; ''' % (prev_element, element) prev_element = element dotcode += '\n}' #print dotcode return dotcode
gpl-2.0
stormi/weblate
weblate/trans/fonts.py
11
49811
# -*- coding: utf-8 -*- # # Copyright © 2012 - 2015 Michal Čihař <michal@cihar.com> # # This file is part of Weblate <http://weblate.org/> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ''' Font handling wrapper. ''' from weblate import appsettings from PIL import ImageFont import os.path # List of chars in base DejaVu font, otherwise we use DroidSansFallback BASE_CHARS = frozenset(( 0x0, 0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8, 0x9, 0xa, 0xb, 0xc, 0xd, 0xe, 0xf, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20, 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28, 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x3a, 0x3b, 0x3c, 0x3d, 0x3e, 0x3f, 0x40, 0x41, 0x42, 0x43, 0x44, 0x45, 0x46, 0x47, 0x48, 0x49, 0x4a, 0x4b, 0x4c, 0x4d, 0x4e, 0x4f, 0x50, 0x51, 0x52, 0x53, 0x54, 0x55, 0x56, 0x57, 0x58, 0x59, 0x5a, 0x5b, 0x5c, 0x5d, 0x5e, 0x5f, 0x60, 0x61, 0x62, 0x63, 0x64, 0x65, 0x66, 0x67, 0x68, 0x69, 0x6a, 0x6b, 0x6c, 0x6d, 0x6e, 0x6f, 0x70, 0x71, 0x72, 0x73, 0x74, 0x75, 0x76, 0x77, 0x78, 0x79, 0x7a, 0x7b, 0x7c, 0x7d, 0x7e, 0x7f, 0x80, 0x81, 0x82, 0x83, 0x84, 0x85, 0x86, 0x87, 0x88, 0x89, 0x8a, 0x8b, 0x8c, 0x8d, 0x8e, 0x8f, 0x90, 0x91, 0x92, 0x93, 0x94, 0x95, 0x96, 0x97, 0x98, 0x99, 0x9a, 0x9b, 0x9c, 0x9d, 0x9e, 0x9f, 0xa0, 0xa1, 0xa2, 0xa3, 0xa4, 0xa5, 0xa6, 0xa7, 0xa8, 0xa9, 0xaa, 0xab, 0xac, 0xad, 0xae, 0xaf, 0xb0, 0xb1, 0xb2, 0xb3, 0xb4, 0xb5, 0xb6, 0xb7, 0xb8, 0xb9, 0xba, 0xbb, 0xbc, 0xbd, 0xbe, 0xbf, 0xc0, 0xc1, 0xc2, 0xc3, 0xc4, 0xc5, 0xc6, 0xc7, 0xc8, 0xc9, 0xca, 0xcb, 0xcc, 0xcd, 0xce, 0xcf, 0xd0, 0xd1, 0xd2, 0xd3, 0xd4, 0xd5, 0xd6, 0xd7, 0xd8, 0xd9, 0xda, 0xdb, 0xdc, 0xdd, 0xde, 0xdf, 0xe0, 0xe1, 0xe2, 0xe3, 0xe4, 0xe5, 0xe6, 0xe7, 0xe8, 0xe9, 0xea, 0xeb, 0xec, 0xed, 0xee, 0xef, 0xf0, 0xf1, 0xf2, 0xf3, 0xf4, 0xf5, 0xf6, 0xf7, 0xf8, 0xf9, 0xfa, 0xfb, 0xfc, 0xfd, 0xfe, 0xff, 0x100, 0x101, 0x102, 0x103, 0x104, 0x105, 0x106, 0x107, 0x108, 0x109, 0x10a, 0x10b, 0x10c, 0x10d, 0x10e, 0x10f, 0x110, 0x111, 0x112, 0x113, 0x114, 0x115, 0x116, 0x117, 0x118, 0x119, 0x11a, 0x11b, 0x11c, 0x11d, 0x11e, 0x11f, 0x120, 0x121, 0x122, 0x123, 0x124, 0x125, 0x126, 0x127, 0x128, 0x129, 0x12a, 0x12b, 0x12c, 0x12d, 0x12e, 0x12f, 0x130, 0x131, 0x132, 0x133, 0x134, 0x135, 0x136, 0x137, 0x138, 0x139, 0x13a, 0x13b, 0x13c, 0x13d, 0x13e, 0x13f, 0x140, 0x141, 0x142, 0x143, 0x144, 0x145, 0x146, 0x147, 0x148, 0x149, 0x14a, 0x14b, 0x14c, 0x14d, 0x14e, 0x14f, 0x150, 0x151, 0x152, 0x153, 0x154, 0x155, 0x156, 0x157, 0x158, 0x159, 0x15a, 0x15b, 0x15c, 0x15d, 0x15e, 0x15f, 0x160, 0x161, 0x162, 0x163, 0x164, 0x165, 0x166, 0x167, 0x168, 0x169, 0x16a, 0x16b, 0x16c, 0x16d, 0x16e, 0x16f, 0x170, 0x171, 0x172, 0x173, 0x174, 0x175, 0x176, 0x177, 0x178, 0x179, 0x17a, 0x17b, 0x17c, 0x17d, 0x17e, 0x17f, 0x180, 0x181, 0x182, 0x183, 0x184, 0x185, 0x186, 0x187, 0x188, 0x189, 0x18a, 0x18b, 0x18c, 0x18d, 0x18e, 0x18f, 0x190, 0x191, 0x192, 0x193, 0x194, 0x195, 0x196, 0x197, 0x198, 0x199, 0x19a, 0x19b, 0x19c, 0x19d, 0x19e, 0x19f, 0x1a0, 0x1a1, 0x1a2, 0x1a3, 0x1a4, 0x1a5, 0x1a6, 0x1a7, 0x1a8, 0x1a9, 0x1aa, 0x1ab, 0x1ac, 0x1ad, 0x1ae, 0x1af, 0x1b0, 0x1b1, 0x1b2, 0x1b3, 0x1b4, 0x1b5, 0x1b6, 0x1b7, 0x1b8, 0x1b9, 0x1ba, 0x1bb, 0x1bc, 0x1bd, 0x1be, 0x1bf, 0x1c0, 0x1c1, 0x1c2, 0x1c3, 0x1c4, 0x1c5, 0x1c6, 0x1c7, 0x1c8, 0x1c9, 0x1ca, 0x1cb, 0x1cc, 0x1cd, 0x1ce, 0x1cf, 0x1d0, 0x1d1, 0x1d2, 0x1d3, 0x1d4, 0x1d5, 0x1d6, 0x1d7, 0x1d8, 0x1d9, 0x1da, 0x1db, 0x1dc, 0x1dd, 0x1de, 0x1df, 0x1e0, 0x1e1, 0x1e2, 0x1e3, 0x1e4, 0x1e5, 0x1e6, 0x1e7, 0x1e8, 0x1e9, 0x1ea, 0x1eb, 0x1ec, 0x1ed, 0x1ee, 0x1ef, 0x1f0, 0x1f1, 0x1f2, 0x1f3, 0x1f4, 0x1f5, 0x1f6, 0x1f7, 0x1f8, 0x1f9, 0x1fa, 0x1fb, 0x1fc, 0x1fd, 0x1fe, 0x1ff, 0x200, 0x201, 0x202, 0x203, 0x204, 0x205, 0x206, 0x207, 0x208, 0x209, 0x20a, 0x20b, 0x20c, 0x20d, 0x20e, 0x20f, 0x210, 0x211, 0x212, 0x213, 0x214, 0x215, 0x216, 0x217, 0x218, 0x219, 0x21a, 0x21b, 0x21c, 0x21d, 0x21e, 0x21f, 0x220, 0x221, 0x222, 0x223, 0x224, 0x225, 0x226, 0x227, 0x228, 0x229, 0x22a, 0x22b, 0x22c, 0x22d, 0x22e, 0x22f, 0x230, 0x231, 0x232, 0x233, 0x234, 0x235, 0x236, 0x237, 0x238, 0x239, 0x23a, 0x23b, 0x23c, 0x23d, 0x23e, 0x23f, 0x240, 0x241, 0x242, 0x243, 0x244, 0x245, 0x246, 0x247, 0x248, 0x249, 0x24a, 0x24b, 0x24c, 0x24d, 0x24e, 0x24f, 0x250, 0x251, 0x252, 0x253, 0x254, 0x255, 0x256, 0x257, 0x258, 0x259, 0x25a, 0x25b, 0x25c, 0x25d, 0x25e, 0x25f, 0x260, 0x261, 0x262, 0x263, 0x264, 0x265, 0x266, 0x267, 0x268, 0x269, 0x26a, 0x26b, 0x26c, 0x26d, 0x26e, 0x26f, 0x270, 0x271, 0x272, 0x273, 0x274, 0x275, 0x276, 0x277, 0x278, 0x279, 0x27a, 0x27b, 0x27c, 0x27d, 0x27e, 0x27f, 0x280, 0x281, 0x282, 0x283, 0x284, 0x285, 0x286, 0x287, 0x288, 0x289, 0x28a, 0x28b, 0x28c, 0x28d, 0x28e, 0x28f, 0x290, 0x291, 0x292, 0x293, 0x294, 0x295, 0x296, 0x297, 0x298, 0x299, 0x29a, 0x29b, 0x29c, 0x29d, 0x29e, 0x29f, 0x2a0, 0x2a1, 0x2a2, 0x2a3, 0x2a4, 0x2a5, 0x2a6, 0x2a7, 0x2a8, 0x2a9, 0x2aa, 0x2ab, 0x2ac, 0x2ad, 0x2ae, 0x2af, 0x2b0, 0x2b1, 0x2b2, 0x2b3, 0x2b4, 0x2b5, 0x2b6, 0x2b7, 0x2b8, 0x2b9, 0x2ba, 0x2bb, 0x2bc, 0x2bd, 0x2be, 0x2bf, 0x2c0, 0x2c1, 0x2c2, 0x2c3, 0x2c4, 0x2c5, 0x2c6, 0x2c7, 0x2c8, 0x2c9, 0x2ca, 0x2cb, 0x2cc, 0x2cd, 0x2ce, 0x2cf, 0x2d0, 0x2d1, 0x2d2, 0x2d3, 0x2d4, 0x2d5, 0x2d6, 0x2d7, 0x2d8, 0x2d9, 0x2da, 0x2db, 0x2dc, 0x2dd, 0x2de, 0x2df, 0x2e0, 0x2e1, 0x2e2, 0x2e3, 0x2e4, 0x2e5, 0x2e6, 0x2e7, 0x2e8, 0x2e9, 0x2ec, 0x2ed, 0x2ee, 0x2f3, 0x2f7, 0x300, 0x301, 0x302, 0x303, 0x304, 0x305, 0x306, 0x307, 0x308, 0x309, 0x30a, 0x30b, 0x30c, 0x30d, 0x30e, 0x30f, 0x310, 0x311, 0x312, 0x313, 0x314, 0x315, 0x316, 0x317, 0x318, 0x319, 0x31a, 0x31b, 0x31c, 0x31d, 0x31e, 0x31f, 0x320, 0x321, 0x322, 0x323, 0x324, 0x325, 0x326, 0x327, 0x328, 0x329, 0x32a, 0x32b, 0x32c, 0x32d, 0x32e, 0x32f, 0x330, 0x331, 0x332, 0x333, 0x334, 0x335, 0x336, 0x337, 0x338, 0x339, 0x33a, 0x33b, 0x33c, 0x33d, 0x33e, 0x33f, 0x340, 0x341, 0x342, 0x343, 0x344, 0x345, 0x346, 0x347, 0x348, 0x349, 0x34a, 0x34b, 0x34c, 0x34d, 0x34e, 0x34f, 0x351, 0x352, 0x353, 0x357, 0x358, 0x35a, 0x35c, 0x35d, 0x35e, 0x35f, 0x360, 0x361, 0x362, 0x370, 0x371, 0x372, 0x373, 0x374, 0x375, 0x376, 0x377, 0x37a, 0x37b, 0x37c, 0x37d, 0x37e, 0x384, 0x385, 0x386, 0x387, 0x388, 0x389, 0x38a, 0x38c, 0x38e, 0x38f, 0x390, 0x391, 0x392, 0x393, 0x394, 0x395, 0x396, 0x397, 0x398, 0x399, 0x39a, 0x39b, 0x39c, 0x39d, 0x39e, 0x39f, 0x3a0, 0x3a1, 0x3a3, 0x3a4, 0x3a5, 0x3a6, 0x3a7, 0x3a8, 0x3a9, 0x3aa, 0x3ab, 0x3ac, 0x3ad, 0x3ae, 0x3af, 0x3b0, 0x3b1, 0x3b2, 0x3b3, 0x3b4, 0x3b5, 0x3b6, 0x3b7, 0x3b8, 0x3b9, 0x3ba, 0x3bb, 0x3bc, 0x3bd, 0x3be, 0x3bf, 0x3c0, 0x3c1, 0x3c2, 0x3c3, 0x3c4, 0x3c5, 0x3c6, 0x3c7, 0x3c8, 0x3c9, 0x3ca, 0x3cb, 0x3cc, 0x3cd, 0x3ce, 0x3cf, 0x3d0, 0x3d1, 0x3d2, 0x3d3, 0x3d4, 0x3d5, 0x3d6, 0x3d7, 0x3d8, 0x3d9, 0x3da, 0x3db, 0x3dc, 0x3dd, 0x3de, 0x3df, 0x3e0, 0x3e1, 0x3e2, 0x3e3, 0x3e4, 0x3e5, 0x3e6, 0x3e7, 0x3e8, 0x3e9, 0x3ea, 0x3eb, 0x3ec, 0x3ed, 0x3ee, 0x3ef, 0x3f0, 0x3f1, 0x3f2, 0x3f3, 0x3f4, 0x3f5, 0x3f6, 0x3f7, 0x3f8, 0x3f9, 0x3fa, 0x3fb, 0x3fc, 0x3fd, 0x3fe, 0x3ff, 0x400, 0x401, 0x402, 0x403, 0x404, 0x405, 0x406, 0x407, 0x408, 0x409, 0x40a, 0x40b, 0x40c, 0x40d, 0x40e, 0x40f, 0x410, 0x411, 0x412, 0x413, 0x414, 0x415, 0x416, 0x417, 0x418, 0x419, 0x41a, 0x41b, 0x41c, 0x41d, 0x41e, 0x41f, 0x420, 0x421, 0x422, 0x423, 0x424, 0x425, 0x426, 0x427, 0x428, 0x429, 0x42a, 0x42b, 0x42c, 0x42d, 0x42e, 0x42f, 0x430, 0x431, 0x432, 0x433, 0x434, 0x435, 0x436, 0x437, 0x438, 0x439, 0x43a, 0x43b, 0x43c, 0x43d, 0x43e, 0x43f, 0x440, 0x441, 0x442, 0x443, 0x444, 0x445, 0x446, 0x447, 0x448, 0x449, 0x44a, 0x44b, 0x44c, 0x44d, 0x44e, 0x44f, 0x450, 0x451, 0x452, 0x453, 0x454, 0x455, 0x456, 0x457, 0x458, 0x459, 0x45a, 0x45b, 0x45c, 0x45d, 0x45e, 0x45f, 0x460, 0x461, 0x462, 0x463, 0x464, 0x465, 0x466, 0x467, 0x468, 0x469, 0x46a, 0x46b, 0x46c, 0x46d, 0x46e, 0x46f, 0x470, 0x471, 0x472, 0x473, 0x474, 0x475, 0x476, 0x477, 0x478, 0x479, 0x47a, 0x47b, 0x47c, 0x47d, 0x47e, 0x47f, 0x480, 0x481, 0x482, 0x483, 0x484, 0x485, 0x486, 0x487, 0x488, 0x489, 0x48a, 0x48b, 0x48c, 0x48d, 0x48e, 0x48f, 0x490, 0x491, 0x492, 0x493, 0x494, 0x495, 0x496, 0x497, 0x498, 0x499, 0x49a, 0x49b, 0x49c, 0x49d, 0x49e, 0x49f, 0x4a0, 0x4a1, 0x4a2, 0x4a3, 0x4a4, 0x4a5, 0x4a6, 0x4a7, 0x4a8, 0x4a9, 0x4aa, 0x4ab, 0x4ac, 0x4ad, 0x4ae, 0x4af, 0x4b0, 0x4b1, 0x4b2, 0x4b3, 0x4b4, 0x4b5, 0x4b6, 0x4b7, 0x4b8, 0x4b9, 0x4ba, 0x4bb, 0x4bc, 0x4bd, 0x4be, 0x4bf, 0x4c0, 0x4c1, 0x4c2, 0x4c3, 0x4c4, 0x4c5, 0x4c6, 0x4c7, 0x4c8, 0x4c9, 0x4ca, 0x4cb, 0x4cc, 0x4cd, 0x4ce, 0x4cf, 0x4d0, 0x4d1, 0x4d2, 0x4d3, 0x4d4, 0x4d5, 0x4d6, 0x4d7, 0x4d8, 0x4d9, 0x4da, 0x4db, 0x4dc, 0x4dd, 0x4de, 0x4df, 0x4e0, 0x4e1, 0x4e2, 0x4e3, 0x4e4, 0x4e5, 0x4e6, 0x4e7, 0x4e8, 0x4e9, 0x4ea, 0x4eb, 0x4ec, 0x4ed, 0x4ee, 0x4ef, 0x4f0, 0x4f1, 0x4f2, 0x4f3, 0x4f4, 0x4f5, 0x4f6, 0x4f7, 0x4f8, 0x4f9, 0x4fa, 0x4fb, 0x4fc, 0x4fd, 0x4fe, 0x4ff, 0x500, 0x501, 0x502, 0x503, 0x504, 0x505, 0x506, 0x507, 0x508, 0x509, 0x50a, 0x50b, 0x50c, 0x50d, 0x50e, 0x50f, 0x510, 0x511, 0x512, 0x513, 0x514, 0x515, 0x516, 0x517, 0x518, 0x519, 0x51a, 0x51b, 0x51c, 0x51d, 0x51e, 0x51f, 0x520, 0x521, 0x522, 0x523, 0x524, 0x525, 0x531, 0x532, 0x533, 0x534, 0x535, 0x536, 0x537, 0x538, 0x539, 0x53a, 0x53b, 0x53c, 0x53d, 0x53e, 0x53f, 0x540, 0x541, 0x542, 0x543, 0x544, 0x545, 0x546, 0x547, 0x548, 0x549, 0x54a, 0x54b, 0x54c, 0x54d, 0x54e, 0x54f, 0x550, 0x551, 0x552, 0x553, 0x554, 0x555, 0x556, 0x559, 0x55a, 0x55b, 0x55c, 0x55d, 0x55e, 0x55f, 0x561, 0x562, 0x563, 0x564, 0x565, 0x566, 0x567, 0x568, 0x569, 0x56a, 0x56b, 0x56c, 0x56d, 0x56e, 0x56f, 0x570, 0x571, 0x572, 0x573, 0x574, 0x575, 0x576, 0x577, 0x578, 0x579, 0x57a, 0x57b, 0x57c, 0x57d, 0x57e, 0x57f, 0x580, 0x581, 0x582, 0x583, 0x584, 0x585, 0x586, 0x587, 0x589, 0x58a, 0x5b0, 0x5b1, 0x5b2, 0x5b3, 0x5b4, 0x5b5, 0x5b6, 0x5b7, 0x5b8, 0x5b9, 0x5ba, 0x5bb, 0x5bc, 0x5bd, 0x5be, 0x5bf, 0x5c0, 0x5c1, 0x5c2, 0x5c3, 0x5c6, 0x5c7, 0x5d0, 0x5d1, 0x5d2, 0x5d3, 0x5d4, 0x5d5, 0x5d6, 0x5d7, 0x5d8, 0x5d9, 0x5da, 0x5db, 0x5dc, 0x5dd, 0x5de, 0x5df, 0x5e0, 0x5e1, 0x5e2, 0x5e3, 0x5e4, 0x5e5, 0x5e6, 0x5e7, 0x5e8, 0x5e9, 0x5ea, 0x5f0, 0x5f1, 0x5f2, 0x5f3, 0x5f4, 0x606, 0x607, 0x609, 0x60a, 0x60c, 0x615, 0x61b, 0x61f, 0x621, 0x622, 0x623, 0x624, 0x625, 0x626, 0x627, 0x628, 0x629, 0x62a, 0x62b, 0x62c, 0x62d, 0x62e, 0x62f, 0x630, 0x631, 0x632, 0x633, 0x634, 0x635, 0x636, 0x637, 0x638, 0x639, 0x63a, 0x640, 0x641, 0x642, 0x643, 0x644, 0x645, 0x646, 0x647, 0x648, 0x649, 0x64a, 0x64b, 0x64c, 0x64d, 0x64e, 0x64f, 0x650, 0x651, 0x652, 0x653, 0x654, 0x655, 0x657, 0x65a, 0x660, 0x661, 0x662, 0x663, 0x664, 0x665, 0x666, 0x667, 0x668, 0x669, 0x66a, 0x66b, 0x66c, 0x66d, 0x66e, 0x66f, 0x670, 0x674, 0x679, 0x67a, 0x67b, 0x67c, 0x67d, 0x67e, 0x67f, 0x680, 0x681, 0x682, 0x683, 0x684, 0x685, 0x686, 0x687, 0x688, 0x689, 0x68a, 0x68b, 0x68c, 0x68d, 0x68e, 0x68f, 0x690, 0x691, 0x692, 0x693, 0x694, 0x695, 0x696, 0x697, 0x698, 0x699, 0x69a, 0x69b, 0x69c, 0x69d, 0x69e, 0x69f, 0x6a0, 0x6a1, 0x6a2, 0x6a3, 0x6a4, 0x6a5, 0x6a6, 0x6a7, 0x6a8, 0x6a9, 0x6aa, 0x6ab, 0x6ac, 0x6ad, 0x6ae, 0x6af, 0x6b0, 0x6b1, 0x6b2, 0x6b3, 0x6b4, 0x6b5, 0x6b6, 0x6b7, 0x6b8, 0x6b9, 0x6ba, 0x6bb, 0x6bc, 0x6bd, 0x6be, 0x6bf, 0x6c6, 0x6cc, 0x6ce, 0x6d5, 0x6f0, 0x6f1, 0x6f2, 0x6f3, 0x6f4, 0x6f5, 0x6f6, 0x6f7, 0x6f8, 0x6f9, 0x7c0, 0x7c1, 0x7c2, 0x7c3, 0x7c4, 0x7c5, 0x7c6, 0x7c7, 0x7c8, 0x7c9, 0x7ca, 0x7cb, 0x7cc, 0x7cd, 0x7ce, 0x7cf, 0x7d0, 0x7d1, 0x7d2, 0x7d3, 0x7d4, 0x7d5, 0x7d6, 0x7d7, 0x7d8, 0x7d9, 0x7da, 0x7db, 0x7dc, 0x7dd, 0x7de, 0x7df, 0x7e0, 0x7e1, 0x7e2, 0x7e3, 0x7e4, 0x7e5, 0x7e6, 0x7e7, 0x7eb, 0x7ec, 0x7ed, 0x7ee, 0x7ef, 0x7f0, 0x7f1, 0x7f2, 0x7f3, 0x7f4, 0x7f5, 0x7f8, 0x7f9, 0x7fa, 0xe3f, 0xe81, 0xe82, 0xe84, 0xe87, 0xe88, 0xe8a, 0xe8d, 0xe94, 0xe95, 0xe96, 0xe97, 0xe99, 0xe9a, 0xe9b, 0xe9c, 0xe9d, 0xe9e, 0xe9f, 0xea1, 0xea2, 0xea3, 0xea5, 0xea7, 0xeaa, 0xeab, 0xead, 0xeae, 0xeaf, 0xeb0, 0xeb1, 0xeb2, 0xeb3, 0xeb4, 0xeb5, 0xeb6, 0xeb7, 0xeb8, 0xeb9, 0xebb, 0xebc, 0xebd, 0xec0, 0xec1, 0xec2, 0xec3, 0xec4, 0xec6, 0xec8, 0xec9, 0xeca, 0xecb, 0xecc, 0xecd, 0xed0, 0xed1, 0xed2, 0xed3, 0xed4, 0xed5, 0xed6, 0xed7, 0xed8, 0xed9, 0xedc, 0xedd, 0x10a0, 0x10a1, 0x10a2, 0x10a3, 0x10a4, 0x10a5, 0x10a6, 0x10a7, 0x10a8, 0x10a9, 0x10aa, 0x10ab, 0x10ac, 0x10ad, 0x10ae, 0x10af, 0x10b0, 0x10b1, 0x10b2, 0x10b3, 0x10b4, 0x10b5, 0x10b6, 0x10b7, 0x10b8, 0x10b9, 0x10ba, 0x10bb, 0x10bc, 0x10bd, 0x10be, 0x10bf, 0x10c0, 0x10c1, 0x10c2, 0x10c3, 0x10c4, 0x10c5, 0x10d0, 0x10d1, 0x10d2, 0x10d3, 0x10d4, 0x10d5, 0x10d6, 0x10d7, 0x10d8, 0x10d9, 0x10da, 0x10db, 0x10dc, 0x10dd, 0x10de, 0x10df, 0x10e0, 0x10e1, 0x10e2, 0x10e3, 0x10e4, 0x10e5, 0x10e6, 0x10e7, 0x10e8, 0x10e9, 0x10ea, 0x10eb, 0x10ec, 0x10ed, 0x10ee, 0x10ef, 0x10f0, 0x10f1, 0x10f2, 0x10f3, 0x10f4, 0x10f5, 0x10f6, 0x10f7, 0x10f8, 0x10f9, 0x10fa, 0x10fb, 0x10fc, 0x1401, 0x1402, 0x1403, 0x1404, 0x1405, 0x1406, 0x1407, 0x1409, 0x140a, 0x140b, 0x140c, 0x140d, 0x140e, 0x140f, 0x1410, 0x1411, 0x1412, 0x1413, 0x1414, 0x1415, 0x1416, 0x1417, 0x1418, 0x1419, 0x141a, 0x141b, 0x141d, 0x141e, 0x141f, 0x1420, 0x1421, 0x1422, 0x1423, 0x1424, 0x1425, 0x1426, 0x1427, 0x1428, 0x1429, 0x142a, 0x142b, 0x142c, 0x142d, 0x142e, 0x142f, 0x1430, 0x1431, 0x1432, 0x1433, 0x1434, 0x1435, 0x1437, 0x1438, 0x1439, 0x143a, 0x143b, 0x143c, 0x143d, 0x143e, 0x143f, 0x1440, 0x1441, 0x1442, 0x1443, 0x1444, 0x1445, 0x1446, 0x1447, 0x1448, 0x1449, 0x144a, 0x144c, 0x144d, 0x144e, 0x144f, 0x1450, 0x1451, 0x1452, 0x1454, 0x1455, 0x1456, 0x1457, 0x1458, 0x1459, 0x145a, 0x145b, 0x145c, 0x145d, 0x145e, 0x145f, 0x1460, 0x1461, 0x1462, 0x1463, 0x1464, 0x1465, 0x1466, 0x1467, 0x1468, 0x1469, 0x146a, 0x146b, 0x146c, 0x146d, 0x146e, 0x146f, 0x1470, 0x1471, 0x1472, 0x1473, 0x1474, 0x1475, 0x1476, 0x1477, 0x1478, 0x1479, 0x147a, 0x147b, 0x147c, 0x147d, 0x147e, 0x147f, 0x1480, 0x1481, 0x1482, 0x1483, 0x1484, 0x1485, 0x1486, 0x1487, 0x1488, 0x1489, 0x148a, 0x148b, 0x148c, 0x148d, 0x148e, 0x148f, 0x1490, 0x1491, 0x1492, 0x1493, 0x1494, 0x1495, 0x1496, 0x1497, 0x1498, 0x1499, 0x149a, 0x149b, 0x149c, 0x149d, 0x149e, 0x149f, 0x14a0, 0x14a1, 0x14a2, 0x14a3, 0x14a4, 0x14a5, 0x14a6, 0x14a7, 0x14a8, 0x14a9, 0x14aa, 0x14ab, 0x14ac, 0x14ad, 0x14ae, 0x14af, 0x14b0, 0x14b1, 0x14b2, 0x14b3, 0x14b4, 0x14b5, 0x14b6, 0x14b7, 0x14b8, 0x14b9, 0x14ba, 0x14bb, 0x14bc, 0x14bd, 0x14c0, 0x14c1, 0x14c2, 0x14c3, 0x14c4, 0x14c5, 0x14c6, 0x14c7, 0x14c8, 0x14c9, 0x14ca, 0x14cb, 0x14cc, 0x14cd, 0x14ce, 0x14cf, 0x14d0, 0x14d1, 0x14d2, 0x14d3, 0x14d4, 0x14d5, 0x14d6, 0x14d7, 0x14d8, 0x14d9, 0x14da, 0x14db, 0x14dc, 0x14dd, 0x14de, 0x14df, 0x14e0, 0x14e1, 0x14e2, 0x14e3, 0x14e4, 0x14e5, 0x14e6, 0x14e7, 0x14e8, 0x14e9, 0x14ea, 0x14ec, 0x14ed, 0x14ee, 0x14ef, 0x14f0, 0x14f1, 0x14f2, 0x14f3, 0x14f4, 0x14f5, 0x14f6, 0x14f7, 0x14f8, 0x14f9, 0x14fa, 0x14fb, 0x14fc, 0x14fd, 0x14fe, 0x14ff, 0x1500, 0x1501, 0x1502, 0x1503, 0x1504, 0x1505, 0x1506, 0x1507, 0x1510, 0x1511, 0x1512, 0x1513, 0x1514, 0x1515, 0x1516, 0x1517, 0x1518, 0x1519, 0x151a, 0x151b, 0x151c, 0x151d, 0x151e, 0x151f, 0x1520, 0x1521, 0x1522, 0x1523, 0x1524, 0x1525, 0x1526, 0x1527, 0x1528, 0x1529, 0x152a, 0x152b, 0x152c, 0x152d, 0x152e, 0x152f, 0x1530, 0x1531, 0x1532, 0x1533, 0x1534, 0x1535, 0x1536, 0x1537, 0x1538, 0x1539, 0x153a, 0x153b, 0x153c, 0x153d, 0x153e, 0x1540, 0x1541, 0x1542, 0x1543, 0x1544, 0x1545, 0x1546, 0x1547, 0x1548, 0x1549, 0x154a, 0x154b, 0x154c, 0x154d, 0x154e, 0x154f, 0x1550, 0x1552, 0x1553, 0x1554, 0x1555, 0x1556, 0x1557, 0x1558, 0x1559, 0x155a, 0x155b, 0x155c, 0x155d, 0x155e, 0x155f, 0x1560, 0x1561, 0x1562, 0x1563, 0x1564, 0x1565, 0x1566, 0x1567, 0x1568, 0x1569, 0x156a, 0x1574, 0x1575, 0x1576, 0x1577, 0x1578, 0x1579, 0x157a, 0x157b, 0x157c, 0x157d, 0x157e, 0x157f, 0x1580, 0x1581, 0x1582, 0x1583, 0x1584, 0x1585, 0x158a, 0x158b, 0x158c, 0x158d, 0x158e, 0x158f, 0x1590, 0x1591, 0x1592, 0x1593, 0x1594, 0x1595, 0x1596, 0x15a0, 0x15a1, 0x15a2, 0x15a3, 0x15a4, 0x15a5, 0x15a6, 0x15a7, 0x15a8, 0x15a9, 0x15aa, 0x15ab, 0x15ac, 0x15ad, 0x15ae, 0x15af, 0x15de, 0x15e1, 0x1646, 0x1647, 0x166e, 0x166f, 0x1670, 0x1671, 0x1672, 0x1673, 0x1674, 0x1675, 0x1676, 0x1680, 0x1681, 0x1682, 0x1683, 0x1684, 0x1685, 0x1686, 0x1687, 0x1688, 0x1689, 0x168a, 0x168b, 0x168c, 0x168d, 0x168e, 0x168f, 0x1690, 0x1691, 0x1692, 0x1693, 0x1694, 0x1695, 0x1696, 0x1697, 0x1698, 0x1699, 0x169a, 0x169b, 0x169c, 0x1d00, 0x1d01, 0x1d02, 0x1d03, 0x1d04, 0x1d05, 0x1d06, 0x1d07, 0x1d08, 0x1d09, 0x1d0a, 0x1d0b, 0x1d0c, 0x1d0d, 0x1d0e, 0x1d0f, 0x1d10, 0x1d11, 0x1d12, 0x1d13, 0x1d14, 0x1d16, 0x1d17, 0x1d18, 0x1d19, 0x1d1a, 0x1d1b, 0x1d1c, 0x1d1d, 0x1d1e, 0x1d1f, 0x1d20, 0x1d21, 0x1d22, 0x1d23, 0x1d26, 0x1d27, 0x1d28, 0x1d29, 0x1d2a, 0x1d2b, 0x1d2c, 0x1d2d, 0x1d2e, 0x1d30, 0x1d31, 0x1d32, 0x1d33, 0x1d34, 0x1d35, 0x1d36, 0x1d37, 0x1d38, 0x1d39, 0x1d3a, 0x1d3b, 0x1d3c, 0x1d3d, 0x1d3e, 0x1d3f, 0x1d40, 0x1d41, 0x1d42, 0x1d43, 0x1d44, 0x1d45, 0x1d46, 0x1d47, 0x1d48, 0x1d49, 0x1d4a, 0x1d4b, 0x1d4c, 0x1d4d, 0x1d4e, 0x1d4f, 0x1d50, 0x1d51, 0x1d52, 0x1d53, 0x1d54, 0x1d55, 0x1d56, 0x1d57, 0x1d58, 0x1d59, 0x1d5a, 0x1d5b, 0x1d5d, 0x1d5e, 0x1d5f, 0x1d60, 0x1d61, 0x1d62, 0x1d63, 0x1d64, 0x1d65, 0x1d66, 0x1d67, 0x1d68, 0x1d69, 0x1d6a, 0x1d77, 0x1d78, 0x1d7b, 0x1d7d, 0x1d85, 0x1d9b, 0x1d9c, 0x1d9d, 0x1d9e, 0x1d9f, 0x1da0, 0x1da1, 0x1da2, 0x1da3, 0x1da4, 0x1da5, 0x1da6, 0x1da7, 0x1da8, 0x1da9, 0x1daa, 0x1dab, 0x1dac, 0x1dad, 0x1dae, 0x1daf, 0x1db0, 0x1db1, 0x1db2, 0x1db3, 0x1db4, 0x1db5, 0x1db6, 0x1db7, 0x1db8, 0x1db9, 0x1dba, 0x1dbb, 0x1dbc, 0x1dbd, 0x1dbe, 0x1dbf, 0x1dc4, 0x1dc5, 0x1dc6, 0x1dc7, 0x1dc8, 0x1dc9, 0x1e00, 0x1e01, 0x1e02, 0x1e03, 0x1e04, 0x1e05, 0x1e06, 0x1e07, 0x1e08, 0x1e09, 0x1e0a, 0x1e0b, 0x1e0c, 0x1e0d, 0x1e0e, 0x1e0f, 0x1e10, 0x1e11, 0x1e12, 0x1e13, 0x1e14, 0x1e15, 0x1e16, 0x1e17, 0x1e18, 0x1e19, 0x1e1a, 0x1e1b, 0x1e1c, 0x1e1d, 0x1e1e, 0x1e1f, 0x1e20, 0x1e21, 0x1e22, 0x1e23, 0x1e24, 0x1e25, 0x1e26, 0x1e27, 0x1e28, 0x1e29, 0x1e2a, 0x1e2b, 0x1e2c, 0x1e2d, 0x1e2e, 0x1e2f, 0x1e30, 0x1e31, 0x1e32, 0x1e33, 0x1e34, 0x1e35, 0x1e36, 0x1e37, 0x1e38, 0x1e39, 0x1e3a, 0x1e3b, 0x1e3c, 0x1e3d, 0x1e3e, 0x1e3f, 0x1e40, 0x1e41, 0x1e42, 0x1e43, 0x1e44, 0x1e45, 0x1e46, 0x1e47, 0x1e48, 0x1e49, 0x1e4a, 0x1e4b, 0x1e4c, 0x1e4d, 0x1e4e, 0x1e4f, 0x1e50, 0x1e51, 0x1e52, 0x1e53, 0x1e54, 0x1e55, 0x1e56, 0x1e57, 0x1e58, 0x1e59, 0x1e5a, 0x1e5b, 0x1e5c, 0x1e5d, 0x1e5e, 0x1e5f, 0x1e60, 0x1e61, 0x1e62, 0x1e63, 0x1e64, 0x1e65, 0x1e66, 0x1e67, 0x1e68, 0x1e69, 0x1e6a, 0x1e6b, 0x1e6c, 0x1e6d, 0x1e6e, 0x1e6f, 0x1e70, 0x1e71, 0x1e72, 0x1e73, 0x1e74, 0x1e75, 0x1e76, 0x1e77, 0x1e78, 0x1e79, 0x1e7a, 0x1e7b, 0x1e7c, 0x1e7d, 0x1e7e, 0x1e7f, 0x1e80, 0x1e81, 0x1e82, 0x1e83, 0x1e84, 0x1e85, 0x1e86, 0x1e87, 0x1e88, 0x1e89, 0x1e8a, 0x1e8b, 0x1e8c, 0x1e8d, 0x1e8e, 0x1e8f, 0x1e90, 0x1e91, 0x1e92, 0x1e93, 0x1e94, 0x1e95, 0x1e96, 0x1e97, 0x1e98, 0x1e99, 0x1e9a, 0x1e9b, 0x1e9c, 0x1e9d, 0x1e9e, 0x1e9f, 0x1ea0, 0x1ea1, 0x1ea2, 0x1ea3, 0x1ea4, 0x1ea5, 0x1ea6, 0x1ea7, 0x1ea8, 0x1ea9, 0x1eaa, 0x1eab, 0x1eac, 0x1ead, 0x1eae, 0x1eaf, 0x1eb0, 0x1eb1, 0x1eb2, 0x1eb3, 0x1eb4, 0x1eb5, 0x1eb6, 0x1eb7, 0x1eb8, 0x1eb9, 0x1eba, 0x1ebb, 0x1ebc, 0x1ebd, 0x1ebe, 0x1ebf, 0x1ec0, 0x1ec1, 0x1ec2, 0x1ec3, 0x1ec4, 0x1ec5, 0x1ec6, 0x1ec7, 0x1ec8, 0x1ec9, 0x1eca, 0x1ecb, 0x1ecc, 0x1ecd, 0x1ece, 0x1ecf, 0x1ed0, 0x1ed1, 0x1ed2, 0x1ed3, 0x1ed4, 0x1ed5, 0x1ed6, 0x1ed7, 0x1ed8, 0x1ed9, 0x1eda, 0x1edb, 0x1edc, 0x1edd, 0x1ede, 0x1edf, 0x1ee0, 0x1ee1, 0x1ee2, 0x1ee3, 0x1ee4, 0x1ee5, 0x1ee6, 0x1ee7, 0x1ee8, 0x1ee9, 0x1eea, 0x1eeb, 0x1eec, 0x1eed, 0x1eee, 0x1eef, 0x1ef0, 0x1ef1, 0x1ef2, 0x1ef3, 0x1ef4, 0x1ef5, 0x1ef6, 0x1ef7, 0x1ef8, 0x1ef9, 0x1efa, 0x1efb, 0x1f00, 0x1f01, 0x1f02, 0x1f03, 0x1f04, 0x1f05, 0x1f06, 0x1f07, 0x1f08, 0x1f09, 0x1f0a, 0x1f0b, 0x1f0c, 0x1f0d, 0x1f0e, 0x1f0f, 0x1f10, 0x1f11, 0x1f12, 0x1f13, 0x1f14, 0x1f15, 0x1f18, 0x1f19, 0x1f1a, 0x1f1b, 0x1f1c, 0x1f1d, 0x1f20, 0x1f21, 0x1f22, 0x1f23, 0x1f24, 0x1f25, 0x1f26, 0x1f27, 0x1f28, 0x1f29, 0x1f2a, 0x1f2b, 0x1f2c, 0x1f2d, 0x1f2e, 0x1f2f, 0x1f30, 0x1f31, 0x1f32, 0x1f33, 0x1f34, 0x1f35, 0x1f36, 0x1f37, 0x1f38, 0x1f39, 0x1f3a, 0x1f3b, 0x1f3c, 0x1f3d, 0x1f3e, 0x1f3f, 0x1f40, 0x1f41, 0x1f42, 0x1f43, 0x1f44, 0x1f45, 0x1f48, 0x1f49, 0x1f4a, 0x1f4b, 0x1f4c, 0x1f4d, 0x1f50, 0x1f51, 0x1f52, 0x1f53, 0x1f54, 0x1f55, 0x1f56, 0x1f57, 0x1f59, 0x1f5b, 0x1f5d, 0x1f5f, 0x1f60, 0x1f61, 0x1f62, 0x1f63, 0x1f64, 0x1f65, 0x1f66, 0x1f67, 0x1f68, 0x1f69, 0x1f6a, 0x1f6b, 0x1f6c, 0x1f6d, 0x1f6e, 0x1f6f, 0x1f70, 0x1f71, 0x1f72, 0x1f73, 0x1f74, 0x1f75, 0x1f76, 0x1f77, 0x1f78, 0x1f79, 0x1f7a, 0x1f7b, 0x1f7c, 0x1f7d, 0x1f80, 0x1f81, 0x1f82, 0x1f83, 0x1f84, 0x1f85, 0x1f86, 0x1f87, 0x1f88, 0x1f89, 0x1f8a, 0x1f8b, 0x1f8c, 0x1f8d, 0x1f8e, 0x1f8f, 0x1f90, 0x1f91, 0x1f92, 0x1f93, 0x1f94, 0x1f95, 0x1f96, 0x1f97, 0x1f98, 0x1f99, 0x1f9a, 0x1f9b, 0x1f9c, 0x1f9d, 0x1f9e, 0x1f9f, 0x1fa0, 0x1fa1, 0x1fa2, 0x1fa3, 0x1fa4, 0x1fa5, 0x1fa6, 0x1fa7, 0x1fa8, 0x1fa9, 0x1faa, 0x1fab, 0x1fac, 0x1fad, 0x1fae, 0x1faf, 0x1fb0, 0x1fb1, 0x1fb2, 0x1fb3, 0x1fb4, 0x1fb6, 0x1fb7, 0x1fb8, 0x1fb9, 0x1fba, 0x1fbb, 0x1fbc, 0x1fbd, 0x1fbe, 0x1fbf, 0x1fc0, 0x1fc1, 0x1fc2, 0x1fc3, 0x1fc4, 0x1fc6, 0x1fc7, 0x1fc8, 0x1fc9, 0x1fca, 0x1fcb, 0x1fcc, 0x1fcd, 0x1fce, 0x1fcf, 0x1fd0, 0x1fd1, 0x1fd2, 0x1fd3, 0x1fd6, 0x1fd7, 0x1fd8, 0x1fd9, 0x1fda, 0x1fdb, 0x1fdd, 0x1fde, 0x1fdf, 0x1fe0, 0x1fe1, 0x1fe2, 0x1fe3, 0x1fe4, 0x1fe5, 0x1fe6, 0x1fe7, 0x1fe8, 0x1fe9, 0x1fea, 0x1feb, 0x1fec, 0x1fed, 0x1fee, 0x1fef, 0x1ff2, 0x1ff3, 0x1ff4, 0x1ff6, 0x1ff7, 0x1ff8, 0x1ff9, 0x1ffa, 0x1ffb, 0x1ffc, 0x1ffd, 0x1ffe, 0x2000, 0x2001, 0x2002, 0x2003, 0x2004, 0x2005, 0x2006, 0x2007, 0x2008, 0x2009, 0x200a, 0x200b, 0x200c, 0x200d, 0x200e, 0x200f, 0x2010, 0x2011, 0x2012, 0x2013, 0x2015, 0x2015, 0x2016, 0x2017, 0x2018, 0x2019, 0x201a, 0x201b, 0x201c, 0x201d, 0x201e, 0x201f, 0x2020, 0x2021, 0x2022, 0x2023, 0x2024, 0x2025, 0x2026, 0x2027, 0x2028, 0x2029, 0x202a, 0x202b, 0x202c, 0x202d, 0x202e, 0x202f, 0x2030, 0x2031, 0x2032, 0x2033, 0x2034, 0x2035, 0x2036, 0x2037, 0x2038, 0x2039, 0x203a, 0x203b, 0x203c, 0x203d, 0x203e, 0x203f, 0x2040, 0x2041, 0x2042, 0x2043, 0x2044, 0x2045, 0x2046, 0x2047, 0x2048, 0x2049, 0x204a, 0x204b, 0x204c, 0x204d, 0x204e, 0x204f, 0x2050, 0x2051, 0x2052, 0x2053, 0x2054, 0x2055, 0x2056, 0x2057, 0x2058, 0x2059, 0x205a, 0x205b, 0x205c, 0x205d, 0x205e, 0x205f, 0x2060, 0x2061, 0x2062, 0x2063, 0x2064, 0x206a, 0x206b, 0x206c, 0x206d, 0x206e, 0x206f, 0x2070, 0x2071, 0x2074, 0x2075, 0x2076, 0x2077, 0x2078, 0x2079, 0x207a, 0x207b, 0x207c, 0x207d, 0x207e, 0x207f, 0x2080, 0x2081, 0x2082, 0x2083, 0x2084, 0x2085, 0x2086, 0x2087, 0x2088, 0x2089, 0x208a, 0x208b, 0x208c, 0x208d, 0x208e, 0x2090, 0x2091, 0x2092, 0x2093, 0x2094, 0x2095, 0x2096, 0x2097, 0x2098, 0x2099, 0x209a, 0x209b, 0x209c, 0x20a0, 0x20a1, 0x20a2, 0x20a3, 0x20a4, 0x20a5, 0x20a6, 0x20a7, 0x20a8, 0x20a9, 0x20aa, 0x20ab, 0x20ac, 0x20ad, 0x20ae, 0x20af, 0x20b0, 0x20b1, 0x20b2, 0x20b3, 0x20b4, 0x20b5, 0x20b8, 0x20b9, 0x20ba, 0x20d0, 0x20d1, 0x20d6, 0x20d7, 0x20db, 0x20dc, 0x20e1, 0x2100, 0x2101, 0x2102, 0x2103, 0x2104, 0x2105, 0x2106, 0x2107, 0x2108, 0x2109, 0x210b, 0x210c, 0x210d, 0x210e, 0x210f, 0x2110, 0x2111, 0x2112, 0x2113, 0x2114, 0x2115, 0x2116, 0x2117, 0x2118, 0x2119, 0x211a, 0x211b, 0x211c, 0x211d, 0x211e, 0x211f, 0x2120, 0x2121, 0x2122, 0x2123, 0x2124, 0x2125, 0x2126, 0x2127, 0x2128, 0x2129, 0x212a, 0x212b, 0x212c, 0x212d, 0x212e, 0x212f, 0x2130, 0x2131, 0x2132, 0x2133, 0x2134, 0x2135, 0x2136, 0x2137, 0x2138, 0x2139, 0x213a, 0x213b, 0x213c, 0x213d, 0x213e, 0x213f, 0x2140, 0x2141, 0x2142, 0x2143, 0x2144, 0x2145, 0x2146, 0x2147, 0x2148, 0x2149, 0x214b, 0x214e, 0x2150, 0x2151, 0x2152, 0x2153, 0x2154, 0x2155, 0x2156, 0x2157, 0x2158, 0x2159, 0x215a, 0x215b, 0x215c, 0x215d, 0x215e, 0x215f, 0x2160, 0x2161, 0x2162, 0x2163, 0x2164, 0x2165, 0x2166, 0x2167, 0x2168, 0x2169, 0x216a, 0x216b, 0x216c, 0x216d, 0x216e, 0x216f, 0x2170, 0x2171, 0x2172, 0x2173, 0x2174, 0x2175, 0x2176, 0x2177, 0x2178, 0x2179, 0x217a, 0x217b, 0x217c, 0x217d, 0x217e, 0x217f, 0x2180, 0x2181, 0x2182, 0x2183, 0x2184, 0x2185, 0x2189, 0x2190, 0x2191, 0x2192, 0x2193, 0x2194, 0x2195, 0x2196, 0x2197, 0x2198, 0x2199, 0x219a, 0x219b, 0x219c, 0x219d, 0x219e, 0x219f, 0x21a0, 0x21a1, 0x21a2, 0x21a3, 0x21a4, 0x21a5, 0x21a6, 0x21a7, 0x21a8, 0x21a9, 0x21aa, 0x21ab, 0x21ac, 0x21ad, 0x21ae, 0x21af, 0x21b0, 0x21b1, 0x21b2, 0x21b3, 0x21b4, 0x21b5, 0x21b6, 0x21b7, 0x21b8, 0x21b9, 0x21ba, 0x21bb, 0x21bc, 0x21bd, 0x21be, 0x21bf, 0x21c0, 0x21c1, 0x21c2, 0x21c3, 0x21c4, 0x21c5, 0x21c6, 0x21c7, 0x21c8, 0x21c9, 0x21ca, 0x21cb, 0x21cc, 0x21cd, 0x21ce, 0x21cf, 0x21d0, 0x21d1, 0x21d2, 0x21d3, 0x21d4, 0x21d5, 0x21d6, 0x21d7, 0x21d8, 0x21d9, 0x21da, 0x21db, 0x21dc, 0x21dd, 0x21de, 0x21df, 0x21e0, 0x21e1, 0x21e2, 0x21e3, 0x21e4, 0x21e5, 0x21e6, 0x21e7, 0x21e8, 0x21e9, 0x21ea, 0x21eb, 0x21ec, 0x21ed, 0x21ee, 0x21ef, 0x21f0, 0x21f1, 0x21f2, 0x21f3, 0x21f4, 0x21f5, 0x21f6, 0x21f7, 0x21f8, 0x21f9, 0x21fa, 0x21fb, 0x21fc, 0x21fd, 0x21fe, 0x21ff, 0x2200, 0x2201, 0x2202, 0x2203, 0x2204, 0x2205, 0x2206, 0x2207, 0x2208, 0x2209, 0x220a, 0x220b, 0x220c, 0x220d, 0x220e, 0x220f, 0x2210, 0x2211, 0x2212, 0x2213, 0x2214, 0x2215, 0x2216, 0x2217, 0x2218, 0x2219, 0x221a, 0x221b, 0x221c, 0x221d, 0x221e, 0x221f, 0x2220, 0x2221, 0x2222, 0x2223, 0x2224, 0x2225, 0x2226, 0x2227, 0x2228, 0x2229, 0x222a, 0x222b, 0x222c, 0x222d, 0x222e, 0x222f, 0x2230, 0x2231, 0x2232, 0x2233, 0x2234, 0x2235, 0x2236, 0x2237, 0x2238, 0x2239, 0x223a, 0x223b, 0x223c, 0x223d, 0x223e, 0x223f, 0x2240, 0x2241, 0x2242, 0x2243, 0x2244, 0x2245, 0x2246, 0x2247, 0x2248, 0x2249, 0x224a, 0x224b, 0x224c, 0x224d, 0x224e, 0x224f, 0x2250, 0x2251, 0x2252, 0x2253, 0x2254, 0x2255, 0x2256, 0x2257, 0x2258, 0x2259, 0x225a, 0x225b, 0x225c, 0x225d, 0x225e, 0x225f, 0x2260, 0x2261, 0x2262, 0x2263, 0x2264, 0x2265, 0x2266, 0x2267, 0x2268, 0x2269, 0x226a, 0x226b, 0x226c, 0x226d, 0x226e, 0x226f, 0x2270, 0x2271, 0x2272, 0x2273, 0x2274, 0x2275, 0x2276, 0x2277, 0x2278, 0x2279, 0x227a, 0x227b, 0x227c, 0x227d, 0x227e, 0x227f, 0x2280, 0x2281, 0x2282, 0x2283, 0x2284, 0x2285, 0x2286, 0x2287, 0x2288, 0x2289, 0x228a, 0x228b, 0x228c, 0x228d, 0x228e, 0x228f, 0x2290, 0x2291, 0x2292, 0x2293, 0x2294, 0x2295, 0x2296, 0x2297, 0x2298, 0x2299, 0x229a, 0x229b, 0x229c, 0x229d, 0x229e, 0x229f, 0x22a0, 0x22a1, 0x22a2, 0x22a3, 0x22a4, 0x22a5, 0x22a6, 0x22a7, 0x22a8, 0x22a9, 0x22aa, 0x22ab, 0x22ac, 0x22ad, 0x22ae, 0x22af, 0x22b0, 0x22b1, 0x22b2, 0x22b3, 0x22b4, 0x22b5, 0x22b6, 0x22b7, 0x22b8, 0x22b9, 0x22ba, 0x22bb, 0x22bc, 0x22bd, 0x22be, 0x22bf, 0x22c0, 0x22c1, 0x22c2, 0x22c3, 0x22c4, 0x22c5, 0x22c6, 0x22c7, 0x22c8, 0x22c9, 0x22ca, 0x22cb, 0x22cc, 0x22cd, 0x22ce, 0x22cf, 0x22d0, 0x22d1, 0x22d2, 0x22d3, 0x22d4, 0x22d5, 0x22d6, 0x22d7, 0x22d8, 0x22d9, 0x22da, 0x22db, 0x22dc, 0x22dd, 0x22de, 0x22df, 0x22e0, 0x22e1, 0x22e2, 0x22e3, 0x22e4, 0x22e5, 0x22e6, 0x22e7, 0x22e8, 0x22e9, 0x22ea, 0x22eb, 0x22ec, 0x22ed, 0x22ee, 0x22ef, 0x22f0, 0x22f1, 0x22f2, 0x22f3, 0x22f4, 0x22f5, 0x22f6, 0x22f7, 0x22f8, 0x22f9, 0x22fa, 0x22fb, 0x22fc, 0x22fd, 0x22fe, 0x22ff, 0x2300, 0x2301, 0x2302, 0x2303, 0x2304, 0x2305, 0x2306, 0x2307, 0x2308, 0x2309, 0x230a, 0x230b, 0x230c, 0x230d, 0x230e, 0x230f, 0x2310, 0x2311, 0x2318, 0x2319, 0x231c, 0x231d, 0x231e, 0x231f, 0x2320, 0x2321, 0x2324, 0x2325, 0x2326, 0x2327, 0x2328, 0x232b, 0x232c, 0x2373, 0x2374, 0x2375, 0x237a, 0x237d, 0x2387, 0x2394, 0x239b, 0x239c, 0x239d, 0x239e, 0x239f, 0x23a0, 0x23a1, 0x23a2, 0x23a3, 0x23a4, 0x23a5, 0x23a6, 0x23a7, 0x23a8, 0x23a9, 0x23aa, 0x23ab, 0x23ac, 0x23ad, 0x23ae, 0x23ce, 0x23cf, 0x23e3, 0x23e5, 0x23e8, 0x2422, 0x2423, 0x2460, 0x2461, 0x2462, 0x2463, 0x2464, 0x2465, 0x2466, 0x2467, 0x2468, 0x2469, 0x2500, 0x2501, 0x2502, 0x2503, 0x2504, 0x2505, 0x2506, 0x2507, 0x2508, 0x2509, 0x250a, 0x250b, 0x250c, 0x250d, 0x250e, 0x250f, 0x2510, 0x2511, 0x2512, 0x2513, 0x2514, 0x2515, 0x2516, 0x2517, 0x2518, 0x2519, 0x251a, 0x251b, 0x251c, 0x251d, 0x251e, 0x251f, 0x2520, 0x2521, 0x2522, 0x2523, 0x2524, 0x2525, 0x2526, 0x2527, 0x2528, 0x2529, 0x252a, 0x252b, 0x252c, 0x252d, 0x252e, 0x252f, 0x2530, 0x2531, 0x2532, 0x2533, 0x2534, 0x2535, 0x2536, 0x2537, 0x2538, 0x2539, 0x253a, 0x253b, 0x253c, 0x253d, 0x253e, 0x253f, 0x2540, 0x2541, 0x2542, 0x2543, 0x2544, 0x2545, 0x2546, 0x2547, 0x2548, 0x2549, 0x254a, 0x254b, 0x254c, 0x254d, 0x254e, 0x254f, 0x2550, 0x2551, 0x2552, 0x2553, 0x2554, 0x2555, 0x2556, 0x2557, 0x2558, 0x2559, 0x255a, 0x255b, 0x255c, 0x255d, 0x255e, 0x255f, 0x2560, 0x2561, 0x2562, 0x2563, 0x2564, 0x2565, 0x2566, 0x2567, 0x2568, 0x2569, 0x256a, 0x256b, 0x256c, 0x256d, 0x256e, 0x256f, 0x2570, 0x2571, 0x2572, 0x2573, 0x2574, 0x2575, 0x2576, 0x2577, 0x2578, 0x2579, 0x257a, 0x257b, 0x257c, 0x257d, 0x257e, 0x257f, 0x2580, 0x2581, 0x2582, 0x2583, 0x2584, 0x2585, 0x2586, 0x2587, 0x2588, 0x2589, 0x258a, 0x258b, 0x258c, 0x258d, 0x258e, 0x258f, 0x2590, 0x2591, 0x2592, 0x2593, 0x2594, 0x2595, 0x2596, 0x2597, 0x2598, 0x2599, 0x259a, 0x259b, 0x259c, 0x259d, 0x259e, 0x259f, 0x25a0, 0x25a1, 0x25a2, 0x25a3, 0x25a4, 0x25a5, 0x25a6, 0x25a7, 0x25a8, 0x25a9, 0x25aa, 0x25ab, 0x25ac, 0x25ad, 0x25ae, 0x25af, 0x25b0, 0x25b1, 0x25b2, 0x25b3, 0x25b4, 0x25b5, 0x25b6, 0x25b7, 0x25b8, 0x25b9, 0x25ba, 0x25bb, 0x25bc, 0x25bd, 0x25be, 0x25bf, 0x25c0, 0x25c1, 0x25c2, 0x25c3, 0x25c4, 0x25c5, 0x25c6, 0x25c7, 0x25c8, 0x25c9, 0x25ca, 0x25cb, 0x25cc, 0x25cd, 0x25ce, 0x25cf, 0x25d0, 0x25d1, 0x25d2, 0x25d3, 0x25d4, 0x25d5, 0x25d6, 0x25d7, 0x25d8, 0x25d9, 0x25da, 0x25db, 0x25dc, 0x25dd, 0x25de, 0x25df, 0x25e0, 0x25e1, 0x25e2, 0x25e3, 0x25e4, 0x25e5, 0x25e6, 0x25e7, 0x25e8, 0x25e9, 0x25ea, 0x25eb, 0x25ec, 0x25ed, 0x25ee, 0x25ef, 0x25f0, 0x25f1, 0x25f2, 0x25f3, 0x25f4, 0x25f5, 0x25f6, 0x25f7, 0x25f8, 0x25f9, 0x25fa, 0x25fb, 0x25fc, 0x25fd, 0x25fe, 0x25ff, 0x2600, 0x2601, 0x2602, 0x2603, 0x2604, 0x2605, 0x2606, 0x2607, 0x2608, 0x2609, 0x260a, 0x260b, 0x260c, 0x260d, 0x260e, 0x260f, 0x2610, 0x2611, 0x2612, 0x2613, 0x2614, 0x2615, 0x2616, 0x2617, 0x2618, 0x2619, 0x261a, 0x261b, 0x261c, 0x261d, 0x261e, 0x261f, 0x2620, 0x2621, 0x2622, 0x2623, 0x2624, 0x2625, 0x2626, 0x2627, 0x2628, 0x2629, 0x262a, 0x262b, 0x262c, 0x262d, 0x262e, 0x262f, 0x2630, 0x2631, 0x2632, 0x2633, 0x2634, 0x2635, 0x2636, 0x2637, 0x2638, 0x2639, 0x263a, 0x263b, 0x263c, 0x263d, 0x263e, 0x263f, 0x2640, 0x2641, 0x2642, 0x2643, 0x2644, 0x2645, 0x2646, 0x2647, 0x2648, 0x2649, 0x264a, 0x264b, 0x264c, 0x264d, 0x264e, 0x264f, 0x2650, 0x2651, 0x2652, 0x2653, 0x2654, 0x2655, 0x2656, 0x2657, 0x2658, 0x2659, 0x265a, 0x265b, 0x265c, 0x265d, 0x265e, 0x265f, 0x2660, 0x2661, 0x2662, 0x2663, 0x2664, 0x2665, 0x2666, 0x2667, 0x2668, 0x2669, 0x266a, 0x266b, 0x266c, 0x266d, 0x266e, 0x266f, 0x2670, 0x2671, 0x2672, 0x2673, 0x2674, 0x2675, 0x2676, 0x2677, 0x2678, 0x2679, 0x267a, 0x267b, 0x267c, 0x267d, 0x267e, 0x267f, 0x2680, 0x2681, 0x2682, 0x2683, 0x2684, 0x2685, 0x2686, 0x2687, 0x2688, 0x2689, 0x268a, 0x268b, 0x268c, 0x268d, 0x268e, 0x268f, 0x2690, 0x2691, 0x2692, 0x2693, 0x2694, 0x2695, 0x2696, 0x2697, 0x2698, 0x2699, 0x269a, 0x269b, 0x269c, 0x26a0, 0x26a1, 0x26a2, 0x26a3, 0x26a4, 0x26a5, 0x26a6, 0x26a7, 0x26a8, 0x26a9, 0x26aa, 0x26ab, 0x26ac, 0x26ad, 0x26ae, 0x26af, 0x26b0, 0x26b1, 0x26b2, 0x26b3, 0x26b4, 0x26b5, 0x26b6, 0x26b7, 0x26b8, 0x26c0, 0x26c1, 0x26c2, 0x26c3, 0x26e2, 0x2701, 0x2702, 0x2703, 0x2704, 0x2706, 0x2707, 0x2708, 0x2709, 0x270c, 0x270d, 0x270e, 0x270f, 0x2710, 0x2711, 0x2712, 0x2713, 0x2714, 0x2715, 0x2716, 0x2717, 0x2718, 0x2719, 0x271a, 0x271b, 0x271c, 0x271d, 0x271e, 0x271f, 0x2720, 0x2721, 0x2722, 0x2723, 0x2724, 0x2725, 0x2726, 0x2727, 0x2729, 0x272a, 0x272b, 0x272c, 0x272d, 0x272e, 0x272f, 0x2730, 0x2731, 0x2732, 0x2733, 0x2734, 0x2735, 0x2736, 0x2737, 0x2738, 0x2739, 0x273a, 0x273b, 0x273c, 0x273d, 0x273e, 0x273f, 0x2740, 0x2741, 0x2742, 0x2743, 0x2744, 0x2745, 0x2746, 0x2747, 0x2748, 0x2749, 0x274a, 0x274b, 0x274d, 0x274f, 0x2750, 0x2751, 0x2752, 0x2756, 0x2758, 0x2759, 0x275a, 0x275b, 0x275c, 0x275d, 0x275e, 0x2761, 0x2762, 0x2763, 0x2764, 0x2765, 0x2766, 0x2767, 0x2768, 0x2769, 0x276a, 0x276b, 0x276c, 0x276d, 0x276e, 0x276f, 0x2770, 0x2771, 0x2772, 0x2773, 0x2774, 0x2775, 0x2776, 0x2777, 0x2778, 0x2779, 0x277a, 0x277b, 0x277c, 0x277d, 0x277e, 0x277f, 0x2780, 0x2781, 0x2782, 0x2783, 0x2784, 0x2785, 0x2786, 0x2787, 0x2788, 0x2789, 0x278a, 0x278b, 0x278c, 0x278d, 0x278e, 0x278f, 0x2790, 0x2791, 0x2792, 0x2793, 0x2794, 0x2798, 0x2799, 0x279a, 0x279b, 0x279c, 0x279d, 0x279e, 0x279f, 0x27a0, 0x27a1, 0x27a2, 0x27a3, 0x27a4, 0x27a5, 0x27a6, 0x27a7, 0x27a8, 0x27a9, 0x27aa, 0x27ab, 0x27ac, 0x27ad, 0x27ae, 0x27af, 0x27b1, 0x27b2, 0x27b3, 0x27b4, 0x27b5, 0x27b6, 0x27b7, 0x27b8, 0x27b9, 0x27ba, 0x27bb, 0x27bc, 0x27bd, 0x27be, 0x27c5, 0x27c6, 0x27e0, 0x27e6, 0x27e7, 0x27e8, 0x27e9, 0x27ea, 0x27eb, 0x27f0, 0x27f1, 0x27f2, 0x27f3, 0x27f4, 0x27f5, 0x27f6, 0x27f7, 0x27f8, 0x27f9, 0x27fa, 0x27fb, 0x27fc, 0x27fd, 0x27fe, 0x27ff, 0x2800, 0x2801, 0x2802, 0x2803, 0x2804, 0x2805, 0x2806, 0x2807, 0x2808, 0x2809, 0x280a, 0x280b, 0x280c, 0x280d, 0x280e, 0x280f, 0x2810, 0x2811, 0x2812, 0x2813, 0x2814, 0x2815, 0x2816, 0x2817, 0x2818, 0x2819, 0x281a, 0x281b, 0x281c, 0x281d, 0x281e, 0x281f, 0x2820, 0x2821, 0x2822, 0x2823, 0x2824, 0x2825, 0x2826, 0x2827, 0x2828, 0x2829, 0x282a, 0x282b, 0x282c, 0x282d, 0x282e, 0x282f, 0x2830, 0x2831, 0x2832, 0x2833, 0x2834, 0x2835, 0x2836, 0x2837, 0x2838, 0x2839, 0x283a, 0x283b, 0x283c, 0x283d, 0x283e, 0x283f, 0x2840, 0x2841, 0x2842, 0x2843, 0x2844, 0x2845, 0x2846, 0x2847, 0x2848, 0x2849, 0x284a, 0x284b, 0x284c, 0x284d, 0x284e, 0x284f, 0x2850, 0x2851, 0x2852, 0x2853, 0x2854, 0x2855, 0x2856, 0x2857, 0x2858, 0x2859, 0x285a, 0x285b, 0x285c, 0x285d, 0x285e, 0x285f, 0x2860, 0x2861, 0x2862, 0x2863, 0x2864, 0x2865, 0x2866, 0x2867, 0x2868, 0x2869, 0x286a, 0x286b, 0x286c, 0x286d, 0x286e, 0x286f, 0x2870, 0x2871, 0x2872, 0x2873, 0x2874, 0x2875, 0x2876, 0x2877, 0x2878, 0x2879, 0x287a, 0x287b, 0x287c, 0x287d, 0x287e, 0x287f, 0x2880, 0x2881, 0x2882, 0x2883, 0x2884, 0x2885, 0x2886, 0x2887, 0x2888, 0x2889, 0x288a, 0x288b, 0x288c, 0x288d, 0x288e, 0x288f, 0x2890, 0x2891, 0x2892, 0x2893, 0x2894, 0x2895, 0x2896, 0x2897, 0x2898, 0x2899, 0x289a, 0x289b, 0x289c, 0x289d, 0x289e, 0x289f, 0x28a0, 0x28a1, 0x28a2, 0x28a3, 0x28a4, 0x28a5, 0x28a6, 0x28a7, 0x28a8, 0x28a9, 0x28aa, 0x28ab, 0x28ac, 0x28ad, 0x28ae, 0x28af, 0x28b0, 0x28b1, 0x28b2, 0x28b3, 0x28b4, 0x28b5, 0x28b6, 0x28b7, 0x28b8, 0x28b9, 0x28ba, 0x28bb, 0x28bc, 0x28bd, 0x28be, 0x28bf, 0x28c0, 0x28c1, 0x28c2, 0x28c3, 0x28c4, 0x28c5, 0x28c6, 0x28c7, 0x28c8, 0x28c9, 0x28ca, 0x28cb, 0x28cc, 0x28cd, 0x28ce, 0x28cf, 0x28d0, 0x28d1, 0x28d2, 0x28d3, 0x28d4, 0x28d5, 0x28d6, 0x28d7, 0x28d8, 0x28d9, 0x28da, 0x28db, 0x28dc, 0x28dd, 0x28de, 0x28df, 0x28e0, 0x28e1, 0x28e2, 0x28e3, 0x28e4, 0x28e5, 0x28e6, 0x28e7, 0x28e8, 0x28e9, 0x28ea, 0x28eb, 0x28ec, 0x28ed, 0x28ee, 0x28ef, 0x28f0, 0x28f1, 0x28f2, 0x28f3, 0x28f4, 0x28f5, 0x28f6, 0x28f7, 0x28f8, 0x28f9, 0x28fa, 0x28fb, 0x28fc, 0x28fd, 0x28fe, 0x28ff, 0x2906, 0x2907, 0x290a, 0x290b, 0x2940, 0x2941, 0x2983, 0x2984, 0x29ce, 0x29cf, 0x29d0, 0x29d1, 0x29d2, 0x29d3, 0x29d4, 0x29d5, 0x29eb, 0x29fa, 0x29fb, 0x2a00, 0x2a01, 0x2a02, 0x2a0c, 0x2a0d, 0x2a0e, 0x2a0f, 0x2a10, 0x2a11, 0x2a12, 0x2a13, 0x2a14, 0x2a15, 0x2a16, 0x2a17, 0x2a18, 0x2a19, 0x2a1a, 0x2a1b, 0x2a1c, 0x2a2f, 0x2a6a, 0x2a6b, 0x2a7d, 0x2a7e, 0x2a7f, 0x2a80, 0x2a81, 0x2a82, 0x2a83, 0x2a84, 0x2a85, 0x2a86, 0x2a87, 0x2a88, 0x2a89, 0x2a8a, 0x2a8b, 0x2a8c, 0x2a8d, 0x2a8e, 0x2a8f, 0x2a90, 0x2a91, 0x2a92, 0x2a93, 0x2a94, 0x2a95, 0x2a96, 0x2a97, 0x2a98, 0x2a99, 0x2a9a, 0x2a9b, 0x2a9c, 0x2a9d, 0x2a9e, 0x2a9f, 0x2aa0, 0x2aae, 0x2aaf, 0x2ab0, 0x2ab1, 0x2ab2, 0x2ab3, 0x2ab4, 0x2ab5, 0x2ab6, 0x2ab7, 0x2ab8, 0x2ab9, 0x2aba, 0x2af9, 0x2afa, 0x2b00, 0x2b01, 0x2b02, 0x2b03, 0x2b04, 0x2b05, 0x2b06, 0x2b07, 0x2b08, 0x2b09, 0x2b0a, 0x2b0b, 0x2b0c, 0x2b0d, 0x2b0e, 0x2b0f, 0x2b10, 0x2b11, 0x2b12, 0x2b13, 0x2b14, 0x2b15, 0x2b16, 0x2b17, 0x2b18, 0x2b19, 0x2b1a, 0x2b1f, 0x2b20, 0x2b21, 0x2b22, 0x2b23, 0x2b24, 0x2b53, 0x2b54, 0x2c60, 0x2c61, 0x2c62, 0x2c63, 0x2c64, 0x2c65, 0x2c66, 0x2c67, 0x2c68, 0x2c69, 0x2c6a, 0x2c6b, 0x2c6c, 0x2c6d, 0x2c6e, 0x2c6f, 0x2c70, 0x2c71, 0x2c72, 0x2c73, 0x2c74, 0x2c75, 0x2c76, 0x2c77, 0x2c79, 0x2c7a, 0x2c7b, 0x2c7c, 0x2c7d, 0x2c7e, 0x2c7f, 0x2d00, 0x2d01, 0x2d02, 0x2d03, 0x2d04, 0x2d05, 0x2d06, 0x2d07, 0x2d08, 0x2d09, 0x2d0a, 0x2d0b, 0x2d0c, 0x2d0d, 0x2d0e, 0x2d0f, 0x2d10, 0x2d11, 0x2d12, 0x2d13, 0x2d14, 0x2d15, 0x2d16, 0x2d17, 0x2d18, 0x2d19, 0x2d1a, 0x2d1b, 0x2d1c, 0x2d1d, 0x2d1e, 0x2d1f, 0x2d20, 0x2d21, 0x2d22, 0x2d23, 0x2d24, 0x2d25, 0x2d30, 0x2d31, 0x2d32, 0x2d33, 0x2d34, 0x2d35, 0x2d36, 0x2d37, 0x2d38, 0x2d39, 0x2d3a, 0x2d3b, 0x2d3c, 0x2d3d, 0x2d3e, 0x2d3f, 0x2d40, 0x2d41, 0x2d42, 0x2d43, 0x2d44, 0x2d45, 0x2d46, 0x2d47, 0x2d48, 0x2d49, 0x2d4a, 0x2d4b, 0x2d4c, 0x2d4d, 0x2d4e, 0x2d4f, 0x2d50, 0x2d51, 0x2d52, 0x2d53, 0x2d54, 0x2d55, 0x2d56, 0x2d57, 0x2d58, 0x2d59, 0x2d5a, 0x2d5b, 0x2d5c, 0x2d5d, 0x2d5e, 0x2d5f, 0x2d60, 0x2d61, 0x2d62, 0x2d63, 0x2d64, 0x2d65, 0x2d6f, 0x2e18, 0x2e1e, 0x2e22, 0x2e23, 0x2e24, 0x2e25, 0x2e2e, 0x4dc0, 0x4dc1, 0x4dc2, 0x4dc3, 0x4dc4, 0x4dc5, 0x4dc6, 0x4dc7, 0x4dc8, 0x4dc9, 0x4dca, 0x4dcb, 0x4dcc, 0x4dcd, 0x4dce, 0x4dcf, 0x4dd0, 0x4dd1, 0x4dd2, 0x4dd3, 0x4dd4, 0x4dd5, 0x4dd6, 0x4dd7, 0x4dd8, 0x4dd9, 0x4dda, 0x4ddb, 0x4ddc, 0x4ddd, 0x4dde, 0x4ddf, 0x4de0, 0x4de1, 0x4de2, 0x4de3, 0x4de4, 0x4de5, 0x4de6, 0x4de7, 0x4de8, 0x4de9, 0x4dea, 0x4deb, 0x4dec, 0x4ded, 0x4dee, 0x4def, 0x4df0, 0x4df1, 0x4df2, 0x4df3, 0x4df4, 0x4df5, 0x4df6, 0x4df7, 0x4df8, 0x4df9, 0x4dfa, 0x4dfb, 0x4dfc, 0x4dfd, 0x4dfe, 0x4dff, 0xa4d0, 0xa4d1, 0xa4d2, 0xa4d3, 0xa4d4, 0xa4d5, 0xa4d6, 0xa4d7, 0xa4d8, 0xa4d9, 0xa4da, 0xa4db, 0xa4dc, 0xa4dd, 0xa4de, 0xa4df, 0xa4e0, 0xa4e1, 0xa4e2, 0xa4e3, 0xa4e4, 0xa4e5, 0xa4e6, 0xa4e7, 0xa4e8, 0xa4e9, 0xa4ea, 0xa4eb, 0xa4ec, 0xa4ed, 0xa4ee, 0xa4ef, 0xa4f0, 0xa4f1, 0xa4f2, 0xa4f3, 0xa4f4, 0xa4f5, 0xa4f6, 0xa4f7, 0xa4f8, 0xa4f9, 0xa4fa, 0xa4fb, 0xa4fc, 0xa4fd, 0xa4fe, 0xa4ff, 0xa644, 0xa645, 0xa646, 0xa647, 0xa64c, 0xa64d, 0xa650, 0xa651, 0xa654, 0xa655, 0xa656, 0xa657, 0xa662, 0xa663, 0xa664, 0xa665, 0xa666, 0xa667, 0xa668, 0xa669, 0xa66a, 0xa66b, 0xa66c, 0xa66d, 0xa66e, 0xa68a, 0xa68b, 0xa68c, 0xa68d, 0xa694, 0xa695, 0xa708, 0xa709, 0xa70a, 0xa70b, 0xa70c, 0xa70d, 0xa70e, 0xa70f, 0xa710, 0xa711, 0xa712, 0xa713, 0xa714, 0xa715, 0xa716, 0xa71b, 0xa71c, 0xa71d, 0xa71e, 0xa71f, 0xa722, 0xa723, 0xa724, 0xa725, 0xa726, 0xa727, 0xa728, 0xa729, 0xa72a, 0xa72b, 0xa730, 0xa731, 0xa732, 0xa733, 0xa734, 0xa735, 0xa736, 0xa737, 0xa738, 0xa739, 0xa73a, 0xa73b, 0xa73c, 0xa73d, 0xa73e, 0xa73f, 0xa740, 0xa741, 0xa746, 0xa747, 0xa748, 0xa749, 0xa74a, 0xa74b, 0xa74e, 0xa74f, 0xa750, 0xa751, 0xa752, 0xa753, 0xa756, 0xa757, 0xa764, 0xa765, 0xa766, 0xa767, 0xa780, 0xa781, 0xa782, 0xa783, 0xa789, 0xa78a, 0xa78b, 0xa78c, 0xa78d, 0xa78e, 0xa790, 0xa791, 0xa7a0, 0xa7a1, 0xa7a2, 0xa7a3, 0xa7a4, 0xa7a5, 0xa7a6, 0xa7a7, 0xa7a8, 0xa7a9, 0xa7aa, 0xa7fa, 0xa7fb, 0xa7fc, 0xa7fd, 0xa7fe, 0xa7ff, 0xef00, 0xef01, 0xef02, 0xef03, 0xef04, 0xef05, 0xef06, 0xef07, 0xef08, 0xef09, 0xef0a, 0xef0b, 0xef0c, 0xef0d, 0xef0e, 0xef0f, 0xef10, 0xef11, 0xef12, 0xef13, 0xef14, 0xef15, 0xef16, 0xef17, 0xef18, 0xef19, 0xf000, 0xf001, 0xf002, 0xf003, 0xf400, 0xf401, 0xf402, 0xf403, 0xf404, 0xf405, 0xf406, 0xf407, 0xf408, 0xf409, 0xf40a, 0xf40b, 0xf40c, 0xf40d, 0xf40e, 0xf40f, 0xf410, 0xf411, 0xf412, 0xf413, 0xf414, 0xf415, 0xf416, 0xf417, 0xf418, 0xf419, 0xf41a, 0xf41b, 0xf41c, 0xf41d, 0xf41e, 0xf41f, 0xf420, 0xf421, 0xf422, 0xf423, 0xf424, 0xf425, 0xf426, 0xf428, 0xf429, 0xf42a, 0xf42b, 0xf42c, 0xf42d, 0xf42e, 0xf42f, 0xf430, 0xf431, 0xf432, 0xf433, 0xf434, 0xf435, 0xf436, 0xf437, 0xf438, 0xf439, 0xf43a, 0xf43b, 0xf43c, 0xf43d, 0xf43e, 0xf43f, 0xf440, 0xf441, 0xf6c5, 0xfb00, 0xfb01, 0xfb02, 0xfb03, 0xfb04, 0xfb05, 0xfb06, 0xfb13, 0xfb14, 0xfb15, 0xfb16, 0xfb17, 0xfb1d, 0xfb1e, 0xfb1f, 0xfb20, 0xfb21, 0xfb22, 0xfb23, 0xfb24, 0xfb25, 0xfb26, 0xfb27, 0xfb28, 0xfb29, 0xfb2a, 0xfb2b, 0xfb2c, 0xfb2d, 0xfb2e, 0xfb2f, 0xfb30, 0xfb31, 0xfb32, 0xfb33, 0xfb34, 0xfb35, 0xfb36, 0xfb38, 0xfb39, 0xfb3a, 0xfb3b, 0xfb3c, 0xfb3e, 0xfb40, 0xfb41, 0xfb43, 0xfb44, 0xfb46, 0xfb47, 0xfb48, 0xfb49, 0xfb4a, 0xfb4b, 0xfb4c, 0xfb4d, 0xfb4e, 0xfb4f, 0xfb52, 0xfb53, 0xfb54, 0xfb55, 0xfb56, 0xfb57, 0xfb58, 0xfb59, 0xfb5a, 0xfb5b, 0xfb5c, 0xfb5d, 0xfb5e, 0xfb5f, 0xfb60, 0xfb61, 0xfb62, 0xfb63, 0xfb64, 0xfb65, 0xfb66, 0xfb67, 0xfb68, 0xfb69, 0xfb6a, 0xfb6b, 0xfb6c, 0xfb6d, 0xfb6e, 0xfb6f, 0xfb70, 0xfb71, 0xfb72, 0xfb73, 0xfb74, 0xfb75, 0xfb76, 0xfb77, 0xfb78, 0xfb79, 0xfb7a, 0xfb7b, 0xfb7c, 0xfb7d, 0xfb7e, 0xfb7f, 0xfb80, 0xfb81, 0xfb82, 0xfb83, 0xfb84, 0xfb85, 0xfb86, 0xfb87, 0xfb88, 0xfb89, 0xfb8a, 0xfb8b, 0xfb8c, 0xfb8d, 0xfb8e, 0xfb8f, 0xfb90, 0xfb91, 0xfb92, 0xfb93, 0xfb94, 0xfb95, 0xfb96, 0xfb97, 0xfb98, 0xfb99, 0xfb9a, 0xfb9b, 0xfb9c, 0xfb9d, 0xfb9e, 0xfb9f, 0xfba0, 0xfba1, 0xfba2, 0xfba3, 0xfbaa, 0xfbab, 0xfbac, 0xfbad, 0xfbd3, 0xfbd4, 0xfbd5, 0xfbd6, 0xfbd9, 0xfbda, 0xfbe8, 0xfbe9, 0xfbfc, 0xfbfd, 0xfbfe, 0xfbff, 0xfe00, 0xfe01, 0xfe02, 0xfe03, 0xfe04, 0xfe05, 0xfe06, 0xfe07, 0xfe08, 0xfe09, 0xfe0a, 0xfe0b, 0xfe0c, 0xfe0d, 0xfe0e, 0xfe0f, 0xfe20, 0xfe21, 0xfe22, 0xfe23, 0xfe70, 0xfe71, 0xfe72, 0xfe73, 0xfe74, 0xfe76, 0xfe77, 0xfe78, 0xfe79, 0xfe7a, 0xfe7b, 0xfe7c, 0xfe7d, 0xfe7e, 0xfe7f, 0xfe80, 0xfe81, 0xfe82, 0xfe83, 0xfe84, 0xfe85, 0xfe86, 0xfe87, 0xfe88, 0xfe89, 0xfe8a, 0xfe8b, 0xfe8c, 0xfe8d, 0xfe8e, 0xfe8f, 0xfe90, 0xfe91, 0xfe92, 0xfe93, 0xfe94, 0xfe95, 0xfe96, 0xfe97, 0xfe98, 0xfe99, 0xfe9a, 0xfe9b, 0xfe9c, 0xfe9d, 0xfe9e, 0xfe9f, 0xfea0, 0xfea1, 0xfea2, 0xfea3, 0xfea4, 0xfea5, 0xfea6, 0xfea7, 0xfea8, 0xfea9, 0xfeaa, 0xfeab, 0xfeac, 0xfead, 0xfeae, 0xfeaf, 0xfeb0, 0xfeb1, 0xfeb2, 0xfeb3, 0xfeb4, 0xfeb5, 0xfeb6, 0xfeb7, 0xfeb8, 0xfeb9, 0xfeba, 0xfebb, 0xfebc, 0xfebd, 0xfebe, 0xfebf, 0xfec0, 0xfec1, 0xfec2, 0xfec3, 0xfec4, 0xfec5, 0xfec6, 0xfec7, 0xfec8, 0xfec9, 0xfeca, 0xfecb, 0xfecc, 0xfecd, 0xfece, 0xfecf, 0xfed0, 0xfed1, 0xfed2, 0xfed3, 0xfed4, 0xfed5, 0xfed6, 0xfed7, 0xfed8, 0xfed9, 0xfeda, 0xfedb, 0xfedc, 0xfedd, 0xfede, 0xfedf, 0xfee0, 0xfee1, 0xfee2, 0xfee3, 0xfee4, 0xfee5, 0xfee6, 0xfee7, 0xfee8, 0xfee9, 0xfeea, 0xfeeb, 0xfeec, 0xfeed, 0xfeee, 0xfeef, 0xfef0, 0xfef1, 0xfef2, 0xfef3, 0xfef4, 0xfef5, 0xfef6, 0xfef7, 0xfef8, 0xfef9, 0xfefa, 0xfefb, 0xfefc, 0xfeff, 0xfff9, 0xfffa, 0xfffb, 0xfffc, 0xfffd, 0x10300, 0x10301, 0x10302, 0x10303, 0x10304, 0x10305, 0x10306, 0x10307, 0x10308, 0x10309, 0x1030a, 0x1030b, 0x1030c, 0x1030d, 0x1030e, 0x1030f, 0x10310, 0x10311, 0x10312, 0x10313, 0x10314, 0x10315, 0x10316, 0x10317, 0x10318, 0x10319, 0x1031a, 0x1031b, 0x1031c, 0x1031d, 0x1031e, 0x10320, 0x10321, 0x10322, 0x10323, 0x1d300, 0x1d301, 0x1d302, 0x1d303, 0x1d304, 0x1d305, 0x1d306, 0x1d307, 0x1d308, 0x1d309, 0x1d30a, 0x1d30b, 0x1d30c, 0x1d30d, 0x1d30e, 0x1d30f, 0x1d310, 0x1d311, 0x1d312, 0x1d313, 0x1d314, 0x1d315, 0x1d316, 0x1d317, 0x1d318, 0x1d319, 0x1d31a, 0x1d31b, 0x1d31c, 0x1d31d, 0x1d31e, 0x1d31f, 0x1d320, 0x1d321, 0x1d322, 0x1d323, 0x1d324, 0x1d325, 0x1d326, 0x1d327, 0x1d328, 0x1d329, 0x1d32a, 0x1d32b, 0x1d32c, 0x1d32d, 0x1d32e, 0x1d32f, 0x1d330, 0x1d331, 0x1d332, 0x1d333, 0x1d334, 0x1d335, 0x1d336, 0x1d337, 0x1d338, 0x1d339, 0x1d33a, 0x1d33b, 0x1d33c, 0x1d33d, 0x1d33e, 0x1d33f, 0x1d340, 0x1d341, 0x1d342, 0x1d343, 0x1d344, 0x1d345, 0x1d346, 0x1d347, 0x1d348, 0x1d349, 0x1d34a, 0x1d34b, 0x1d34c, 0x1d34d, 0x1d34e, 0x1d34f, 0x1d350, 0x1d351, 0x1d352, 0x1d353, 0x1d354, 0x1d355, 0x1d356, 0x1d538, 0x1d539, 0x1d53b, 0x1d53c, 0x1d53d, 0x1d53e, 0x1d540, 0x1d541, 0x1d542, 0x1d543, 0x1d544, 0x1d546, 0x1d54a, 0x1d54b, 0x1d54c, 0x1d54d, 0x1d54e, 0x1d54f, 0x1d550, 0x1d552, 0x1d553, 0x1d554, 0x1d555, 0x1d556, 0x1d557, 0x1d558, 0x1d559, 0x1d55a, 0x1d55b, 0x1d55c, 0x1d55d, 0x1d55e, 0x1d55f, 0x1d560, 0x1d561, 0x1d562, 0x1d563, 0x1d564, 0x1d565, 0x1d566, 0x1d567, 0x1d568, 0x1d569, 0x1d56a, 0x1d56b, 0x1d5a0, 0x1d5a1, 0x1d5a2, 0x1d5a3, 0x1d5a4, 0x1d5a5, 0x1d5a6, 0x1d5a7, 0x1d5a8, 0x1d5a9, 0x1d5aa, 0x1d5ab, 0x1d5ac, 0x1d5ad, 0x1d5ae, 0x1d5af, 0x1d5b0, 0x1d5b1, 0x1d5b2, 0x1d5b3, 0x1d5b4, 0x1d5b5, 0x1d5b6, 0x1d5b7, 0x1d5b8, 0x1d5b9, 0x1d5ba, 0x1d5bb, 0x1d5bc, 0x1d5bd, 0x1d5be, 0x1d5bf, 0x1d5c0, 0x1d5c1, 0x1d5c2, 0x1d5c3, 0x1d5c4, 0x1d5c5, 0x1d5c6, 0x1d5c7, 0x1d5c8, 0x1d5c9, 0x1d5ca, 0x1d5cb, 0x1d5cc, 0x1d5cd, 0x1d5ce, 0x1d5cf, 0x1d5d0, 0x1d5d1, 0x1d5d2, 0x1d5d3, 0x1d7d8, 0x1d7d9, 0x1d7da, 0x1d7db, 0x1d7dc, 0x1d7dd, 0x1d7de, 0x1d7df, 0x1d7e0, 0x1d7e1, 0x1d7e2, 0x1d7e3, 0x1d7e4, 0x1d7e5, 0x1d7e6, 0x1d7e7, 0x1d7e8, 0x1d7e9, 0x1d7ea, 0x1d7eb, 0x1f030, 0x1f031, 0x1f032, 0x1f033, 0x1f034, 0x1f035, 0x1f036, 0x1f037, 0x1f038, 0x1f039, 0x1f03a, 0x1f03b, 0x1f03c, 0x1f03d, 0x1f03e, 0x1f03f, 0x1f040, 0x1f041, 0x1f042, 0x1f043, 0x1f044, 0x1f045, 0x1f046, 0x1f047, 0x1f048, 0x1f049, 0x1f04a, 0x1f04b, 0x1f04c, 0x1f04d, 0x1f04e, 0x1f04f, 0x1f050, 0x1f051, 0x1f052, 0x1f053, 0x1f054, 0x1f055, 0x1f056, 0x1f057, 0x1f058, 0x1f059, 0x1f05a, 0x1f05b, 0x1f05c, 0x1f05d, 0x1f05e, 0x1f05f, 0x1f060, 0x1f061, 0x1f062, 0x1f063, 0x1f064, 0x1f065, 0x1f066, 0x1f067, 0x1f068, 0x1f069, 0x1f06a, 0x1f06b, 0x1f06c, 0x1f06d, 0x1f06e, 0x1f06f, 0x1f070, 0x1f071, 0x1f072, 0x1f073, 0x1f074, 0x1f075, 0x1f076, 0x1f077, 0x1f078, 0x1f079, 0x1f07a, 0x1f07b, 0x1f07c, 0x1f07d, 0x1f07e, 0x1f07f, 0x1f080, 0x1f081, 0x1f082, 0x1f083, 0x1f084, 0x1f085, 0x1f086, 0x1f087, 0x1f088, 0x1f089, 0x1f08a, 0x1f08b, 0x1f08c, 0x1f08d, 0x1f08e, 0x1f08f, 0x1f090, 0x1f091, 0x1f092, 0x1f093, 0x1f0a0, 0x1f0a1, 0x1f0a2, 0x1f0a3, 0x1f0a4, 0x1f0a5, 0x1f0a6, 0x1f0a7, 0x1f0a8, 0x1f0a9, 0x1f0aa, 0x1f0ab, 0x1f0ac, 0x1f0ad, 0x1f0ae, 0x1f0b1, 0x1f0b2, 0x1f0b3, 0x1f0b4, 0x1f0b5, 0x1f0b6, 0x1f0b7, 0x1f0b8, 0x1f0b9, 0x1f0ba, 0x1f0bb, 0x1f0bc, 0x1f0bd, 0x1f0be, 0x1f0c1, 0x1f0c2, 0x1f0c3, 0x1f0c4, 0x1f0c5, 0x1f0c6, 0x1f0c7, 0x1f0c8, 0x1f0c9, 0x1f0ca, 0x1f0cb, 0x1f0cc, 0x1f0cd, 0x1f0ce, 0x1f0cf, 0x1f0d1, 0x1f0d2, 0x1f0d3, 0x1f0d4, 0x1f0d5, 0x1f0d6, 0x1f0d7, 0x1f0d8, 0x1f0d9, 0x1f0da, 0x1f0db, 0x1f0dc, 0x1f0dd, 0x1f0de, 0x1f0df, 0x1f42d, 0x1f42e, 0x1f431, 0x1f435, 0x1f600, 0x1f601, 0x1f602, 0x1f603, 0x1f604, 0x1f605, 0x1f606, 0x1f607, 0x1f608, 0x1f609, 0x1f60a, 0x1f60b, 0x1f60c, 0x1f60d, 0x1f60e, 0x1f60f, 0x1f610, 0x1f611, 0x1f612, 0x1f613, 0x1f614, 0x1f615, 0x1f616, 0x1f617, 0x1f618, 0x1f619, 0x1f61a, 0x1f61b, 0x1f61c, 0x1f61d, 0x1f61e, 0x1f61f, 0x1f620, 0x1f621, 0x1f622, 0x1f623, 0x1f625, 0x1f626, 0x1f627, 0x1f628, 0x1f629, 0x1f62a, 0x1f62b, 0x1f62d, 0x1f62e, 0x1f62f, 0x1f630, 0x1f631, 0x1f632, 0x1f633, 0x1f634, 0x1f635, 0x1f636, 0x1f637, 0x1f638, 0x1f639, 0x1f63a, 0x1f63b, 0x1f63c, 0x1f63d, 0x1f63e, 0x1f63f, 0x1f640 )) # Cache of open fonts FONT_CACHE = {} def is_base(text): ''' Checks whether text should use CJK fonts. ''' return min([ord(char) in BASE_CHARS for char in text]) def get_font(size, bold=False, base_font=True): ''' Returns PIL font object matching parameters. ''' cache_key = '%d-%s-%s' % (size, bold, base_font) if cache_key not in FONT_CACHE: if base_font: if bold: name = 'DejaVuSans-Bold.ttf' else: name = 'DejaVuSans.ttf' else: name = 'DroidSansFallback.ttf' FONT_CACHE[cache_key] = ImageFont.truetype( os.path.join(appsettings.TTF_PATH, name), size ) return FONT_CACHE[cache_key]
gpl-3.0
laubed/pyMetric
pyMetricServer/handler/metric.py
1
4050
""" Metric data handler Used to add entries to the database or fetch data from it. Only handles the requests get/post. Fetching or inserting data is done in the database module """ import time from flask import request from flask.json import jsonify from werkzeug.exceptions import abort from pyMetricServer import app from pyMetricServer.system.database import getMetric, insertMetric from pyMetricServer.system.decorators import crossdomain @app.route("/api/v1.0/metrics/get") @crossdomain(origin="*") def get_metric(): """ Used to get a list of metric data. GET Params: fromtime - specifies a min timestamp to search for totime - specifies a max timestamp to search for origin - specifies the origin field in the database to search for key - specifies the key field in the database to search for count - specifies how many rows should be returned order - specifies the field by which resulsts get ordered desc - specifies wether the results are sorted in ascending oder descending order Example: GET /api/v1.0/metrics/get?fromtime=0&totime=123000423&origin=10.0.0.9&key=cpu_usage&count=20&order=time&desc=false :return: returns all rows in metric data which matches specified criteria """ fromtime = request.args.get("fromtime", None) totime = request.args.get("totime", None) origin = request.args.get("origin", None) key = request.args.get("key", None) count = request.args.get("count", None) order = (request.args.get("order", "Time"), bool(request.args.get("desc", True))) res = getMetric(fromtime, totime, origin, key, count, order) return jsonify({ "results": res, "resultcount": len(res), "param_fromtime": fromtime, "param_totime": totime, "param_origin": origin, "param_key": key, "param_count": count, "param_order": order[0], "param_desc": order[1] }) @app.route("/api/v1.0/metrics/current") @crossdomain(origin="*") def current_metric(): """ Used to get the last entry from a specific origin and key (You can even use it without origin and key so it shows the last data pushed to the server) GET Params: origin - specifies the origin field in the database to search for key - specifies the key field in the database to search for Example: GET /api/v1.0/metrics/current?origin=10.0.0.9&key=cpu_usage :return: returns all rows in metric data which matches specified criteria """ fromtime = None totime = None origin = request.args.get("origin", None) key = request.args.get("key", None) count = 1 order = ("time", True) res = getMetric(fromtime, totime, origin, key, count, order) return jsonify({ "results": res, "resultcount": len(res), "param_fromtime": fromtime, "param_totime": totime, "param_origin": origin, "param_key": key, "param_count": count, "param_order": order[0], "param_desc": order[1] }) @app.route('/api/v1.0/metrics', methods=['POST']) @crossdomain(origin='*') def add_metric(): """ Used to insert a metric value into database Post Params: origin - Origin of the metric data key - Key of the metric data value - Value of the metric data time - Timestamp of the metric data (optional) :return: returns the row just inserted """ origin = request.form.get("origin", None) key = request.form.get("key", None) value = request.form.get("value", None) times = request.form.get("time", time.time()) if origin == None or key == None or value == None: abort(400) else: res = insertMetric(times, origin, key, value) return jsonify({ "results": res, "resultcount": len(res) })
mit
BackupGGCode/python-for-android
python-modules/twisted/twisted/words/protocols/irc.py
49
100168
# -*- test-case-name: twisted.words.test.test_irc -*- # Copyright (c) 2001-2010 Twisted Matrix Laboratories. # See LICENSE for details. """ Internet Relay Chat Protocol for client and server. Future Plans ============ The way the IRCClient class works here encourages people to implement IRC clients by subclassing the ephemeral protocol class, and it tends to end up with way more state than it should for an object which will be destroyed as soon as the TCP transport drops. Someone oughta do something about that, ya know? The DCC support needs to have more hooks for the client for it to be able to ask the user things like "Do you want to accept this session?" and "Transfer #2 is 67% done." and otherwise manage the DCC sessions. Test coverage needs to be better. @var MAX_COMMAND_LENGTH: The maximum length of a command, as defined by RFC 2812 section 2.3. @author: Kevin Turner @see: RFC 1459: Internet Relay Chat Protocol @see: RFC 2812: Internet Relay Chat: Client Protocol @see: U{The Client-To-Client-Protocol <http://www.irchelp.org/irchelp/rfc/ctcpspec.html>} """ import errno, os, random, re, stat, struct, sys, time, types, traceback import string, socket import warnings import textwrap from os import path from twisted.internet import reactor, protocol from twisted.persisted import styles from twisted.protocols import basic from twisted.python import log, reflect, text NUL = chr(0) CR = chr(015) NL = chr(012) LF = NL SPC = chr(040) # This includes the CRLF terminator characters. MAX_COMMAND_LENGTH = 512 CHANNEL_PREFIXES = '&#!+' class IRCBadMessage(Exception): pass class IRCPasswordMismatch(Exception): pass class IRCBadModes(ValueError): """ A malformed mode was encountered while attempting to parse a mode string. """ def parsemsg(s): """Breaks a message from an IRC server into its prefix, command, and arguments. """ prefix = '' trailing = [] if not s: raise IRCBadMessage("Empty line.") if s[0] == ':': prefix, s = s[1:].split(' ', 1) if s.find(' :') != -1: s, trailing = s.split(' :', 1) args = s.split() args.append(trailing) else: args = s.split() command = args.pop(0) return prefix, command, args def split(str, length=80): """ Split a string into multiple lines. Whitespace near C{str[length]} will be preferred as a breaking point. C{"\\n"} will also be used as a breaking point. @param str: The string to split. @type str: C{str} @param length: The maximum length which will be allowed for any string in the result. @type length: C{int} @return: C{list} of C{str} """ return [chunk for line in str.split('\n') for chunk in textwrap.wrap(line, length)] def _intOrDefault(value, default=None): """ Convert a value to an integer if possible. @rtype: C{int} or type of L{default} @return: An integer when C{value} can be converted to an integer, otherwise return C{default} """ if value: try: return int(value) except (TypeError, ValueError): pass return default class UnhandledCommand(RuntimeError): """ A command dispatcher could not locate an appropriate command handler. """ class _CommandDispatcherMixin(object): """ Dispatch commands to handlers based on their name. Command handler names should be of the form C{prefix_commandName}, where C{prefix} is the value specified by L{prefix}, and must accept the parameters as given to L{dispatch}. Attempting to mix this in more than once for a single class will cause strange behaviour, due to L{prefix} being overwritten. @type prefix: C{str} @ivar prefix: Command handler prefix, used to locate handler attributes """ prefix = None def dispatch(self, commandName, *args): """ Perform actual command dispatch. """ def _getMethodName(command): return '%s_%s' % (self.prefix, command) def _getMethod(name): return getattr(self, _getMethodName(name), None) method = _getMethod(commandName) if method is not None: return method(*args) method = _getMethod('unknown') if method is None: raise UnhandledCommand("No handler for %r could be found" % (_getMethodName(commandName),)) return method(commandName, *args) def parseModes(modes, params, paramModes=('', '')): """ Parse an IRC mode string. The mode string is parsed into two lists of mode changes (added and removed), with each mode change represented as C{(mode, param)} where mode is the mode character, and param is the parameter passed for that mode, or C{None} if no parameter is required. @type modes: C{str} @param modes: Modes string to parse. @type params: C{list} @param params: Parameters specified along with L{modes}. @type paramModes: C{(str, str)} @param paramModes: A pair of strings (C{(add, remove)}) that indicate which modes take parameters when added or removed. @returns: Two lists of mode changes, one for modes added and the other for modes removed respectively, mode changes in each list are represented as C{(mode, param)}. """ if len(modes) == 0: raise IRCBadModes('Empty mode string') if modes[0] not in '+-': raise IRCBadModes('Malformed modes string: %r' % (modes,)) changes = ([], []) direction = None count = -1 for ch in modes: if ch in '+-': if count == 0: raise IRCBadModes('Empty mode sequence: %r' % (modes,)) direction = '+-'.index(ch) count = 0 else: param = None if ch in paramModes[direction]: try: param = params.pop(0) except IndexError: raise IRCBadModes('Not enough parameters: %r' % (ch,)) changes[direction].append((ch, param)) count += 1 if len(params) > 0: raise IRCBadModes('Too many parameters: %r %r' % (modes, params)) if count == 0: raise IRCBadModes('Empty mode sequence: %r' % (modes,)) return changes class IRC(protocol.Protocol): """ Internet Relay Chat server protocol. """ buffer = "" hostname = None encoding = None def connectionMade(self): self.channels = [] if self.hostname is None: self.hostname = socket.getfqdn() def sendLine(self, line): if self.encoding is not None: if isinstance(line, unicode): line = line.encode(self.encoding) self.transport.write("%s%s%s" % (line, CR, LF)) def sendMessage(self, command, *parameter_list, **prefix): """ Send a line formatted as an IRC message. First argument is the command, all subsequent arguments are parameters to that command. If a prefix is desired, it may be specified with the keyword argument 'prefix'. """ if not command: raise ValueError, "IRC message requires a command." if ' ' in command or command[0] == ':': # Not the ONLY way to screw up, but provides a little # sanity checking to catch likely dumb mistakes. raise ValueError, "Somebody screwed up, 'cuz this doesn't" \ " look like a command to me: %s" % command line = string.join([command] + list(parameter_list)) if prefix.has_key('prefix'): line = ":%s %s" % (prefix['prefix'], line) self.sendLine(line) if len(parameter_list) > 15: log.msg("Message has %d parameters (RFC allows 15):\n%s" % (len(parameter_list), line)) def dataReceived(self, data): """ This hack is to support mIRC, which sends LF only, even though the RFC says CRLF. (Also, the flexibility of LineReceiver to turn "line mode" on and off was not required.) """ lines = (self.buffer + data).split(LF) # Put the (possibly empty) element after the last LF back in the # buffer self.buffer = lines.pop() for line in lines: if len(line) <= 2: # This is a blank line, at best. continue if line[-1] == CR: line = line[:-1] prefix, command, params = parsemsg(line) # mIRC is a big pile of doo-doo command = command.upper() # DEBUG: log.msg( "%s %s %s" % (prefix, command, params)) self.handleCommand(command, prefix, params) def handleCommand(self, command, prefix, params): """ Determine the function to call for the given command and call it with the given arguments. """ method = getattr(self, "irc_%s" % command, None) try: if method is not None: method(prefix, params) else: self.irc_unknown(prefix, command, params) except: log.deferr() def irc_unknown(self, prefix, command, params): """ Called by L{handleCommand} on a command that doesn't have a defined handler. Subclasses should override this method. """ raise NotImplementedError(command, prefix, params) # Helper methods def privmsg(self, sender, recip, message): """ Send a message to a channel or user @type sender: C{str} or C{unicode} @param sender: Who is sending this message. Should be of the form username!ident@hostmask (unless you know better!). @type recip: C{str} or C{unicode} @param recip: The recipient of this message. If a channel, it must start with a channel prefix. @type message: C{str} or C{unicode} @param message: The message being sent. """ self.sendLine(":%s PRIVMSG %s :%s" % (sender, recip, lowQuote(message))) def notice(self, sender, recip, message): """ Send a "notice" to a channel or user. Notices differ from privmsgs in that the RFC claims they are different. Robots are supposed to send notices and not respond to them. Clients typically display notices differently from privmsgs. @type sender: C{str} or C{unicode} @param sender: Who is sending this message. Should be of the form username!ident@hostmask (unless you know better!). @type recip: C{str} or C{unicode} @param recip: The recipient of this message. If a channel, it must start with a channel prefix. @type message: C{str} or C{unicode} @param message: The message being sent. """ self.sendLine(":%s NOTICE %s :%s" % (sender, recip, message)) def action(self, sender, recip, message): """ Send an action to a channel or user. @type sender: C{str} or C{unicode} @param sender: Who is sending this message. Should be of the form username!ident@hostmask (unless you know better!). @type recip: C{str} or C{unicode} @param recip: The recipient of this message. If a channel, it must start with a channel prefix. @type message: C{str} or C{unicode} @param message: The action being sent. """ self.sendLine(":%s ACTION %s :%s" % (sender, recip, message)) def topic(self, user, channel, topic, author=None): """ Send the topic to a user. @type user: C{str} or C{unicode} @param user: The user receiving the topic. Only their nick name, not the full hostmask. @type channel: C{str} or C{unicode} @param channel: The channel for which this is the topic. @type topic: C{str} or C{unicode} or C{None} @param topic: The topic string, unquoted, or None if there is no topic. @type author: C{str} or C{unicode} @param author: If the topic is being changed, the full username and hostmask of the person changing it. """ if author is None: if topic is None: self.sendLine(':%s %s %s %s :%s' % ( self.hostname, RPL_NOTOPIC, user, channel, 'No topic is set.')) else: self.sendLine(":%s %s %s %s :%s" % ( self.hostname, RPL_TOPIC, user, channel, lowQuote(topic))) else: self.sendLine(":%s TOPIC %s :%s" % (author, channel, lowQuote(topic))) def topicAuthor(self, user, channel, author, date): """ Send the author of and time at which a topic was set for the given channel. This sends a 333 reply message, which is not part of the IRC RFC. @type user: C{str} or C{unicode} @param user: The user receiving the topic. Only their nick name, not the full hostmask. @type channel: C{str} or C{unicode} @param channel: The channel for which this information is relevant. @type author: C{str} or C{unicode} @param author: The nickname (without hostmask) of the user who last set the topic. @type date: C{int} @param date: A POSIX timestamp (number of seconds since the epoch) at which the topic was last set. """ self.sendLine(':%s %d %s %s %s %d' % ( self.hostname, 333, user, channel, author, date)) def names(self, user, channel, names): """ Send the names of a channel's participants to a user. @type user: C{str} or C{unicode} @param user: The user receiving the name list. Only their nick name, not the full hostmask. @type channel: C{str} or C{unicode} @param channel: The channel for which this is the namelist. @type names: C{list} of C{str} or C{unicode} @param names: The names to send. """ # XXX If unicode is given, these limits are not quite correct prefixLength = len(channel) + len(user) + 10 namesLength = 512 - prefixLength L = [] count = 0 for n in names: if count + len(n) + 1 > namesLength: self.sendLine(":%s %s %s = %s :%s" % ( self.hostname, RPL_NAMREPLY, user, channel, ' '.join(L))) L = [n] count = len(n) else: L.append(n) count += len(n) + 1 if L: self.sendLine(":%s %s %s = %s :%s" % ( self.hostname, RPL_NAMREPLY, user, channel, ' '.join(L))) self.sendLine(":%s %s %s %s :End of /NAMES list" % ( self.hostname, RPL_ENDOFNAMES, user, channel)) def who(self, user, channel, memberInfo): """ Send a list of users participating in a channel. @type user: C{str} or C{unicode} @param user: The user receiving this member information. Only their nick name, not the full hostmask. @type channel: C{str} or C{unicode} @param channel: The channel for which this is the member information. @type memberInfo: C{list} of C{tuples} @param memberInfo: For each member of the given channel, a 7-tuple containing their username, their hostmask, the server to which they are connected, their nickname, the letter "H" or "G" (standing for "Here" or "Gone"), the hopcount from C{user} to this member, and this member's real name. """ for info in memberInfo: (username, hostmask, server, nickname, flag, hops, realName) = info assert flag in ("H", "G") self.sendLine(":%s %s %s %s %s %s %s %s %s :%d %s" % ( self.hostname, RPL_WHOREPLY, user, channel, username, hostmask, server, nickname, flag, hops, realName)) self.sendLine(":%s %s %s %s :End of /WHO list." % ( self.hostname, RPL_ENDOFWHO, user, channel)) def whois(self, user, nick, username, hostname, realName, server, serverInfo, oper, idle, signOn, channels): """ Send information about the state of a particular user. @type user: C{str} or C{unicode} @param user: The user receiving this information. Only their nick name, not the full hostmask. @type nick: C{str} or C{unicode} @param nick: The nickname of the user this information describes. @type username: C{str} or C{unicode} @param username: The user's username (eg, ident response) @type hostname: C{str} @param hostname: The user's hostmask @type realName: C{str} or C{unicode} @param realName: The user's real name @type server: C{str} or C{unicode} @param server: The name of the server to which the user is connected @type serverInfo: C{str} or C{unicode} @param serverInfo: A descriptive string about that server @type oper: C{bool} @param oper: Indicates whether the user is an IRC operator @type idle: C{int} @param idle: The number of seconds since the user last sent a message @type signOn: C{int} @param signOn: A POSIX timestamp (number of seconds since the epoch) indicating the time the user signed on @type channels: C{list} of C{str} or C{unicode} @param channels: A list of the channels which the user is participating in """ self.sendLine(":%s %s %s %s %s %s * :%s" % ( self.hostname, RPL_WHOISUSER, user, nick, username, hostname, realName)) self.sendLine(":%s %s %s %s %s :%s" % ( self.hostname, RPL_WHOISSERVER, user, nick, server, serverInfo)) if oper: self.sendLine(":%s %s %s %s :is an IRC operator" % ( self.hostname, RPL_WHOISOPERATOR, user, nick)) self.sendLine(":%s %s %s %s %d %d :seconds idle, signon time" % ( self.hostname, RPL_WHOISIDLE, user, nick, idle, signOn)) self.sendLine(":%s %s %s %s :%s" % ( self.hostname, RPL_WHOISCHANNELS, user, nick, ' '.join(channels))) self.sendLine(":%s %s %s %s :End of WHOIS list." % ( self.hostname, RPL_ENDOFWHOIS, user, nick)) def join(self, who, where): """ Send a join message. @type who: C{str} or C{unicode} @param who: The name of the user joining. Should be of the form username!ident@hostmask (unless you know better!). @type where: C{str} or C{unicode} @param where: The channel the user is joining. """ self.sendLine(":%s JOIN %s" % (who, where)) def part(self, who, where, reason=None): """ Send a part message. @type who: C{str} or C{unicode} @param who: The name of the user joining. Should be of the form username!ident@hostmask (unless you know better!). @type where: C{str} or C{unicode} @param where: The channel the user is joining. @type reason: C{str} or C{unicode} @param reason: A string describing the misery which caused this poor soul to depart. """ if reason: self.sendLine(":%s PART %s :%s" % (who, where, reason)) else: self.sendLine(":%s PART %s" % (who, where)) def channelMode(self, user, channel, mode, *args): """ Send information about the mode of a channel. @type user: C{str} or C{unicode} @param user: The user receiving the name list. Only their nick name, not the full hostmask. @type channel: C{str} or C{unicode} @param channel: The channel for which this is the namelist. @type mode: C{str} @param mode: A string describing this channel's modes. @param args: Any additional arguments required by the modes. """ self.sendLine(":%s %s %s %s %s %s" % ( self.hostname, RPL_CHANNELMODEIS, user, channel, mode, ' '.join(args))) class ServerSupportedFeatures(_CommandDispatcherMixin): """ Handle ISUPPORT messages. Feature names match those in the ISUPPORT RFC draft identically. Information regarding the specifics of ISUPPORT was gleaned from <http://www.irc.org/tech_docs/draft-brocklesby-irc-isupport-03.txt>. """ prefix = 'isupport' def __init__(self): self._features = { 'CHANNELLEN': 200, 'CHANTYPES': tuple('#&'), 'MODES': 3, 'NICKLEN': 9, 'PREFIX': self._parsePrefixParam('(ovh)@+%'), # The ISUPPORT draft explicitly says that there is no default for # CHANMODES, but we're defaulting it here to handle the case where # the IRC server doesn't send us any ISUPPORT information, since # IRCClient.getChannelModeParams relies on this value. 'CHANMODES': self._parseChanModesParam(['b', '', 'lk'])} def _splitParamArgs(cls, params, valueProcessor=None): """ Split ISUPPORT parameter arguments. Values can optionally be processed by C{valueProcessor}. For example:: >>> ServerSupportedFeatures._splitParamArgs(['A:1', 'B:2']) (('A', '1'), ('B', '2')) @type params: C{iterable} of C{str} @type valueProcessor: C{callable} taking {str} @param valueProcessor: Callable to process argument values, or C{None} to perform no processing @rtype: C{list} of C{(str, object)} @return: Sequence of C{(name, processedValue)} """ if valueProcessor is None: valueProcessor = lambda x: x def _parse(): for param in params: if ':' not in param: param += ':' a, b = param.split(':', 1) yield a, valueProcessor(b) return list(_parse()) _splitParamArgs = classmethod(_splitParamArgs) def _unescapeParamValue(cls, value): """ Unescape an ISUPPORT parameter. The only form of supported escape is C{\\xHH}, where HH must be a valid 2-digit hexadecimal number. @rtype: C{str} """ def _unescape(): parts = value.split('\\x') # The first part can never be preceeded by the escape. yield parts.pop(0) for s in parts: octet, rest = s[:2], s[2:] try: octet = int(octet, 16) except ValueError: raise ValueError('Invalid hex octet: %r' % (octet,)) yield chr(octet) + rest if '\\x' not in value: return value return ''.join(_unescape()) _unescapeParamValue = classmethod(_unescapeParamValue) def _splitParam(cls, param): """ Split an ISUPPORT parameter. @type param: C{str} @rtype: C{(str, list)} @return C{(key, arguments)} """ if '=' not in param: param += '=' key, value = param.split('=', 1) return key, map(cls._unescapeParamValue, value.split(',')) _splitParam = classmethod(_splitParam) def _parsePrefixParam(cls, prefix): """ Parse the ISUPPORT "PREFIX" parameter. The order in which the parameter arguments appear is significant, the earlier a mode appears the more privileges it gives. @rtype: C{dict} mapping C{str} to C{(str, int)} @return: A dictionary mapping a mode character to a two-tuple of C({symbol, priority)}, the lower a priority (the lowest being C{0}) the more privileges it gives """ if not prefix: return None if prefix[0] != '(' and ')' not in prefix: raise ValueError('Malformed PREFIX parameter') modes, symbols = prefix.split(')', 1) symbols = zip(symbols, xrange(len(symbols))) modes = modes[1:] return dict(zip(modes, symbols)) _parsePrefixParam = classmethod(_parsePrefixParam) def _parseChanModesParam(self, params): """ Parse the ISUPPORT "CHANMODES" parameter. See L{isupport_CHANMODES} for a detailed explanation of this parameter. """ names = ('addressModes', 'param', 'setParam', 'noParam') if len(params) > len(names): raise ValueError( 'Expecting a maximum of %d channel mode parameters, got %d' % ( len(names), len(params))) items = map(lambda key, value: (key, value or ''), names, params) return dict(items) _parseChanModesParam = classmethod(_parseChanModesParam) def getFeature(self, feature, default=None): """ Get a server supported feature's value. A feature with the value C{None} is equivalent to the feature being unsupported. @type feature: C{str} @param feature: Feature name @type default: C{object} @param default: The value to default to, assuming that C{feature} is not supported @return: Feature value """ return self._features.get(feature, default) def hasFeature(self, feature): """ Determine whether a feature is supported or not. @rtype: C{bool} """ return self.getFeature(feature) is not None def parse(self, params): """ Parse ISUPPORT parameters. If an unknown parameter is encountered, it is simply added to the dictionary, keyed by its name, as a tuple of the parameters provided. @type params: C{iterable} of C{str} @param params: Iterable of ISUPPORT parameters to parse """ for param in params: key, value = self._splitParam(param) if key.startswith('-'): self._features.pop(key[1:], None) else: self._features[key] = self.dispatch(key, value) def isupport_unknown(self, command, params): """ Unknown ISUPPORT parameter. """ return tuple(params) def isupport_CHANLIMIT(self, params): """ The maximum number of each channel type a user may join. """ return self._splitParamArgs(params, _intOrDefault) def isupport_CHANMODES(self, params): """ Available channel modes. There are 4 categories of channel mode:: addressModes - Modes that add or remove an address to or from a list, these modes always take a parameter. param - Modes that change a setting on a channel, these modes always take a parameter. setParam - Modes that change a setting on a channel, these modes only take a parameter when being set. noParam - Modes that change a setting on a channel, these modes never take a parameter. """ try: return self._parseChanModesParam(params) except ValueError: return self.getFeature('CHANMODES') def isupport_CHANNELLEN(self, params): """ Maximum length of a channel name a client may create. """ return _intOrDefault(params[0], self.getFeature('CHANNELLEN')) def isupport_CHANTYPES(self, params): """ Valid channel prefixes. """ return tuple(params[0]) def isupport_EXCEPTS(self, params): """ Mode character for "ban exceptions". The presence of this parameter indicates that the server supports this functionality. """ return params[0] or 'e' def isupport_IDCHAN(self, params): """ Safe channel identifiers. The presence of this parameter indicates that the server supports this functionality. """ return self._splitParamArgs(params) def isupport_INVEX(self, params): """ Mode character for "invite exceptions". The presence of this parameter indicates that the server supports this functionality. """ return params[0] or 'I' def isupport_KICKLEN(self, params): """ Maximum length of a kick message a client may provide. """ return _intOrDefault(params[0]) def isupport_MAXLIST(self, params): """ Maximum number of "list modes" a client may set on a channel at once. List modes are identified by the "addressModes" key in CHANMODES. """ return self._splitParamArgs(params, _intOrDefault) def isupport_MODES(self, params): """ Maximum number of modes accepting parameters that may be sent, by a client, in a single MODE command. """ return _intOrDefault(params[0]) def isupport_NETWORK(self, params): """ IRC network name. """ return params[0] def isupport_NICKLEN(self, params): """ Maximum length of a nickname the client may use. """ return _intOrDefault(params[0], self.getFeature('NICKLEN')) def isupport_PREFIX(self, params): """ Mapping of channel modes that clients may have to status flags. """ try: return self._parsePrefixParam(params[0]) except ValueError: return self.getFeature('PREFIX') def isupport_SAFELIST(self, params): """ Flag indicating that a client may request a LIST without being disconnected due to the large amount of data generated. """ return True def isupport_STATUSMSG(self, params): """ The server supports sending messages to only to clients on a channel with a specific status. """ return params[0] def isupport_TARGMAX(self, params): """ Maximum number of targets allowable for commands that accept multiple targets. """ return dict(self._splitParamArgs(params, _intOrDefault)) def isupport_TOPICLEN(self, params): """ Maximum length of a topic that may be set. """ return _intOrDefault(params[0]) class IRCClient(basic.LineReceiver): """Internet Relay Chat client protocol, with sprinkles. In addition to providing an interface for an IRC client protocol, this class also contains reasonable implementations of many common CTCP methods. TODO ==== - Limit the length of messages sent (because the IRC server probably does). - Add flood protection/rate limiting for my CTCP replies. - NickServ cooperation. (a mix-in?) - Heartbeat. The transport may die in such a way that it does not realize it is dead until it is written to. Sending something (like "PING this.irc-host.net") during idle peroids would alleviate that. If you're concerned with the stability of the host as well as that of the transport, you might care to watch for the corresponding PONG. @ivar nickname: Nickname the client will use. @ivar password: Password used to log on to the server. May be C{None}. @ivar realname: Supplied to the server during login as the "Real name" or "ircname". May be C{None}. @ivar username: Supplied to the server during login as the "User name". May be C{None} @ivar userinfo: Sent in reply to a C{USERINFO} CTCP query. If C{None}, no USERINFO reply will be sent. "This is used to transmit a string which is settable by the user (and never should be set by the client)." @ivar fingerReply: Sent in reply to a C{FINGER} CTCP query. If C{None}, no FINGER reply will be sent. @type fingerReply: Callable or String @ivar versionName: CTCP VERSION reply, client name. If C{None}, no VERSION reply will be sent. @type versionName: C{str}, or None. @ivar versionNum: CTCP VERSION reply, client version. @type versionNum: C{str}, or None. @ivar versionEnv: CTCP VERSION reply, environment the client is running in. @type versionEnv: C{str}, or None. @ivar sourceURL: CTCP SOURCE reply, a URL where the source code of this client may be found. If C{None}, no SOURCE reply will be sent. @ivar lineRate: Minimum delay between lines sent to the server. If C{None}, no delay will be imposed. @type lineRate: Number of Seconds. @ivar motd: Either L{None} or, between receipt of I{RPL_MOTDSTART} and I{RPL_ENDOFMOTD}, a L{list} of L{str}, each of which is the content of an I{RPL_MOTD} message. @ivar erroneousNickFallback: Default nickname assigned when an unregistered client triggers an C{ERR_ERRONEUSNICKNAME} while trying to register with an illegal nickname. @type erroneousNickFallback: C{str} @ivar _registered: Whether or not the user is registered. It becomes True once a welcome has been received from the server. @type _registered: C{bool} @ivar _attemptedNick: The nickname that will try to get registered. It may change if it is illegal or already taken. L{nickname} becomes the L{_attemptedNick} that is successfully registered. @type _attemptedNick: C{str} @type supported: L{ServerSupportedFeatures} @ivar supported: Available ISUPPORT features on the server """ motd = None nickname = 'irc' password = None realname = None username = None ### Responses to various CTCP queries. userinfo = None # fingerReply is a callable returning a string, or a str()able object. fingerReply = None versionName = None versionNum = None versionEnv = None sourceURL = "http://twistedmatrix.com/downloads/" dcc_destdir = '.' dcc_sessions = None # If this is false, no attempt will be made to identify # ourself to the server. performLogin = 1 lineRate = None _queue = None _queueEmptying = None delimiter = '\n' # '\r\n' will also work (see dataReceived) __pychecker__ = 'unusednames=params,prefix,channel' _registered = False _attemptedNick = '' erroneousNickFallback = 'defaultnick' def _reallySendLine(self, line): return basic.LineReceiver.sendLine(self, lowQuote(line) + '\r') def sendLine(self, line): if self.lineRate is None: self._reallySendLine(line) else: self._queue.append(line) if not self._queueEmptying: self._sendLine() def _sendLine(self): if self._queue: self._reallySendLine(self._queue.pop(0)) self._queueEmptying = reactor.callLater(self.lineRate, self._sendLine) else: self._queueEmptying = None ### Interface level client->user output methods ### ### You'll want to override these. ### Methods relating to the server itself def created(self, when): """Called with creation date information about the server, usually at logon. @type when: C{str} @param when: A string describing when the server was created, probably. """ def yourHost(self, info): """Called with daemon information about the server, usually at logon. @type info: C{str} @param when: A string describing what software the server is running, probably. """ def myInfo(self, servername, version, umodes, cmodes): """Called with information about the server, usually at logon. @type servername: C{str} @param servername: The hostname of this server. @type version: C{str} @param version: A description of what software this server runs. @type umodes: C{str} @param umodes: All the available user modes. @type cmodes: C{str} @param cmodes: All the available channel modes. """ def luserClient(self, info): """Called with information about the number of connections, usually at logon. @type info: C{str} @param info: A description of the number of clients and servers connected to the network, probably. """ def bounce(self, info): """Called with information about where the client should reconnect. @type info: C{str} @param info: A plaintext description of the address that should be connected to. """ def isupport(self, options): """Called with various information about what the server supports. @type options: C{list} of C{str} @param options: Descriptions of features or limits of the server, possibly in the form "NAME=VALUE". """ def luserChannels(self, channels): """Called with the number of channels existant on the server. @type channels: C{int} """ def luserOp(self, ops): """Called with the number of ops logged on to the server. @type ops: C{int} """ def luserMe(self, info): """Called with information about the server connected to. @type info: C{str} @param info: A plaintext string describing the number of users and servers connected to this server. """ ### Methods involving me directly def privmsg(self, user, channel, message): """Called when I have a message from a user to me or a channel. """ pass def joined(self, channel): """ Called when I finish joining a channel. channel has the starting character (C{'#'}, C{'&'}, C{'!'}, or C{'+'}) intact. """ def left(self, channel): """ Called when I have left a channel. channel has the starting character (C{'#'}, C{'&'}, C{'!'}, or C{'+'}) intact. """ def noticed(self, user, channel, message): """ Called when I have a notice from a user to me or a channel. If the client makes any automated replies, it must not do so in response to a NOTICE message, per the RFC:: The difference between NOTICE and PRIVMSG is that automatic replies MUST NEVER be sent in response to a NOTICE message. [...] The object of this rule is to avoid loops between clients automatically sending something in response to something it received. """ def modeChanged(self, user, channel, set, modes, args): """Called when users or channel's modes are changed. @type user: C{str} @param user: The user and hostmask which instigated this change. @type channel: C{str} @param channel: The channel where the modes are changed. If args is empty the channel for which the modes are changing. If the changes are at server level it could be equal to C{user}. @type set: C{bool} or C{int} @param set: True if the mode(s) is being added, False if it is being removed. If some modes are added and others removed at the same time this function will be called twice, the first time with all the added modes, the second with the removed ones. (To change this behaviour override the irc_MODE method) @type modes: C{str} @param modes: The mode or modes which are being changed. @type args: C{tuple} @param args: Any additional information required for the mode change. """ def pong(self, user, secs): """Called with the results of a CTCP PING query. """ pass def signedOn(self): """Called after sucessfully signing on to the server. """ pass def kickedFrom(self, channel, kicker, message): """Called when I am kicked from a channel. """ pass def nickChanged(self, nick): """Called when my nick has been changed. """ self.nickname = nick ### Things I observe other people doing in a channel. def userJoined(self, user, channel): """Called when I see another user joining a channel. """ pass def userLeft(self, user, channel): """Called when I see another user leaving a channel. """ pass def userQuit(self, user, quitMessage): """Called when I see another user disconnect from the network. """ pass def userKicked(self, kickee, channel, kicker, message): """Called when I observe someone else being kicked from a channel. """ pass def action(self, user, channel, data): """Called when I see a user perform an ACTION on a channel. """ pass def topicUpdated(self, user, channel, newTopic): """In channel, user changed the topic to newTopic. Also called when first joining a channel. """ pass def userRenamed(self, oldname, newname): """A user changed their name from oldname to newname. """ pass ### Information from the server. def receivedMOTD(self, motd): """I received a message-of-the-day banner from the server. motd is a list of strings, where each string was sent as a seperate message from the server. To display, you might want to use:: '\\n'.join(motd) to get a nicely formatted string. """ pass ### user input commands, client->server ### Your client will want to invoke these. def join(self, channel, key=None): """ Join a channel. @type channel: C{str} @param channel: The name of the channel to join. If it has no prefix, C{'#'} will be prepended to it. @type key: C{str} @param key: If specified, the key used to join the channel. """ if channel[0] not in CHANNEL_PREFIXES: channel = '#' + channel if key: self.sendLine("JOIN %s %s" % (channel, key)) else: self.sendLine("JOIN %s" % (channel,)) def leave(self, channel, reason=None): """ Leave a channel. @type channel: C{str} @param channel: The name of the channel to leave. If it has no prefix, C{'#'} will be prepended to it. @type reason: C{str} @param reason: If given, the reason for leaving. """ if channel[0] not in CHANNEL_PREFIXES: channel = '#' + channel if reason: self.sendLine("PART %s :%s" % (channel, reason)) else: self.sendLine("PART %s" % (channel,)) def kick(self, channel, user, reason=None): """ Attempt to kick a user from a channel. @type channel: C{str} @param channel: The name of the channel to kick the user from. If it has no prefix, C{'#'} will be prepended to it. @type user: C{str} @param user: The nick of the user to kick. @type reason: C{str} @param reason: If given, the reason for kicking the user. """ if channel[0] not in CHANNEL_PREFIXES: channel = '#' + channel if reason: self.sendLine("KICK %s %s :%s" % (channel, user, reason)) else: self.sendLine("KICK %s %s" % (channel, user)) part = leave def topic(self, channel, topic=None): """ Attempt to set the topic of the given channel, or ask what it is. If topic is None, then I sent a topic query instead of trying to set the topic. The server should respond with a TOPIC message containing the current topic of the given channel. @type channel: C{str} @param channel: The name of the channel to change the topic on. If it has no prefix, C{'#'} will be prepended to it. @type topic: C{str} @param topic: If specified, what to set the topic to. """ # << TOPIC #xtestx :fff if channel[0] not in CHANNEL_PREFIXES: channel = '#' + channel if topic != None: self.sendLine("TOPIC %s :%s" % (channel, topic)) else: self.sendLine("TOPIC %s" % (channel,)) def mode(self, chan, set, modes, limit = None, user = None, mask = None): """ Change the modes on a user or channel. The C{limit}, C{user}, and C{mask} parameters are mutually exclusive. @type chan: C{str} @param chan: The name of the channel to operate on. @type set: C{bool} @param set: True to give the user or channel permissions and False to remove them. @type modes: C{str} @param modes: The mode flags to set on the user or channel. @type limit: C{int} @param limit: In conjuction with the C{'l'} mode flag, limits the number of users on the channel. @type user: C{str} @param user: The user to change the mode on. @type mask: C{str} @param mask: In conjuction with the C{'b'} mode flag, sets a mask of users to be banned from the channel. """ if set: line = 'MODE %s +%s' % (chan, modes) else: line = 'MODE %s -%s' % (chan, modes) if limit is not None: line = '%s %d' % (line, limit) elif user is not None: line = '%s %s' % (line, user) elif mask is not None: line = '%s %s' % (line, mask) self.sendLine(line) def say(self, channel, message, length = None): """ Send a message to a channel @type channel: C{str} @param channel: The channel to say the message on. If it has no prefix, C{'#'} will be prepended to it. @type message: C{str} @param message: The message to say. @type length: C{int} @param length: The maximum number of octets to send at a time. This has the effect of turning a single call to C{msg()} into multiple commands to the server. This is useful when long messages may be sent that would otherwise cause the server to kick us off or silently truncate the text we are sending. If None is passed, the entire message is always send in one command. """ if channel[0] not in CHANNEL_PREFIXES: channel = '#' + channel self.msg(channel, message, length) def msg(self, user, message, length=MAX_COMMAND_LENGTH): """ Send a message to a user or channel. The message will be split into multiple commands to the server if: - The message contains any newline characters - Any span between newline characters is longer than the given line-length. @param user: The username or channel name to which to direct the message. @type user: C{str} @param message: The text to send. @type message: C{str} @param length: The maximum number of octets to send in a single command, including the IRC protocol framing. If not supplied, defaults to L{MAX_COMMAND_LENGTH}. @type length: C{int} """ fmt = "PRIVMSG %s :%%s" % (user,) # NOTE: minimumLength really equals len(fmt) - 2 (for '%s') + 2 # (for the line-terminating CRLF) minimumLength = len(fmt) if length <= minimumLength: raise ValueError("Maximum length must exceed %d for message " "to %s" % (minimumLength, user)) for line in split(message, length - minimumLength): self.sendLine(fmt % (line,)) def notice(self, user, message): """ Send a notice to a user. Notices are like normal message, but should never get automated replies. @type user: C{str} @param user: The user to send a notice to. @type message: C{str} @param message: The contents of the notice to send. """ self.sendLine("NOTICE %s :%s" % (user, message)) def away(self, message=''): """ Mark this client as away. @type message: C{str} @param message: If specified, the away message. """ self.sendLine("AWAY :%s" % message) def back(self): """ Clear the away status. """ # An empty away marks us as back self.away() def whois(self, nickname, server=None): """ Retrieve user information about the given nick name. @type nickname: C{str} @param nickname: The nick name about which to retrieve information. @since: 8.2 """ if server is None: self.sendLine('WHOIS ' + nickname) else: self.sendLine('WHOIS %s %s' % (server, nickname)) def register(self, nickname, hostname='foo', servername='bar'): """ Login to the server. @type nickname: C{str} @param nickname: The nickname to register. @type hostname: C{str} @param hostname: If specified, the hostname to logon as. @type servername: C{str} @param servername: If specified, the servername to logon as. """ if self.password is not None: self.sendLine("PASS %s" % self.password) self.setNick(nickname) if self.username is None: self.username = nickname self.sendLine("USER %s %s %s :%s" % (self.username, hostname, servername, self.realname)) def setNick(self, nickname): """ Set this client's nickname. @type nickname: C{str} @param nickname: The nickname to change to. """ self._attemptedNick = nickname self.sendLine("NICK %s" % nickname) def quit(self, message = ''): """ Disconnect from the server @type message: C{str} @param message: If specified, the message to give when quitting the server. """ self.sendLine("QUIT :%s" % message) ### user input commands, client->client def describe(self, channel, action): """ Strike a pose. @type channel: C{str} @param channel: The name of the channel to have an action on. If it has no prefix, it is sent to the user of that name. @type action: C{str} @param action: The action to preform. @since: 9.0 """ self.ctcpMakeQuery(channel, [('ACTION', action)]) def me(self, channel, action): """ Strike a pose. This function is deprecated since Twisted 9.0. Use describe(). @type channel: C{str} @param channel: The name of the channel to have an action on. If it has no prefix, C{'#'} will be prepended to it. @type action: C{str} @param action: The action to preform. """ warnings.warn("me() is deprecated since Twisted 9.0. Use IRCClient.describe().", DeprecationWarning, stacklevel=2) if channel[0] not in CHANNEL_PREFIXES: channel = '#' + channel self.describe(channel, action) _pings = None _MAX_PINGRING = 12 def ping(self, user, text = None): """ Measure round-trip delay to another IRC client. """ if self._pings is None: self._pings = {} if text is None: chars = string.letters + string.digits + string.punctuation key = ''.join([random.choice(chars) for i in range(12)]) else: key = str(text) self._pings[(user, key)] = time.time() self.ctcpMakeQuery(user, [('PING', key)]) if len(self._pings) > self._MAX_PINGRING: # Remove some of the oldest entries. byValue = [(v, k) for (k, v) in self._pings.items()] byValue.sort() excess = self._MAX_PINGRING - len(self._pings) for i in xrange(excess): del self._pings[byValue[i][1]] def dccSend(self, user, file): if type(file) == types.StringType: file = open(file, 'r') size = fileSize(file) name = getattr(file, "name", "file@%s" % (id(file),)) factory = DccSendFactory(file) port = reactor.listenTCP(0, factory, 1) raise NotImplementedError,( "XXX!!! Help! I need to bind a socket, have it listen, and tell me its address. " "(and stop accepting once we've made a single connection.)") my_address = struct.pack("!I", my_address) args = ['SEND', name, my_address, str(port)] if not (size is None): args.append(size) args = string.join(args, ' ') self.ctcpMakeQuery(user, [('DCC', args)]) def dccResume(self, user, fileName, port, resumePos): """Send a DCC RESUME request to another user.""" self.ctcpMakeQuery(user, [ ('DCC', ['RESUME', fileName, port, resumePos])]) def dccAcceptResume(self, user, fileName, port, resumePos): """Send a DCC ACCEPT response to clients who have requested a resume. """ self.ctcpMakeQuery(user, [ ('DCC', ['ACCEPT', fileName, port, resumePos])]) ### server->client messages ### You might want to fiddle with these, ### but it is safe to leave them alone. def irc_ERR_NICKNAMEINUSE(self, prefix, params): """ Called when we try to register or change to a nickname that is already taken. """ self._attemptedNick = self.alterCollidedNick(self._attemptedNick) self.setNick(self._attemptedNick) def alterCollidedNick(self, nickname): """ Generate an altered version of a nickname that caused a collision in an effort to create an unused related name for subsequent registration. @param nickname: The nickname a user is attempting to register. @type nickname: C{str} @returns: A string that is in some way different from the nickname. @rtype: C{str} """ return nickname + '_' def irc_ERR_ERRONEUSNICKNAME(self, prefix, params): """ Called when we try to register or change to an illegal nickname. The server should send this reply when the nickname contains any disallowed characters. The bot will stall, waiting for RPL_WELCOME, if we don't handle this during sign-on. @note: The method uses the spelling I{erroneus}, as it appears in the RFC, section 6.1. """ if not self._registered: self.setNick(self.erroneousNickFallback) def irc_ERR_PASSWDMISMATCH(self, prefix, params): """ Called when the login was incorrect. """ raise IRCPasswordMismatch("Password Incorrect.") def irc_RPL_WELCOME(self, prefix, params): """ Called when we have received the welcome from the server. """ self._registered = True self.nickname = self._attemptedNick self.signedOn() def irc_JOIN(self, prefix, params): """ Called when a user joins a channel. """ nick = string.split(prefix,'!')[0] channel = params[-1] if nick == self.nickname: self.joined(channel) else: self.userJoined(nick, channel) def irc_PART(self, prefix, params): """ Called when a user leaves a channel. """ nick = string.split(prefix,'!')[0] channel = params[0] if nick == self.nickname: self.left(channel) else: self.userLeft(nick, channel) def irc_QUIT(self, prefix, params): """ Called when a user has quit. """ nick = string.split(prefix,'!')[0] self.userQuit(nick, params[0]) def irc_MODE(self, user, params): """ Parse a server mode change message. """ channel, modes, args = params[0], params[1], params[2:] if modes[0] not in '-+': modes = '+' + modes if channel == self.nickname: # This is a mode change to our individual user, not a channel mode # that involves us. paramModes = self.getUserModeParams() else: paramModes = self.getChannelModeParams() try: added, removed = parseModes(modes, args, paramModes) except IRCBadModes: log.err(None, 'An error occured while parsing the following ' 'MODE message: MODE %s' % (' '.join(params),)) else: if added: modes, params = zip(*added) self.modeChanged(user, channel, True, ''.join(modes), params) if removed: modes, params = zip(*removed) self.modeChanged(user, channel, False, ''.join(modes), params) def irc_PING(self, prefix, params): """ Called when some has pinged us. """ self.sendLine("PONG %s" % params[-1]) def irc_PRIVMSG(self, prefix, params): """ Called when we get a message. """ user = prefix channel = params[0] message = params[-1] if not message: return # don't raise an exception if some idiot sends us a blank message if message[0]==X_DELIM: m = ctcpExtract(message) if m['extended']: self.ctcpQuery(user, channel, m['extended']) if not m['normal']: return message = string.join(m['normal'], ' ') self.privmsg(user, channel, message) def irc_NOTICE(self, prefix, params): """ Called when a user gets a notice. """ user = prefix channel = params[0] message = params[-1] if message[0]==X_DELIM: m = ctcpExtract(message) if m['extended']: self.ctcpReply(user, channel, m['extended']) if not m['normal']: return message = string.join(m['normal'], ' ') self.noticed(user, channel, message) def irc_NICK(self, prefix, params): """ Called when a user changes their nickname. """ nick = string.split(prefix,'!', 1)[0] if nick == self.nickname: self.nickChanged(params[0]) else: self.userRenamed(nick, params[0]) def irc_KICK(self, prefix, params): """ Called when a user is kicked from a channel. """ kicker = string.split(prefix,'!')[0] channel = params[0] kicked = params[1] message = params[-1] if string.lower(kicked) == string.lower(self.nickname): # Yikes! self.kickedFrom(channel, kicker, message) else: self.userKicked(kicked, channel, kicker, message) def irc_TOPIC(self, prefix, params): """ Someone in the channel set the topic. """ user = string.split(prefix, '!')[0] channel = params[0] newtopic = params[1] self.topicUpdated(user, channel, newtopic) def irc_RPL_TOPIC(self, prefix, params): """ Called when the topic for a channel is initially reported or when it subsequently changes. """ user = string.split(prefix, '!')[0] channel = params[1] newtopic = params[2] self.topicUpdated(user, channel, newtopic) def irc_RPL_NOTOPIC(self, prefix, params): user = string.split(prefix, '!')[0] channel = params[1] newtopic = "" self.topicUpdated(user, channel, newtopic) def irc_RPL_MOTDSTART(self, prefix, params): if params[-1].startswith("- "): params[-1] = params[-1][2:] self.motd = [params[-1]] def irc_RPL_MOTD(self, prefix, params): if params[-1].startswith("- "): params[-1] = params[-1][2:] if self.motd is None: self.motd = [] self.motd.append(params[-1]) def irc_RPL_ENDOFMOTD(self, prefix, params): """ I{RPL_ENDOFMOTD} indicates the end of the message of the day messages. Deliver the accumulated lines to C{receivedMOTD}. """ motd = self.motd self.motd = None self.receivedMOTD(motd) def irc_RPL_CREATED(self, prefix, params): self.created(params[1]) def irc_RPL_YOURHOST(self, prefix, params): self.yourHost(params[1]) def irc_RPL_MYINFO(self, prefix, params): info = params[1].split(None, 3) while len(info) < 4: info.append(None) self.myInfo(*info) def irc_RPL_BOUNCE(self, prefix, params): self.bounce(params[1]) def irc_RPL_ISUPPORT(self, prefix, params): args = params[1:-1] # Several ISUPPORT messages, in no particular order, may be sent # to the client at any given point in time (usually only on connect, # though.) For this reason, ServerSupportedFeatures.parse is intended # to mutate the supported feature list. self.supported.parse(args) self.isupport(args) def irc_RPL_LUSERCLIENT(self, prefix, params): self.luserClient(params[1]) def irc_RPL_LUSEROP(self, prefix, params): try: self.luserOp(int(params[1])) except ValueError: pass def irc_RPL_LUSERCHANNELS(self, prefix, params): try: self.luserChannels(int(params[1])) except ValueError: pass def irc_RPL_LUSERME(self, prefix, params): self.luserMe(params[1]) def irc_unknown(self, prefix, command, params): pass ### Receiving a CTCP query from another party ### It is safe to leave these alone. def ctcpQuery(self, user, channel, messages): """Dispatch method for any CTCP queries received. """ for m in messages: method = getattr(self, "ctcpQuery_%s" % m[0], None) if method: method(user, channel, m[1]) else: self.ctcpUnknownQuery(user, channel, m[0], m[1]) def ctcpQuery_ACTION(self, user, channel, data): self.action(user, channel, data) def ctcpQuery_PING(self, user, channel, data): nick = string.split(user,"!")[0] self.ctcpMakeReply(nick, [("PING", data)]) def ctcpQuery_FINGER(self, user, channel, data): if data is not None: self.quirkyMessage("Why did %s send '%s' with a FINGER query?" % (user, data)) if not self.fingerReply: return if callable(self.fingerReply): reply = self.fingerReply() else: reply = str(self.fingerReply) nick = string.split(user,"!")[0] self.ctcpMakeReply(nick, [('FINGER', reply)]) def ctcpQuery_VERSION(self, user, channel, data): if data is not None: self.quirkyMessage("Why did %s send '%s' with a VERSION query?" % (user, data)) if self.versionName: nick = string.split(user,"!")[0] self.ctcpMakeReply(nick, [('VERSION', '%s:%s:%s' % (self.versionName, self.versionNum or '', self.versionEnv or ''))]) def ctcpQuery_SOURCE(self, user, channel, data): if data is not None: self.quirkyMessage("Why did %s send '%s' with a SOURCE query?" % (user, data)) if self.sourceURL: nick = string.split(user,"!")[0] # The CTCP document (Zeuge, Rollo, Mesander 1994) says that SOURCE # replies should be responded to with the location of an anonymous # FTP server in host:directory:file format. I'm taking the liberty # of bringing it into the 21st century by sending a URL instead. self.ctcpMakeReply(nick, [('SOURCE', self.sourceURL), ('SOURCE', None)]) def ctcpQuery_USERINFO(self, user, channel, data): if data is not None: self.quirkyMessage("Why did %s send '%s' with a USERINFO query?" % (user, data)) if self.userinfo: nick = string.split(user,"!")[0] self.ctcpMakeReply(nick, [('USERINFO', self.userinfo)]) def ctcpQuery_CLIENTINFO(self, user, channel, data): """A master index of what CTCP tags this client knows. If no arguments are provided, respond with a list of known tags. If an argument is provided, provide human-readable help on the usage of that tag. """ nick = string.split(user,"!")[0] if not data: # XXX: prefixedMethodNames gets methods from my *class*, # but it's entirely possible that this *instance* has more # methods. names = reflect.prefixedMethodNames(self.__class__, 'ctcpQuery_') self.ctcpMakeReply(nick, [('CLIENTINFO', string.join(names, ' '))]) else: args = string.split(data) method = getattr(self, 'ctcpQuery_%s' % (args[0],), None) if not method: self.ctcpMakeReply(nick, [('ERRMSG', "CLIENTINFO %s :" "Unknown query '%s'" % (data, args[0]))]) return doc = getattr(method, '__doc__', '') self.ctcpMakeReply(nick, [('CLIENTINFO', doc)]) def ctcpQuery_ERRMSG(self, user, channel, data): # Yeah, this seems strange, but that's what the spec says to do # when faced with an ERRMSG query (not a reply). nick = string.split(user,"!")[0] self.ctcpMakeReply(nick, [('ERRMSG', "%s :No error has occoured." % data)]) def ctcpQuery_TIME(self, user, channel, data): if data is not None: self.quirkyMessage("Why did %s send '%s' with a TIME query?" % (user, data)) nick = string.split(user,"!")[0] self.ctcpMakeReply(nick, [('TIME', ':%s' % time.asctime(time.localtime(time.time())))]) def ctcpQuery_DCC(self, user, channel, data): """Initiate a Direct Client Connection """ if not data: return dcctype = data.split(None, 1)[0].upper() handler = getattr(self, "dcc_" + dcctype, None) if handler: if self.dcc_sessions is None: self.dcc_sessions = [] data = data[len(dcctype)+1:] handler(user, channel, data) else: nick = string.split(user,"!")[0] self.ctcpMakeReply(nick, [('ERRMSG', "DCC %s :Unknown DCC type '%s'" % (data, dcctype))]) self.quirkyMessage("%s offered unknown DCC type %s" % (user, dcctype)) def dcc_SEND(self, user, channel, data): # Use splitQuoted for those who send files with spaces in the names. data = text.splitQuoted(data) if len(data) < 3: raise IRCBadMessage, "malformed DCC SEND request: %r" % (data,) (filename, address, port) = data[:3] address = dccParseAddress(address) try: port = int(port) except ValueError: raise IRCBadMessage, "Indecipherable port %r" % (port,) size = -1 if len(data) >= 4: try: size = int(data[3]) except ValueError: pass # XXX Should we bother passing this data? self.dccDoSend(user, address, port, filename, size, data) def dcc_ACCEPT(self, user, channel, data): data = text.splitQuoted(data) if len(data) < 3: raise IRCBadMessage, "malformed DCC SEND ACCEPT request: %r" % (data,) (filename, port, resumePos) = data[:3] try: port = int(port) resumePos = int(resumePos) except ValueError: return self.dccDoAcceptResume(user, filename, port, resumePos) def dcc_RESUME(self, user, channel, data): data = text.splitQuoted(data) if len(data) < 3: raise IRCBadMessage, "malformed DCC SEND RESUME request: %r" % (data,) (filename, port, resumePos) = data[:3] try: port = int(port) resumePos = int(resumePos) except ValueError: return self.dccDoResume(user, filename, port, resumePos) def dcc_CHAT(self, user, channel, data): data = text.splitQuoted(data) if len(data) < 3: raise IRCBadMessage, "malformed DCC CHAT request: %r" % (data,) (filename, address, port) = data[:3] address = dccParseAddress(address) try: port = int(port) except ValueError: raise IRCBadMessage, "Indecipherable port %r" % (port,) self.dccDoChat(user, channel, address, port, data) ### The dccDo methods are the slightly higher-level siblings of ### common dcc_ methods; the arguments have been parsed for them. def dccDoSend(self, user, address, port, fileName, size, data): """Called when I receive a DCC SEND offer from a client. By default, I do nothing here.""" ## filename = path.basename(arg) ## protocol = DccFileReceive(filename, size, ## (user,channel,data),self.dcc_destdir) ## reactor.clientTCP(address, port, protocol) ## self.dcc_sessions.append(protocol) pass def dccDoResume(self, user, file, port, resumePos): """Called when a client is trying to resume an offered file via DCC send. It should be either replied to with a DCC ACCEPT or ignored (default).""" pass def dccDoAcceptResume(self, user, file, port, resumePos): """Called when a client has verified and accepted a DCC resume request made by us. By default it will do nothing.""" pass def dccDoChat(self, user, channel, address, port, data): pass #factory = DccChatFactory(self, queryData=(user, channel, data)) #reactor.connectTCP(address, port, factory) #self.dcc_sessions.append(factory) #def ctcpQuery_SED(self, user, data): # """Simple Encryption Doodoo # # Feel free to implement this, but no specification is available. # """ # raise NotImplementedError def ctcpUnknownQuery(self, user, channel, tag, data): nick = string.split(user,"!")[0] self.ctcpMakeReply(nick, [('ERRMSG', "%s %s: Unknown query '%s'" % (tag, data, tag))]) log.msg("Unknown CTCP query from %s: %s %s\n" % (user, tag, data)) def ctcpMakeReply(self, user, messages): """ Send one or more C{extended messages} as a CTCP reply. @type messages: a list of extended messages. An extended message is a (tag, data) tuple, where 'data' may be C{None}. """ self.notice(user, ctcpStringify(messages)) ### client CTCP query commands def ctcpMakeQuery(self, user, messages): """ Send one or more C{extended messages} as a CTCP query. @type messages: a list of extended messages. An extended message is a (tag, data) tuple, where 'data' may be C{None}. """ self.msg(user, ctcpStringify(messages)) ### Receiving a response to a CTCP query (presumably to one we made) ### You may want to add methods here, or override UnknownReply. def ctcpReply(self, user, channel, messages): """ Dispatch method for any CTCP replies received. """ for m in messages: method = getattr(self, "ctcpReply_%s" % m[0], None) if method: method(user, channel, m[1]) else: self.ctcpUnknownReply(user, channel, m[0], m[1]) def ctcpReply_PING(self, user, channel, data): nick = user.split('!', 1)[0] if (not self._pings) or (not self._pings.has_key((nick, data))): raise IRCBadMessage,\ "Bogus PING response from %s: %s" % (user, data) t0 = self._pings[(nick, data)] self.pong(user, time.time() - t0) def ctcpUnknownReply(self, user, channel, tag, data): """Called when a fitting ctcpReply_ method is not found. XXX: If the client makes arbitrary CTCP queries, this method should probably show the responses to them instead of treating them as anomolies. """ log.msg("Unknown CTCP reply from %s: %s %s\n" % (user, tag, data)) ### Error handlers ### You may override these with something more appropriate to your UI. def badMessage(self, line, excType, excValue, tb): """When I get a message that's so broken I can't use it. """ log.msg(line) log.msg(string.join(traceback.format_exception(excType, excValue, tb),'')) def quirkyMessage(self, s): """This is called when I receive a message which is peculiar, but not wholly indecipherable. """ log.msg(s + '\n') ### Protocool methods def connectionMade(self): self.supported = ServerSupportedFeatures() self._queue = [] if self.performLogin: self.register(self.nickname) def dataReceived(self, data): basic.LineReceiver.dataReceived(self, data.replace('\r', '')) def lineReceived(self, line): line = lowDequote(line) try: prefix, command, params = parsemsg(line) if numeric_to_symbolic.has_key(command): command = numeric_to_symbolic[command] self.handleCommand(command, prefix, params) except IRCBadMessage: self.badMessage(line, *sys.exc_info()) def getUserModeParams(self): """ Get user modes that require parameters for correct parsing. @rtype: C{[str, str]} @return C{[add, remove]} """ return ['', ''] def getChannelModeParams(self): """ Get channel modes that require parameters for correct parsing. @rtype: C{[str, str]} @return C{[add, remove]} """ # PREFIX modes are treated as "type B" CHANMODES, they always take # parameter. params = ['', ''] prefixes = self.supported.getFeature('PREFIX', {}) params[0] = params[1] = ''.join(prefixes.iterkeys()) chanmodes = self.supported.getFeature('CHANMODES') if chanmodes is not None: params[0] += chanmodes.get('addressModes', '') params[0] += chanmodes.get('param', '') params[1] = params[0] params[0] += chanmodes.get('setParam', '') return params def handleCommand(self, command, prefix, params): """Determine the function to call for the given command and call it with the given arguments. """ method = getattr(self, "irc_%s" % command, None) try: if method is not None: method(prefix, params) else: self.irc_unknown(prefix, command, params) except: log.deferr() def __getstate__(self): dct = self.__dict__.copy() dct['dcc_sessions'] = None dct['_pings'] = None return dct def dccParseAddress(address): if '.' in address: pass else: try: address = long(address) except ValueError: raise IRCBadMessage,\ "Indecipherable address %r" % (address,) else: address = ( (address >> 24) & 0xFF, (address >> 16) & 0xFF, (address >> 8) & 0xFF, address & 0xFF, ) address = '.'.join(map(str,address)) return address class DccFileReceiveBasic(protocol.Protocol, styles.Ephemeral): """Bare protocol to receive a Direct Client Connection SEND stream. This does enough to keep the other guy talking, but you'll want to extend my dataReceived method to *do* something with the data I get. """ bytesReceived = 0 def __init__(self, resumeOffset=0): self.bytesReceived = resumeOffset self.resume = (resumeOffset != 0) def dataReceived(self, data): """Called when data is received. Warning: This just acknowledges to the remote host that the data has been received; it doesn't *do* anything with the data, so you'll want to override this. """ self.bytesReceived = self.bytesReceived + len(data) self.transport.write(struct.pack('!i', self.bytesReceived)) class DccSendProtocol(protocol.Protocol, styles.Ephemeral): """Protocol for an outgoing Direct Client Connection SEND. """ blocksize = 1024 file = None bytesSent = 0 completed = 0 connected = 0 def __init__(self, file): if type(file) is types.StringType: self.file = open(file, 'r') def connectionMade(self): self.connected = 1 self.sendBlock() def dataReceived(self, data): # XXX: Do we need to check to see if len(data) != fmtsize? bytesShesGot = struct.unpack("!I", data) if bytesShesGot < self.bytesSent: # Wait for her. # XXX? Add some checks to see if we've stalled out? return elif bytesShesGot > self.bytesSent: # self.transport.log("DCC SEND %s: She says she has %d bytes " # "but I've only sent %d. I'm stopping " # "this screwy transfer." # % (self.file, # bytesShesGot, self.bytesSent)) self.transport.loseConnection() return self.sendBlock() def sendBlock(self): block = self.file.read(self.blocksize) if block: self.transport.write(block) self.bytesSent = self.bytesSent + len(block) else: # Nothing more to send, transfer complete. self.transport.loseConnection() self.completed = 1 def connectionLost(self, reason): self.connected = 0 if hasattr(self.file, "close"): self.file.close() class DccSendFactory(protocol.Factory): protocol = DccSendProtocol def __init__(self, file): self.file = file def buildProtocol(self, connection): p = self.protocol(self.file) p.factory = self return p def fileSize(file): """I'll try my damndest to determine the size of this file object. """ size = None if hasattr(file, "fileno"): fileno = file.fileno() try: stat_ = os.fstat(fileno) size = stat_[stat.ST_SIZE] except: pass else: return size if hasattr(file, "name") and path.exists(file.name): try: size = path.getsize(file.name) except: pass else: return size if hasattr(file, "seek") and hasattr(file, "tell"): try: try: file.seek(0, 2) size = file.tell() finally: file.seek(0, 0) except: pass else: return size return size class DccChat(basic.LineReceiver, styles.Ephemeral): """Direct Client Connection protocol type CHAT. DCC CHAT is really just your run o' the mill basic.LineReceiver protocol. This class only varies from that slightly, accepting either LF or CR LF for a line delimeter for incoming messages while always using CR LF for outgoing. The lineReceived method implemented here uses the DCC connection's 'client' attribute (provided upon construction) to deliver incoming lines from the DCC chat via IRCClient's normal privmsg interface. That's something of a spoof, which you may well want to override. """ queryData = None delimiter = CR + NL client = None remoteParty = None buffer = "" def __init__(self, client, queryData=None): """Initialize a new DCC CHAT session. queryData is a 3-tuple of (fromUser, targetUserOrChannel, data) as received by the CTCP query. (To be honest, fromUser is the only thing that's currently used here. targetUserOrChannel is potentially useful, while the 'data' argument is soley for informational purposes.) """ self.client = client if queryData: self.queryData = queryData self.remoteParty = self.queryData[0] def dataReceived(self, data): self.buffer = self.buffer + data lines = string.split(self.buffer, LF) # Put the (possibly empty) element after the last LF back in the # buffer self.buffer = lines.pop() for line in lines: if line[-1] == CR: line = line[:-1] self.lineReceived(line) def lineReceived(self, line): log.msg("DCC CHAT<%s> %s" % (self.remoteParty, line)) self.client.privmsg(self.remoteParty, self.client.nickname, line) class DccChatFactory(protocol.ClientFactory): protocol = DccChat noisy = 0 def __init__(self, client, queryData): self.client = client self.queryData = queryData def buildProtocol(self, addr): p = self.protocol(client=self.client, queryData=self.queryData) p.factory = self def clientConnectionFailed(self, unused_connector, unused_reason): self.client.dcc_sessions.remove(self) def clientConnectionLost(self, unused_connector, unused_reason): self.client.dcc_sessions.remove(self) def dccDescribe(data): """Given the data chunk from a DCC query, return a descriptive string. """ orig_data = data data = string.split(data) if len(data) < 4: return orig_data (dcctype, arg, address, port) = data[:4] if '.' in address: pass else: try: address = long(address) except ValueError: pass else: address = ( (address >> 24) & 0xFF, (address >> 16) & 0xFF, (address >> 8) & 0xFF, address & 0xFF, ) # The mapping to 'int' is to get rid of those accursed # "L"s which python 1.5.2 puts on the end of longs. address = string.join(map(str,map(int,address)), ".") if dcctype == 'SEND': filename = arg size_txt = '' if len(data) >= 5: try: size = int(data[4]) size_txt = ' of size %d bytes' % (size,) except ValueError: pass dcc_text = ("SEND for file '%s'%s at host %s, port %s" % (filename, size_txt, address, port)) elif dcctype == 'CHAT': dcc_text = ("CHAT for host %s, port %s" % (address, port)) else: dcc_text = orig_data return dcc_text class DccFileReceive(DccFileReceiveBasic): """Higher-level coverage for getting a file from DCC SEND. I allow you to change the file's name and destination directory. I won't overwrite an existing file unless I've been told it's okay to do so. If passed the resumeOffset keyword argument I will attempt to resume the file from that amount of bytes. XXX: I need to let the client know when I am finished. XXX: I need to decide how to keep a progress indicator updated. XXX: Client needs a way to tell me "Do not finish until I say so." XXX: I need to make sure the client understands if the file cannot be written. """ filename = 'dcc' fileSize = -1 destDir = '.' overwrite = 0 fromUser = None queryData = None def __init__(self, filename, fileSize=-1, queryData=None, destDir='.', resumeOffset=0): DccFileReceiveBasic.__init__(self, resumeOffset=resumeOffset) self.filename = filename self.destDir = destDir self.fileSize = fileSize if queryData: self.queryData = queryData self.fromUser = self.queryData[0] def set_directory(self, directory): """Set the directory where the downloaded file will be placed. May raise OSError if the supplied directory path is not suitable. """ if not path.exists(directory): raise OSError(errno.ENOENT, "You see no directory there.", directory) if not path.isdir(directory): raise OSError(errno.ENOTDIR, "You cannot put a file into " "something which is not a directory.", directory) if not os.access(directory, os.X_OK | os.W_OK): raise OSError(errno.EACCES, "This directory is too hard to write in to.", directory) self.destDir = directory def set_filename(self, filename): """Change the name of the file being transferred. This replaces the file name provided by the sender. """ self.filename = filename def set_overwrite(self, boolean): """May I overwrite existing files? """ self.overwrite = boolean # Protocol-level methods. def connectionMade(self): dst = path.abspath(path.join(self.destDir,self.filename)) exists = path.exists(dst) if self.resume and exists: # I have been told I want to resume, and a file already # exists - Here we go self.file = open(dst, 'ab') log.msg("Attempting to resume %s - starting from %d bytes" % (self.file, self.file.tell())) elif self.overwrite or not exists: self.file = open(dst, 'wb') else: raise OSError(errno.EEXIST, "There's a file in the way. " "Perhaps that's why you cannot open it.", dst) def dataReceived(self, data): self.file.write(data) DccFileReceiveBasic.dataReceived(self, data) # XXX: update a progress indicator here? def connectionLost(self, reason): """When the connection is lost, I close the file. """ self.connected = 0 logmsg = ("%s closed." % (self,)) if self.fileSize > 0: logmsg = ("%s %d/%d bytes received" % (logmsg, self.bytesReceived, self.fileSize)) if self.bytesReceived == self.fileSize: pass # Hooray! elif self.bytesReceived < self.fileSize: logmsg = ("%s (Warning: %d bytes short)" % (logmsg, self.fileSize - self.bytesReceived)) else: logmsg = ("%s (file larger than expected)" % (logmsg,)) else: logmsg = ("%s %d bytes received" % (logmsg, self.bytesReceived)) if hasattr(self, 'file'): logmsg = "%s and written to %s.\n" % (logmsg, self.file.name) if hasattr(self.file, 'close'): self.file.close() # self.transport.log(logmsg) def __str__(self): if not self.connected: return "<Unconnected DccFileReceive object at %x>" % (id(self),) from_ = self.transport.getPeer() if self.fromUser: from_ = "%s (%s)" % (self.fromUser, from_) s = ("DCC transfer of '%s' from %s" % (self.filename, from_)) return s def __repr__(self): s = ("<%s at %x: GET %s>" % (self.__class__, id(self), self.filename)) return s # CTCP constants and helper functions X_DELIM = chr(001) def ctcpExtract(message): """Extract CTCP data from a string. Returns a dictionary with two items: - C{'extended'}: a list of CTCP (tag, data) tuples - C{'normal'}: a list of strings which were not inside a CTCP delimeter """ extended_messages = [] normal_messages = [] retval = {'extended': extended_messages, 'normal': normal_messages } messages = string.split(message, X_DELIM) odd = 0 # X1 extended data X2 nomal data X3 extended data X4 normal... while messages: if odd: extended_messages.append(messages.pop(0)) else: normal_messages.append(messages.pop(0)) odd = not odd extended_messages[:] = filter(None, extended_messages) normal_messages[:] = filter(None, normal_messages) extended_messages[:] = map(ctcpDequote, extended_messages) for i in xrange(len(extended_messages)): m = string.split(extended_messages[i], SPC, 1) tag = m[0] if len(m) > 1: data = m[1] else: data = None extended_messages[i] = (tag, data) return retval # CTCP escaping M_QUOTE= chr(020) mQuoteTable = { NUL: M_QUOTE + '0', NL: M_QUOTE + 'n', CR: M_QUOTE + 'r', M_QUOTE: M_QUOTE + M_QUOTE } mDequoteTable = {} for k, v in mQuoteTable.items(): mDequoteTable[v[-1]] = k del k, v mEscape_re = re.compile('%s.' % (re.escape(M_QUOTE),), re.DOTALL) def lowQuote(s): for c in (M_QUOTE, NUL, NL, CR): s = string.replace(s, c, mQuoteTable[c]) return s def lowDequote(s): def sub(matchobj, mDequoteTable=mDequoteTable): s = matchobj.group()[1] try: s = mDequoteTable[s] except KeyError: s = s return s return mEscape_re.sub(sub, s) X_QUOTE = '\\' xQuoteTable = { X_DELIM: X_QUOTE + 'a', X_QUOTE: X_QUOTE + X_QUOTE } xDequoteTable = {} for k, v in xQuoteTable.items(): xDequoteTable[v[-1]] = k xEscape_re = re.compile('%s.' % (re.escape(X_QUOTE),), re.DOTALL) def ctcpQuote(s): for c in (X_QUOTE, X_DELIM): s = string.replace(s, c, xQuoteTable[c]) return s def ctcpDequote(s): def sub(matchobj, xDequoteTable=xDequoteTable): s = matchobj.group()[1] try: s = xDequoteTable[s] except KeyError: s = s return s return xEscape_re.sub(sub, s) def ctcpStringify(messages): """ @type messages: a list of extended messages. An extended message is a (tag, data) tuple, where 'data' may be C{None}, a string, or a list of strings to be joined with whitespace. @returns: String """ coded_messages = [] for (tag, data) in messages: if data: if not isinstance(data, types.StringType): try: # data as list-of-strings data = " ".join(map(str, data)) except TypeError: # No? Then use it's %s representation. pass m = "%s %s" % (tag, data) else: m = str(tag) m = ctcpQuote(m) m = "%s%s%s" % (X_DELIM, m, X_DELIM) coded_messages.append(m) line = string.join(coded_messages, '') return line # Constants (from RFC 2812) RPL_WELCOME = '001' RPL_YOURHOST = '002' RPL_CREATED = '003' RPL_MYINFO = '004' RPL_ISUPPORT = '005' RPL_BOUNCE = '010' RPL_USERHOST = '302' RPL_ISON = '303' RPL_AWAY = '301' RPL_UNAWAY = '305' RPL_NOWAWAY = '306' RPL_WHOISUSER = '311' RPL_WHOISSERVER = '312' RPL_WHOISOPERATOR = '313' RPL_WHOISIDLE = '317' RPL_ENDOFWHOIS = '318' RPL_WHOISCHANNELS = '319' RPL_WHOWASUSER = '314' RPL_ENDOFWHOWAS = '369' RPL_LISTSTART = '321' RPL_LIST = '322' RPL_LISTEND = '323' RPL_UNIQOPIS = '325' RPL_CHANNELMODEIS = '324' RPL_NOTOPIC = '331' RPL_TOPIC = '332' RPL_INVITING = '341' RPL_SUMMONING = '342' RPL_INVITELIST = '346' RPL_ENDOFINVITELIST = '347' RPL_EXCEPTLIST = '348' RPL_ENDOFEXCEPTLIST = '349' RPL_VERSION = '351' RPL_WHOREPLY = '352' RPL_ENDOFWHO = '315' RPL_NAMREPLY = '353' RPL_ENDOFNAMES = '366' RPL_LINKS = '364' RPL_ENDOFLINKS = '365' RPL_BANLIST = '367' RPL_ENDOFBANLIST = '368' RPL_INFO = '371' RPL_ENDOFINFO = '374' RPL_MOTDSTART = '375' RPL_MOTD = '372' RPL_ENDOFMOTD = '376' RPL_YOUREOPER = '381' RPL_REHASHING = '382' RPL_YOURESERVICE = '383' RPL_TIME = '391' RPL_USERSSTART = '392' RPL_USERS = '393' RPL_ENDOFUSERS = '394' RPL_NOUSERS = '395' RPL_TRACELINK = '200' RPL_TRACECONNECTING = '201' RPL_TRACEHANDSHAKE = '202' RPL_TRACEUNKNOWN = '203' RPL_TRACEOPERATOR = '204' RPL_TRACEUSER = '205' RPL_TRACESERVER = '206' RPL_TRACESERVICE = '207' RPL_TRACENEWTYPE = '208' RPL_TRACECLASS = '209' RPL_TRACERECONNECT = '210' RPL_TRACELOG = '261' RPL_TRACEEND = '262' RPL_STATSLINKINFO = '211' RPL_STATSCOMMANDS = '212' RPL_ENDOFSTATS = '219' RPL_STATSUPTIME = '242' RPL_STATSOLINE = '243' RPL_UMODEIS = '221' RPL_SERVLIST = '234' RPL_SERVLISTEND = '235' RPL_LUSERCLIENT = '251' RPL_LUSEROP = '252' RPL_LUSERUNKNOWN = '253' RPL_LUSERCHANNELS = '254' RPL_LUSERME = '255' RPL_ADMINME = '256' RPL_ADMINLOC = '257' RPL_ADMINLOC = '258' RPL_ADMINEMAIL = '259' RPL_TRYAGAIN = '263' ERR_NOSUCHNICK = '401' ERR_NOSUCHSERVER = '402' ERR_NOSUCHCHANNEL = '403' ERR_CANNOTSENDTOCHAN = '404' ERR_TOOMANYCHANNELS = '405' ERR_WASNOSUCHNICK = '406' ERR_TOOMANYTARGETS = '407' ERR_NOSUCHSERVICE = '408' ERR_NOORIGIN = '409' ERR_NORECIPIENT = '411' ERR_NOTEXTTOSEND = '412' ERR_NOTOPLEVEL = '413' ERR_WILDTOPLEVEL = '414' ERR_BADMASK = '415' ERR_UNKNOWNCOMMAND = '421' ERR_NOMOTD = '422' ERR_NOADMININFO = '423' ERR_FILEERROR = '424' ERR_NONICKNAMEGIVEN = '431' ERR_ERRONEUSNICKNAME = '432' ERR_NICKNAMEINUSE = '433' ERR_NICKCOLLISION = '436' ERR_UNAVAILRESOURCE = '437' ERR_USERNOTINCHANNEL = '441' ERR_NOTONCHANNEL = '442' ERR_USERONCHANNEL = '443' ERR_NOLOGIN = '444' ERR_SUMMONDISABLED = '445' ERR_USERSDISABLED = '446' ERR_NOTREGISTERED = '451' ERR_NEEDMOREPARAMS = '461' ERR_ALREADYREGISTRED = '462' ERR_NOPERMFORHOST = '463' ERR_PASSWDMISMATCH = '464' ERR_YOUREBANNEDCREEP = '465' ERR_YOUWILLBEBANNED = '466' ERR_KEYSET = '467' ERR_CHANNELISFULL = '471' ERR_UNKNOWNMODE = '472' ERR_INVITEONLYCHAN = '473' ERR_BANNEDFROMCHAN = '474' ERR_BADCHANNELKEY = '475' ERR_BADCHANMASK = '476' ERR_NOCHANMODES = '477' ERR_BANLISTFULL = '478' ERR_NOPRIVILEGES = '481' ERR_CHANOPRIVSNEEDED = '482' ERR_CANTKILLSERVER = '483' ERR_RESTRICTED = '484' ERR_UNIQOPPRIVSNEEDED = '485' ERR_NOOPERHOST = '491' ERR_NOSERVICEHOST = '492' ERR_UMODEUNKNOWNFLAG = '501' ERR_USERSDONTMATCH = '502' # And hey, as long as the strings are already intern'd... symbolic_to_numeric = { "RPL_WELCOME": '001', "RPL_YOURHOST": '002', "RPL_CREATED": '003', "RPL_MYINFO": '004', "RPL_ISUPPORT": '005', "RPL_BOUNCE": '010', "RPL_USERHOST": '302', "RPL_ISON": '303', "RPL_AWAY": '301', "RPL_UNAWAY": '305', "RPL_NOWAWAY": '306', "RPL_WHOISUSER": '311', "RPL_WHOISSERVER": '312', "RPL_WHOISOPERATOR": '313', "RPL_WHOISIDLE": '317', "RPL_ENDOFWHOIS": '318', "RPL_WHOISCHANNELS": '319', "RPL_WHOWASUSER": '314', "RPL_ENDOFWHOWAS": '369', "RPL_LISTSTART": '321', "RPL_LIST": '322', "RPL_LISTEND": '323', "RPL_UNIQOPIS": '325', "RPL_CHANNELMODEIS": '324', "RPL_NOTOPIC": '331', "RPL_TOPIC": '332', "RPL_INVITING": '341', "RPL_SUMMONING": '342', "RPL_INVITELIST": '346', "RPL_ENDOFINVITELIST": '347', "RPL_EXCEPTLIST": '348', "RPL_ENDOFEXCEPTLIST": '349', "RPL_VERSION": '351', "RPL_WHOREPLY": '352', "RPL_ENDOFWHO": '315', "RPL_NAMREPLY": '353', "RPL_ENDOFNAMES": '366', "RPL_LINKS": '364', "RPL_ENDOFLINKS": '365', "RPL_BANLIST": '367', "RPL_ENDOFBANLIST": '368', "RPL_INFO": '371', "RPL_ENDOFINFO": '374', "RPL_MOTDSTART": '375', "RPL_MOTD": '372', "RPL_ENDOFMOTD": '376', "RPL_YOUREOPER": '381', "RPL_REHASHING": '382', "RPL_YOURESERVICE": '383', "RPL_TIME": '391', "RPL_USERSSTART": '392', "RPL_USERS": '393', "RPL_ENDOFUSERS": '394', "RPL_NOUSERS": '395', "RPL_TRACELINK": '200', "RPL_TRACECONNECTING": '201', "RPL_TRACEHANDSHAKE": '202', "RPL_TRACEUNKNOWN": '203', "RPL_TRACEOPERATOR": '204', "RPL_TRACEUSER": '205', "RPL_TRACESERVER": '206', "RPL_TRACESERVICE": '207', "RPL_TRACENEWTYPE": '208', "RPL_TRACECLASS": '209', "RPL_TRACERECONNECT": '210', "RPL_TRACELOG": '261', "RPL_TRACEEND": '262', "RPL_STATSLINKINFO": '211', "RPL_STATSCOMMANDS": '212', "RPL_ENDOFSTATS": '219', "RPL_STATSUPTIME": '242', "RPL_STATSOLINE": '243', "RPL_UMODEIS": '221', "RPL_SERVLIST": '234', "RPL_SERVLISTEND": '235', "RPL_LUSERCLIENT": '251', "RPL_LUSEROP": '252', "RPL_LUSERUNKNOWN": '253', "RPL_LUSERCHANNELS": '254', "RPL_LUSERME": '255', "RPL_ADMINME": '256', "RPL_ADMINLOC": '257', "RPL_ADMINLOC": '258', "RPL_ADMINEMAIL": '259', "RPL_TRYAGAIN": '263', "ERR_NOSUCHNICK": '401', "ERR_NOSUCHSERVER": '402', "ERR_NOSUCHCHANNEL": '403', "ERR_CANNOTSENDTOCHAN": '404', "ERR_TOOMANYCHANNELS": '405', "ERR_WASNOSUCHNICK": '406', "ERR_TOOMANYTARGETS": '407', "ERR_NOSUCHSERVICE": '408', "ERR_NOORIGIN": '409', "ERR_NORECIPIENT": '411', "ERR_NOTEXTTOSEND": '412', "ERR_NOTOPLEVEL": '413', "ERR_WILDTOPLEVEL": '414', "ERR_BADMASK": '415', "ERR_UNKNOWNCOMMAND": '421', "ERR_NOMOTD": '422', "ERR_NOADMININFO": '423', "ERR_FILEERROR": '424', "ERR_NONICKNAMEGIVEN": '431', "ERR_ERRONEUSNICKNAME": '432', "ERR_NICKNAMEINUSE": '433', "ERR_NICKCOLLISION": '436', "ERR_UNAVAILRESOURCE": '437', "ERR_USERNOTINCHANNEL": '441', "ERR_NOTONCHANNEL": '442', "ERR_USERONCHANNEL": '443', "ERR_NOLOGIN": '444', "ERR_SUMMONDISABLED": '445', "ERR_USERSDISABLED": '446', "ERR_NOTREGISTERED": '451', "ERR_NEEDMOREPARAMS": '461', "ERR_ALREADYREGISTRED": '462', "ERR_NOPERMFORHOST": '463', "ERR_PASSWDMISMATCH": '464', "ERR_YOUREBANNEDCREEP": '465', "ERR_YOUWILLBEBANNED": '466', "ERR_KEYSET": '467', "ERR_CHANNELISFULL": '471', "ERR_UNKNOWNMODE": '472', "ERR_INVITEONLYCHAN": '473', "ERR_BANNEDFROMCHAN": '474', "ERR_BADCHANNELKEY": '475', "ERR_BADCHANMASK": '476', "ERR_NOCHANMODES": '477', "ERR_BANLISTFULL": '478', "ERR_NOPRIVILEGES": '481', "ERR_CHANOPRIVSNEEDED": '482', "ERR_CANTKILLSERVER": '483', "ERR_RESTRICTED": '484', "ERR_UNIQOPPRIVSNEEDED": '485', "ERR_NOOPERHOST": '491', "ERR_NOSERVICEHOST": '492', "ERR_UMODEUNKNOWNFLAG": '501', "ERR_USERSDONTMATCH": '502', } numeric_to_symbolic = {} for k, v in symbolic_to_numeric.items(): numeric_to_symbolic[v] = k
apache-2.0
Confkins/CloudComputing
.local/share/heroku/cli/lib/npm/node_modules/node-gyp/gyp/pylib/gyp/generator/xcode.py
1363
58344
# Copyright (c) 2012 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import filecmp import gyp.common import gyp.xcodeproj_file import gyp.xcode_ninja import errno import os import sys import posixpath import re import shutil import subprocess import tempfile # Project files generated by this module will use _intermediate_var as a # custom Xcode setting whose value is a DerivedSources-like directory that's # project-specific and configuration-specific. The normal choice, # DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive # as it is likely that multiple targets within a single project file will want # to access the same set of generated files. The other option, # PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific, # it is not configuration-specific. INTERMEDIATE_DIR is defined as # $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION). _intermediate_var = 'INTERMEDIATE_DIR' # SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all # targets that share the same BUILT_PRODUCTS_DIR. _shared_intermediate_var = 'SHARED_INTERMEDIATE_DIR' _library_search_paths_var = 'LIBRARY_SEARCH_PATHS' generator_default_variables = { 'EXECUTABLE_PREFIX': '', 'EXECUTABLE_SUFFIX': '', 'STATIC_LIB_PREFIX': 'lib', 'SHARED_LIB_PREFIX': 'lib', 'STATIC_LIB_SUFFIX': '.a', 'SHARED_LIB_SUFFIX': '.dylib', # INTERMEDIATE_DIR is a place for targets to build up intermediate products. # It is specific to each build environment. It is only guaranteed to exist # and be constant within the context of a project, corresponding to a single # input file. Some build environments may allow their intermediate directory # to be shared on a wider scale, but this is not guaranteed. 'INTERMEDIATE_DIR': '$(%s)' % _intermediate_var, 'OS': 'mac', 'PRODUCT_DIR': '$(BUILT_PRODUCTS_DIR)', 'LIB_DIR': '$(BUILT_PRODUCTS_DIR)', 'RULE_INPUT_ROOT': '$(INPUT_FILE_BASE)', 'RULE_INPUT_EXT': '$(INPUT_FILE_SUFFIX)', 'RULE_INPUT_NAME': '$(INPUT_FILE_NAME)', 'RULE_INPUT_PATH': '$(INPUT_FILE_PATH)', 'RULE_INPUT_DIRNAME': '$(INPUT_FILE_DIRNAME)', 'SHARED_INTERMEDIATE_DIR': '$(%s)' % _shared_intermediate_var, 'CONFIGURATION_NAME': '$(CONFIGURATION)', } # The Xcode-specific sections that hold paths. generator_additional_path_sections = [ 'mac_bundle_resources', 'mac_framework_headers', 'mac_framework_private_headers', # 'mac_framework_dirs', input already handles _dirs endings. ] # The Xcode-specific keys that exist on targets and aren't moved down to # configurations. generator_additional_non_configuration_keys = [ 'ios_app_extension', 'ios_watch_app', 'ios_watchkit_extension', 'mac_bundle', 'mac_bundle_resources', 'mac_framework_headers', 'mac_framework_private_headers', 'mac_xctest_bundle', 'xcode_create_dependents_test_runner', ] # We want to let any rules apply to files that are resources also. generator_extra_sources_for_rules = [ 'mac_bundle_resources', 'mac_framework_headers', 'mac_framework_private_headers', ] generator_filelist_paths = None # Xcode's standard set of library directories, which don't need to be duplicated # in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay. xcode_standard_library_dirs = frozenset([ '$(SDKROOT)/usr/lib', '$(SDKROOT)/usr/local/lib', ]) def CreateXCConfigurationList(configuration_names): xccl = gyp.xcodeproj_file.XCConfigurationList({'buildConfigurations': []}) if len(configuration_names) == 0: configuration_names = ['Default'] for configuration_name in configuration_names: xcbc = gyp.xcodeproj_file.XCBuildConfiguration({ 'name': configuration_name}) xccl.AppendProperty('buildConfigurations', xcbc) xccl.SetProperty('defaultConfigurationName', configuration_names[0]) return xccl class XcodeProject(object): def __init__(self, gyp_path, path, build_file_dict): self.gyp_path = gyp_path self.path = path self.project = gyp.xcodeproj_file.PBXProject(path=path) projectDirPath = gyp.common.RelativePath( os.path.dirname(os.path.abspath(self.gyp_path)), os.path.dirname(path) or '.') self.project.SetProperty('projectDirPath', projectDirPath) self.project_file = \ gyp.xcodeproj_file.XCProjectFile({'rootObject': self.project}) self.build_file_dict = build_file_dict # TODO(mark): add destructor that cleans up self.path if created_dir is # True and things didn't complete successfully. Or do something even # better with "try"? self.created_dir = False try: os.makedirs(self.path) self.created_dir = True except OSError, e: if e.errno != errno.EEXIST: raise def Finalize1(self, xcode_targets, serialize_all_tests): # Collect a list of all of the build configuration names used by the # various targets in the file. It is very heavily advised to keep each # target in an entire project (even across multiple project files) using # the same set of configuration names. configurations = [] for xct in self.project.GetProperty('targets'): xccl = xct.GetProperty('buildConfigurationList') xcbcs = xccl.GetProperty('buildConfigurations') for xcbc in xcbcs: name = xcbc.GetProperty('name') if name not in configurations: configurations.append(name) # Replace the XCConfigurationList attached to the PBXProject object with # a new one specifying all of the configuration names used by the various # targets. try: xccl = CreateXCConfigurationList(configurations) self.project.SetProperty('buildConfigurationList', xccl) except: sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path) raise # The need for this setting is explained above where _intermediate_var is # defined. The comments below about wanting to avoid project-wide build # settings apply here too, but this needs to be set on a project-wide basis # so that files relative to the _intermediate_var setting can be displayed # properly in the Xcode UI. # # Note that for configuration-relative files such as anything relative to # _intermediate_var, for the purposes of UI tree view display, Xcode will # only resolve the configuration name once, when the project file is # opened. If the active build configuration is changed, the project file # must be closed and reopened if it is desired for the tree view to update. # This is filed as Apple radar 6588391. xccl.SetBuildSetting(_intermediate_var, '$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)') xccl.SetBuildSetting(_shared_intermediate_var, '$(SYMROOT)/DerivedSources/$(CONFIGURATION)') # Set user-specified project-wide build settings and config files. This # is intended to be used very sparingly. Really, almost everything should # go into target-specific build settings sections. The project-wide # settings are only intended to be used in cases where Xcode attempts to # resolve variable references in a project context as opposed to a target # context, such as when resolving sourceTree references while building up # the tree tree view for UI display. # Any values set globally are applied to all configurations, then any # per-configuration values are applied. for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems(): xccl.SetBuildSetting(xck, xcv) if 'xcode_config_file' in self.build_file_dict: config_ref = self.project.AddOrGetFileInRootGroup( self.build_file_dict['xcode_config_file']) xccl.SetBaseConfiguration(config_ref) build_file_configurations = self.build_file_dict.get('configurations', {}) if build_file_configurations: for config_name in configurations: build_file_configuration_named = \ build_file_configurations.get(config_name, {}) if build_file_configuration_named: xcc = xccl.ConfigurationNamed(config_name) for xck, xcv in build_file_configuration_named.get('xcode_settings', {}).iteritems(): xcc.SetBuildSetting(xck, xcv) if 'xcode_config_file' in build_file_configuration_named: config_ref = self.project.AddOrGetFileInRootGroup( build_file_configurations[config_name]['xcode_config_file']) xcc.SetBaseConfiguration(config_ref) # Sort the targets based on how they appeared in the input. # TODO(mark): Like a lot of other things here, this assumes internal # knowledge of PBXProject - in this case, of its "targets" property. # ordinary_targets are ordinary targets that are already in the project # file. run_test_targets are the targets that run unittests and should be # used for the Run All Tests target. support_targets are the action/rule # targets used by GYP file targets, just kept for the assert check. ordinary_targets = [] run_test_targets = [] support_targets = [] # targets is full list of targets in the project. targets = [] # does the it define it's own "all"? has_custom_all = False # targets_for_all is the list of ordinary_targets that should be listed # in this project's "All" target. It includes each non_runtest_target # that does not have suppress_wildcard set. targets_for_all = [] for target in self.build_file_dict['targets']: target_name = target['target_name'] toolset = target['toolset'] qualified_target = gyp.common.QualifiedTarget(self.gyp_path, target_name, toolset) xcode_target = xcode_targets[qualified_target] # Make sure that the target being added to the sorted list is already in # the unsorted list. assert xcode_target in self.project._properties['targets'] targets.append(xcode_target) ordinary_targets.append(xcode_target) if xcode_target.support_target: support_targets.append(xcode_target.support_target) targets.append(xcode_target.support_target) if not int(target.get('suppress_wildcard', False)): targets_for_all.append(xcode_target) if target_name.lower() == 'all': has_custom_all = True; # If this target has a 'run_as' attribute, add its target to the # targets, and add it to the test targets. if target.get('run_as'): # Make a target to run something. It should have one # dependency, the parent xcode target. xccl = CreateXCConfigurationList(configurations) run_target = gyp.xcodeproj_file.PBXAggregateTarget({ 'name': 'Run ' + target_name, 'productName': xcode_target.GetProperty('productName'), 'buildConfigurationList': xccl, }, parent=self.project) run_target.AddDependency(xcode_target) command = target['run_as'] script = '' if command.get('working_directory'): script = script + 'cd "%s"\n' % \ gyp.xcodeproj_file.ConvertVariablesToShellSyntax( command.get('working_directory')) if command.get('environment'): script = script + "\n".join( ['export %s="%s"' % (key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val)) for (key, val) in command.get('environment').iteritems()]) + "\n" # Some test end up using sockets, files on disk, etc. and can get # confused if more then one test runs at a time. The generator # flag 'xcode_serialize_all_test_runs' controls the forcing of all # tests serially. It defaults to True. To get serial runs this # little bit of python does the same as the linux flock utility to # make sure only one runs at a time. command_prefix = '' if serialize_all_tests: command_prefix = \ """python -c "import fcntl, subprocess, sys file = open('$TMPDIR/GYP_serialize_test_runs', 'a') fcntl.flock(file.fileno(), fcntl.LOCK_EX) sys.exit(subprocess.call(sys.argv[1:]))" """ # If we were unable to exec for some reason, we want to exit # with an error, and fixup variable references to be shell # syntax instead of xcode syntax. script = script + 'exec ' + command_prefix + '%s\nexit 1\n' % \ gyp.xcodeproj_file.ConvertVariablesToShellSyntax( gyp.common.EncodePOSIXShellList(command.get('action'))) ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ 'shellScript': script, 'showEnvVarsInLog': 0, }) run_target.AppendProperty('buildPhases', ssbp) # Add the run target to the project file. targets.append(run_target) run_test_targets.append(run_target) xcode_target.test_runner = run_target # Make sure that the list of targets being replaced is the same length as # the one replacing it, but allow for the added test runner targets. assert len(self.project._properties['targets']) == \ len(ordinary_targets) + len(support_targets) self.project._properties['targets'] = targets # Get rid of unnecessary levels of depth in groups like the Source group. self.project.RootGroupsTakeOverOnlyChildren(True) # Sort the groups nicely. Do this after sorting the targets, because the # Products group is sorted based on the order of the targets. self.project.SortGroups() # Create an "All" target if there's more than one target in this project # file and the project didn't define its own "All" target. Put a generated # "All" target first so that people opening up the project for the first # time will build everything by default. if len(targets_for_all) > 1 and not has_custom_all: xccl = CreateXCConfigurationList(configurations) all_target = gyp.xcodeproj_file.PBXAggregateTarget( { 'buildConfigurationList': xccl, 'name': 'All', }, parent=self.project) for target in targets_for_all: all_target.AddDependency(target) # TODO(mark): This is evil because it relies on internal knowledge of # PBXProject._properties. It's important to get the "All" target first, # though. self.project._properties['targets'].insert(0, all_target) # The same, but for run_test_targets. if len(run_test_targets) > 1: xccl = CreateXCConfigurationList(configurations) run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget( { 'buildConfigurationList': xccl, 'name': 'Run All Tests', }, parent=self.project) for run_test_target in run_test_targets: run_all_tests_target.AddDependency(run_test_target) # Insert after the "All" target, which must exist if there is more than # one run_test_target. self.project._properties['targets'].insert(1, run_all_tests_target) def Finalize2(self, xcode_targets, xcode_target_to_target_dict): # Finalize2 needs to happen in a separate step because the process of # updating references to other projects depends on the ordering of targets # within remote project files. Finalize1 is responsible for sorting duty, # and once all project files are sorted, Finalize2 can come in and update # these references. # To support making a "test runner" target that will run all the tests # that are direct dependents of any given target, we look for # xcode_create_dependents_test_runner being set on an Aggregate target, # and generate a second target that will run the tests runners found under # the marked target. for bf_tgt in self.build_file_dict['targets']: if int(bf_tgt.get('xcode_create_dependents_test_runner', 0)): tgt_name = bf_tgt['target_name'] toolset = bf_tgt['toolset'] qualified_target = gyp.common.QualifiedTarget(self.gyp_path, tgt_name, toolset) xcode_target = xcode_targets[qualified_target] if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget): # Collect all the run test targets. all_run_tests = [] pbxtds = xcode_target.GetProperty('dependencies') for pbxtd in pbxtds: pbxcip = pbxtd.GetProperty('targetProxy') dependency_xct = pbxcip.GetProperty('remoteGlobalIDString') if hasattr(dependency_xct, 'test_runner'): all_run_tests.append(dependency_xct.test_runner) # Directly depend on all the runners as they depend on the target # that builds them. if len(all_run_tests) > 0: run_all_target = gyp.xcodeproj_file.PBXAggregateTarget({ 'name': 'Run %s Tests' % tgt_name, 'productName': tgt_name, }, parent=self.project) for run_test_target in all_run_tests: run_all_target.AddDependency(run_test_target) # Insert the test runner after the related target. idx = self.project._properties['targets'].index(xcode_target) self.project._properties['targets'].insert(idx + 1, run_all_target) # Update all references to other projects, to make sure that the lists of # remote products are complete. Otherwise, Xcode will fill them in when # it opens the project file, which will result in unnecessary diffs. # TODO(mark): This is evil because it relies on internal knowledge of # PBXProject._other_pbxprojects. for other_pbxproject in self.project._other_pbxprojects.keys(): self.project.AddOrGetProjectReference(other_pbxproject) self.project.SortRemoteProductReferences() # Give everything an ID. self.project_file.ComputeIDs() # Make sure that no two objects in the project file have the same ID. If # multiple objects wind up with the same ID, upon loading the file, Xcode # will only recognize one object (the last one in the file?) and the # results are unpredictable. self.project_file.EnsureNoIDCollisions() def Write(self): # Write the project file to a temporary location first. Xcode watches for # changes to the project file and presents a UI sheet offering to reload # the project when it does change. However, in some cases, especially when # multiple projects are open or when Xcode is busy, things don't work so # seamlessly. Sometimes, Xcode is able to detect that a project file has # changed but can't unload it because something else is referencing it. # To mitigate this problem, and to avoid even having Xcode present the UI # sheet when an open project is rewritten for inconsequential changes, the # project file is written to a temporary file in the xcodeproj directory # first. The new temporary file is then compared to the existing project # file, if any. If they differ, the new file replaces the old; otherwise, # the new project file is simply deleted. Xcode properly detects a file # being renamed over an open project file as a change and so it remains # able to present the "project file changed" sheet under this system. # Writing to a temporary file first also avoids the possible problem of # Xcode rereading an incomplete project file. (output_fd, new_pbxproj_path) = \ tempfile.mkstemp(suffix='.tmp', prefix='project.pbxproj.gyp.', dir=self.path) try: output_file = os.fdopen(output_fd, 'wb') self.project_file.Print(output_file) output_file.close() pbxproj_path = os.path.join(self.path, 'project.pbxproj') same = False try: same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False) except OSError, e: if e.errno != errno.ENOENT: raise if same: # The new file is identical to the old one, just get rid of the new # one. os.unlink(new_pbxproj_path) else: # The new file is different from the old one, or there is no old one. # Rename the new file to the permanent name. # # tempfile.mkstemp uses an overly restrictive mode, resulting in a # file that can only be read by the owner, regardless of the umask. # There's no reason to not respect the umask here, which means that # an extra hoop is required to fetch it and reset the new file's mode. # # No way to get the umask without setting a new one? Set a safe one # and then set it back to the old value. umask = os.umask(077) os.umask(umask) os.chmod(new_pbxproj_path, 0666 & ~umask) os.rename(new_pbxproj_path, pbxproj_path) except Exception: # Don't leave turds behind. In fact, if this code was responsible for # creating the xcodeproj directory, get rid of that too. os.unlink(new_pbxproj_path) if self.created_dir: shutil.rmtree(self.path, True) raise def AddSourceToTarget(source, type, pbxp, xct): # TODO(mark): Perhaps source_extensions and library_extensions can be made a # little bit fancier. source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's', 'swift'] # .o is conceptually more of a "source" than a "library," but Xcode thinks # of "sources" as things to compile and "libraries" (or "frameworks") as # things to link with. Adding an object file to an Xcode target's frameworks # phase works properly. library_extensions = ['a', 'dylib', 'framework', 'o'] basename = posixpath.basename(source) (root, ext) = posixpath.splitext(basename) if ext: ext = ext[1:].lower() if ext in source_extensions and type != 'none': xct.SourcesPhase().AddFile(source) elif ext in library_extensions and type != 'none': xct.FrameworksPhase().AddFile(source) else: # Files that aren't added to a sources or frameworks build phase can still # go into the project file, just not as part of a build phase. pbxp.AddOrGetFileInRootGroup(source) def AddResourceToTarget(resource, pbxp, xct): # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call # where it's used. xct.ResourcesPhase().AddFile(resource) def AddHeaderToTarget(header, pbxp, xct, is_public): # TODO(mark): Combine with AddSourceToTarget above? Or just inline this call # where it's used. settings = '{ATTRIBUTES = (%s, ); }' % ('Private', 'Public')[is_public] xct.HeadersPhase().AddFile(header, settings) _xcode_variable_re = re.compile(r'(\$\((.*?)\))') def ExpandXcodeVariables(string, expansions): """Expands Xcode-style $(VARIABLES) in string per the expansions dict. In some rare cases, it is appropriate to expand Xcode variables when a project file is generated. For any substring $(VAR) in string, if VAR is a key in the expansions dict, $(VAR) will be replaced with expansions[VAR]. Any $(VAR) substring in string for which VAR is not a key in the expansions dict will remain in the returned string. """ matches = _xcode_variable_re.findall(string) if matches == None: return string matches.reverse() for match in matches: (to_replace, variable) = match if not variable in expansions: continue replacement = expansions[variable] string = re.sub(re.escape(to_replace), replacement, string) return string _xcode_define_re = re.compile(r'([\\\"\' ])') def EscapeXcodeDefine(s): """We must escape the defines that we give to XCode so that it knows not to split on spaces and to respect backslash and quote literals. However, we must not quote the define, or Xcode will incorrectly intepret variables especially $(inherited).""" return re.sub(_xcode_define_re, r'\\\1', s) def PerformBuild(data, configurations, params): options = params['options'] for build_file, build_file_dict in data.iteritems(): (build_file_root, build_file_ext) = os.path.splitext(build_file) if build_file_ext != '.gyp': continue xcodeproj_path = build_file_root + options.suffix + '.xcodeproj' if options.generator_output: xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path) for config in configurations: arguments = ['xcodebuild', '-project', xcodeproj_path] arguments += ['-configuration', config] print "Building [%s]: %s" % (config, arguments) subprocess.check_call(arguments) def CalculateGeneratorInputInfo(params): toplevel = params['options'].toplevel_dir if params.get('flavor') == 'ninja': generator_dir = os.path.relpath(params['options'].generator_output or '.') output_dir = params.get('generator_flags', {}).get('output_dir', 'out') output_dir = os.path.normpath(os.path.join(generator_dir, output_dir)) qualified_out_dir = os.path.normpath(os.path.join( toplevel, output_dir, 'gypfiles-xcode-ninja')) else: output_dir = os.path.normpath(os.path.join(toplevel, 'xcodebuild')) qualified_out_dir = os.path.normpath(os.path.join( toplevel, output_dir, 'gypfiles')) global generator_filelist_paths generator_filelist_paths = { 'toplevel': toplevel, 'qualified_out_dir': qualified_out_dir, } def GenerateOutput(target_list, target_dicts, data, params): # Optionally configure each spec to use ninja as the external builder. ninja_wrapper = params.get('flavor') == 'ninja' if ninja_wrapper: (target_list, target_dicts, data) = \ gyp.xcode_ninja.CreateWrapper(target_list, target_dicts, data, params) options = params['options'] generator_flags = params.get('generator_flags', {}) parallel_builds = generator_flags.get('xcode_parallel_builds', True) serialize_all_tests = \ generator_flags.get('xcode_serialize_all_test_runs', True) upgrade_check_project_version = \ generator_flags.get('xcode_upgrade_check_project_version', None) # Format upgrade_check_project_version with leading zeros as needed. if upgrade_check_project_version: upgrade_check_project_version = str(upgrade_check_project_version) while len(upgrade_check_project_version) < 4: upgrade_check_project_version = '0' + upgrade_check_project_version skip_excluded_files = \ not generator_flags.get('xcode_list_excluded_files', True) xcode_projects = {} for build_file, build_file_dict in data.iteritems(): (build_file_root, build_file_ext) = os.path.splitext(build_file) if build_file_ext != '.gyp': continue xcodeproj_path = build_file_root + options.suffix + '.xcodeproj' if options.generator_output: xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path) xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict) xcode_projects[build_file] = xcp pbxp = xcp.project # Set project-level attributes from multiple options project_attributes = {}; if parallel_builds: project_attributes['BuildIndependentTargetsInParallel'] = 'YES' if upgrade_check_project_version: project_attributes['LastUpgradeCheck'] = upgrade_check_project_version project_attributes['LastTestingUpgradeCheck'] = \ upgrade_check_project_version project_attributes['LastSwiftUpdateCheck'] = \ upgrade_check_project_version pbxp.SetProperty('attributes', project_attributes) # Add gyp/gypi files to project if not generator_flags.get('standalone'): main_group = pbxp.GetProperty('mainGroup') build_group = gyp.xcodeproj_file.PBXGroup({'name': 'Build'}) main_group.AppendChild(build_group) for included_file in build_file_dict['included_files']: build_group.AddOrGetFileByPath(included_file, False) xcode_targets = {} xcode_target_to_target_dict = {} for qualified_target in target_list: [build_file, target_name, toolset] = \ gyp.common.ParseQualifiedTarget(qualified_target) spec = target_dicts[qualified_target] if spec['toolset'] != 'target': raise Exception( 'Multiple toolsets not supported in xcode build (target %s)' % qualified_target) configuration_names = [spec['default_configuration']] for configuration_name in sorted(spec['configurations'].keys()): if configuration_name not in configuration_names: configuration_names.append(configuration_name) xcp = xcode_projects[build_file] pbxp = xcp.project # Set up the configurations for the target according to the list of names # supplied. xccl = CreateXCConfigurationList(configuration_names) # Create an XCTarget subclass object for the target. The type with # "+bundle" appended will be used if the target has "mac_bundle" set. # loadable_modules not in a mac_bundle are mapped to # com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets # to create a single-file mh_bundle. _types = { 'executable': 'com.apple.product-type.tool', 'loadable_module': 'com.googlecode.gyp.xcode.bundle', 'shared_library': 'com.apple.product-type.library.dynamic', 'static_library': 'com.apple.product-type.library.static', 'mac_kernel_extension': 'com.apple.product-type.kernel-extension', 'executable+bundle': 'com.apple.product-type.application', 'loadable_module+bundle': 'com.apple.product-type.bundle', 'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test', 'shared_library+bundle': 'com.apple.product-type.framework', 'executable+extension+bundle': 'com.apple.product-type.app-extension', 'executable+watch+extension+bundle': 'com.apple.product-type.watchkit-extension', 'executable+watch+bundle': 'com.apple.product-type.application.watchapp', 'mac_kernel_extension+bundle': 'com.apple.product-type.kernel-extension', } target_properties = { 'buildConfigurationList': xccl, 'name': target_name, } type = spec['type'] is_xctest = int(spec.get('mac_xctest_bundle', 0)) is_bundle = int(spec.get('mac_bundle', 0)) or is_xctest is_app_extension = int(spec.get('ios_app_extension', 0)) is_watchkit_extension = int(spec.get('ios_watchkit_extension', 0)) is_watch_app = int(spec.get('ios_watch_app', 0)) if type != 'none': type_bundle_key = type if is_xctest: type_bundle_key += '+xctest' assert type == 'loadable_module', ( 'mac_xctest_bundle targets must have type loadable_module ' '(target %s)' % target_name) elif is_app_extension: assert is_bundle, ('ios_app_extension flag requires mac_bundle ' '(target %s)' % target_name) type_bundle_key += '+extension+bundle' elif is_watchkit_extension: assert is_bundle, ('ios_watchkit_extension flag requires mac_bundle ' '(target %s)' % target_name) type_bundle_key += '+watch+extension+bundle' elif is_watch_app: assert is_bundle, ('ios_watch_app flag requires mac_bundle ' '(target %s)' % target_name) type_bundle_key += '+watch+bundle' elif is_bundle: type_bundle_key += '+bundle' xctarget_type = gyp.xcodeproj_file.PBXNativeTarget try: target_properties['productType'] = _types[type_bundle_key] except KeyError, e: gyp.common.ExceptionAppend(e, "-- unknown product type while " "writing target %s" % target_name) raise else: xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget assert not is_bundle, ( 'mac_bundle targets cannot have type none (target "%s")' % target_name) assert not is_xctest, ( 'mac_xctest_bundle targets cannot have type none (target "%s")' % target_name) target_product_name = spec.get('product_name') if target_product_name is not None: target_properties['productName'] = target_product_name xct = xctarget_type(target_properties, parent=pbxp, force_outdir=spec.get('product_dir'), force_prefix=spec.get('product_prefix'), force_extension=spec.get('product_extension')) pbxp.AppendProperty('targets', xct) xcode_targets[qualified_target] = xct xcode_target_to_target_dict[xct] = spec spec_actions = spec.get('actions', []) spec_rules = spec.get('rules', []) # Xcode has some "issues" with checking dependencies for the "Compile # sources" step with any source files/headers generated by actions/rules. # To work around this, if a target is building anything directly (not # type "none"), then a second target is used to run the GYP actions/rules # and is made a dependency of this target. This way the work is done # before the dependency checks for what should be recompiled. support_xct = None # The Xcode "issues" don't affect xcode-ninja builds, since the dependency # logic all happens in ninja. Don't bother creating the extra targets in # that case. if type != 'none' and (spec_actions or spec_rules) and not ninja_wrapper: support_xccl = CreateXCConfigurationList(configuration_names); support_target_suffix = generator_flags.get( 'support_target_suffix', ' Support') support_target_properties = { 'buildConfigurationList': support_xccl, 'name': target_name + support_target_suffix, } if target_product_name: support_target_properties['productName'] = \ target_product_name + ' Support' support_xct = \ gyp.xcodeproj_file.PBXAggregateTarget(support_target_properties, parent=pbxp) pbxp.AppendProperty('targets', support_xct) xct.AddDependency(support_xct) # Hang the support target off the main target so it can be tested/found # by the generator during Finalize. xct.support_target = support_xct prebuild_index = 0 # Add custom shell script phases for "actions" sections. for action in spec_actions: # There's no need to write anything into the script to ensure that the # output directories already exist, because Xcode will look at the # declared outputs and automatically ensure that they exist for us. # Do we have a message to print when this action runs? message = action.get('message') if message: message = 'echo note: ' + gyp.common.EncodePOSIXShellArgument(message) else: message = '' # Turn the list into a string that can be passed to a shell. action_string = gyp.common.EncodePOSIXShellList(action['action']) # Convert Xcode-type variable references to sh-compatible environment # variable references. message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message) action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax( action_string) script = '' # Include the optional message if message_sh: script += message_sh + '\n' # Be sure the script runs in exec, and that if exec fails, the script # exits signalling an error. script += 'exec ' + action_string_sh + '\nexit 1\n' ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ 'inputPaths': action['inputs'], 'name': 'Action "' + action['action_name'] + '"', 'outputPaths': action['outputs'], 'shellScript': script, 'showEnvVarsInLog': 0, }) if support_xct: support_xct.AppendProperty('buildPhases', ssbp) else: # TODO(mark): this assumes too much knowledge of the internals of # xcodeproj_file; some of these smarts should move into xcodeproj_file # itself. xct._properties['buildPhases'].insert(prebuild_index, ssbp) prebuild_index = prebuild_index + 1 # TODO(mark): Should verify that at most one of these is specified. if int(action.get('process_outputs_as_sources', False)): for output in action['outputs']: AddSourceToTarget(output, type, pbxp, xct) if int(action.get('process_outputs_as_mac_bundle_resources', False)): for output in action['outputs']: AddResourceToTarget(output, pbxp, xct) # tgt_mac_bundle_resources holds the list of bundle resources so # the rule processing can check against it. if is_bundle: tgt_mac_bundle_resources = spec.get('mac_bundle_resources', []) else: tgt_mac_bundle_resources = [] # Add custom shell script phases driving "make" for "rules" sections. # # Xcode's built-in rule support is almost powerful enough to use directly, # but there are a few significant deficiencies that render them unusable. # There are workarounds for some of its inadequacies, but in aggregate, # the workarounds added complexity to the generator, and some workarounds # actually require input files to be crafted more carefully than I'd like. # Consequently, until Xcode rules are made more capable, "rules" input # sections will be handled in Xcode output by shell script build phases # performed prior to the compilation phase. # # The following problems with Xcode rules were found. The numbers are # Apple radar IDs. I hope that these shortcomings are addressed, I really # liked having the rules handled directly in Xcode during the period that # I was prototyping this. # # 6588600 Xcode compiles custom script rule outputs too soon, compilation # fails. This occurs when rule outputs from distinct inputs are # interdependent. The only workaround is to put rules and their # inputs in a separate target from the one that compiles the rule # outputs. This requires input file cooperation and it means that # process_outputs_as_sources is unusable. # 6584932 Need to declare that custom rule outputs should be excluded from # compilation. A possible workaround is to lie to Xcode about a # rule's output, giving it a dummy file it doesn't know how to # compile. The rule action script would need to touch the dummy. # 6584839 I need a way to declare additional inputs to a custom rule. # A possible workaround is a shell script phase prior to # compilation that touches a rule's primary input files if any # would-be additional inputs are newer than the output. Modifying # the source tree - even just modification times - feels dirty. # 6564240 Xcode "custom script" build rules always dump all environment # variables. This is a low-prioroty problem and is not a # show-stopper. rules_by_ext = {} for rule in spec_rules: rules_by_ext[rule['extension']] = rule # First, some definitions: # # A "rule source" is a file that was listed in a target's "sources" # list and will have a rule applied to it on the basis of matching the # rule's "extensions" attribute. Rule sources are direct inputs to # rules. # # Rule definitions may specify additional inputs in their "inputs" # attribute. These additional inputs are used for dependency tracking # purposes. # # A "concrete output" is a rule output with input-dependent variables # resolved. For example, given a rule with: # 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'], # if the target's "sources" list contained "one.ext" and "two.ext", # the "concrete output" for rule input "two.ext" would be "two.cc". If # a rule specifies multiple outputs, each input file that the rule is # applied to will have the same number of concrete outputs. # # If any concrete outputs are outdated or missing relative to their # corresponding rule_source or to any specified additional input, the # rule action must be performed to generate the concrete outputs. # concrete_outputs_by_rule_source will have an item at the same index # as the rule['rule_sources'] that it corresponds to. Each item is a # list of all of the concrete outputs for the rule_source. concrete_outputs_by_rule_source = [] # concrete_outputs_all is a flat list of all concrete outputs that this # rule is able to produce, given the known set of input files # (rule_sources) that apply to it. concrete_outputs_all = [] # messages & actions are keyed by the same indices as rule['rule_sources'] # and concrete_outputs_by_rule_source. They contain the message and # action to perform after resolving input-dependent variables. The # message is optional, in which case None is stored for each rule source. messages = [] actions = [] for rule_source in rule.get('rule_sources', []): rule_source_dirname, rule_source_basename = \ posixpath.split(rule_source) (rule_source_root, rule_source_ext) = \ posixpath.splitext(rule_source_basename) # These are the same variable names that Xcode uses for its own native # rule support. Because Xcode's rule engine is not being used, they # need to be expanded as they are written to the makefile. rule_input_dict = { 'INPUT_FILE_BASE': rule_source_root, 'INPUT_FILE_SUFFIX': rule_source_ext, 'INPUT_FILE_NAME': rule_source_basename, 'INPUT_FILE_PATH': rule_source, 'INPUT_FILE_DIRNAME': rule_source_dirname, } concrete_outputs_for_this_rule_source = [] for output in rule.get('outputs', []): # Fortunately, Xcode and make both use $(VAR) format for their # variables, so the expansion is the only transformation necessary. # Any remaning $(VAR)-type variables in the string can be given # directly to make, which will pick up the correct settings from # what Xcode puts into the environment. concrete_output = ExpandXcodeVariables(output, rule_input_dict) concrete_outputs_for_this_rule_source.append(concrete_output) # Add all concrete outputs to the project. pbxp.AddOrGetFileInRootGroup(concrete_output) concrete_outputs_by_rule_source.append( \ concrete_outputs_for_this_rule_source) concrete_outputs_all.extend(concrete_outputs_for_this_rule_source) # TODO(mark): Should verify that at most one of these is specified. if int(rule.get('process_outputs_as_sources', False)): for output in concrete_outputs_for_this_rule_source: AddSourceToTarget(output, type, pbxp, xct) # If the file came from the mac_bundle_resources list or if the rule # is marked to process outputs as bundle resource, do so. was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources if was_mac_bundle_resource or \ int(rule.get('process_outputs_as_mac_bundle_resources', False)): for output in concrete_outputs_for_this_rule_source: AddResourceToTarget(output, pbxp, xct) # Do we have a message to print when this rule runs? message = rule.get('message') if message: message = gyp.common.EncodePOSIXShellArgument(message) message = ExpandXcodeVariables(message, rule_input_dict) messages.append(message) # Turn the list into a string that can be passed to a shell. action_string = gyp.common.EncodePOSIXShellList(rule['action']) action = ExpandXcodeVariables(action_string, rule_input_dict) actions.append(action) if len(concrete_outputs_all) > 0: # TODO(mark): There's a possibilty for collision here. Consider # target "t" rule "A_r" and target "t_A" rule "r". makefile_name = '%s.make' % re.sub( '[^a-zA-Z0-9_]', '_' , '%s_%s' % (target_name, rule['rule_name'])) makefile_path = os.path.join(xcode_projects[build_file].path, makefile_name) # TODO(mark): try/close? Write to a temporary file and swap it only # if it's got changes? makefile = open(makefile_path, 'wb') # make will build the first target in the makefile by default. By # convention, it's called "all". List all (or at least one) # concrete output for each rule source as a prerequisite of the "all" # target. makefile.write('all: \\\n') for concrete_output_index in \ xrange(0, len(concrete_outputs_by_rule_source)): # Only list the first (index [0]) concrete output of each input # in the "all" target. Otherwise, a parallel make (-j > 1) would # attempt to process each input multiple times simultaneously. # Otherwise, "all" could just contain the entire list of # concrete_outputs_all. concrete_output = \ concrete_outputs_by_rule_source[concrete_output_index][0] if concrete_output_index == len(concrete_outputs_by_rule_source) - 1: eol = '' else: eol = ' \\' makefile.write(' %s%s\n' % (concrete_output, eol)) for (rule_source, concrete_outputs, message, action) in \ zip(rule['rule_sources'], concrete_outputs_by_rule_source, messages, actions): makefile.write('\n') # Add a rule that declares it can build each concrete output of a # rule source. Collect the names of the directories that are # required. concrete_output_dirs = [] for concrete_output_index in xrange(0, len(concrete_outputs)): concrete_output = concrete_outputs[concrete_output_index] if concrete_output_index == 0: bol = '' else: bol = ' ' makefile.write('%s%s \\\n' % (bol, concrete_output)) concrete_output_dir = posixpath.dirname(concrete_output) if (concrete_output_dir and concrete_output_dir not in concrete_output_dirs): concrete_output_dirs.append(concrete_output_dir) makefile.write(' : \\\n') # The prerequisites for this rule are the rule source itself and # the set of additional rule inputs, if any. prerequisites = [rule_source] prerequisites.extend(rule.get('inputs', [])) for prerequisite_index in xrange(0, len(prerequisites)): prerequisite = prerequisites[prerequisite_index] if prerequisite_index == len(prerequisites) - 1: eol = '' else: eol = ' \\' makefile.write(' %s%s\n' % (prerequisite, eol)) # Make sure that output directories exist before executing the rule # action. if len(concrete_output_dirs) > 0: makefile.write('\t@mkdir -p "%s"\n' % '" "'.join(concrete_output_dirs)) # The rule message and action have already had the necessary variable # substitutions performed. if message: # Mark it with note: so Xcode picks it up in build output. makefile.write('\t@echo note: %s\n' % message) makefile.write('\t%s\n' % action) makefile.close() # It might be nice to ensure that needed output directories exist # here rather than in each target in the Makefile, but that wouldn't # work if there ever was a concrete output that had an input-dependent # variable anywhere other than in the leaf position. # Don't declare any inputPaths or outputPaths. If they're present, # Xcode will provide a slight optimization by only running the script # phase if any output is missing or outdated relative to any input. # Unfortunately, it will also assume that all outputs are touched by # the script, and if the outputs serve as files in a compilation # phase, they will be unconditionally rebuilt. Since make might not # rebuild everything that could be declared here as an output, this # extra compilation activity is unnecessary. With inputPaths and # outputPaths not supplied, make will always be called, but it knows # enough to not do anything when everything is up-to-date. # To help speed things up, pass -j COUNT to make so it does some work # in parallel. Don't use ncpus because Xcode will build ncpus targets # in parallel and if each target happens to have a rules step, there # would be ncpus^2 things going. With a machine that has 2 quad-core # Xeons, a build can quickly run out of processes based on # scheduling/other tasks, and randomly failing builds are no good. script = \ """JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)" if [ "${JOB_COUNT}" -gt 4 ]; then JOB_COUNT=4 fi exec xcrun make -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}" exit 1 """ % makefile_name ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ 'name': 'Rule "' + rule['rule_name'] + '"', 'shellScript': script, 'showEnvVarsInLog': 0, }) if support_xct: support_xct.AppendProperty('buildPhases', ssbp) else: # TODO(mark): this assumes too much knowledge of the internals of # xcodeproj_file; some of these smarts should move into xcodeproj_file # itself. xct._properties['buildPhases'].insert(prebuild_index, ssbp) prebuild_index = prebuild_index + 1 # Extra rule inputs also go into the project file. Concrete outputs were # already added when they were computed. groups = ['inputs', 'inputs_excluded'] if skip_excluded_files: groups = [x for x in groups if not x.endswith('_excluded')] for group in groups: for item in rule.get(group, []): pbxp.AddOrGetFileInRootGroup(item) # Add "sources". for source in spec.get('sources', []): (source_root, source_extension) = posixpath.splitext(source) if source_extension[1:] not in rules_by_ext: # AddSourceToTarget will add the file to a root group if it's not # already there. AddSourceToTarget(source, type, pbxp, xct) else: pbxp.AddOrGetFileInRootGroup(source) # Add "mac_bundle_resources" and "mac_framework_private_headers" if # it's a bundle of any type. if is_bundle: for resource in tgt_mac_bundle_resources: (resource_root, resource_extension) = posixpath.splitext(resource) if resource_extension[1:] not in rules_by_ext: AddResourceToTarget(resource, pbxp, xct) else: pbxp.AddOrGetFileInRootGroup(resource) for header in spec.get('mac_framework_private_headers', []): AddHeaderToTarget(header, pbxp, xct, False) # Add "mac_framework_headers". These can be valid for both frameworks # and static libraries. if is_bundle or type == 'static_library': for header in spec.get('mac_framework_headers', []): AddHeaderToTarget(header, pbxp, xct, True) # Add "copies". pbxcp_dict = {} for copy_group in spec.get('copies', []): dest = copy_group['destination'] if dest[0] not in ('/', '$'): # Relative paths are relative to $(SRCROOT). dest = '$(SRCROOT)/' + dest code_sign = int(copy_group.get('xcode_code_sign', 0)) settings = (None, '{ATTRIBUTES = (CodeSignOnCopy, ); }')[code_sign]; # Coalesce multiple "copies" sections in the same target with the same # "destination" property into the same PBXCopyFilesBuildPhase, otherwise # they'll wind up with ID collisions. pbxcp = pbxcp_dict.get(dest, None) if pbxcp is None: pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({ 'name': 'Copy to ' + copy_group['destination'] }, parent=xct) pbxcp.SetDestination(dest) # TODO(mark): The usual comment about this knowing too much about # gyp.xcodeproj_file internals applies. xct._properties['buildPhases'].insert(prebuild_index, pbxcp) pbxcp_dict[dest] = pbxcp for file in copy_group['files']: pbxcp.AddFile(file, settings) # Excluded files can also go into the project file. if not skip_excluded_files: for key in ['sources', 'mac_bundle_resources', 'mac_framework_headers', 'mac_framework_private_headers']: excluded_key = key + '_excluded' for item in spec.get(excluded_key, []): pbxp.AddOrGetFileInRootGroup(item) # So can "inputs" and "outputs" sections of "actions" groups. groups = ['inputs', 'inputs_excluded', 'outputs', 'outputs_excluded'] if skip_excluded_files: groups = [x for x in groups if not x.endswith('_excluded')] for action in spec.get('actions', []): for group in groups: for item in action.get(group, []): # Exclude anything in BUILT_PRODUCTS_DIR. They're products, not # sources. if not item.startswith('$(BUILT_PRODUCTS_DIR)/'): pbxp.AddOrGetFileInRootGroup(item) for postbuild in spec.get('postbuilds', []): action_string_sh = gyp.common.EncodePOSIXShellList(postbuild['action']) script = 'exec ' + action_string_sh + '\nexit 1\n' # Make the postbuild step depend on the output of ld or ar from this # target. Apparently putting the script step after the link step isn't # sufficient to ensure proper ordering in all cases. With an input # declared but no outputs, the script step should run every time, as # desired. ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({ 'inputPaths': ['$(BUILT_PRODUCTS_DIR)/$(EXECUTABLE_PATH)'], 'name': 'Postbuild "' + postbuild['postbuild_name'] + '"', 'shellScript': script, 'showEnvVarsInLog': 0, }) xct.AppendProperty('buildPhases', ssbp) # Add dependencies before libraries, because adding a dependency may imply # adding a library. It's preferable to keep dependencies listed first # during a link phase so that they can override symbols that would # otherwise be provided by libraries, which will usually include system # libraries. On some systems, ld is finicky and even requires the # libraries to be ordered in such a way that unresolved symbols in # earlier-listed libraries may only be resolved by later-listed libraries. # The Mac linker doesn't work that way, but other platforms do, and so # their linker invocations need to be constructed in this way. There's # no compelling reason for Xcode's linker invocations to differ. if 'dependencies' in spec: for dependency in spec['dependencies']: xct.AddDependency(xcode_targets[dependency]) # The support project also gets the dependencies (in case they are # needed for the actions/rules to work). if support_xct: support_xct.AddDependency(xcode_targets[dependency]) if 'libraries' in spec: for library in spec['libraries']: xct.FrameworksPhase().AddFile(library) # Add the library's directory to LIBRARY_SEARCH_PATHS if necessary. # I wish Xcode handled this automatically. library_dir = posixpath.dirname(library) if library_dir not in xcode_standard_library_dirs and ( not xct.HasBuildSetting(_library_search_paths_var) or library_dir not in xct.GetBuildSetting(_library_search_paths_var)): xct.AppendBuildSetting(_library_search_paths_var, library_dir) for configuration_name in configuration_names: configuration = spec['configurations'][configuration_name] xcbc = xct.ConfigurationNamed(configuration_name) for include_dir in configuration.get('mac_framework_dirs', []): xcbc.AppendBuildSetting('FRAMEWORK_SEARCH_PATHS', include_dir) for include_dir in configuration.get('include_dirs', []): xcbc.AppendBuildSetting('HEADER_SEARCH_PATHS', include_dir) for library_dir in configuration.get('library_dirs', []): if library_dir not in xcode_standard_library_dirs and ( not xcbc.HasBuildSetting(_library_search_paths_var) or library_dir not in xcbc.GetBuildSetting(_library_search_paths_var)): xcbc.AppendBuildSetting(_library_search_paths_var, library_dir) if 'defines' in configuration: for define in configuration['defines']: set_define = EscapeXcodeDefine(define) xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define) if 'xcode_settings' in configuration: for xck, xcv in configuration['xcode_settings'].iteritems(): xcbc.SetBuildSetting(xck, xcv) if 'xcode_config_file' in configuration: config_ref = pbxp.AddOrGetFileInRootGroup( configuration['xcode_config_file']) xcbc.SetBaseConfiguration(config_ref) build_files = [] for build_file, build_file_dict in data.iteritems(): if build_file.endswith('.gyp'): build_files.append(build_file) for build_file in build_files: xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests) for build_file in build_files: xcode_projects[build_file].Finalize2(xcode_targets, xcode_target_to_target_dict) for build_file in build_files: xcode_projects[build_file].Write()
mit
chouseknecht/ansible
lib/ansible/compat/selectors/_selectors2.py
79
23517
# This file is from the selectors2.py package. It backports the PSF Licensed # selectors module from the Python-3.5 stdlib to older versions of Python. # The author, Seth Michael Larson, dual licenses his modifications under the # PSF License and MIT License: # https://github.com/SethMichaelLarson/selectors2#license # # Copyright (c) 2016 Seth Michael Larson # # PSF License (see licenses/PSF-license.txt or https://opensource.org/licenses/Python-2.0) # MIT License (see licenses/MIT-license.txt or https://opensource.org/licenses/MIT) # # Backport of selectors.py from Python 3.5+ to support Python < 3.4 # Also has the behavior specified in PEP 475 which is to retry syscalls # in the case of an EINTR error. This module is required because selectors34 # does not follow this behavior and instead returns that no file descriptor # events have occurred rather than retry the syscall. The decision to drop # support for select.devpoll is made to maintain 100% test coverage. import errno import math import select import socket import sys import time from collections import namedtuple from ansible.module_utils.common._collections_compat import Mapping try: monotonic = time.monotonic except (AttributeError, ImportError): # Python 3.3< monotonic = time.time __author__ = 'Seth Michael Larson' __email__ = 'sethmichaellarson@protonmail.com' __version__ = '1.1.1' __license__ = 'MIT' __all__ = [ 'EVENT_READ', 'EVENT_WRITE', 'SelectorError', 'SelectorKey', 'DefaultSelector' ] EVENT_READ = (1 << 0) EVENT_WRITE = (1 << 1) HAS_SELECT = True # Variable that shows whether the platform has a selector. _SYSCALL_SENTINEL = object() # Sentinel in case a system call returns None. class SelectorError(Exception): def __init__(self, errcode): super(SelectorError, self).__init__() self.errno = errcode def __repr__(self): return "<SelectorError errno={0}>".format(self.errno) def __str__(self): return self.__repr__() def _fileobj_to_fd(fileobj): """ Return a file descriptor from a file object. If given an integer will simply return that integer back. """ if isinstance(fileobj, int): fd = fileobj else: try: fd = int(fileobj.fileno()) except (AttributeError, TypeError, ValueError): raise ValueError("Invalid file object: {0!r}".format(fileobj)) if fd < 0: raise ValueError("Invalid file descriptor: {0}".format(fd)) return fd # Python 3.5 uses a more direct route to wrap system calls to increase speed. if sys.version_info >= (3, 5): def _syscall_wrapper(func, _, *args, **kwargs): """ This is the short-circuit version of the below logic because in Python 3.5+ all selectors restart system calls. """ try: return func(*args, **kwargs) except (OSError, IOError, select.error) as e: errcode = None if hasattr(e, "errno"): errcode = e.errno elif hasattr(e, "args"): errcode = e.args[0] raise SelectorError(errcode) else: def _syscall_wrapper(func, recalc_timeout, *args, **kwargs): """ Wrapper function for syscalls that could fail due to EINTR. All functions should be retried if there is time left in the timeout in accordance with PEP 475. """ timeout = kwargs.get("timeout", None) if timeout is None: expires = None recalc_timeout = False else: timeout = float(timeout) if timeout < 0.0: # Timeout less than 0 treated as no timeout. expires = None else: expires = monotonic() + timeout args = list(args) if recalc_timeout and "timeout" not in kwargs: raise ValueError( "Timeout must be in args or kwargs to be recalculated") result = _SYSCALL_SENTINEL while result is _SYSCALL_SENTINEL: try: result = func(*args, **kwargs) # OSError is thrown by select.select # IOError is thrown by select.epoll.poll # select.error is thrown by select.poll.poll # Aren't we thankful for Python 3.x rework for exceptions? except (OSError, IOError, select.error) as e: # select.error wasn't a subclass of OSError in the past. errcode = None if hasattr(e, "errno"): errcode = e.errno elif hasattr(e, "args"): errcode = e.args[0] # Also test for the Windows equivalent of EINTR. is_interrupt = (errcode == errno.EINTR or (hasattr(errno, "WSAEINTR") and errcode == errno.WSAEINTR)) if is_interrupt: if expires is not None: current_time = monotonic() if current_time > expires: raise OSError(errno.ETIMEDOUT) if recalc_timeout: if "timeout" in kwargs: kwargs["timeout"] = expires - current_time continue if errcode: raise SelectorError(errcode) else: raise return result SelectorKey = namedtuple('SelectorKey', ['fileobj', 'fd', 'events', 'data']) class _SelectorMapping(Mapping): """ Mapping of file objects to selector keys """ def __init__(self, selector): self._selector = selector def __len__(self): return len(self._selector._fd_to_key) def __getitem__(self, fileobj): try: fd = self._selector._fileobj_lookup(fileobj) return self._selector._fd_to_key[fd] except KeyError: raise KeyError("{0!r} is not registered.".format(fileobj)) def __iter__(self): return iter(self._selector._fd_to_key) class BaseSelector(object): """ Abstract Selector class A selector supports registering file objects to be monitored for specific I/O events. A file object is a file descriptor or any object with a `fileno()` method. An arbitrary object can be attached to the file object which can be used for example to store context info, a callback, etc. A selector can use various implementations (select(), poll(), epoll(), and kqueue()) depending on the platform. The 'DefaultSelector' class uses the most efficient implementation for the current platform. """ def __init__(self): # Maps file descriptors to keys. self._fd_to_key = {} # Read-only mapping returned by get_map() self._map = _SelectorMapping(self) def _fileobj_lookup(self, fileobj): """ Return a file descriptor from a file object. This wraps _fileobj_to_fd() to do an exhaustive search in case the object is invalid but we still have it in our map. Used by unregister() so we can unregister an object that was previously registered even if it is closed. It is also used by _SelectorMapping """ try: return _fileobj_to_fd(fileobj) except ValueError: # Search through all our mapped keys. for key in self._fd_to_key.values(): if key.fileobj is fileobj: return key.fd # Raise ValueError after all. raise def register(self, fileobj, events, data=None): """ Register a file object for a set of events to monitor. """ if (not events) or (events & ~(EVENT_READ | EVENT_WRITE)): raise ValueError("Invalid events: {0!r}".format(events)) key = SelectorKey(fileobj, self._fileobj_lookup(fileobj), events, data) if key.fd in self._fd_to_key: raise KeyError("{0!r} (FD {1}) is already registered" .format(fileobj, key.fd)) self._fd_to_key[key.fd] = key return key def unregister(self, fileobj): """ Unregister a file object from being monitored. """ try: key = self._fd_to_key.pop(self._fileobj_lookup(fileobj)) except KeyError: raise KeyError("{0!r} is not registered".format(fileobj)) # Getting the fileno of a closed socket on Windows errors with EBADF. except socket.error as err: if err.errno != errno.EBADF: raise else: for key in self._fd_to_key.values(): if key.fileobj is fileobj: self._fd_to_key.pop(key.fd) break else: raise KeyError("{0!r} is not registered".format(fileobj)) return key def modify(self, fileobj, events, data=None): """ Change a registered file object monitored events and data. """ # NOTE: Some subclasses optimize this operation even further. try: key = self._fd_to_key[self._fileobj_lookup(fileobj)] except KeyError: raise KeyError("{0!r} is not registered".format(fileobj)) if events != key.events: self.unregister(fileobj) key = self.register(fileobj, events, data) elif data != key.data: # Use a shortcut to update the data. key = key._replace(data=data) self._fd_to_key[key.fd] = key return key def select(self, timeout=None): """ Perform the actual selection until some monitored file objects are ready or the timeout expires. """ raise NotImplementedError() def close(self): """ Close the selector. This must be called to ensure that all underlying resources are freed. """ self._fd_to_key.clear() self._map = None def get_key(self, fileobj): """ Return the key associated with a registered file object. """ mapping = self.get_map() if mapping is None: raise RuntimeError("Selector is closed") try: return mapping[fileobj] except KeyError: raise KeyError("{0!r} is not registered".format(fileobj)) def get_map(self): """ Return a mapping of file objects to selector keys """ return self._map def _key_from_fd(self, fd): """ Return the key associated to a given file descriptor Return None if it is not found. """ try: return self._fd_to_key[fd] except KeyError: return None def __enter__(self): return self def __exit__(self, *args): self.close() # Almost all platforms have select.select() if hasattr(select, "select"): class SelectSelector(BaseSelector): """ Select-based selector. """ def __init__(self): super(SelectSelector, self).__init__() self._readers = set() self._writers = set() def register(self, fileobj, events, data=None): key = super(SelectSelector, self).register(fileobj, events, data) if events & EVENT_READ: self._readers.add(key.fd) if events & EVENT_WRITE: self._writers.add(key.fd) return key def unregister(self, fileobj): key = super(SelectSelector, self).unregister(fileobj) self._readers.discard(key.fd) self._writers.discard(key.fd) return key def _select(self, r, w, timeout=None): """ Wrapper for select.select because timeout is a positional arg """ return select.select(r, w, [], timeout) def select(self, timeout=None): # Selecting on empty lists on Windows errors out. if not len(self._readers) and not len(self._writers): return [] timeout = None if timeout is None else max(timeout, 0.0) ready = [] r, w, _ = _syscall_wrapper(self._select, True, self._readers, self._writers, timeout=timeout) r = set(r) w = set(w) for fd in r | w: events = 0 if fd in r: events |= EVENT_READ if fd in w: events |= EVENT_WRITE key = self._key_from_fd(fd) if key: ready.append((key, events & key.events)) return ready __all__.append('SelectSelector') if hasattr(select, "poll"): class PollSelector(BaseSelector): """ Poll-based selector """ def __init__(self): super(PollSelector, self).__init__() self._poll = select.poll() def register(self, fileobj, events, data=None): key = super(PollSelector, self).register(fileobj, events, data) event_mask = 0 if events & EVENT_READ: event_mask |= select.POLLIN if events & EVENT_WRITE: event_mask |= select.POLLOUT self._poll.register(key.fd, event_mask) return key def unregister(self, fileobj): key = super(PollSelector, self).unregister(fileobj) self._poll.unregister(key.fd) return key def _wrap_poll(self, timeout=None): """ Wrapper function for select.poll.poll() so that _syscall_wrapper can work with only seconds. """ if timeout is not None: if timeout <= 0: timeout = 0 else: # select.poll.poll() has a resolution of 1 millisecond, # round away from zero to wait *at least* timeout seconds. timeout = math.ceil(timeout * 1e3) result = self._poll.poll(timeout) return result def select(self, timeout=None): ready = [] fd_events = _syscall_wrapper(self._wrap_poll, True, timeout=timeout) for fd, event_mask in fd_events: events = 0 if event_mask & ~select.POLLIN: events |= EVENT_WRITE if event_mask & ~select.POLLOUT: events |= EVENT_READ key = self._key_from_fd(fd) if key: ready.append((key, events & key.events)) return ready __all__.append('PollSelector') if hasattr(select, "epoll"): class EpollSelector(BaseSelector): """ Epoll-based selector """ def __init__(self): super(EpollSelector, self).__init__() self._epoll = select.epoll() def fileno(self): return self._epoll.fileno() def register(self, fileobj, events, data=None): key = super(EpollSelector, self).register(fileobj, events, data) events_mask = 0 if events & EVENT_READ: events_mask |= select.EPOLLIN if events & EVENT_WRITE: events_mask |= select.EPOLLOUT _syscall_wrapper(self._epoll.register, False, key.fd, events_mask) return key def unregister(self, fileobj): key = super(EpollSelector, self).unregister(fileobj) try: _syscall_wrapper(self._epoll.unregister, False, key.fd) except SelectorError: # This can occur when the fd was closed since registry. pass return key def select(self, timeout=None): if timeout is not None: if timeout <= 0: timeout = 0.0 else: # select.epoll.poll() has a resolution of 1 millisecond # but luckily takes seconds so we don't need a wrapper # like PollSelector. Just for better rounding. timeout = math.ceil(timeout * 1e3) * 1e-3 timeout = float(timeout) else: timeout = -1.0 # epoll.poll() must have a float. # We always want at least 1 to ensure that select can be called # with no file descriptors registered. Otherwise will fail. max_events = max(len(self._fd_to_key), 1) ready = [] fd_events = _syscall_wrapper(self._epoll.poll, True, timeout=timeout, maxevents=max_events) for fd, event_mask in fd_events: events = 0 if event_mask & ~select.EPOLLIN: events |= EVENT_WRITE if event_mask & ~select.EPOLLOUT: events |= EVENT_READ key = self._key_from_fd(fd) if key: ready.append((key, events & key.events)) return ready def close(self): self._epoll.close() super(EpollSelector, self).close() __all__.append('EpollSelector') if hasattr(select, "devpoll"): class DevpollSelector(BaseSelector): """Solaris /dev/poll selector.""" def __init__(self): super(DevpollSelector, self).__init__() self._devpoll = select.devpoll() def fileno(self): return self._devpoll.fileno() def register(self, fileobj, events, data=None): key = super(DevpollSelector, self).register(fileobj, events, data) poll_events = 0 if events & EVENT_READ: poll_events |= select.POLLIN if events & EVENT_WRITE: poll_events |= select.POLLOUT self._devpoll.register(key.fd, poll_events) return key def unregister(self, fileobj): key = super(DevpollSelector, self).unregister(fileobj) self._devpoll.unregister(key.fd) return key def _wrap_poll(self, timeout=None): """ Wrapper function for select.poll.poll() so that _syscall_wrapper can work with only seconds. """ if timeout is not None: if timeout <= 0: timeout = 0 else: # select.devpoll.poll() has a resolution of 1 millisecond, # round away from zero to wait *at least* timeout seconds. timeout = math.ceil(timeout * 1e3) result = self._devpoll.poll(timeout) return result def select(self, timeout=None): ready = [] fd_events = _syscall_wrapper(self._wrap_poll, True, timeout=timeout) for fd, event_mask in fd_events: events = 0 if event_mask & ~select.POLLIN: events |= EVENT_WRITE if event_mask & ~select.POLLOUT: events |= EVENT_READ key = self._key_from_fd(fd) if key: ready.append((key, events & key.events)) return ready def close(self): self._devpoll.close() super(DevpollSelector, self).close() __all__.append('DevpollSelector') if hasattr(select, "kqueue"): class KqueueSelector(BaseSelector): """ Kqueue / Kevent-based selector """ def __init__(self): super(KqueueSelector, self).__init__() self._kqueue = select.kqueue() def fileno(self): return self._kqueue.fileno() def register(self, fileobj, events, data=None): key = super(KqueueSelector, self).register(fileobj, events, data) if events & EVENT_READ: kevent = select.kevent(key.fd, select.KQ_FILTER_READ, select.KQ_EV_ADD) _syscall_wrapper(self._wrap_control, False, [kevent], 0, 0) if events & EVENT_WRITE: kevent = select.kevent(key.fd, select.KQ_FILTER_WRITE, select.KQ_EV_ADD) _syscall_wrapper(self._wrap_control, False, [kevent], 0, 0) return key def unregister(self, fileobj): key = super(KqueueSelector, self).unregister(fileobj) if key.events & EVENT_READ: kevent = select.kevent(key.fd, select.KQ_FILTER_READ, select.KQ_EV_DELETE) try: _syscall_wrapper(self._wrap_control, False, [kevent], 0, 0) except SelectorError: pass if key.events & EVENT_WRITE: kevent = select.kevent(key.fd, select.KQ_FILTER_WRITE, select.KQ_EV_DELETE) try: _syscall_wrapper(self._wrap_control, False, [kevent], 0, 0) except SelectorError: pass return key def select(self, timeout=None): if timeout is not None: timeout = max(timeout, 0) max_events = len(self._fd_to_key) * 2 ready_fds = {} kevent_list = _syscall_wrapper(self._wrap_control, True, None, max_events, timeout=timeout) for kevent in kevent_list: fd = kevent.ident event_mask = kevent.filter events = 0 if event_mask == select.KQ_FILTER_READ: events |= EVENT_READ if event_mask == select.KQ_FILTER_WRITE: events |= EVENT_WRITE key = self._key_from_fd(fd) if key: if key.fd not in ready_fds: ready_fds[key.fd] = (key, events & key.events) else: old_events = ready_fds[key.fd][1] ready_fds[key.fd] = (key, (events | old_events) & key.events) return list(ready_fds.values()) def close(self): self._kqueue.close() super(KqueueSelector, self).close() def _wrap_control(self, changelist, max_events, timeout): return self._kqueue.control(changelist, max_events, timeout) __all__.append('KqueueSelector') # Choose the best implementation, roughly: # kqueue == epoll == devpoll > poll > select. # select() also can't accept a FD > FD_SETSIZE (usually around 1024) if 'KqueueSelector' in globals(): # Platform-specific: Mac OS and BSD DefaultSelector = KqueueSelector elif 'DevpollSelector' in globals(): DefaultSelector = DevpollSelector elif 'EpollSelector' in globals(): # Platform-specific: Linux DefaultSelector = EpollSelector elif 'PollSelector' in globals(): # Platform-specific: Linux DefaultSelector = PollSelector elif 'SelectSelector' in globals(): # Platform-specific: Windows DefaultSelector = SelectSelector else: # Platform-specific: AppEngine def no_selector(_): raise ValueError("Platform does not have a selector") DefaultSelector = no_selector HAS_SELECT = False
gpl-3.0
myerpengine/odoo
addons/hr_recruitment/hr_recruitment.py
10
33246
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from datetime import datetime from openerp.osv import fields, osv from openerp.tools.translate import _ AVAILABLE_PRIORITIES = [ ('0', 'Bad'), ('1', 'Below Average'), ('2', 'Average'), ('3', 'Good'), ('4', 'Excellent') ] class hr_recruitment_source(osv.osv): """ Sources of HR Recruitment """ _name = "hr.recruitment.source" _description = "Source of Applicants" _columns = { 'name': fields.char('Source Name', size=64, required=True, translate=True), } class hr_recruitment_stage(osv.osv): """ Stage of HR Recruitment """ _name = "hr.recruitment.stage" _description = "Stage of Recruitment" _order = 'sequence' _columns = { 'name': fields.char('Name', size=64, required=True, translate=True), 'sequence': fields.integer('Sequence', help="Gives the sequence order when displaying a list of stages."), 'department_id':fields.many2one('hr.department', 'Specific to a Department', help="Stages of the recruitment process may be different per department. If this stage is common to all departments, keep this field empty."), 'requirements': fields.text('Requirements'), 'template_id': fields.many2one('email.template', 'Use template', help="If set, a message is posted on the applicant using the template when the applicant is set to the stage."), 'fold': fields.boolean('Folded in Kanban View', help='This stage is folded in the kanban view when' 'there are no records in that stage to display.'), } _defaults = { 'sequence': 1, } class hr_recruitment_degree(osv.osv): """ Degree of HR Recruitment """ _name = "hr.recruitment.degree" _description = "Degree of Recruitment" _columns = { 'name': fields.char('Name', size=64, required=True, translate=True), 'sequence': fields.integer('Sequence', help="Gives the sequence order when displaying a list of degrees."), } _defaults = { 'sequence': 1, } _sql_constraints = [ ('name_uniq', 'unique (name)', 'The name of the Degree of Recruitment must be unique!') ] class hr_applicant(osv.Model): _name = "hr.applicant" _description = "Applicant" _order = "id desc" _inherit = ['mail.thread', 'ir.needaction_mixin'] _track = { 'stage_id': { # this is only an heuristics; depending on your particular stage configuration it may not match all 'new' stages 'hr_recruitment.mt_applicant_new': lambda self, cr, uid, obj, ctx=None: obj.stage_id and obj.stage_id.sequence <= 1, 'hr_recruitment.mt_applicant_stage_changed': lambda self, cr, uid, obj, ctx=None: obj.stage_id and obj.stage_id.sequence > 1, }, } _mail_mass_mailing = _('Applicants') def _get_default_department_id(self, cr, uid, context=None): """ Gives default department by checking if present in the context """ return (self._resolve_department_id_from_context(cr, uid, context=context) or False) def _get_default_stage_id(self, cr, uid, context=None): """ Gives default stage_id """ department_id = self._get_default_department_id(cr, uid, context=context) return self.stage_find(cr, uid, [], department_id, [('fold', '=', False)], context=context) def _resolve_department_id_from_context(self, cr, uid, context=None): """ Returns ID of department based on the value of 'default_department_id' context key, or None if it cannot be resolved to a single department. """ if context is None: context = {} if type(context.get('default_department_id')) in (int, long): return context.get('default_department_id') if isinstance(context.get('default_department_id'), basestring): department_name = context['default_department_id'] department_ids = self.pool.get('hr.department').name_search(cr, uid, name=department_name, context=context) if len(department_ids) == 1: return int(department_ids[0][0]) return None def _read_group_stage_ids(self, cr, uid, ids, domain, read_group_order=None, access_rights_uid=None, context=None): access_rights_uid = access_rights_uid or uid stage_obj = self.pool.get('hr.recruitment.stage') order = stage_obj._order # lame hack to allow reverting search, should just work in the trivial case if read_group_order == 'stage_id desc': order = "%s desc" % order # retrieve section_id from the context and write the domain # - ('id', 'in', 'ids'): add columns that should be present # - OR ('department_id', '=', False), ('fold', '=', False): add default columns that are not folded # - OR ('department_id', 'in', department_id), ('fold', '=', False) if department_id: add department columns that are not folded department_id = self._resolve_department_id_from_context(cr, uid, context=context) search_domain = [] if department_id: search_domain += ['|', ('department_id', '=', department_id)] search_domain += ['|', ('id', 'in', ids), ('department_id', '=', False)] stage_ids = stage_obj._search(cr, uid, search_domain, order=order, access_rights_uid=access_rights_uid, context=context) result = stage_obj.name_get(cr, access_rights_uid, stage_ids, context=context) # restore order of the search result.sort(lambda x,y: cmp(stage_ids.index(x[0]), stage_ids.index(y[0]))) fold = {} for stage in stage_obj.browse(cr, access_rights_uid, stage_ids, context=context): fold[stage.id] = stage.fold or False return result, fold def _compute_day(self, cr, uid, ids, fields, args, context=None): """ @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of Openday’s IDs @return: difference between current date and log date @param context: A standard dictionary for contextual values """ res = {} for issue in self.browse(cr, uid, ids, context=context): for field in fields: res[issue.id] = {} duration = 0 ans = False hours = 0 if field in ['day_open']: if issue.date_open: date_create = datetime.strptime(issue.create_date, "%Y-%m-%d %H:%M:%S") date_open = datetime.strptime(issue.date_open, "%Y-%m-%d %H:%M:%S") ans = date_open - date_create elif field in ['day_close']: if issue.date_closed: date_create = datetime.strptime(issue.create_date, "%Y-%m-%d %H:%M:%S") date_close = datetime.strptime(issue.date_closed, "%Y-%m-%d %H:%M:%S") ans = date_close - date_create if ans: duration = float(ans.days) res[issue.id][field] = abs(float(duration)) return res def _get_attachment_number(self, cr, uid, ids, fields, args, context=None): res = dict.fromkeys(ids, 0) for app_id in ids: res[app_id] = self.pool['ir.attachment'].search_count(cr, uid, [('res_model', '=', 'hr.applicant'), ('res_id', '=', app_id)], context=context) return res _columns = { 'name': fields.char('Subject / Application Name', size=128, required=True), 'active': fields.boolean('Active', help="If the active field is set to false, it will allow you to hide the case without removing it."), 'description': fields.text('Description'), 'email_from': fields.char('Email', size=128, help="These people will receive email."), 'email_cc': fields.text('Watchers Emails', size=252, help="These email addresses will be added to the CC field of all inbound and outbound emails for this record before being sent. Separate multiple email addresses with a comma"), 'probability': fields.float('Probability'), 'partner_id': fields.many2one('res.partner', 'Contact'), 'create_date': fields.datetime('Creation Date', readonly=True, select=True), 'write_date': fields.datetime('Update Date', readonly=True), 'stage_id': fields.many2one ('hr.recruitment.stage', 'Stage', track_visibility='onchange', domain="['|', ('department_id', '=', department_id), ('department_id', '=', False)]"), 'last_stage_id': fields.many2one('hr.recruitment.stage', 'Last Stage', help='Stage of the applicant before being in the current stage. Used for lost cases analysis.'), 'categ_ids': fields.many2many('hr.applicant_category', string='Tags'), 'company_id': fields.many2one('res.company', 'Company'), 'user_id': fields.many2one('res.users', 'Responsible', track_visibility='onchange'), 'date_closed': fields.datetime('Closed', readonly=True, select=True), 'date_open': fields.datetime('Assigned', readonly=True, select=True), 'date_last_stage_update': fields.datetime('Last Stage Update', select=True), 'date_action': fields.date('Next Action Date'), 'title_action': fields.char('Next Action', size=64), 'priority': fields.selection(AVAILABLE_PRIORITIES, 'Appreciation'), 'job_id': fields.many2one('hr.job', 'Applied Job'), 'salary_proposed_extra': fields.char('Proposed Salary Extra', size=100, help="Salary Proposed by the Organisation, extra advantages"), 'salary_expected_extra': fields.char('Expected Salary Extra', size=100, help="Salary Expected by Applicant, extra advantages"), 'salary_proposed': fields.float('Proposed Salary', help="Salary Proposed by the Organisation"), 'salary_expected': fields.float('Expected Salary', help="Salary Expected by Applicant"), 'availability': fields.integer('Availability', help="The number of days in which the applicant will be available to start working"), 'partner_name': fields.char("Applicant's Name", size=64), 'partner_phone': fields.char('Phone', size=32), 'partner_mobile': fields.char('Mobile', size=32), 'type_id': fields.many2one('hr.recruitment.degree', 'Degree'), 'department_id': fields.many2one('hr.department', 'Department'), 'survey': fields.related('job_id', 'survey_id', type='many2one', relation='survey.survey', string='Survey'), 'response_id': fields.many2one('survey.user_input', "Response", ondelete='set null', oldname="response"), 'reference': fields.char('Referred By', size=128), 'source_id': fields.many2one('hr.recruitment.source', 'Source'), 'day_open': fields.function(_compute_day, string='Days to Open', \ multi='day_open', type="float", store=True), 'day_close': fields.function(_compute_day, string='Days to Close', \ multi='day_close', type="float", store=True), 'color': fields.integer('Color Index'), 'emp_id': fields.many2one('hr.employee', string='Employee', help='Employee linked to the applicant.'), 'user_email': fields.related('user_id', 'email', type='char', string='User Email', readonly=True), 'attachment_number': fields.function(_get_attachment_number, string='Number of Attachments', type="integer"), } _defaults = { 'active': lambda *a: 1, 'user_id': lambda s, cr, uid, c: uid, 'stage_id': lambda s, cr, uid, c: s._get_default_stage_id(cr, uid, c), 'department_id': lambda s, cr, uid, c: s._get_default_department_id(cr, uid, c), 'company_id': lambda s, cr, uid, c: s.pool.get('res.company')._company_default_get(cr, uid, 'hr.applicant', context=c), 'color': 0, 'date_last_stage_update': fields.datetime.now, } _group_by_full = { 'stage_id': _read_group_stage_ids } def onchange_job(self, cr, uid, ids, job_id=False, context=None): department_id = False if job_id: job_record = self.pool.get('hr.job').browse(cr, uid, job_id, context=context) department_id = job_record and job_record.department_id and job_record.department_id.id or False user_id = job_record and job_record.user_id and job_record.user_id.id or False return {'value': {'department_id': department_id, 'user_id': user_id}} def onchange_department_id(self, cr, uid, ids, department_id=False, stage_id=False, context=None): if not stage_id: stage_id = self.stage_find(cr, uid, [], department_id, [('fold', '=', False)], context=context) return {'value': {'stage_id': stage_id}} def onchange_partner_id(self, cr, uid, ids, partner_id, context=None): data = {'partner_phone': False, 'partner_mobile': False, 'email_from': False} if partner_id: addr = self.pool.get('res.partner').browse(cr, uid, partner_id, context) data.update({'partner_phone': addr.phone, 'partner_mobile': addr.mobile, 'email_from': addr.email}) return {'value': data} def stage_find(self, cr, uid, cases, section_id, domain=[], order='sequence', context=None): """ Override of the base.stage method Parameter of the stage search taken from the lead: - department_id: if set, stages must belong to this section or be a default case """ if isinstance(cases, (int, long)): cases = self.browse(cr, uid, cases, context=context) # collect all section_ids department_ids = [] if section_id: department_ids.append(section_id) for case in cases: if case.department_id: department_ids.append(case.department_id.id) # OR all section_ids and OR with case_default search_domain = [] if department_ids: search_domain += ['|', ('department_id', 'in', department_ids)] search_domain.append(('department_id', '=', False)) # AND with the domain in parameter search_domain += list(domain) # perform search, return the first found stage_ids = self.pool.get('hr.recruitment.stage').search(cr, uid, search_domain, order=order, context=context) if stage_ids: return stage_ids[0] return False def action_makeMeeting(self, cr, uid, ids, context=None): """ This opens Meeting's calendar view to schedule meeting on current applicant @return: Dictionary value for created Meeting view """ applicant = self.browse(cr, uid, ids[0], context) applicant_ids = [] if applicant.partner_id: applicant_ids.append(applicant.partner_id.id) if applicant.department_id and applicant.department_id.manager_id and applicant.department_id.manager_id.user_id and applicant.department_id.manager_id.user_id.partner_id: applicant_ids.append(applicant.department_id.manager_id.user_id.partner_id.id) category = self.pool.get('ir.model.data').get_object(cr, uid, 'hr_recruitment', 'categ_meet_interview', context) res = self.pool.get('ir.actions.act_window').for_xml_id(cr, uid, 'calendar', 'action_calendar_event', context) res['context'] = { 'default_partner_ids': applicant_ids, 'default_user_id': uid, 'default_name': applicant.name, 'default_categ_ids': category and [category.id] or False, } return res def action_start_survey(self, cr, uid, ids, context=None): context = context if context else {} applicant = self.browse(cr, uid, ids, context=context)[0] survey_obj = self.pool.get('survey.survey') response_obj = self.pool.get('survey.user_input') # create a response and link it to this applicant if not applicant.response_id: response_id = response_obj.create(cr, uid, {'survey_id': applicant.survey.id, 'partner_id': applicant.partner_id.id}, context=context) self.write(cr, uid, ids[0], {'response_id': response_id}, context=context) else: response_id = applicant.response_id.id # grab the token of the response and start surveying response = response_obj.browse(cr, uid, response_id, context=context) context.update({'survey_token': response.token}) return survey_obj.action_start_survey(cr, uid, [applicant.survey.id], context=context) def action_print_survey(self, cr, uid, ids, context=None): """ If response is available then print this response otherwise print survey form (print template of the survey) """ context = context if context else {} applicant = self.browse(cr, uid, ids, context=context)[0] survey_obj = self.pool.get('survey.survey') response_obj = self.pool.get('survey.user_input') if not applicant.response_id: return survey_obj.action_print_survey(cr, uid, [applicant.survey.id], context=context) else: response = response_obj.browse(cr, uid, applicant.response_id.id, context=context) context.update({'survey_token': response.token}) return survey_obj.action_print_survey(cr, uid, [applicant.survey.id], context=context) def action_get_attachment_tree_view(self, cr, uid, ids, context=None): model, action_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'base', 'action_attachment') action = self.pool.get(model).read(cr, uid, action_id, context=context) action['context'] = {'default_res_model': self._name, 'default_res_id': ids[0]} action['domain'] = str(['&', ('res_model', '=', self._name), ('res_id', 'in', ids)]) return action def message_get_suggested_recipients(self, cr, uid, ids, context=None): recipients = super(hr_applicant, self).message_get_suggested_recipients(cr, uid, ids, context=context) for applicant in self.browse(cr, uid, ids, context=context): if applicant.partner_id: self._message_add_suggested_recipient(cr, uid, recipients, applicant, partner=applicant.partner_id, reason=_('Contact')) elif applicant.email_from: self._message_add_suggested_recipient(cr, uid, recipients, applicant, email=applicant.email_from, reason=_('Contact Email')) return recipients def message_new(self, cr, uid, msg, custom_values=None, context=None): """ Overrides mail_thread message_new that is called by the mailgateway through message_process. This override updates the document according to the email. """ if custom_values is None: custom_values = {} val = msg.get('from').split('<')[0] defaults = { 'name': msg.get('subject') or _("No Subject"), 'partner_name': val, 'email_from': msg.get('from'), 'email_cc': msg.get('cc'), 'user_id': False, 'partner_id': msg.get('author_id', False), } if msg.get('priority'): defaults['priority'] = msg.get('priority') defaults.update(custom_values) return super(hr_applicant, self).message_new(cr, uid, msg, custom_values=defaults, context=context) def create(self, cr, uid, vals, context=None): if context is None: context = {} context['mail_create_nolog'] = True if vals.get('department_id') and not context.get('default_department_id'): context['default_department_id'] = vals.get('department_id') if vals.get('job_id') or context.get('default_job_id'): job_id = vals.get('job_id') or context.get('default_job_id') vals.update(self.onchange_job(cr, uid, [], job_id, context=context)['value']) obj_id = super(hr_applicant, self).create(cr, uid, vals, context=context) applicant = self.browse(cr, uid, obj_id, context=context) if applicant.job_id: name = applicant.partner_name if applicant.partner_name else applicant.name self.pool['hr.job'].message_post( cr, uid, [applicant.job_id.id], body=_('New application from %s') % name, subtype="hr_recruitment.mt_job_applicant_new", context=context) return obj_id def write(self, cr, uid, ids, vals, context=None): if isinstance(ids, (int, long)): ids = [ids] res = True # user_id change: update date_open if vals.get('user_id'): vals['date_open'] = fields.datetime.now() # stage_id: track last stage before update if 'stage_id' in vals: vals['date_last_stage_update'] = fields.datetime.now() for applicant in self.browse(cr, uid, ids, context=None): vals['last_stage_id'] = applicant.stage_id.id res = super(hr_applicant, self).write(cr, uid, [applicant.id], vals, context=context) else: res = super(hr_applicant, self).write(cr, uid, ids, vals, context=context) # post processing: if job changed, post a message on the job if vals.get('job_id'): for applicant in self.browse(cr, uid, ids, context=None): name = applicant.partner_name if applicant.partner_name else applicant.name self.pool['hr.job'].message_post( cr, uid, [vals['job_id']], body=_('New application from %s') % name, subtype="hr_recruitment.mt_job_applicant_new", context=context) # post processing: if stage changed, post a message in the chatter if vals.get('stage_id'): stage = self.pool['hr.recruitment.stage'].browse(cr, uid, vals['stage_id'], context=context) if stage.template_id: # TDENOTE: probably factorize me in a message_post_with_template generic method FIXME compose_ctx = dict(context, active_ids=ids) compose_id = self.pool['mail.compose.message'].create( cr, uid, { 'model': self._name, 'composition_mode': 'mass_mail', 'template_id': stage.template_id.id, 'same_thread': True, 'post': True, 'notify': True, }, context=compose_ctx) self.pool['mail.compose.message'].write( cr, uid, [compose_id], self.pool['mail.compose.message'].onchange_template_id( cr, uid, [compose_id], stage.template_id.id, 'mass_mail', self._name, False, context=compose_ctx)['value'], context=compose_ctx) self.pool['mail.compose.message'].send_mail(cr, uid, [compose_id], context=compose_ctx) return res def create_employee_from_applicant(self, cr, uid, ids, context=None): """ Create an hr.employee from the hr.applicants """ if context is None: context = {} hr_employee = self.pool.get('hr.employee') model_data = self.pool.get('ir.model.data') act_window = self.pool.get('ir.actions.act_window') emp_id = False for applicant in self.browse(cr, uid, ids, context=context): address_id = contact_name = False if applicant.partner_id: address_id = self.pool.get('res.partner').address_get(cr, uid, [applicant.partner_id.id], ['contact'])['contact'] contact_name = self.pool.get('res.partner').name_get(cr, uid, [applicant.partner_id.id])[0][1] if applicant.job_id and (applicant.partner_name or contact_name): applicant.job_id.write({'no_of_hired_employee': applicant.job_id.no_of_hired_employee + 1}, context=context) create_ctx = dict(context, mail_broadcast=True) emp_id = hr_employee.create(cr, uid, {'name': applicant.partner_name or contact_name, 'job_id': applicant.job_id.id, 'address_home_id': address_id, 'department_id': applicant.department_id.id or False, 'address_id': applicant.company_id and applicant.company_id.partner_id and applicant.company_id.partner_id.id or False, 'work_email': applicant.department_id and applicant.department_id.company_id and applicant.department_id.company_id.email or False, 'work_phone': applicant.department_id and applicant.department_id.company_id and applicant.department_id.company_id.phone or False, }, context=create_ctx) self.write(cr, uid, [applicant.id], {'emp_id': emp_id}, context=context) self.pool['hr.job'].message_post( cr, uid, [applicant.job_id.id], body=_('New Employee %s Hired') % applicant.partner_name if applicant.partner_name else applicant.name, subtype="hr_recruitment.mt_job_applicant_hired", context=context) else: raise osv.except_osv(_('Warning!'), _('You must define an Applied Job and a Contact Name for this applicant.')) action_model, action_id = model_data.get_object_reference(cr, uid, 'hr', 'open_view_employee_list') dict_act_window = act_window.read(cr, uid, action_id, []) if emp_id: dict_act_window['res_id'] = emp_id dict_act_window['view_mode'] = 'form,tree' return dict_act_window def get_empty_list_help(self, cr, uid, help, context=None): context['empty_list_help_model'] = 'hr.job' context['empty_list_help_id'] = context.get('default_job_id', None) context['empty_list_help_document_name'] = _("job applicants") return super(hr_applicant, self).get_empty_list_help(cr, uid, help, context=context) class hr_job(osv.osv): _inherit = "hr.job" _name = "hr.job" _inherits = {'mail.alias': 'alias_id'} def _get_attached_docs(self, cr, uid, ids, field_name, arg, context=None): res = {} attachment_obj = self.pool.get('ir.attachment') for job_id in ids: applicant_ids = self.pool.get('hr.applicant').search(cr, uid, [('job_id', '=', job_id)], context=context) res[job_id] = attachment_obj.search( cr, uid, [ '|', '&', ('res_model', '=', 'hr.job'), ('res_id', '=', job_id), '&', ('res_model', '=', 'hr.applicant'), ('res_id', 'in', applicant_ids) ], context=context) return res def _count_all(self, cr, uid, ids, field_name, arg, context=None): Applicant = self.pool['hr.applicant'] return { job_id: { 'application_count': Applicant.search_count(cr,uid, [('job_id', '=', job_id)], context=context), 'documents_count': len(self._get_attached_docs(cr, uid, [job_id], field_name, arg, context=context)[job_id]) } for job_id in ids } _columns = { 'survey_id': fields.many2one('survey.survey', 'Interview Form', help="Choose an interview form for this job position and you will be able to print/answer this interview from all applicants who apply for this job"), 'alias_id': fields.many2one('mail.alias', 'Alias', ondelete="restrict", required=True, help="Email alias for this job position. New emails will automatically " "create new applicants for this job position."), 'address_id': fields.many2one('res.partner', 'Job Location', help="Address where employees are working"), 'application_ids': fields.one2many('hr.applicant', 'job_id', 'Applications'), 'application_count': fields.function(_count_all, type='integer', string='Applications', multi=True), 'manager_id': fields.related('department_id', 'manager_id', type='many2one', string='Department Manager', relation='hr.employee', readonly=True, store=True), 'document_ids': fields.function(_get_attached_docs, type='one2many', relation='ir.attachment', string='Applications'), 'documents_count': fields.function(_count_all, type='integer', string='Documents', multi=True), 'user_id': fields.many2one('res.users', 'Recruitment Responsible', track_visibility='onchange'), 'color': fields.integer('Color Index'), } def _address_get(self, cr, uid, context=None): user = self.pool.get('res.users').browse(cr, uid, uid, context=context) return user.company_id.partner_id.id _defaults = { 'address_id': _address_get } def _auto_init(self, cr, context=None): """Installation hook to create aliases for all jobs and avoid constraint errors.""" return self.pool.get('mail.alias').migrate_to_alias(cr, self._name, self._table, super(hr_job, self)._auto_init, 'hr.applicant', self._columns['alias_id'], 'name', alias_prefix='job+', alias_defaults={'job_id': 'id'}, context=context) def create(self, cr, uid, vals, context=None): alias_context = dict(context, alias_model_name='hr.applicant', alias_parent_model_name=self._name) job_id = super(hr_job, self).create(cr, uid, vals, context=alias_context) job = self.browse(cr, uid, job_id, context=context) self.pool.get('mail.alias').write(cr, uid, [job.alias_id.id], {'alias_parent_thread_id': job_id, "alias_defaults": {'job_id': job_id}}, context) return job_id def unlink(self, cr, uid, ids, context=None): # Cascade-delete mail aliases as well, as they should not exist without the job position. mail_alias = self.pool.get('mail.alias') alias_ids = [job.alias_id.id for job in self.browse(cr, uid, ids, context=context) if job.alias_id] res = super(hr_job, self).unlink(cr, uid, ids, context=context) mail_alias.unlink(cr, uid, alias_ids, context=context) return res def action_print_survey(self, cr, uid, ids, context=None): job = self.browse(cr, uid, ids, context=context)[0] survey_id = job.survey_id.id return self.pool.get('survey.survey').action_print_survey(cr, uid, [survey_id], context=context) def action_get_attachment_tree_view(self, cr, uid, ids, context=None): #open attachments of job and related applicantions. model, action_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'base', 'action_attachment') action = self.pool.get(model).read(cr, uid, action_id, context=context) applicant_ids = self.pool.get('hr.applicant').search(cr, uid, [('job_id', 'in', ids)], context=context) action['context'] = {'default_res_model': self._name, 'default_res_id': ids[0]} action['domain'] = str(['|', '&', ('res_model', '=', 'hr.job'), ('res_id', 'in', ids), '&', ('res_model', '=', 'hr.applicant'), ('res_id', 'in', applicant_ids)]) return action def action_set_no_of_recruitment(self, cr, uid, id, value, context=None): return self.write(cr, uid, [id], {'no_of_recruitment': value}, context=context) class applicant_category(osv.osv): """ Category of applicant """ _name = "hr.applicant_category" _description = "Category of applicant" _columns = { 'name': fields.char('Name', size=64, required=True, translate=True), } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
piskvorky/pattern
pattern/web/pdf/utils.py
28
8540
#!/usr/bin/env python2 """ Miscellaneous Routines. """ import struct from sys import maxint as INF ## Matrix operations ## MATRIX_IDENTITY = (1, 0, 0, 1, 0, 0) def mult_matrix((a1,b1,c1,d1,e1,f1), (a0,b0,c0,d0,e0,f0)): """Returns the multiplication of two matrices.""" return (a0*a1+c0*b1, b0*a1+d0*b1, a0*c1+c0*d1, b0*c1+d0*d1, a0*e1+c0*f1+e0, b0*e1+d0*f1+f0) def translate_matrix((a,b,c,d,e,f), (x,y)): """Translates a matrix by (x,y).""" return (a,b,c,d,x*a+y*c+e,x*b+y*d+f) def apply_matrix_pt((a,b,c,d,e,f), (x,y)): """Applies a matrix to a point.""" return (a*x+c*y+e, b*x+d*y+f) def apply_matrix_norm((a,b,c,d,e,f), (p,q)): """Equivalent to apply_matrix_pt(M, (p,q)) - apply_matrix_pt(M, (0,0))""" return (a*p+c*q, b*p+d*q) ## Utility functions ## # uniq def uniq(objs): """Eliminates duplicated elements.""" done = set() for obj in objs: if obj in done: continue done.add(obj) yield obj return # csort def csort(objs, key=lambda x:x): """Order-preserving sorting function.""" idxs = dict( (obj,i) for (i,obj) in enumerate(objs) ) return sorted(objs, key=lambda obj: (key(obj), idxs[obj])) # fsplit def fsplit(pred, objs): """Split a list into two classes according to the predicate.""" t = [] f = [] for obj in objs: if pred(obj): t.append(obj) else: f.append(obj) return (t,f) # drange def drange(v0, v1, d): """Returns a discrete range.""" assert v0 < v1 return xrange(int(v0)/d, int(v1+d-1)/d) # get_bound def get_bound(pts): """Compute a minimal rectangle that covers all the points.""" (x0, y0, x1, y1) = (INF, INF, -INF, -INF) for (x,y) in pts: x0 = min(x0, x) y0 = min(y0, y) x1 = max(x1, x) y1 = max(y1, y) return (x0,y0,x1,y1) # pick def pick(seq, func, maxobj=None): """Picks the object obj where func(obj) has the highest value.""" maxscore = None for obj in seq: score = func(obj) if maxscore is None or maxscore < score: (maxscore,maxobj) = (score,obj) return maxobj # choplist def choplist(n, seq): """Groups every n elements of the list.""" r = [] for x in seq: r.append(x) if len(r) == n: yield tuple(r) r = [] return # nunpack def nunpack(s, default=0): """Unpacks 1 to 4 byte integers (big endian).""" l = len(s) if not l: return default elif l == 1: return ord(s) elif l == 2: return struct.unpack('>H', s)[0] elif l == 3: return struct.unpack('>L', '\x00'+s)[0] elif l == 4: return struct.unpack('>L', s)[0] else: raise TypeError('invalid length: %d' % l) # decode_text PDFDocEncoding = ''.join( unichr(x) for x in ( 0x0000, 0x0001, 0x0002, 0x0003, 0x0004, 0x0005, 0x0006, 0x0007, 0x0008, 0x0009, 0x000a, 0x000b, 0x000c, 0x000d, 0x000e, 0x000f, 0x0010, 0x0011, 0x0012, 0x0013, 0x0014, 0x0015, 0x0017, 0x0017, 0x02d8, 0x02c7, 0x02c6, 0x02d9, 0x02dd, 0x02db, 0x02da, 0x02dc, 0x0020, 0x0021, 0x0022, 0x0023, 0x0024, 0x0025, 0x0026, 0x0027, 0x0028, 0x0029, 0x002a, 0x002b, 0x002c, 0x002d, 0x002e, 0x002f, 0x0030, 0x0031, 0x0032, 0x0033, 0x0034, 0x0035, 0x0036, 0x0037, 0x0038, 0x0039, 0x003a, 0x003b, 0x003c, 0x003d, 0x003e, 0x003f, 0x0040, 0x0041, 0x0042, 0x0043, 0x0044, 0x0045, 0x0046, 0x0047, 0x0048, 0x0049, 0x004a, 0x004b, 0x004c, 0x004d, 0x004e, 0x004f, 0x0050, 0x0051, 0x0052, 0x0053, 0x0054, 0x0055, 0x0056, 0x0057, 0x0058, 0x0059, 0x005a, 0x005b, 0x005c, 0x005d, 0x005e, 0x005f, 0x0060, 0x0061, 0x0062, 0x0063, 0x0064, 0x0065, 0x0066, 0x0067, 0x0068, 0x0069, 0x006a, 0x006b, 0x006c, 0x006d, 0x006e, 0x006f, 0x0070, 0x0071, 0x0072, 0x0073, 0x0074, 0x0075, 0x0076, 0x0077, 0x0078, 0x0079, 0x007a, 0x007b, 0x007c, 0x007d, 0x007e, 0x0000, 0x2022, 0x2020, 0x2021, 0x2026, 0x2014, 0x2013, 0x0192, 0x2044, 0x2039, 0x203a, 0x2212, 0x2030, 0x201e, 0x201c, 0x201d, 0x2018, 0x2019, 0x201a, 0x2122, 0xfb01, 0xfb02, 0x0141, 0x0152, 0x0160, 0x0178, 0x017d, 0x0131, 0x0142, 0x0153, 0x0161, 0x017e, 0x0000, 0x20ac, 0x00a1, 0x00a2, 0x00a3, 0x00a4, 0x00a5, 0x00a6, 0x00a7, 0x00a8, 0x00a9, 0x00aa, 0x00ab, 0x00ac, 0x0000, 0x00ae, 0x00af, 0x00b0, 0x00b1, 0x00b2, 0x00b3, 0x00b4, 0x00b5, 0x00b6, 0x00b7, 0x00b8, 0x00b9, 0x00ba, 0x00bb, 0x00bc, 0x00bd, 0x00be, 0x00bf, 0x00c0, 0x00c1, 0x00c2, 0x00c3, 0x00c4, 0x00c5, 0x00c6, 0x00c7, 0x00c8, 0x00c9, 0x00ca, 0x00cb, 0x00cc, 0x00cd, 0x00ce, 0x00cf, 0x00d0, 0x00d1, 0x00d2, 0x00d3, 0x00d4, 0x00d5, 0x00d6, 0x00d7, 0x00d8, 0x00d9, 0x00da, 0x00db, 0x00dc, 0x00dd, 0x00de, 0x00df, 0x00e0, 0x00e1, 0x00e2, 0x00e3, 0x00e4, 0x00e5, 0x00e6, 0x00e7, 0x00e8, 0x00e9, 0x00ea, 0x00eb, 0x00ec, 0x00ed, 0x00ee, 0x00ef, 0x00f0, 0x00f1, 0x00f2, 0x00f3, 0x00f4, 0x00f5, 0x00f6, 0x00f7, 0x00f8, 0x00f9, 0x00fa, 0x00fb, 0x00fc, 0x00fd, 0x00fe, 0x00ff, )) def decode_text(s): """Decodes a PDFDocEncoding string to Unicode.""" if s.startswith('\xfe\xff'): return unicode(s[2:], 'utf-16be', 'ignore') else: return ''.join( PDFDocEncoding[ord(c)] for c in s ) # enc def enc(x, codec='ascii'): """Encodes a string for SGML/XML/HTML""" x = x.replace('&','&amp;').replace('>','&gt;').replace('<','&lt;').replace('"','&quot;') return x.encode(codec, 'xmlcharrefreplace') def bbox2str((x0,y0,x1,y1)): return '%.3f,%.3f,%.3f,%.3f' % (x0, y0, x1, y1) def matrix2str((a,b,c,d,e,f)): return '[%.2f,%.2f,%.2f,%.2f, (%.2f,%.2f)]' % (a,b,c,d,e,f) ## ObjIdRange ## class ObjIdRange(object): "A utility class to represent a range of object IDs." def __init__(self, start, nobjs): self.start = start self.nobjs = nobjs return def __repr__(self): return '<ObjIdRange: %d-%d>' % (self.get_start_id(), self.get_end_id()) def get_start_id(self): return self.start def get_end_id(self): return self.start + self.nobjs - 1 def get_nobjs(self): return self.nobjs ## Plane ## ## A set-like data structure for objects placed on a plane. ## Can efficiently find objects in a certain rectangular area. ## It maintains two parallel lists of objects, each of ## which is sorted by its x or y coordinate. ## class Plane(object): def __init__(self, objs=None, gridsize=50): self._objs = [] self._grid = {} self.gridsize = gridsize if objs is not None: for obj in objs: self.add(obj) return def __repr__(self): return ('<Plane objs=%r>' % list(self)) def __iter__(self): return iter(self._objs) def __len__(self): return len(self._objs) def __contains__(self, obj): return obj in self._objs def _getrange(self, (x0,y0,x1,y1)): for y in drange(y0, y1, self.gridsize): for x in drange(x0, x1, self.gridsize): yield (x,y) return # add(obj): place an object. def add(self, obj): for k in self._getrange((obj.x0, obj.y0, obj.x1, obj.y1)): if k not in self._grid: r = [] self._grid[k] = r else: r = self._grid[k] r.append(obj) self._objs.append(obj) return # remove(obj): displace an object. def remove(self, obj): for k in self._getrange((obj.x0, obj.y0, obj.x1, obj.y1)): try: self._grid[k].remove(obj) except (KeyError, ValueError): pass self._objs.remove(obj) return # find(): finds objects that are in a certain area. def find(self, (x0,y0,x1,y1)): done = set() for k in self._getrange((x0,y0,x1,y1)): if k not in self._grid: continue for obj in self._grid[k]: if obj in done: continue done.add(obj) if (obj.x1 <= x0 or x1 <= obj.x0 or obj.y1 <= y0 or y1 <= obj.y0): continue yield obj return # create_bmp def create_bmp(data, bits, width, height): info = struct.pack('<IiiHHIIIIII', 40, width, height, 1, bits, 0, len(data), 0, 0, 0, 0) assert len(info) == 40, len(info) header = struct.pack('<ccIHHI', 'B', 'M', 14+40+len(data), 0, 0, 14+40) assert len(header) == 14, len(header) # XXX re-rasterize every line return header+info+data
bsd-3-clause
0x46616c6b/ansible
lib/ansible/modules/network/nxos/nxos_overlay_global.py
11
8837
#!/usr/bin/python # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'version': '1.0'} DOCUMENTATION = ''' --- module: nxos_overlay_global version_added: "2.2" short_description: Configures anycast gateway MAC of the switch. description: - Configures anycast gateway MAC of the switch. author: Gabriele Gerbino (@GGabriele) notes: - Default restores params default value - Supported MAC address format are "E.E.E", "EE-EE-EE-EE-EE-EE", "EE:EE:EE:EE:EE:EE" and "EEEE.EEEE.EEEE" options: anycast_gateway_mac: description: - Anycast gateway mac of the switch. required: true default: null ''' EXAMPLES = ''' - nxos_overlay_global: anycast_gateway_mac: "b.b.b" username: "{{ un }}" password: "{{ pwd }}" host: "{{ inventory_hostname }}" ''' RETURN = ''' proposed: description: k/v pairs of parameters passed into module returned: verbose mode type: dict sample: {"asn": "65535", "router_id": "1.1.1.1", "vrf": "test"} existing: description: k/v pairs of existing BGP configuration returned: verbose mode type: dict sample: {"asn": "65535", "bestpath_always_compare_med": false, "bestpath_aspath_multipath_relax": false, "bestpath_compare_neighborid": false, "bestpath_compare_routerid": false, "bestpath_cost_community_ignore": false, "bestpath_med_confed": false, "bestpath_med_missing_as_worst": false, "bestpath_med_non_deterministic": false, "cluster_id": "", "confederation_id": "", "confederation_peers": "", "graceful_restart": true, "graceful_restart_helper": false, "graceful_restart_timers_restart": "120", "graceful_restart_timers_stalepath_time": "300", "local_as": "", "log_neighbor_changes": false, "maxas_limit": "", "neighbor_down_fib_accelerate": false, "reconnect_interval": "60", "router_id": "11.11.11.11", "suppress_fib_pending": false, "timer_bestpath_limit": "", "timer_bgp_hold": "180", "timer_bgp_keepalive": "60", "vrf": "test"} end_state: description: k/v pairs of BGP configuration after module execution returned: verbose mode type: dict sample: {"asn": "65535", "bestpath_always_compare_med": false, "bestpath_aspath_multipath_relax": false, "bestpath_compare_neighborid": false, "bestpath_compare_routerid": false, "bestpath_cost_community_ignore": false, "bestpath_med_confed": false, "bestpath_med_missing_as_worst": false, "bestpath_med_non_deterministic": false, "cluster_id": "", "confederation_id": "", "confederation_peers": "", "graceful_restart": true, "graceful_restart_helper": false, "graceful_restart_timers_restart": "120", "graceful_restart_timers_stalepath_time": "300", "local_as": "", "log_neighbor_changes": false, "maxas_limit": "", "neighbor_down_fib_accelerate": false, "reconnect_interval": "60", "router_id": "1.1.1.1", "suppress_fib_pending": false, "timer_bestpath_limit": "", "timer_bgp_hold": "180", "timer_bgp_keepalive": "60", "vrf": "test"} updates: description: commands sent to the device returned: always type: list sample: ["router bgp 65535", "vrf test", "router-id 1.1.1.1"] changed: description: check to see if a change was made on the device returned: always type: boolean sample: true ''' import re from ansible.module_utils.nxos import get_config, load_config from ansible.module_utils.nxos import nxos_argument_spec, check_args from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.netcfg import CustomNetworkConfig PARAM_TO_COMMAND_KEYMAP = { 'anycast_gateway_mac': 'fabric forwarding anycast-gateway-mac', } def invoke(name, *args, **kwargs): func = globals().get(name) if func: return func(*args, **kwargs) def get_value(arg, config, module): REGEX = re.compile(r'(?:{0}\s)(?P<value>.*)$'.format(PARAM_TO_COMMAND_KEYMAP[arg]), re.M) value = '' if PARAM_TO_COMMAND_KEYMAP[arg] in config: value = REGEX.search(config).group('value') return value def get_existing(module, args): existing = {} config = str(get_config(module)) for arg in args: existing[arg] = get_value(arg, config, module) return existing def apply_key_map(key_map, table): new_dict = {} for key, value in table.items(): new_key = key_map.get(key) if new_key: value = table.get(key) if value: new_dict[new_key] = value else: new_dict[new_key] = value return new_dict def get_commands(module, existing, proposed, candidate): commands = list() proposed_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, proposed) existing_commands = apply_key_map(PARAM_TO_COMMAND_KEYMAP, existing) for key, value in proposed_commands.items(): if value == 'default': existing_value = existing_commands.get(key) if existing_value: commands.append('no {0} {1}'.format(key, existing_value)) else: if 'anycast-gateway-mac' in key: value = normalize_mac(value, module) command = '{0} {1}'.format(key, value) commands.append(command) if commands: candidate.add(commands, parents=[]) def normalize_mac(proposed_mac, module): try: if '-' in proposed_mac: splitted_mac = proposed_mac.split('-') if len(splitted_mac) != 6: raise ValueError for octect in splitted_mac: if len(octect) != 2: raise ValueError elif '.' in proposed_mac: splitted_mac = [] splitted_dot_mac = proposed_mac.split('.') if len(splitted_dot_mac) != 3: raise ValueError for octect in splitted_dot_mac: if len(octect) > 4: raise ValueError else: octect_len = len(octect) padding = 4 - octect_len splitted_mac.append(octect.zfill(padding+1)) elif ':' in proposed_mac: splitted_mac = proposed_mac.split(':') if len(splitted_mac) != 6: raise ValueError for octect in splitted_mac: if len(octect) != 2: raise ValueError else: raise ValueError except ValueError: module.fail_json(msg='Invalid MAC address format', proposed_mac=proposed_mac) joined_mac = ''.join(splitted_mac) mac = [joined_mac[i:i+4] for i in range(0, len(joined_mac), 4)] return '.'.join(mac).upper() def main(): argument_spec = dict( anycast_gateway_mac=dict(required=True, type='str'), m_facts=dict(required=False, default=False, type='bool'), ) argument_spec.update(nxos_argument_spec) module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True) warnings = list() check_args(module, warnings) args = [ 'anycast_gateway_mac' ] existing = invoke('get_existing', module, args) end_state = existing proposed = dict((k, v) for k, v in module.params.items() if v is not None and k in args) result = {} candidate = CustomNetworkConfig(indent=3) invoke('get_commands', module, existing, proposed, candidate) if not module.check_mode: load_config(module, candidate) if module._verbosity > 0: end_state = invoke('get_existing', module, args) result['end_state'] = end_state result['existing'] = existing result['proposed'] = proposed result['warnings'] = True module.exit_json(**result) if __name__ == '__main__': main()
gpl-3.0
MyNameIsMeerkat/pyREtic
Downloaded_Runtimes/Python-default/Lib/opcode.py
94
5210
""" opcode module - potentially shared between dis and other modules which operate on bytecodes (e.g. peephole optimizers). """ __all__ = ["cmp_op", "hasconst", "hasname", "hasjrel", "hasjabs", "haslocal", "hascompare", "hasfree", "opname", "opmap", "HAVE_ARGUMENT", "EXTENDED_ARG"] cmp_op = ('<', '<=', '==', '!=', '>', '>=', 'in', 'not in', 'is', 'is not', 'exception match', 'BAD') hasconst = [] hasname = [] hasjrel = [] hasjabs = [] haslocal = [] hascompare = [] hasfree = [] opmap = {} opname = [''] * 256 for op in range(256): opname[op] = '<%r>' % (op,) del op def def_op(name, op): opname[op] = name opmap[name] = op def name_op(name, op): def_op(name, op) hasname.append(op) def jrel_op(name, op): def_op(name, op) hasjrel.append(op) def jabs_op(name, op): def_op(name, op) hasjabs.append(op) # Instruction opcodes for compiled code # Blank lines correspond to available opcodes def_op('STOP_CODE', 0) def_op('POP_TOP', 1) def_op('ROT_TWO', 2) def_op('ROT_THREE', 3) def_op('DUP_TOP', 4) def_op('ROT_FOUR', 5) def_op('NOP', 9) def_op('UNARY_POSITIVE', 10) def_op('UNARY_NEGATIVE', 11) def_op('UNARY_NOT', 12) def_op('UNARY_CONVERT', 13) def_op('UNARY_INVERT', 15) def_op('LIST_APPEND', 18) def_op('BINARY_POWER', 19) def_op('BINARY_MULTIPLY', 20) def_op('BINARY_DIVIDE', 21) def_op('BINARY_MODULO', 22) def_op('BINARY_ADD', 23) def_op('BINARY_SUBTRACT', 24) def_op('BINARY_SUBSCR', 25) def_op('BINARY_FLOOR_DIVIDE', 26) def_op('BINARY_TRUE_DIVIDE', 27) def_op('INPLACE_FLOOR_DIVIDE', 28) def_op('INPLACE_TRUE_DIVIDE', 29) def_op('SLICE+0', 30) def_op('SLICE+1', 31) def_op('SLICE+2', 32) def_op('SLICE+3', 33) def_op('STORE_SLICE+0', 40) def_op('STORE_SLICE+1', 41) def_op('STORE_SLICE+2', 42) def_op('STORE_SLICE+3', 43) def_op('DELETE_SLICE+0', 50) def_op('DELETE_SLICE+1', 51) def_op('DELETE_SLICE+2', 52) def_op('DELETE_SLICE+3', 53) def_op('INPLACE_ADD', 55) def_op('INPLACE_SUBTRACT', 56) def_op('INPLACE_MULTIPLY', 57) def_op('INPLACE_DIVIDE', 58) def_op('INPLACE_MODULO', 59) def_op('STORE_SUBSCR', 60) def_op('DELETE_SUBSCR', 61) def_op('BINARY_LSHIFT', 62) def_op('BINARY_RSHIFT', 63) def_op('BINARY_AND', 64) def_op('BINARY_XOR', 65) def_op('BINARY_OR', 66) def_op('INPLACE_POWER', 67) def_op('GET_ITER', 68) def_op('PRINT_EXPR', 70) def_op('PRINT_ITEM', 71) def_op('PRINT_NEWLINE', 72) def_op('PRINT_ITEM_TO', 73) def_op('PRINT_NEWLINE_TO', 74) def_op('INPLACE_LSHIFT', 75) def_op('INPLACE_RSHIFT', 76) def_op('INPLACE_AND', 77) def_op('INPLACE_XOR', 78) def_op('INPLACE_OR', 79) def_op('BREAK_LOOP', 80) def_op('WITH_CLEANUP', 81) def_op('LOAD_LOCALS', 82) def_op('RETURN_VALUE', 83) def_op('IMPORT_STAR', 84) def_op('EXEC_STMT', 85) def_op('YIELD_VALUE', 86) def_op('POP_BLOCK', 87) def_op('END_FINALLY', 88) def_op('BUILD_CLASS', 89) HAVE_ARGUMENT = 90 # Opcodes from here have an argument: name_op('STORE_NAME', 90) # Index in name list name_op('DELETE_NAME', 91) # "" def_op('UNPACK_SEQUENCE', 92) # Number of tuple items jrel_op('FOR_ITER', 93) name_op('STORE_ATTR', 95) # Index in name list name_op('DELETE_ATTR', 96) # "" name_op('STORE_GLOBAL', 97) # "" name_op('DELETE_GLOBAL', 98) # "" def_op('DUP_TOPX', 99) # number of items to duplicate def_op('LOAD_CONST', 100) # Index in const list hasconst.append(100) name_op('LOAD_NAME', 101) # Index in name list def_op('BUILD_TUPLE', 102) # Number of tuple items def_op('BUILD_LIST', 103) # Number of list items def_op('BUILD_MAP', 104) # Always zero for now name_op('LOAD_ATTR', 105) # Index in name list def_op('COMPARE_OP', 106) # Comparison operator hascompare.append(106) name_op('IMPORT_NAME', 107) # Index in name list name_op('IMPORT_FROM', 108) # Index in name list jrel_op('JUMP_FORWARD', 110) # Number of bytes to skip jrel_op('JUMP_IF_FALSE', 111) # "" jrel_op('JUMP_IF_TRUE', 112) # "" jabs_op('JUMP_ABSOLUTE', 113) # Target byte offset from beginning of code name_op('LOAD_GLOBAL', 116) # Index in name list jabs_op('CONTINUE_LOOP', 119) # Target address jrel_op('SETUP_LOOP', 120) # Distance to target address jrel_op('SETUP_EXCEPT', 121) # "" jrel_op('SETUP_FINALLY', 122) # "" def_op('LOAD_FAST', 124) # Local variable number haslocal.append(124) def_op('STORE_FAST', 125) # Local variable number haslocal.append(125) def_op('DELETE_FAST', 126) # Local variable number haslocal.append(126) def_op('RAISE_VARARGS', 130) # Number of raise arguments (1, 2, or 3) def_op('CALL_FUNCTION', 131) # #args + (#kwargs << 8) def_op('MAKE_FUNCTION', 132) # Number of args with default values def_op('BUILD_SLICE', 133) # Number of items def_op('MAKE_CLOSURE', 134) def_op('LOAD_CLOSURE', 135) hasfree.append(135) def_op('LOAD_DEREF', 136) hasfree.append(136) def_op('STORE_DEREF', 137) hasfree.append(137) def_op('CALL_FUNCTION_VAR', 140) # #args + (#kwargs << 8) def_op('CALL_FUNCTION_KW', 141) # #args + (#kwargs << 8) def_op('CALL_FUNCTION_VAR_KW', 142) # #args + (#kwargs << 8) def_op('EXTENDED_ARG', 143) EXTENDED_ARG = 143 del def_op, name_op, jrel_op, jabs_op
gpl-3.0
pahaz/prospector
prospector/tools/mccabe/__init__.py
3
2717
from __future__ import absolute_import import ast from mccabe import PathGraphingAstVisitor from prospector.message import Location, Message from prospector.tools.base import ToolBase __all__ = ( 'McCabeTool', ) class McCabeTool(ToolBase): def __init__(self, *args, **kwargs): super(McCabeTool, self).__init__(*args, **kwargs) self.ignore_codes = () self.max_complexity = 10 def configure(self, prospector_config, _): self.ignore_codes = prospector_config.get_disabled_messages('mccabe') options = prospector_config.tool_options('mccabe') if 'max-complexity' in options: self.max_complexity = options['max-complexity'] return None def run(self, found_files): messages = [] for code_file in found_files.iter_module_paths(): try: tree = ast.parse( open(code_file, 'r').read(), filename=code_file, ) except (SyntaxError, TypeError): location = Location( path=code_file, module=None, function=None, line=1, character=0, ) message = Message( source='mccabe', code='MC0000', location=location, message='Could not parse file', ) messages.append(message) continue visitor = PathGraphingAstVisitor() visitor.preorder(tree, visitor) for graph in visitor.graphs.values(): complexity = graph.complexity() if complexity > self.max_complexity: location = Location( path=code_file, module=None, function=graph.entity, line=graph.lineno, character=0, absolute_path=True ) message = Message( source='mccabe', code='MC0001', location=location, message='%s is too complex (%s)' % ( graph.entity, complexity, ), ) messages.append(message) return self.filter_messages(messages) def filter_messages(self, messages): return [ message for message in messages if message.code not in self.ignore_codes ]
gpl-2.0
metacloud/molecule
test/unit/dependency/test_ansible_galaxy.py
1
6911
# Copyright (c) 2015-2018 Cisco Systems, Inc. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to # deal in the Software without restriction, including without limitation the # rights to use, copy, modify, merge, publish, distribute, sublicense, and/or # sell copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER # DEALINGS IN THE SOFTWARE. import os import pytest import sh from molecule import config from molecule.dependency import ansible_galaxy @pytest.fixture def _patched_ansible_galaxy_has_requirements_file(mocker): m = mocker.patch(('molecule.dependency.ansible_galaxy.' 'AnsibleGalaxy._has_requirements_file')) m.return_value = True return m @pytest.fixture def _dependency_section_data(): return { 'dependency': { 'name': 'galaxy', 'options': { 'foo': 'bar', 'v': True, }, 'env': { 'FOO': 'bar', } } } # NOTE(retr0h): The use of the `patched_config_validate` fixture, disables # config.Config._validate from executing. Thus preventing odd side-effects # throughout patched.assert_called unit tests. @pytest.fixture def _instance(_dependency_section_data, patched_config_validate, config_instance): return ansible_galaxy.AnsibleGalaxy(config_instance) @pytest.fixture def role_file(_instance): return os.path.join(_instance._config.scenario.directory, 'requirements.yml') @pytest.fixture def roles_path(_instance): return os.path.join(_instance._config.scenario.ephemeral_directory, 'roles') def test_config_private_member(_instance): assert isinstance(_instance._config, config.Config) def test_default_options_property(_instance, role_file, roles_path): x = {'role-file': role_file, 'roles-path': roles_path, 'force': True} assert x == _instance.default_options def test_default_env_property(_instance): env = _instance.default_env assert 'MOLECULE_FILE' in env assert 'MOLECULE_INVENTORY_FILE' in env assert 'MOLECULE_SCENARIO_DIRECTORY' in env assert 'MOLECULE_INSTANCE_CONFIG' in env def test_name_property(_instance): assert 'galaxy' == _instance.name def test_enabled_property(_instance): assert _instance.enabled @pytest.mark.parametrize( 'config_instance', ['_dependency_section_data'], indirect=True) def test_options_property(_instance, role_file, roles_path): x = { 'force': True, 'role-file': role_file, 'roles-path': roles_path, 'foo': 'bar', 'v': True, } assert x == _instance.options @pytest.mark.parametrize( 'config_instance', ['_dependency_section_data'], indirect=True) def test_options_property_handles_cli_args(role_file, roles_path, _instance): _instance._config.args = {'debug': True} x = { 'force': True, 'role-file': role_file, 'roles-path': roles_path, 'foo': 'bar', 'vvv': True, } assert x == _instance.options @pytest.mark.parametrize( 'config_instance', ['_dependency_section_data'], indirect=True) def test_env_property(_instance): assert 'bar' == _instance.env['FOO'] @pytest.mark.parametrize( 'config_instance', ['_dependency_section_data'], indirect=True) def test_bake(_instance, role_file, roles_path): _instance.bake() x = [ str(sh.ansible_galaxy), 'install', '--role-file={}'.format(role_file), '--roles-path={}'.format(roles_path), '--force', '--foo=bar', '-v' ] result = str(_instance._sh_command).split() assert sorted(x) == sorted(result) def test_execute(patched_run_command, _patched_ansible_galaxy_has_requirements_file, patched_logger_success, _instance): _instance._sh_command = 'patched-command' _instance.execute() role_directory = os.path.join(_instance._config.scenario.directory, _instance.options['roles-path']) assert os.path.isdir(role_directory) patched_run_command.assert_called_once_with('patched-command', debug=False) msg = 'Dependency completed successfully.' patched_logger_success.assert_called_once_with(msg) def test_execute_does_not_execute_when_disabled( patched_run_command, patched_logger_warn, _instance): _instance._config.config['dependency']['enabled'] = False _instance.execute() assert not patched_run_command.called msg = 'Skipping, dependency is disabled.' patched_logger_warn.assert_called_once_with(msg) def test_execute_does_not_execute_when_no_requirements_file( patched_run_command, _patched_ansible_galaxy_has_requirements_file, patched_logger_warn, _instance): _patched_ansible_galaxy_has_requirements_file.return_value = False _instance.execute() assert not patched_run_command.called msg = 'Skipping, missing the requirements file.' patched_logger_warn.assert_called_once_with(msg) def test_execute_bakes(patched_run_command, _instance, role_file, _patched_ansible_galaxy_has_requirements_file, roles_path): _instance.execute() assert _instance._sh_command is not None assert 1 == patched_run_command.call_count def test_executes_catches_and_exits_return_code( patched_run_command, _patched_ansible_galaxy_has_requirements_file, _instance): patched_run_command.side_effect = sh.ErrorReturnCode_1( sh.ansible_galaxy, b'', b'') with pytest.raises(SystemExit) as e: _instance.execute() assert 1 == e.value.code def test_setup(_instance): role_directory = os.path.join(_instance._config.scenario.directory, _instance.options['roles-path']) assert not os.path.isdir(role_directory) _instance._setup() assert os.path.isdir(role_directory) def test_role_file(role_file, _instance): assert role_file == _instance._role_file() def test_has_requirements_file(_instance): assert not _instance._has_requirements_file()
mit
hryamzik/ansible
lib/ansible/modules/network/nxos/nxos_linkagg.py
5
12502
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2017, Ansible by Red Hat, inc # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'network'} DOCUMENTATION = """ --- module: nxos_linkagg extends_documentation_fragment: nxos version_added: "2.5" short_description: Manage link aggregation groups on Cisco NXOS devices. description: - This module provides declarative management of link aggregation groups on Cisco NXOS devices. author: - Trishna Guha (@trishnaguha) notes: - Tested against NXOSv 7.0(3)I5(1). - C(state=absent) removes the portchannel config and interface if it already exists. If members to be removed are not explicitly passed, all existing members (if any), are removed. - Members must be a list. - LACP needs to be enabled first if active/passive modes are used. options: group: description: - Channel-group number for the port-channel Link aggregation group. required: true mode: description: - Mode for the link aggregation group. choices: [ active, 'on', passive ] default: 'on' min_links: description: - Minimum number of ports required up before bringing up the link aggregation group. members: description: - List of interfaces that will be managed in the link aggregation group. force: description: - When true it forces link aggregation group members to match what is declared in the members param. This can be used to remove members. type: bool default: 'no' aggregate: description: List of link aggregation definitions. state: description: - State of the link aggregation group. default: present choices: ['present','absent'] purge: description: - Purge links not defined in the I(aggregate) parameter. type: bool default: 'no' """ EXAMPLES = """ - name: create link aggregation group nxos_linkagg: group: 99 state: present - name: delete link aggregation group nxos_linkagg: group: 99 state: absent - name: set link aggregation group to members nxos_linkagg: group: 10 min_links: 3 mode: active members: - Ethernet1/2 - Ethernet1/4 - name: remove link aggregation group from Ethernet1/2 nxos_linkagg: group: 10 min_links: 3 mode: active members: - Ethernet1/4 - name: Create aggregate of linkagg definitions nxos_linkagg: aggregate: - { group: 3 } - { group: 100, min_links: 3 } - name: Remove aggregate of linkagg definitions nxos_linkagg: aggregate: - { group: 3 } - { group: 100, min_links: 3 } state: absent """ RETURN = """ commands: description: The list of configuration mode commands to send to the device returned: always, except for the platforms that use Netconf transport to manage the device. type: list sample: - interface port-channel 30 - lacp min-links 5 - interface Ethernet2/1 - channel-group 30 mode active - no interface port-channel 30 """ import re from copy import deepcopy from ansible.module_utils.network.nxos.nxos import get_config, load_config, run_commands from ansible.module_utils.network.nxos.nxos import get_capabilities, nxos_argument_spec from ansible.module_utils.network.nxos.nxos import normalize_interface from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.network.common.utils import remove_default_spec def search_obj_in_list(group, lst): for o in lst: if o['group'] == group: return o def map_obj_to_commands(updates, module): commands = list() want, have = updates purge = module.params['purge'] force = module.params['force'] for w in want: group = w['group'] mode = w['mode'] min_links = w['min_links'] members = w.get('members') or [] state = w['state'] del w['state'] obj_in_have = search_obj_in_list(group, have) if state == 'absent': if obj_in_have: members_to_remove = list(set(obj_in_have['members']) - set(members)) if members_to_remove: for m in members_to_remove: commands.append('interface {0}'.format(m)) commands.append('no channel-group {0}'.format(obj_in_have['group'])) commands.append('exit') commands.append('no interface port-channel {0}'.format(group)) elif state == 'present': if not obj_in_have: commands.append('interface port-channel {0}'.format(group)) if min_links != 'None': commands.append('lacp min-links {0}'.format(min_links)) commands.append('exit') if members: for m in members: commands.append('interface {0}'.format(m)) if force: commands.append('channel-group {0} force mode {1}'.format(group, mode)) else: commands.append('channel-group {0} mode {1}'.format(group, mode)) else: if members: if not obj_in_have['members']: for m in members: commands.append('interface port-channel {0}'.format(group)) commands.append('exit') commands.append('interface {0}'.format(m)) if force: commands.append('channel-group {0} force mode {1}'.format(group, mode)) else: commands.append('channel-group {0} mode {1}'.format(group, mode)) elif set(members) != set(obj_in_have['members']): missing_members = list(set(members) - set(obj_in_have['members'])) for m in missing_members: commands.append('interface port-channel {0}'.format(group)) commands.append('exit') commands.append('interface {0}'.format(m)) if force: commands.append('channel-group {0} force mode {1}'.format(group, mode)) else: commands.append('channel-group {0} mode {1}'.format(group, mode)) superfluous_members = list(set(obj_in_have['members']) - set(members)) for m in superfluous_members: commands.append('interface port-channel {0}'.format(group)) commands.append('exit') commands.append('interface {0}'.format(m)) commands.append('no channel-group {0}'.format(group)) if purge: for h in have: obj_in_want = search_obj_in_list(h['group'], want) if not obj_in_want: commands.append('no interface port-channel {0}'.format(h['group'])) return commands def map_params_to_obj(module): obj = [] aggregate = module.params.get('aggregate') if aggregate: for item in aggregate: for key in item: if item.get(key) is None: item[key] = module.params[key] d = item.copy() d['group'] = str(d['group']) d['min_links'] = str(d['min_links']) if d['members']: d['members'] = [normalize_interface(i) for i in d['members']] obj.append(d) else: members = None if module.params['members']: members = [normalize_interface(i) for i in module.params['members']] obj.append({ 'group': str(module.params['group']), 'mode': module.params['mode'], 'min_links': str(module.params['min_links']), 'members': members, 'state': module.params['state'] }) return obj def parse_min_links(module, group): min_links = None flags = ['| section interface.port-channel{0}'.format(group)] config = get_config(module, flags=flags) match = re.search(r'lacp min-links (\S+)', config, re.M) if match: min_links = match.group(1) return min_links def parse_mode(module, m): mode = None flags = ['| section interface.{0}'.format(m)] config = get_config(module, flags=flags) match = re.search(r'mode (\S+)', config, re.M) if match: mode = match.group(1) return mode def get_members(channel): members = [] if 'TABLE_member' in channel.keys(): interfaces = channel['TABLE_member']['ROW_member'] else: return list() if isinstance(interfaces, dict): members.append(normalize_interface(interfaces.get('port'))) elif isinstance(interfaces, list): for i in interfaces: members.append(normalize_interface(i.get('port'))) return members def parse_members(output, group): channels = output['TABLE_channel']['ROW_channel'] if isinstance(channels, list): for channel in channels: if channel['group'] == group: members = get_members(channel) elif isinstance(channels, dict): if channels['group'] == group: members = get_members(channels) else: return list() return members def parse_channel_options(module, output, channel): obj = {} group = channel['group'] obj['group'] = group obj['min-links'] = parse_min_links(module, group) members = parse_members(output, group) obj['members'] = members for m in members: obj['mode'] = parse_mode(module, m) return obj def map_config_to_obj(module): objs = list() output = run_commands(module, ['show port-channel summary | json'])[0] if not output: return list() try: channels = output['TABLE_channel']['ROW_channel'] except (TypeError, KeyError): return objs if channels: if isinstance(channels, list): for channel in channels: obj = parse_channel_options(module, output, channel) objs.append(obj) elif isinstance(channels, dict): obj = parse_channel_options(module, output, channels) objs.append(obj) return objs def main(): """ main entry point for module execution """ element_spec = dict( group=dict(type='int'), mode=dict(required=False, choices=['on', 'active', 'passive'], default='on', type='str'), min_links=dict(required=False, default=None, type='int'), members=dict(required=False, default=None, type='list'), force=dict(required=False, default=False, type='bool'), state=dict(required=False, choices=['absent', 'present'], default='present') ) aggregate_spec = deepcopy(element_spec) aggregate_spec['group'] = dict(required=True) # remove default in aggregate spec, to handle common arguments remove_default_spec(aggregate_spec) argument_spec = dict( aggregate=dict(type='list', elements='dict', options=aggregate_spec), purge=dict(default=False, type='bool') ) argument_spec.update(element_spec) argument_spec.update(nxos_argument_spec) required_one_of = [['group', 'aggregate']] mutually_exclusive = [['group', 'aggregate']] module = AnsibleModule(argument_spec=argument_spec, required_one_of=required_one_of, mutually_exclusive=mutually_exclusive, supports_check_mode=True) warnings = list() result = {'changed': False} if warnings: result['warnings'] = warnings want = map_params_to_obj(module) have = map_config_to_obj(module) commands = map_obj_to_commands((want, have), module) result['commands'] = commands if commands: if not module.check_mode: load_config(module, commands) result['changed'] = True module.exit_json(**result) if __name__ == '__main__': main()
gpl-3.0
moio/spacewalk
backend/server/importlib/channelImport.py
3
14657
# # Copyright (c) 2008--2012 Red Hat, Inc. # # This software is licensed to you under the GNU General Public License, # version 2 (GPLv2). There is NO WARRANTY for this software, express or # implied, including the implied warranties of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2 # along with this software; if not, see # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt. # # Red Hat trademarks are not licensed under GPLv2. No permission is # granted to use or replicate Red Hat trademarks that are incorporated # in this software or its documentation. # # Channel import process # from importLib import Import, InvalidArchError, \ InvalidChannelError, InvalidChannelFamilyError, MissingParentChannelError from spacewalk.common.rhnConfig import CFG from spacewalk.satellite_tools.syncLib import log class ChannelImport(Import): def __init__(self, batch, backend): Import.__init__(self, batch, backend) self.arches = {} self.families = {} self.dists = {} self.will_commit = 1 self.releases = {} self.channels = {} self.checksum_types = {} def preprocess(self): # Processes the batch to a form more suitable for database # operations for channel in self.batch: self.__processChannel(channel) def __processChannel(self, channel): # Processes a package arch = channel['channel_arch'] if not self.arches.has_key(arch): self.arches[arch] = None for family in channel['families']: self.families[family['label']] = None # Dists if channel.has_key('dists') and channel['dists'] is not None: for dist in channel['dists']: self.arches[dist['channel_arch']] = None # Product Names if channel.has_key('release') and channel['release'] is not None: for release in channel['release']: self.arches[release['channel_arch']] = None if not channel.has_key('receiving_updates') or channel['receiving_updates'] is None: channel['receiving_updates'] = 'N' # Yum repo checksum type if (channel['checksum_type'] and channel['checksum_type'] not in self.checksum_types): self.checksum_types[channel['checksum_type']] = None # bug #528227 # Print a warning in case the sync would move the channel between orgs if channel.has_key('org_id') and channel['org_id']: org_id = self.backend.lookupChannelOrg(channel['label']) if org_id and int(channel['org_id']) != org_id['org_id']: log(1, "WARNING: Channel %s is already present in orgid %s." % \ (channel['label'], org_id['org_id'])) log(1, " Running synchronization will move the channel to orgid %s." % \ channel['org_id']) log(1,'') def fix(self): self.backend.lookupChannelArches(self.arches) self.backend.lookupChannelFamilies(self.families) self.backend.lookupChecksumTypes(self.checksum_types) # Fix for channel in self.batch: self.__postprocessChannel(channel) def __postprocessChannel(self, channel): if channel.ignored: return arch = channel['channel_arch'] if self.arches[arch] is None: # Mark it as ignored channel.ignored = 1 raise InvalidArchError(arch, "Unsupported channel arch %s" % arch) channel['channel_arch_id'] = self.arches[arch] if channel['checksum_type']: channel['checksum_type_id'] = self.checksum_types[channel['checksum_type']] else: channel['checksum_type_id'] = None if channel.has_key('product_name'): channel['product_name_id'] = self.backend.lookupProductNames( channel['product_name']) families = [] for family in channel['families']: # Link back the channel to families channel_family_id = self.families[family['label']] if channel_family_id is None: # Still cant get the id, Unknown channel family raise InvalidChannelFamilyError(family['label']) families.append({ 'channel_family_id' : self.families[family['label']] }) channel['families'] = families # Dists self.__postprocessChannelMaps(channel, 'dists') #release self.__postprocessChannelMaps(channel, 'release') def __postprocessChannelMaps(self, channel, map): if channel.has_key(map) and channel[map] is not None: for dict in channel[map]: arch = dict['channel_arch'] if self.arches[arch] is None: # Mark it as ignored channel.ignored = 1 raise InvalidArchError(arch, "Unsupported channel arch %s" % arch) dict['channel_arch_id'] = self.arches[arch] def submit(self): parentChannels = {} # Split the batch into null and non-null parent channels nullParentBatch = [] nonNullParentBatch = [] channel_trusts = [] for channel in self.batch: if channel.ignored: continue if (channel.has_key('trust_list') and channel['trust_list']): self.backend.clearChannelTrusts(channel['label']) for trust in channel['trust_list']: if (channel.has_key('org_id') and channel['org_id'] and self.backend.orgTrustExists( channel['org_id'], trust['org_trust_id'])): channel_trusts.append( {'channel-label': channel['label'], 'org-id': trust['org_trust_id']}) parent = channel['parent_channel'] if not parent: nullParentBatch.append(channel) continue nonNullParentBatch.append(channel) # And save the parent channel's label in a hash too parentChannels[parent] = None # Process the easy case of null parent channels try: self.backend.processChannels(nullParentBatch, True) except: self.backend.rollback() raise # Find the parent channels ids for channel in nullParentBatch: if channel.ignored: continue label = channel['label'] if not parentChannels.has_key(label): # This channel is not a parent channel to anybody continue parentChannels[label] = channel.id # Build an extra hash for the channels with unknown ids unknownChannels = {} for k, v in parentChannels.items(): if v is None: unknownChannels[k] = None # And look them up self.backend.lookupChannels(unknownChannels) # Copy the ids back into parentChannels, to make life easier missingParents = [] for k, v in unknownChannels.items(): if v is None: missingParents.append(k) else: parentChannels[k] = v['id'] if missingParents: raise MissingParentChannelError(missingParents, "Invalid import (this parent needs to be imported?) %s" % missingParents) # Fix up the parent channels for channel in nonNullParentBatch: parent = channel['parent_channel'] if not parentChannels.has_key(parent): # Unknown parent channel channel.ignored = 1 continue # Replace the label with the id channel['parent_channel'] = parentChannels[parent] # And process these channels too try: self.backend.processChannels(nonNullParentBatch, False) except: self.backend.rollback() raise # Process the channel trusts if len(channel_trusts) > 0: self.backend.processChannelTrusts(channel_trusts) # Finally go back and add the products, if any for channel in self.batch: if channel.ignored: continue if (channel.has_key('channel_product') and channel['channel_product']) \ or (channel.has_key('product_name') and channel['product_name']): self.backend.processChannelProduct(channel) # Sometimes we may want to turn commits off if self.will_commit: self.backend.commit() class ChannelFamilyImport(Import): def preprocess(self): if CFG.ISS_PARENT: # Filter out private channel families from ISS syncs self.__filterCustomChannelFamilies() # We have to look up the channels for this channel family first self.channels = {} for cf in self.batch: for c in cf['channels']: self.channels[c] = None def fix(self): self.backend.lookupChannels(self.channels) for cf in self.batch: channel_ids = cf['channel_ids'] = [] for c in cf['channels']: chash = self.channels[c] if chash is None: # Skip continue cid = chash['id'] channel_ids.append(cid) def submit(self): try: self.backend.processChannelFamilies(self.batch) self.backend.processChannelFamilyMembers(self.batch) # self.backend.processVirtSubLevel(self.batch) self.backend.processChannelFamilyVirtSubLevel(self.batch) except: self.backend.rollback() raise self.backend.commit() def __filterCustomChannelFamilies(self): """Filter out private channel families from ISS syncs. WebUI creates these for us at the org creation time. """ new_batch = [] for cf in self.batch: if not cf['label'].startswith("private-channel-family"): new_batch.append(cf) self.batch = new_batch class ChannelFamilyPermissionsImport(Import): def __init__(self, batch, backend): Import.__init__(self, batch, backend) self.channel_families = {} self.will_commit = 1 def preprocess(self): for cf in self.batch: self.channel_families[cf['channel_family']] = None def fix(self): # Look up the _only_ org in the system org_id = self.backend.lookupOrg() self.backend.lookupChannelFamilies(self.channel_families) for cf in self.batch: cf_name = cf['channel_family'] if self.channel_families[cf_name] is None: # Unsupported channel family raise InvalidChannelFamilyError(cf_name) cf['channel_family_id'] = self.channel_families[cf_name] if cf['org_id'] == -1: # Make it our own org_id cf['org_id'] = org_id def submit(self): try: self.backend.processChannelFamilyPermissions(self.batch) except: self.backend.rollback() raise if self.will_commit: self.backend.commit() class DistChannelMapImport(Import): def __init__(self, batch, backend): Import.__init__(self, batch, backend) self.arches = {} self.channels = {} def preprocess(self): # Processes the batch to a form more suitable for database # operations for dcm in self.batch: self.arches[dcm['arch']] = None self.channels[dcm['channel']] = None def fix(self): # Look up arches and channels self.backend.lookupChannelArches(self.arches) self.backend.lookupChannels(self.channels) for dcm in self.batch: arch = self.arches[dcm['arch']] if arch is None: # Invalid arch dcm.ignored = 1 raise InvalidArchError(dcm['arch'], "Invalid dist_channel_map arch %s" % dcm['arch']) channel = self.channels[dcm['channel']] if channel is None: dcm.ignored = 1 raise InvalidChannelError(dcm['channel'], "Invalid dist_channel_map channel %s" % dcm['channel']) dcm['arch'] = arch dcm['channel_id'] = channel['id'] dcm['org_id'] = None def submit(self): try: self.backend.processDistChannelMap(self.batch) except: self.backend.rollback() raise self.backend.commit() # for testing only if __name__ == '__main__': import sys from spacewalk.server import rhnSQL from backendOracle import OracleBackend from importLib import Collection, ChannelFamily, DistChannelMap backend = OracleBackend() if 1: batch = Collection() dcms = [ { 'os' : 'Red Hat Linux', 'release' : '7.2', 'arch' : 'i386', 'channel' : 'redhat-linux-i386-7.2', }, { 'os' : 'Red Hat Linux', 'release' : '6.2', 'arch' : 'i386', 'channel' : 'redhat-linux-i386-6.2', }, ] for dcm in dcms: x = DistChannelMap() x.populate(dcm) batch.append(x) rhnSQL.initDB() backend.init() dcmimp = DistChannelMapImport(batch, backend) dcmimp.run() sys.exit(0) if 0: batch = Collection() families = [ { 'name' : 'Cisco Linux', 'label' : 'cisco', 'product_url' : 'http://www.redhat.com/products/ADSFASDFASDF', }, { 'name' : 'Misa Linux', 'label' : 'misa', 'product_url' : 'http://people.redhat.com/misa/ASDFASDFASDF', }, ] for fam in families: cf = ChannelFamily() cf.populate(fam) batch.append(cf) rhnSQL.initDB() backend.init() cfimp = ChannelFamilyImport(batch, backend) cfimp.run() sys.exit(0)
gpl-2.0
markyzq/linux-3.14
tools/perf/scripts/python/netdev-times.py
11271
15048
# Display a process of packets and processed time. # It helps us to investigate networking or network device. # # options # tx: show only tx chart # rx: show only rx chart # dev=: show only thing related to specified device # debug: work with debug mode. It shows buffer status. import os import sys sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from perf_trace_context import * from Core import * from Util import * all_event_list = []; # insert all tracepoint event related with this script irq_dic = {}; # key is cpu and value is a list which stacks irqs # which raise NET_RX softirq net_rx_dic = {}; # key is cpu and value include time of NET_RX softirq-entry # and a list which stacks receive receive_hunk_list = []; # a list which include a sequence of receive events rx_skb_list = []; # received packet list for matching # skb_copy_datagram_iovec buffer_budget = 65536; # the budget of rx_skb_list, tx_queue_list and # tx_xmit_list of_count_rx_skb_list = 0; # overflow count tx_queue_list = []; # list of packets which pass through dev_queue_xmit of_count_tx_queue_list = 0; # overflow count tx_xmit_list = []; # list of packets which pass through dev_hard_start_xmit of_count_tx_xmit_list = 0; # overflow count tx_free_list = []; # list of packets which is freed # options show_tx = 0; show_rx = 0; dev = 0; # store a name of device specified by option "dev=" debug = 0; # indices of event_info tuple EINFO_IDX_NAME= 0 EINFO_IDX_CONTEXT=1 EINFO_IDX_CPU= 2 EINFO_IDX_TIME= 3 EINFO_IDX_PID= 4 EINFO_IDX_COMM= 5 # Calculate a time interval(msec) from src(nsec) to dst(nsec) def diff_msec(src, dst): return (dst - src) / 1000000.0 # Display a process of transmitting a packet def print_transmit(hunk): if dev != 0 and hunk['dev'].find(dev) < 0: return print "%7s %5d %6d.%06dsec %12.3fmsec %12.3fmsec" % \ (hunk['dev'], hunk['len'], nsecs_secs(hunk['queue_t']), nsecs_nsecs(hunk['queue_t'])/1000, diff_msec(hunk['queue_t'], hunk['xmit_t']), diff_msec(hunk['xmit_t'], hunk['free_t'])) # Format for displaying rx packet processing PF_IRQ_ENTRY= " irq_entry(+%.3fmsec irq=%d:%s)" PF_SOFT_ENTRY=" softirq_entry(+%.3fmsec)" PF_NAPI_POLL= " napi_poll_exit(+%.3fmsec %s)" PF_JOINT= " |" PF_WJOINT= " | |" PF_NET_RECV= " |---netif_receive_skb(+%.3fmsec skb=%x len=%d)" PF_NET_RX= " |---netif_rx(+%.3fmsec skb=%x)" PF_CPY_DGRAM= " | skb_copy_datagram_iovec(+%.3fmsec %d:%s)" PF_KFREE_SKB= " | kfree_skb(+%.3fmsec location=%x)" PF_CONS_SKB= " | consume_skb(+%.3fmsec)" # Display a process of received packets and interrputs associated with # a NET_RX softirq def print_receive(hunk): show_hunk = 0 irq_list = hunk['irq_list'] cpu = irq_list[0]['cpu'] base_t = irq_list[0]['irq_ent_t'] # check if this hunk should be showed if dev != 0: for i in range(len(irq_list)): if irq_list[i]['name'].find(dev) >= 0: show_hunk = 1 break else: show_hunk = 1 if show_hunk == 0: return print "%d.%06dsec cpu=%d" % \ (nsecs_secs(base_t), nsecs_nsecs(base_t)/1000, cpu) for i in range(len(irq_list)): print PF_IRQ_ENTRY % \ (diff_msec(base_t, irq_list[i]['irq_ent_t']), irq_list[i]['irq'], irq_list[i]['name']) print PF_JOINT irq_event_list = irq_list[i]['event_list'] for j in range(len(irq_event_list)): irq_event = irq_event_list[j] if irq_event['event'] == 'netif_rx': print PF_NET_RX % \ (diff_msec(base_t, irq_event['time']), irq_event['skbaddr']) print PF_JOINT print PF_SOFT_ENTRY % \ diff_msec(base_t, hunk['sirq_ent_t']) print PF_JOINT event_list = hunk['event_list'] for i in range(len(event_list)): event = event_list[i] if event['event_name'] == 'napi_poll': print PF_NAPI_POLL % \ (diff_msec(base_t, event['event_t']), event['dev']) if i == len(event_list) - 1: print "" else: print PF_JOINT else: print PF_NET_RECV % \ (diff_msec(base_t, event['event_t']), event['skbaddr'], event['len']) if 'comm' in event.keys(): print PF_WJOINT print PF_CPY_DGRAM % \ (diff_msec(base_t, event['comm_t']), event['pid'], event['comm']) elif 'handle' in event.keys(): print PF_WJOINT if event['handle'] == "kfree_skb": print PF_KFREE_SKB % \ (diff_msec(base_t, event['comm_t']), event['location']) elif event['handle'] == "consume_skb": print PF_CONS_SKB % \ diff_msec(base_t, event['comm_t']) print PF_JOINT def trace_begin(): global show_tx global show_rx global dev global debug for i in range(len(sys.argv)): if i == 0: continue arg = sys.argv[i] if arg == 'tx': show_tx = 1 elif arg =='rx': show_rx = 1 elif arg.find('dev=',0, 4) >= 0: dev = arg[4:] elif arg == 'debug': debug = 1 if show_tx == 0 and show_rx == 0: show_tx = 1 show_rx = 1 def trace_end(): # order all events in time all_event_list.sort(lambda a,b :cmp(a[EINFO_IDX_TIME], b[EINFO_IDX_TIME])) # process all events for i in range(len(all_event_list)): event_info = all_event_list[i] name = event_info[EINFO_IDX_NAME] if name == 'irq__softirq_exit': handle_irq_softirq_exit(event_info) elif name == 'irq__softirq_entry': handle_irq_softirq_entry(event_info) elif name == 'irq__softirq_raise': handle_irq_softirq_raise(event_info) elif name == 'irq__irq_handler_entry': handle_irq_handler_entry(event_info) elif name == 'irq__irq_handler_exit': handle_irq_handler_exit(event_info) elif name == 'napi__napi_poll': handle_napi_poll(event_info) elif name == 'net__netif_receive_skb': handle_netif_receive_skb(event_info) elif name == 'net__netif_rx': handle_netif_rx(event_info) elif name == 'skb__skb_copy_datagram_iovec': handle_skb_copy_datagram_iovec(event_info) elif name == 'net__net_dev_queue': handle_net_dev_queue(event_info) elif name == 'net__net_dev_xmit': handle_net_dev_xmit(event_info) elif name == 'skb__kfree_skb': handle_kfree_skb(event_info) elif name == 'skb__consume_skb': handle_consume_skb(event_info) # display receive hunks if show_rx: for i in range(len(receive_hunk_list)): print_receive(receive_hunk_list[i]) # display transmit hunks if show_tx: print " dev len Qdisc " \ " netdevice free" for i in range(len(tx_free_list)): print_transmit(tx_free_list[i]) if debug: print "debug buffer status" print "----------------------------" print "xmit Qdisc:remain:%d overflow:%d" % \ (len(tx_queue_list), of_count_tx_queue_list) print "xmit netdevice:remain:%d overflow:%d" % \ (len(tx_xmit_list), of_count_tx_xmit_list) print "receive:remain:%d overflow:%d" % \ (len(rx_skb_list), of_count_rx_skb_list) # called from perf, when it finds a correspoinding event def irq__softirq_entry(name, context, cpu, sec, nsec, pid, comm, vec): if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX": return event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec) all_event_list.append(event_info) def irq__softirq_exit(name, context, cpu, sec, nsec, pid, comm, vec): if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX": return event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec) all_event_list.append(event_info) def irq__softirq_raise(name, context, cpu, sec, nsec, pid, comm, vec): if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX": return event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec) all_event_list.append(event_info) def irq__irq_handler_entry(name, context, cpu, sec, nsec, pid, comm, irq, irq_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, irq_name) all_event_list.append(event_info) def irq__irq_handler_exit(name, context, cpu, sec, nsec, pid, comm, irq, ret): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, ret) all_event_list.append(event_info) def napi__napi_poll(name, context, cpu, sec, nsec, pid, comm, napi, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, napi, dev_name) all_event_list.append(event_info) def net__netif_receive_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen, dev_name) all_event_list.append(event_info) def net__netif_rx(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen, dev_name) all_event_list.append(event_info) def net__net_dev_queue(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen, dev_name) all_event_list.append(event_info) def net__net_dev_xmit(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen, rc, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen, rc ,dev_name) all_event_list.append(event_info) def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr, protocol, location): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, protocol, location) all_event_list.append(event_info) def skb__consume_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr) all_event_list.append(event_info) def skb__skb_copy_datagram_iovec(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen) all_event_list.append(event_info) def handle_irq_handler_entry(event_info): (name, context, cpu, time, pid, comm, irq, irq_name) = event_info if cpu not in irq_dic.keys(): irq_dic[cpu] = [] irq_record = {'irq':irq, 'name':irq_name, 'cpu':cpu, 'irq_ent_t':time} irq_dic[cpu].append(irq_record) def handle_irq_handler_exit(event_info): (name, context, cpu, time, pid, comm, irq, ret) = event_info if cpu not in irq_dic.keys(): return irq_record = irq_dic[cpu].pop() if irq != irq_record['irq']: return irq_record.update({'irq_ext_t':time}) # if an irq doesn't include NET_RX softirq, drop. if 'event_list' in irq_record.keys(): irq_dic[cpu].append(irq_record) def handle_irq_softirq_raise(event_info): (name, context, cpu, time, pid, comm, vec) = event_info if cpu not in irq_dic.keys() \ or len(irq_dic[cpu]) == 0: return irq_record = irq_dic[cpu].pop() if 'event_list' in irq_record.keys(): irq_event_list = irq_record['event_list'] else: irq_event_list = [] irq_event_list.append({'time':time, 'event':'sirq_raise'}) irq_record.update({'event_list':irq_event_list}) irq_dic[cpu].append(irq_record) def handle_irq_softirq_entry(event_info): (name, context, cpu, time, pid, comm, vec) = event_info net_rx_dic[cpu] = {'sirq_ent_t':time, 'event_list':[]} def handle_irq_softirq_exit(event_info): (name, context, cpu, time, pid, comm, vec) = event_info irq_list = [] event_list = 0 if cpu in irq_dic.keys(): irq_list = irq_dic[cpu] del irq_dic[cpu] if cpu in net_rx_dic.keys(): sirq_ent_t = net_rx_dic[cpu]['sirq_ent_t'] event_list = net_rx_dic[cpu]['event_list'] del net_rx_dic[cpu] if irq_list == [] or event_list == 0: return rec_data = {'sirq_ent_t':sirq_ent_t, 'sirq_ext_t':time, 'irq_list':irq_list, 'event_list':event_list} # merge information realted to a NET_RX softirq receive_hunk_list.append(rec_data) def handle_napi_poll(event_info): (name, context, cpu, time, pid, comm, napi, dev_name) = event_info if cpu in net_rx_dic.keys(): event_list = net_rx_dic[cpu]['event_list'] rec_data = {'event_name':'napi_poll', 'dev':dev_name, 'event_t':time} event_list.append(rec_data) def handle_netif_rx(event_info): (name, context, cpu, time, pid, comm, skbaddr, skblen, dev_name) = event_info if cpu not in irq_dic.keys() \ or len(irq_dic[cpu]) == 0: return irq_record = irq_dic[cpu].pop() if 'event_list' in irq_record.keys(): irq_event_list = irq_record['event_list'] else: irq_event_list = [] irq_event_list.append({'time':time, 'event':'netif_rx', 'skbaddr':skbaddr, 'skblen':skblen, 'dev_name':dev_name}) irq_record.update({'event_list':irq_event_list}) irq_dic[cpu].append(irq_record) def handle_netif_receive_skb(event_info): global of_count_rx_skb_list (name, context, cpu, time, pid, comm, skbaddr, skblen, dev_name) = event_info if cpu in net_rx_dic.keys(): rec_data = {'event_name':'netif_receive_skb', 'event_t':time, 'skbaddr':skbaddr, 'len':skblen} event_list = net_rx_dic[cpu]['event_list'] event_list.append(rec_data) rx_skb_list.insert(0, rec_data) if len(rx_skb_list) > buffer_budget: rx_skb_list.pop() of_count_rx_skb_list += 1 def handle_net_dev_queue(event_info): global of_count_tx_queue_list (name, context, cpu, time, pid, comm, skbaddr, skblen, dev_name) = event_info skb = {'dev':dev_name, 'skbaddr':skbaddr, 'len':skblen, 'queue_t':time} tx_queue_list.insert(0, skb) if len(tx_queue_list) > buffer_budget: tx_queue_list.pop() of_count_tx_queue_list += 1 def handle_net_dev_xmit(event_info): global of_count_tx_xmit_list (name, context, cpu, time, pid, comm, skbaddr, skblen, rc, dev_name) = event_info if rc == 0: # NETDEV_TX_OK for i in range(len(tx_queue_list)): skb = tx_queue_list[i] if skb['skbaddr'] == skbaddr: skb['xmit_t'] = time tx_xmit_list.insert(0, skb) del tx_queue_list[i] if len(tx_xmit_list) > buffer_budget: tx_xmit_list.pop() of_count_tx_xmit_list += 1 return def handle_kfree_skb(event_info): (name, context, cpu, time, pid, comm, skbaddr, protocol, location) = event_info for i in range(len(tx_queue_list)): skb = tx_queue_list[i] if skb['skbaddr'] == skbaddr: del tx_queue_list[i] return for i in range(len(tx_xmit_list)): skb = tx_xmit_list[i] if skb['skbaddr'] == skbaddr: skb['free_t'] = time tx_free_list.append(skb) del tx_xmit_list[i] return for i in range(len(rx_skb_list)): rec_data = rx_skb_list[i] if rec_data['skbaddr'] == skbaddr: rec_data.update({'handle':"kfree_skb", 'comm':comm, 'pid':pid, 'comm_t':time}) del rx_skb_list[i] return def handle_consume_skb(event_info): (name, context, cpu, time, pid, comm, skbaddr) = event_info for i in range(len(tx_xmit_list)): skb = tx_xmit_list[i] if skb['skbaddr'] == skbaddr: skb['free_t'] = time tx_free_list.append(skb) del tx_xmit_list[i] return def handle_skb_copy_datagram_iovec(event_info): (name, context, cpu, time, pid, comm, skbaddr, skblen) = event_info for i in range(len(rx_skb_list)): rec_data = rx_skb_list[i] if skbaddr == rec_data['skbaddr']: rec_data.update({'handle':"skb_copy_datagram_iovec", 'comm':comm, 'pid':pid, 'comm_t':time}) del rx_skb_list[i] return
gpl-2.0
gx1997/chrome-loongson
tools/sync-webkit-git.py
94
7648
#!/usr/bin/env python # Copyright (c) 2011 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Update third_party/WebKit using git. Under the assumption third_party/WebKit is a clone of git.webkit.org, we can use git commands to make it match the version requested by DEPS. See http://code.google.com/p/chromium/wiki/UsingWebKitGit for details on how to use this. """ import logging import optparse import os import re import subprocess import sys import urllib def RunGit(command): """Run a git subcommand, returning its output.""" # On Windows, use shell=True to get PATH interpretation. command = ['git'] + command logging.info(' '.join(command)) shell = (os.name == 'nt') proc = subprocess.Popen(command, shell=shell, stdout=subprocess.PIPE) out = proc.communicate()[0].strip() logging.info('Returned "%s"' % out) return out def GetOverrideShortBranchName(): """Returns the user-configured override branch name, if any.""" override_config_name = 'chromium.sync-branch' return RunGit(['config', '--get', override_config_name]) def GetGClientBranchName(): """Returns the name of the magic branch that lets us know that DEPS is managing the update cycle.""" # Is there an override branch specified? override_branch_name = GetOverrideShortBranchName() if not override_branch_name: return 'refs/heads/gclient' # No override, so return the default branch. # Verify that the branch from config exists. ref_branch = 'refs/heads/' + override_branch_name current_head = RunGit(['show-ref', '--hash', ref_branch]) if current_head: return ref_branch # Inform the user about the problem and how to fix it. print ("The specified override branch ('%s') doesn't appear to exist." % override_branch_name) print "Please fix your git config value '%s'." % overide_config_name sys.exit(1) def GetWebKitRev(): """Extract the 'webkit_revision' variable out of DEPS.""" locals = {'Var': lambda _: locals["vars"][_], 'From': lambda *args: None} execfile('DEPS', {}, locals) return locals['vars']['webkit_revision'] def GetWebKitRevFromTarball(version): """Extract the 'webkit_revision' variable out of tarball DEPS.""" deps_url = "http://src.chromium.org/svn/releases/" + version + "/DEPS" f = urllib.urlopen(deps_url) s = f.read() m = re.search('(?<=/Source@)\w+', s) return m.group(0) def FindSVNRev(branch_name, target_rev): """Map an SVN revision to a git hash. Like 'git svn find-rev' but without the git-svn bits.""" # We iterate through the commit log looking for "git-svn-id" lines, # which contain the SVN revision of that commit. We can stop once # we've found our target (or hit a revision number lower than what # we're looking for, indicating not found). target_rev = int(target_rev) # regexp matching the "commit" line from the log. commit_re = re.compile(r'^commit ([a-f\d]{40})$') # regexp matching the git-svn line from the log. git_svn_re = re.compile(r'^\s+git-svn-id: [^@]+@(\d+) ') if not branch_name: branch_name = 'origin/master' cmd = ['git', 'log', '--no-color', '--first-parent', '--pretty=medium', branch_name] logging.info(' '.join(cmd)) log = subprocess.Popen(cmd, shell=(os.name == 'nt'), stdout=subprocess.PIPE) # Track whether we saw a revision *later* than the one we're seeking. saw_later = False for line in log.stdout: match = commit_re.match(line) if match: commit = match.group(1) continue match = git_svn_re.match(line) if match: rev = int(match.group(1)) if rev <= target_rev: log.stdout.close() # Break pipe. if rev < target_rev: if not saw_later: return None # Can't be sure whether this rev is ok. print ("WARNING: r%d not found, so using next nearest earlier r%d" % (target_rev, rev)) return commit else: saw_later = True print "Error: reached end of log without finding commit info." print "Something has likely gone horribly wrong." return None def GetRemote(): branch = GetOverrideShortBranchName() if not branch: branch = 'gclient' remote = RunGit(['config', '--get', 'branch.' + branch + '.remote']) if remote: return remote return 'origin' def UpdateGClientBranch(branch_name, webkit_rev, magic_gclient_branch): """Update the magic gclient branch to point at |webkit_rev|. Returns: true if the branch didn't need changes.""" target = FindSVNRev(branch_name, webkit_rev) if not target: print "r%s not available; fetching." % webkit_rev subprocess.check_call(['git', 'fetch', GetRemote()], shell=(os.name == 'nt')) target = FindSVNRev(branch_name, webkit_rev) if not target: print "ERROR: Couldn't map r%s to a git revision." % webkit_rev sys.exit(1) current = RunGit(['show-ref', '--hash', magic_gclient_branch]) if current == target: return False # No change necessary. subprocess.check_call(['git', 'update-ref', '-m', 'gclient sync', magic_gclient_branch, target], shell=(os.name == 'nt')) return True def UpdateCurrentCheckoutIfAppropriate(magic_gclient_branch): """Reset the current gclient branch if that's what we have checked out.""" branch = RunGit(['symbolic-ref', '-q', 'HEAD']) if branch != magic_gclient_branch: print "We have now updated the 'gclient' branch, but third_party/WebKit" print "has some other branch ('%s') checked out." % branch print "Run 'git checkout gclient' under third_party/WebKit if you want" print "to switch it to the version requested by DEPS." return 1 if subprocess.call(['git', 'diff-index', '--exit-code', '--shortstat', 'HEAD'], shell=(os.name == 'nt')): print "Resetting tree state to new revision." subprocess.check_call(['git', 'reset', '--hard'], shell=(os.name == 'nt')) def main(): parser = optparse.OptionParser() parser.add_option('-v', '--verbose', action='store_true') parser.add_option('-r', '--revision', help="switch to desired revision") parser.add_option('-t', '--tarball', help="switch to desired tarball release") parser.add_option('-b', '--branch', help="branch name that gclient generate") options, args = parser.parse_args() if options.verbose: logging.basicConfig(level=logging.INFO) if not os.path.exists('third_party/WebKit/.git'): if os.path.exists('third_party/WebKit'): print "ERROR: third_party/WebKit appears to not be under git control." else: print "ERROR: third_party/WebKit could not be found." print "Did you run this script from the right directory?" print "See http://code.google.com/p/chromium/wiki/UsingWebKitGit for" print "setup instructions." return 1 if options.revision: webkit_rev = options.revision if options.tarball: print "WARNING: --revision is given, so ignore --tarball" else: if options.tarball: webkit_rev = GetWebKitRevFromTarball(options.tarball) else: webkit_rev = GetWebKitRev() print 'Desired revision: r%s.' % webkit_rev os.chdir('third_party/WebKit') magic_gclient_branch = GetGClientBranchName() changed = UpdateGClientBranch(options.branch, webkit_rev, magic_gclient_branch) if changed: return UpdateCurrentCheckoutIfAppropriate(magic_gclient_branch) else: print "Already on correct revision." return 0 if __name__ == '__main__': sys.exit(main())
bsd-3-clause
hfp/tensorflow-xsmm
tensorflow/python/ops/linalg/linear_operator_test_util.py
5
29205
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Utilities for testing `LinearOperator` and sub-classes.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import abc import numpy as np import six from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.framework import random_seed from tensorflow.python.framework import tensor_shape from tensorflow.python.framework import tensor_util from tensorflow.python.ops import array_ops from tensorflow.python.ops import linalg_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import random_ops from tensorflow.python.ops.linalg import linalg_impl as linalg from tensorflow.python.ops.linalg import linear_operator_util from tensorflow.python.platform import test class OperatorBuildInfo(object): """Object encoding expected shape for a test. Encodes the expected shape of a matrix for a test. Also allows additional metadata for the test harness. """ def __init__(self, shape, **kwargs): self.shape = shape self.__dict__.update(kwargs) @six.add_metaclass(abc.ABCMeta) # pylint: disable=no-init class LinearOperatorDerivedClassTest(test.TestCase): """Tests for derived classes. Subclasses should implement every abstractmethod, and this will enable all test methods to work. """ # Absolute/relative tolerance for tests. _atol = { dtypes.float16: 1e-3, dtypes.float32: 1e-6, dtypes.float64: 1e-12, dtypes.complex64: 1e-6, dtypes.complex128: 1e-12 } _rtol = { dtypes.float16: 1e-3, dtypes.float32: 1e-6, dtypes.float64: 1e-12, dtypes.complex64: 1e-6, dtypes.complex128: 1e-12 } def assertAC(self, x, y): """Derived classes can set _atol, _rtol to get different tolerance.""" dtype = dtypes.as_dtype(x.dtype) atol = self._atol[dtype] rtol = self._rtol[dtype] self.assertAllClose(x, y, atol=atol, rtol=rtol) @property def _adjoint_options(self): return [False, True] @property def _adjoint_arg_options(self): return [False, True] @property def _dtypes_to_test(self): # TODO(langmore) Test tf.float16 once tf.matrix_solve works in 16bit. return [dtypes.float32, dtypes.float64, dtypes.complex64, dtypes.complex128] @property def _use_placeholder_options(self): return [False, True] @abc.abstractproperty def _operator_build_infos(self): """Returns list of OperatorBuildInfo, encapsulating the shape to test.""" raise NotImplementedError("operator_build_infos has not been implemented.") @abc.abstractmethod def _operator_and_matrix( self, build_info, dtype, use_placeholder, ensure_self_adjoint_and_pd=False): """Build a batch matrix and an Operator that should have similar behavior. Every operator acts like a (batch) matrix. This method returns both together, and is used by tests. Args: build_info: `OperatorBuildInfo`, encoding shape information about the operator. dtype: Numpy dtype. Data type of returned array/operator. use_placeholder: Python bool. If True, initialize the operator with a placeholder of undefined shape and correct dtype. ensure_self_adjoint_and_pd: If `True`, construct this operator to be Hermitian Positive Definite, as well as ensuring the hints `is_positive_definite` and `is_self_adjoint` are set. This is useful for testing methods such as `cholesky`. Returns: operator: `LinearOperator` subclass instance. mat: `Tensor` representing operator. """ # Create a matrix as a numpy array with desired shape/dtype. # Create a LinearOperator that should have the same behavior as the matrix. raise NotImplementedError("Not implemented yet.") @abc.abstractmethod def _make_rhs(self, operator, adjoint, with_batch=True): """Make a rhs appropriate for calling operator.solve(rhs). Args: operator: A `LinearOperator` adjoint: Python `bool`. If `True`, we are making a 'rhs' value for the adjoint operator. with_batch: Python `bool`. If `True`, create `rhs` with the same batch shape as operator, and otherwise create a matrix without any batch shape. Returns: A `Tensor` """ raise NotImplementedError("_make_rhs is not defined.") @abc.abstractmethod def _make_x(self, operator, adjoint, with_batch=True): """Make an 'x' appropriate for calling operator.matmul(x). Args: operator: A `LinearOperator` adjoint: Python `bool`. If `True`, we are making an 'x' value for the adjoint operator. with_batch: Python `bool`. If `True`, create `x` with the same batch shape as operator, and otherwise create a matrix without any batch shape. Returns: A `Tensor` """ raise NotImplementedError("_make_x is not defined.") @property def _tests_to_skip(self): """List of test names to skip.""" # Subclasses should over-ride if they want to skip some tests. # To skip "test_foo", add "foo" to this list. return [] def _skip_if_tests_to_skip_contains(self, test_name): """If self._tests_to_skip contains test_name, raise SkipTest exception. See tests below for usage. Args: test_name: String name corresponding to a test. Raises: SkipTest Exception, if test_name is in self._tests_to_skip. """ if test_name in self._tests_to_skip: self.skipTest( "{} skipped because it was added to self._tests_to_skip.".format( test_name)) def test_to_dense(self): self._skip_if_tests_to_skip_contains("to_dense") for use_placeholder in self._use_placeholder_options: for build_info in self._operator_build_infos: for dtype in self._dtypes_to_test: with self.session(graph=ops.Graph()) as sess: sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED operator, mat = self._operator_and_matrix( build_info, dtype, use_placeholder=use_placeholder) op_dense = operator.to_dense() if not use_placeholder: self.assertAllEqual(build_info.shape, op_dense.get_shape()) op_dense_v, mat_v = sess.run([op_dense, mat]) self.assertAC(op_dense_v, mat_v) def test_det(self): self._skip_if_tests_to_skip_contains("det") for use_placeholder in self._use_placeholder_options: for build_info in self._operator_build_infos: for dtype in self._dtypes_to_test: with self.session(graph=ops.Graph()) as sess: sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED operator, mat = self._operator_and_matrix( build_info, dtype, use_placeholder=use_placeholder) op_det = operator.determinant() if not use_placeholder: self.assertAllEqual(build_info.shape[:-2], op_det.get_shape()) op_det_v, mat_det_v = sess.run( [op_det, linalg_ops.matrix_determinant(mat)]) self.assertAC(op_det_v, mat_det_v) def test_log_abs_det(self): self._skip_if_tests_to_skip_contains("log_abs_det") for use_placeholder in self._use_placeholder_options: for build_info in self._operator_build_infos: for dtype in self._dtypes_to_test: with self.session(graph=ops.Graph()) as sess: sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED operator, mat = self._operator_and_matrix( build_info, dtype, use_placeholder=use_placeholder) op_log_abs_det = operator.log_abs_determinant() _, mat_log_abs_det = linalg.slogdet(mat) if not use_placeholder: self.assertAllEqual( build_info.shape[:-2], op_log_abs_det.get_shape()) op_log_abs_det_v, mat_log_abs_det_v = sess.run( [op_log_abs_det, mat_log_abs_det]) self.assertAC(op_log_abs_det_v, mat_log_abs_det_v) def _test_matmul(self, with_batch): for use_placeholder in self._use_placeholder_options: for build_info in self._operator_build_infos: # If batch dimensions are omitted, but there are # no batch dimensions for the linear operator, then # skip the test case. This is already checked with # with_batch=True. if not with_batch and len(build_info.shape) <= 2: continue for dtype in self._dtypes_to_test: for adjoint in self._adjoint_options: for adjoint_arg in self._adjoint_arg_options: with self.session(graph=ops.Graph()) as sess: sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED operator, mat = self._operator_and_matrix( build_info, dtype, use_placeholder=use_placeholder) x = self._make_x( operator, adjoint=adjoint, with_batch=with_batch) # If adjoint_arg, compute A X^H^H = A X. if adjoint_arg: op_matmul = operator.matmul( linalg.adjoint(x), adjoint=adjoint, adjoint_arg=adjoint_arg) else: op_matmul = operator.matmul(x, adjoint=adjoint) mat_matmul = linear_operator_util.matmul_with_broadcast( mat, x, adjoint_a=adjoint) if not use_placeholder: self.assertAllEqual(op_matmul.get_shape(), mat_matmul.get_shape()) op_matmul_v, mat_matmul_v = sess.run( [op_matmul, mat_matmul]) self.assertAC(op_matmul_v, mat_matmul_v) def test_matmul(self): self._skip_if_tests_to_skip_contains("matmul") self._test_matmul(with_batch=True) def test_matmul_with_broadcast(self): self._skip_if_tests_to_skip_contains("matmul_with_broadcast") self._test_matmul(with_batch=False) def test_cholesky(self): self._skip_if_tests_to_skip_contains("cholesky") for use_placeholder in self._use_placeholder_options: for build_info in self._operator_build_infos: for dtype in self._dtypes_to_test: with self.test_session(graph=ops.Graph()) as sess: sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED operator, mat = self._operator_and_matrix( build_info, dtype, use_placeholder=use_placeholder, ensure_self_adjoint_and_pd=True) op_chol = operator.cholesky().to_dense() mat_chol = linalg_ops.cholesky(mat) op_chol_v, mat_chol_v = sess.run([op_chol, mat_chol]) self.assertAC(mat_chol_v, op_chol_v) def _test_solve(self, with_batch): for use_placeholder in self._use_placeholder_options: for build_info in self._operator_build_infos: # If batch dimensions are omitted, but there are # no batch dimensions for the linear operator, then # skip the test case. This is already checked with # with_batch=True. if not with_batch and len(build_info.shape) <= 2: continue for dtype in self._dtypes_to_test: for adjoint in self._adjoint_options: for adjoint_arg in self._adjoint_arg_options: with self.session(graph=ops.Graph()) as sess: sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED operator, mat = self._operator_and_matrix( build_info, dtype, use_placeholder=use_placeholder) rhs = self._make_rhs( operator, adjoint=adjoint, with_batch=with_batch) # If adjoint_arg, solve A X = (rhs^H)^H = rhs. if adjoint_arg: op_solve = operator.solve( linalg.adjoint(rhs), adjoint=adjoint, adjoint_arg=adjoint_arg) else: op_solve = operator.solve( rhs, adjoint=adjoint, adjoint_arg=adjoint_arg) mat_solve = linear_operator_util.matrix_solve_with_broadcast( mat, rhs, adjoint=adjoint) if not use_placeholder: self.assertAllEqual(op_solve.get_shape(), mat_solve.get_shape()) op_solve_v, mat_solve_v = sess.run([op_solve, mat_solve]) self.assertAC(op_solve_v, mat_solve_v) def test_solve(self): self._skip_if_tests_to_skip_contains("solve") self._test_solve(with_batch=True) def test_solve_with_broadcast(self): self._skip_if_tests_to_skip_contains("solve_with_broadcast") self._test_solve(with_batch=False) def test_trace(self): self._skip_if_tests_to_skip_contains("trace") for use_placeholder in self._use_placeholder_options: for build_info in self._operator_build_infos: for dtype in self._dtypes_to_test: with self.session(graph=ops.Graph()) as sess: sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED operator, mat = self._operator_and_matrix( build_info, dtype, use_placeholder=use_placeholder) op_trace = operator.trace() mat_trace = math_ops.trace(mat) if not use_placeholder: self.assertAllEqual(op_trace.get_shape(), mat_trace.get_shape()) op_trace_v, mat_trace_v = sess.run([op_trace, mat_trace]) self.assertAC(op_trace_v, mat_trace_v) def test_add_to_tensor(self): self._skip_if_tests_to_skip_contains("add_to_tensor") for use_placeholder in self._use_placeholder_options: for build_info in self._operator_build_infos: for dtype in self._dtypes_to_test: with self.session(graph=ops.Graph()) as sess: sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED operator, mat = self._operator_and_matrix( build_info, dtype, use_placeholder=use_placeholder) op_plus_2mat = operator.add_to_tensor(2 * mat) if not use_placeholder: self.assertAllEqual(build_info.shape, op_plus_2mat.get_shape()) op_plus_2mat_v, mat_v = sess.run([op_plus_2mat, mat]) self.assertAC(op_plus_2mat_v, 3 * mat_v) def test_diag_part(self): self._skip_if_tests_to_skip_contains("diag_part") for use_placeholder in self._use_placeholder_options: for build_info in self._operator_build_infos: for dtype in self._dtypes_to_test: with self.session(graph=ops.Graph()) as sess: sess.graph.seed = random_seed.DEFAULT_GRAPH_SEED operator, mat = self._operator_and_matrix( build_info, dtype, use_placeholder=use_placeholder) op_diag_part = operator.diag_part() mat_diag_part = array_ops.matrix_diag_part(mat) if not use_placeholder: self.assertAllEqual(mat_diag_part.get_shape(), op_diag_part.get_shape()) op_diag_part_, mat_diag_part_ = sess.run( [op_diag_part, mat_diag_part]) self.assertAC(op_diag_part_, mat_diag_part_) @six.add_metaclass(abc.ABCMeta) class SquareLinearOperatorDerivedClassTest(LinearOperatorDerivedClassTest): """Base test class appropriate for square operators. Sub-classes must still define all abstractmethods from LinearOperatorDerivedClassTest that are not defined here. """ @property def _operator_build_infos(self): build_info = OperatorBuildInfo # non-batch operators (n, n) and batch operators. return [ build_info((0, 0)), build_info((1, 1)), build_info((1, 3, 3)), build_info((3, 4, 4)), build_info((2, 1, 4, 4))] def _make_rhs(self, operator, adjoint, with_batch=True): # This operator is square, so rhs and x will have same shape. # adjoint value makes no difference because the operator shape doesn't # change since it is square, but be pedantic. return self._make_x(operator, adjoint=not adjoint, with_batch=with_batch) def _make_x(self, operator, adjoint, with_batch=True): # Value of adjoint makes no difference because the operator is square. # Return the number of systems to solve, R, equal to 1 or 2. r = self._get_num_systems(operator) # If operator.shape = [B1,...,Bb, N, N] this returns a random matrix of # shape [B1,...,Bb, N, R], R = 1 or 2. if operator.shape.is_fully_defined(): batch_shape = operator.batch_shape.as_list() n = operator.domain_dimension.value if with_batch: x_shape = batch_shape + [n, r] else: x_shape = [n, r] else: batch_shape = operator.batch_shape_tensor() n = operator.domain_dimension_tensor() if with_batch: x_shape = array_ops.concat((batch_shape, [n, r]), 0) else: x_shape = [n, r] return random_normal(x_shape, dtype=operator.dtype) def _get_num_systems(self, operator): """Get some number, either 1 or 2, depending on operator.""" if operator.tensor_rank is None or operator.tensor_rank % 2: return 1 else: return 2 @six.add_metaclass(abc.ABCMeta) class NonSquareLinearOperatorDerivedClassTest(LinearOperatorDerivedClassTest): """Base test class appropriate for generic rectangular operators. Square shapes are never tested by this class, so if you want to test your operator with a square shape, create two test classes, the other subclassing SquareLinearOperatorFullMatrixTest. Sub-classes must still define all abstractmethods from LinearOperatorDerivedClassTest that are not defined here. """ @property def _tests_to_skip(self): """List of test names to skip.""" return ["cholesky", "solve", "solve_with_broadcast", "det", "log_abs_det"] @property def _operator_build_infos(self): build_info = OperatorBuildInfo # non-batch operators (n, n) and batch operators. return [ build_info((2, 1)), build_info((1, 2)), build_info((1, 3, 2)), build_info((3, 3, 4)), build_info((2, 1, 2, 4))] def _make_rhs(self, operator, adjoint, with_batch=True): # TODO(langmore) Add once we're testing solve_ls. raise NotImplementedError( "_make_rhs not implemented because we don't test solve") def _make_x(self, operator, adjoint, with_batch=True): # Return the number of systems for the argument 'x' for .matmul(x) r = self._get_num_systems(operator) # If operator.shape = [B1,...,Bb, M, N] this returns a random matrix of # shape [B1,...,Bb, N, R], R = 1 or 2. if operator.shape.is_fully_defined(): batch_shape = operator.batch_shape.as_list() if adjoint: n = operator.range_dimension.value else: n = operator.domain_dimension.value if with_batch: x_shape = batch_shape + [n, r] else: x_shape = [n, r] else: batch_shape = operator.batch_shape_tensor() if adjoint: n = operator.range_dimension_tensor() else: n = operator.domain_dimension_tensor() if with_batch: x_shape = array_ops.concat((batch_shape, [n, r]), 0) else: x_shape = [n, r] return random_normal(x_shape, dtype=operator.dtype) def _get_num_systems(self, operator): """Get some number, either 1 or 2, depending on operator.""" if operator.tensor_rank is None or operator.tensor_rank % 2: return 1 else: return 2 def random_positive_definite_matrix(shape, dtype, force_well_conditioned=False): """[batch] positive definite matrix. Args: shape: `TensorShape` or Python list. Shape of the returned matrix. dtype: `TensorFlow` `dtype` or Python dtype. force_well_conditioned: Python bool. If `True`, returned matrix has eigenvalues with modulus in `(1, 4)`. Otherwise, eigenvalues are chi-squared random variables. Returns: `Tensor` with desired shape and dtype. """ dtype = dtypes.as_dtype(dtype) if not tensor_util.is_tensor(shape): shape = tensor_shape.TensorShape(shape) # Matrix must be square. shape[-1].assert_is_compatible_with(shape[-2]) with ops.name_scope("random_positive_definite_matrix"): tril = random_tril_matrix( shape, dtype, force_well_conditioned=force_well_conditioned) return math_ops.matmul(tril, tril, adjoint_b=True) def random_tril_matrix(shape, dtype, force_well_conditioned=False, remove_upper=True): """[batch] lower triangular matrix. Args: shape: `TensorShape` or Python `list`. Shape of the returned matrix. dtype: `TensorFlow` `dtype` or Python dtype force_well_conditioned: Python `bool`. If `True`, returned matrix will have eigenvalues with modulus in `(1, 2)`. Otherwise, eigenvalues are unit normal random variables. remove_upper: Python `bool`. If `True`, zero out the strictly upper triangle. If `False`, the lower triangle of returned matrix will have desired properties, but will not have the strictly upper triangle zero'd out. Returns: `Tensor` with desired shape and dtype. """ with ops.name_scope("random_tril_matrix"): # Totally random matrix. Has no nice properties. tril = random_normal(shape, dtype=dtype) if remove_upper: tril = array_ops.matrix_band_part(tril, -1, 0) # Create a diagonal with entries having modulus in [1, 2]. if force_well_conditioned: maxval = ops.convert_to_tensor(np.sqrt(2.), dtype=dtype.real_dtype) diag = random_sign_uniform( shape[:-1], dtype=dtype, minval=1., maxval=maxval) tril = array_ops.matrix_set_diag(tril, diag) return tril def random_normal(shape, mean=0.0, stddev=1.0, dtype=dtypes.float32, seed=None): """Tensor with (possibly complex) Gaussian entries. Samples are distributed like ``` N(mean, stddev^2), if dtype is real, X + iY, where X, Y ~ N(mean, stddev^2) if dtype is complex. ``` Args: shape: `TensorShape` or Python list. Shape of the returned tensor. mean: `Tensor` giving mean of normal to sample from. stddev: `Tensor` giving stdev of normal to sample from. dtype: `TensorFlow` `dtype` or numpy dtype seed: Python integer seed for the RNG. Returns: `Tensor` with desired shape and dtype. """ dtype = dtypes.as_dtype(dtype) with ops.name_scope("random_normal"): samples = random_ops.random_normal( shape, mean=mean, stddev=stddev, dtype=dtype.real_dtype, seed=seed) if dtype.is_complex: if seed is not None: seed += 1234 more_samples = random_ops.random_normal( shape, mean=mean, stddev=stddev, dtype=dtype.real_dtype, seed=seed) samples = math_ops.complex(samples, more_samples) return samples def random_uniform(shape, minval=None, maxval=None, dtype=dtypes.float32, seed=None): """Tensor with (possibly complex) Uniform entries. Samples are distributed like ``` Uniform[minval, maxval], if dtype is real, X + iY, where X, Y ~ Uniform[minval, maxval], if dtype is complex. ``` Args: shape: `TensorShape` or Python list. Shape of the returned tensor. minval: `0-D` `Tensor` giving the minimum values. maxval: `0-D` `Tensor` giving the maximum values. dtype: `TensorFlow` `dtype` or Python dtype seed: Python integer seed for the RNG. Returns: `Tensor` with desired shape and dtype. """ dtype = dtypes.as_dtype(dtype) with ops.name_scope("random_uniform"): samples = random_ops.random_uniform( shape, dtype=dtype.real_dtype, minval=minval, maxval=maxval, seed=seed) if dtype.is_complex: if seed is not None: seed += 12345 more_samples = random_ops.random_uniform( shape, dtype=dtype.real_dtype, minval=minval, maxval=maxval, seed=seed) samples = math_ops.complex(samples, more_samples) return samples def random_sign_uniform(shape, minval=None, maxval=None, dtype=dtypes.float32, seed=None): """Tensor with (possibly complex) random entries from a "sign Uniform". Letting `Z` be a random variable equal to `-1` and `1` with equal probability, Samples from this `Op` are distributed like ``` Z * X, where X ~ Uniform[minval, maxval], if dtype is real, Z * (X + iY), where X, Y ~ Uniform[minval, maxval], if dtype is complex. ``` Args: shape: `TensorShape` or Python list. Shape of the returned tensor. minval: `0-D` `Tensor` giving the minimum values. maxval: `0-D` `Tensor` giving the maximum values. dtype: `TensorFlow` `dtype` or Python dtype seed: Python integer seed for the RNG. Returns: `Tensor` with desired shape and dtype. """ dtype = dtypes.as_dtype(dtype) with ops.name_scope("random_sign_uniform"): unsigned_samples = random_uniform( shape, minval=minval, maxval=maxval, dtype=dtype, seed=seed) if seed is not None: seed += 12 signs = math_ops.sign( random_ops.random_uniform(shape, minval=-1., maxval=1., seed=seed)) return unsigned_samples * math_ops.cast(signs, unsigned_samples.dtype) def random_normal_correlated_columns(shape, mean=0.0, stddev=1.0, dtype=dtypes.float32, eps=1e-4, seed=None): """Batch matrix with (possibly complex) Gaussian entries and correlated cols. Returns random batch matrix `A` with specified element-wise `mean`, `stddev`, living close to an embedded hyperplane. Suppose `shape[-2:] = (M, N)`. If `M < N`, `A` is a random `M x N` [batch] matrix with iid Gaussian entries. If `M >= N`, then the colums of `A` will be made almost dependent as follows: ``` L = random normal N x N-1 matrix, mean = 0, stddev = 1 / sqrt(N - 1) B = random normal M x N-1 matrix, mean = 0, stddev = stddev. G = (L B^H)^H, a random normal M x N matrix, living on N-1 dim hyperplane E = a random normal M x N matrix, mean = 0, stddev = eps mu = a constant M x N matrix, equal to the argument "mean" A = G + E + mu ``` Args: shape: Python list of integers. Shape of the returned tensor. Must be at least length two. mean: `Tensor` giving mean of normal to sample from. stddev: `Tensor` giving stdev of normal to sample from. dtype: `TensorFlow` `dtype` or numpy dtype eps: Distance each column is perturbed from the low-dimensional subspace. seed: Python integer seed for the RNG. Returns: `Tensor` with desired shape and dtype. Raises: ValueError: If `shape` is not at least length 2. """ dtype = dtypes.as_dtype(dtype) if len(shape) < 2: raise ValueError( "Argument shape must be at least length 2. Found: %s" % shape) # Shape is the final shape, e.g. [..., M, N] shape = list(shape) batch_shape = shape[:-2] m, n = shape[-2:] # If there is only one column, "they" are by definition correlated. if n < 2 or n < m: return random_normal( shape, mean=mean, stddev=stddev, dtype=dtype, seed=seed) # Shape of the matrix with only n - 1 columns that we will embed in higher # dimensional space. smaller_shape = batch_shape + [m, n - 1] # Shape of the embedding matrix, mapping batch matrices # from [..., N-1, M] to [..., N, M] embedding_mat_shape = batch_shape + [n, n - 1] # This stddev for the embedding_mat ensures final result has correct stddev. stddev_mat = 1 / np.sqrt(n - 1) with ops.name_scope("random_normal_correlated_columns"): smaller_mat = random_normal( smaller_shape, mean=0.0, stddev=stddev_mat, dtype=dtype, seed=seed) if seed is not None: seed += 1287 embedding_mat = random_normal(embedding_mat_shape, dtype=dtype, seed=seed) embedded_t = math_ops.matmul(embedding_mat, smaller_mat, transpose_b=True) embedded = array_ops.matrix_transpose(embedded_t) mean_mat = array_ops.ones_like(embedded) * mean return embedded + random_normal(shape, stddev=eps, dtype=dtype) + mean_mat
apache-2.0
miminar/openshift-ansible
roles/lib_openshift/src/class/oc_process.py
18
6672
# pylint: skip-file # flake8: noqa # pylint: disable=too-many-instance-attributes class OCProcess(OpenShiftCLI): ''' Class to wrap the oc command line tools ''' # pylint allows 5. we need 6 # pylint: disable=too-many-arguments def __init__(self, namespace, tname=None, params=None, create=False, kubeconfig='/etc/origin/master/admin.kubeconfig', tdata=None, verbose=False): ''' Constructor for OpenshiftOC ''' super(OCProcess, self).__init__(namespace, kubeconfig=kubeconfig, verbose=verbose) self.name = tname self.data = tdata self.params = params self.create = create self._template = None @property def template(self): '''template property''' if self._template is None: results = self._process(self.name, False, self.params, self.data) if results['returncode'] != 0: raise OpenShiftCLIError('Error processing template [%s]: %s' %(self.name, results)) self._template = results['results']['items'] return self._template def get(self): '''get the template''' results = self._get('template', self.name) if results['returncode'] != 0: # Does the template exist?? if 'not found' in results['stderr']: results['returncode'] = 0 results['exists'] = False results['results'] = [] return results def delete(self, obj): '''delete a resource''' return self._delete(obj['kind'], obj['metadata']['name']) def create_obj(self, obj): '''create a resource''' return self._create_from_content(obj['metadata']['name'], obj) def process(self, create=None): '''process a template''' do_create = False if create != None: do_create = create else: do_create = self.create return self._process(self.name, do_create, self.params, self.data) def exists(self): '''return whether the template exists''' # Always return true if we're being passed template data if self.data: return True t_results = self._get('template', self.name) if t_results['returncode'] != 0: # Does the template exist?? if 'not found' in t_results['stderr']: return False else: raise OpenShiftCLIError('Something went wrong. %s' % t_results) return True def needs_update(self): '''attempt to process the template and return it for comparison with oc objects''' obj_results = [] for obj in self.template: # build a list of types to skip skip = [] if obj['kind'] == 'ServiceAccount': skip.extend(['secrets', 'imagePullSecrets']) if obj['kind'] == 'BuildConfig': skip.extend(['lastTriggeredImageID']) if obj['kind'] == 'ImageStream': skip.extend(['generation']) if obj['kind'] == 'DeploymentConfig': skip.extend(['lastTriggeredImage']) # fetch the current object curr_obj_results = self._get(obj['kind'], obj['metadata']['name']) if curr_obj_results['returncode'] != 0: # Does the template exist?? if 'not found' in curr_obj_results['stderr']: obj_results.append((obj, True)) continue # check the generated object against the existing object if not Utils.check_def_equal(obj, curr_obj_results['results'][0], skip_keys=skip): obj_results.append((obj, True)) continue obj_results.append((obj, False)) return obj_results # pylint: disable=too-many-return-statements @staticmethod def run_ansible(params, check_mode): '''run the oc_process module''' ocprocess = OCProcess(params['namespace'], params['template_name'], params['params'], params['create'], kubeconfig=params['kubeconfig'], tdata=params['content'], verbose=params['debug']) state = params['state'] api_rval = ocprocess.get() if state == 'list': if api_rval['returncode'] != 0: return {"failed": True, "msg" : api_rval} return {"changed" : False, "results": api_rval, "state": state} elif state == 'present': if check_mode and params['create']: return {"changed": True, 'msg': "CHECK_MODE: Would have processed template."} if not ocprocess.exists() or not params['reconcile']: #FIXME: this code will never get run in a way that succeeds when # module.params['reconcile'] is true. Because oc_process doesn't # create the actual template, the check of ocprocess.exists() # is meaningless. Either it's already here and this code # won't be run, or this code will fail because there is no # template available for oc process to use. Have we conflated # the template's existence with the existence of the objects # it describes? # Create it here api_rval = ocprocess.process() if api_rval['returncode'] != 0: return {"failed": True, "msg": api_rval} if params['create']: return {"changed": True, "results": api_rval, "state": state} return {"changed": False, "results": api_rval, "state": state} # verify results update = False rval = [] all_results = ocprocess.needs_update() for obj, status in all_results: if status: ocprocess.delete(obj) results = ocprocess.create_obj(obj) results['kind'] = obj['kind'] rval.append(results) update = True if not update: return {"changed": update, "results": api_rval, "state": state} for cmd in rval: if cmd['returncode'] != 0: return {"failed": True, "changed": update, "msg": rval, "state": state} return {"changed": update, "results": rval, "state": state}
apache-2.0
eriser/marsyas
src/marsyas_python/additive.py
4
1039
import marsyas import json from marsyas_util import * net = marsyas.system_from_script(""" Network : Series { + public frequency = 440.0 + public g0 = 1.0 + public g1 = 0.1 + public g2 = 0.2 + public g3 = 0.1 israte = 44100.0 -> Fanout { -> f0: Series {-> sin1: SineSource {frequency = /frequency} -> amp1: Gain {gain = /g0}} -> f1: Series {-> sin2: SineSource {frequency = (2 * /frequency)} -> amp2: Gain {gain = /g1}} -> f2: Series {-> sin3: SineSource {frequency = (3 * /frequency)} -> amp2: Gain {gain = /g2}} -> f3: Series {-> sin4: SineSource {frequency = (4 * /frequency)} -> amp2: Gain {gain = /g3}} } -> sum: Sum -> out: AudioSink } """) net.updControl("/Series/Network/AudioSink/out/mrs_bool/initAudio", marsyas.MarControlPtr.from_bool(True)) k = 1 while (1): k = k+1 net.tick() if (k==4): waveform = control2array(net, "Sum/sum/mrs_realvec/processedData"); marplot(waveform) show()
gpl-2.0
ProfessionalIT/professionalit-webiste
sdk/google_appengine/lib/django-1.4/django/core/files/move.py
403
2931
""" Move a file in the safest way possible:: >>> from django.core.files.move import file_move_safe >>> file_move_safe("/tmp/old_file", "/tmp/new_file") """ import os from django.core.files import locks try: from shutil import copystat except ImportError: import stat def copystat(src, dst): """Copy all stat info (mode bits, atime and mtime) from src to dst""" st = os.stat(src) mode = stat.S_IMODE(st.st_mode) if hasattr(os, 'utime'): os.utime(dst, (st.st_atime, st.st_mtime)) if hasattr(os, 'chmod'): os.chmod(dst, mode) __all__ = ['file_move_safe'] def _samefile(src, dst): # Macintosh, Unix. if hasattr(os.path,'samefile'): try: return os.path.samefile(src, dst) except OSError: return False # All other platforms: check for same pathname. return (os.path.normcase(os.path.abspath(src)) == os.path.normcase(os.path.abspath(dst))) def file_move_safe(old_file_name, new_file_name, chunk_size = 1024*64, allow_overwrite=False): """ Moves a file from one location to another in the safest way possible. First, tries ``os.rename``, which is simple but will break across filesystems. If that fails, streams manually from one file to another in pure Python. If the destination file exists and ``allow_overwrite`` is ``False``, this function will throw an ``IOError``. """ # There's no reason to move if we don't have to. if _samefile(old_file_name, new_file_name): return try: os.rename(old_file_name, new_file_name) return except OSError: # This will happen with os.rename if moving to another filesystem # or when moving opened files on certain operating systems pass # first open the old file, so that it won't go away old_file = open(old_file_name, 'rb') try: # now open the new file, not forgetting allow_overwrite fd = os.open(new_file_name, os.O_WRONLY | os.O_CREAT | getattr(os, 'O_BINARY', 0) | (not allow_overwrite and os.O_EXCL or 0)) try: locks.lock(fd, locks.LOCK_EX) current_chunk = None while current_chunk != '': current_chunk = old_file.read(chunk_size) os.write(fd, current_chunk) finally: locks.unlock(fd) os.close(fd) finally: old_file.close() copystat(old_file_name, new_file_name) try: os.remove(old_file_name) except OSError, e: # Certain operating systems (Cygwin and Windows) # fail when deleting opened files, ignore it. (For the # systems where this happens, temporary files will be auto-deleted # on close anyway.) if getattr(e, 'winerror', 0) != 32 and getattr(e, 'errno', 0) != 13: raise
lgpl-3.0
SamiHiltunen/invenio-workflows
tests/demo_package/workflows/demo_workflow_error.py
6
1057
# -*- coding: utf-8 -*- # # This file is part of Invenio. # Copyright (C) 2014, 2015 CERN. # # Invenio is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # Invenio is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Invenio; if not, write to the Free Software Foundation, Inc., # 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """Implement a workflow for testing.""" from invenio_workflows.tasks.sample_tasks import task_b, add_data, generate_error class demo_workflow_error(object): """Test workflow for unit-tests.""" workflow = [ task_b, task_b, add_data(15), generate_error ]
gpl-2.0
openpgh/askpgh
askbot/migrations/0155_remove_unused_internal_tags.py
16
34298
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import DataMigration from django.db import models class Migration(DataMigration): def forwards(self, orm): "Write your forwards methods here." print 'Deleting unused tags added for internal bookkeeping...' tags = orm['askbot.Tag'].objects.filter(name__startswith='_internal_') count = tags.count() if count > 0: tags.delete() print '%d tags formatted _internal_X' % count else: print 'None found' def backwards(self, orm): "Write your backwards methods here." models = { 'askbot.activity': { 'Meta': {'object_name': 'Activity', 'db_table': "u'activity'"}, 'active_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'activity_type': ('django.db.models.fields.SmallIntegerField', [], {}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_auditted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}), 'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Post']", 'null': 'True'}), 'receiving_users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'received_activity'", 'symmetrical': 'False', 'to': "orm['auth.User']"}), 'recipients': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'incoming_activity'", 'symmetrical': 'False', 'through': "orm['askbot.ActivityAuditStatus']", 'to': "orm['auth.User']"}), 'summary': ('django.db.models.fields.TextField', [], {'default': "''"}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) }, 'askbot.activityauditstatus': { 'Meta': {'unique_together': "(('user', 'activity'),)", 'object_name': 'ActivityAuditStatus'}, 'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Activity']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) }, 'askbot.anonymousanswer': { 'Meta': {'object_name': 'AnonymousAnswer'}, 'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}), 'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'anonymous_answers'", 'to': "orm['askbot.Post']"}), 'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}), 'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}), 'text': ('django.db.models.fields.TextField', [], {}), 'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) }, 'askbot.anonymousquestion': { 'Meta': {'object_name': 'AnonymousQuestion'}, 'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'ip_addr': ('django.db.models.fields.IPAddressField', [], {'max_length': '15'}), 'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'session_key': ('django.db.models.fields.CharField', [], {'max_length': '40'}), 'summary': ('django.db.models.fields.CharField', [], {'max_length': '180'}), 'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}), 'text': ('django.db.models.fields.TextField', [], {}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}), 'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'}) }, 'askbot.askwidget': { 'Meta': {'object_name': 'AskWidget'}, 'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Group']", 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'include_text_field': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'inner_style': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'outer_style': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'tag': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Tag']", 'null': 'True', 'blank': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'askbot.award': { 'Meta': {'object_name': 'Award', 'db_table': "u'award'"}, 'awarded_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'badge': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_badge'", 'to': "orm['askbot.BadgeData']"}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'notified': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'object_id': ('django.db.models.fields.PositiveIntegerField', [], {}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'award_user'", 'to': "orm['auth.User']"}) }, 'askbot.badgedata': { 'Meta': {'ordering': "('slug',)", 'object_name': 'BadgeData'}, 'awarded_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), 'awarded_to': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'badges'", 'symmetrical': 'False', 'through': "orm['askbot.Award']", 'to': "orm['auth.User']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}) }, 'askbot.draftanswer': { 'Meta': {'object_name': 'DraftAnswer'}, 'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'draft_answers'", 'to': "orm['auth.User']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'text': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'thread': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'draft_answers'", 'to': "orm['askbot.Thread']"}) }, 'askbot.draftquestion': { 'Meta': {'object_name': 'DraftQuestion'}, 'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125', 'null': 'True'}), 'text': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '300', 'null': 'True'}) }, 'askbot.emailfeedsetting': { 'Meta': {'unique_together': "(('subscriber', 'feed_type'),)", 'object_name': 'EmailFeedSetting'}, 'added_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'feed_type': ('django.db.models.fields.CharField', [], {'max_length': '16'}), 'frequency': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '8'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'reported_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'subscriber': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notification_subscriptions'", 'to': "orm['auth.User']"}) }, 'askbot.favoritequestion': { 'Meta': {'object_name': 'FavoriteQuestion', 'db_table': "u'favorite_question'"}, 'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'thread': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Thread']"}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_favorite_questions'", 'to': "orm['auth.User']"}) }, 'askbot.group': { 'Meta': {'object_name': 'Group', '_ormbases': ['auth.Group']}, 'description': ('django.db.models.fields.related.OneToOneField', [], {'blank': 'True', 'related_name': "'described_group'", 'unique': 'True', 'null': 'True', 'to': "orm['askbot.Post']"}), 'group_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.Group']", 'unique': 'True', 'primary_key': 'True'}), 'is_vip': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'logo_url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True'}), 'moderate_answers_to_enquirers': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'moderate_email': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'openness': ('django.db.models.fields.SmallIntegerField', [], {'default': '2'}), 'preapproved_email_domains': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}), 'preapproved_emails': ('django.db.models.fields.TextField', [], {'default': "''", 'null': 'True', 'blank': 'True'}) }, 'askbot.groupmembership': { 'Meta': {'object_name': 'GroupMembership', '_ormbases': ['auth.AuthUserGroups']}, 'authusergroups_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.AuthUserGroups']", 'unique': 'True', 'primary_key': 'True'}), 'level': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}) }, 'askbot.markedtag': { 'Meta': {'object_name': 'MarkedTag'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'reason': ('django.db.models.fields.CharField', [], {'max_length': '16'}), 'tag': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'user_selections'", 'to': "orm['askbot.Tag']"}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'tag_selections'", 'to': "orm['auth.User']"}) }, 'askbot.post': { 'Meta': {'object_name': 'Post'}, 'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'approved': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}), 'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'posts'", 'to': "orm['auth.User']"}), 'comment_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), 'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), 'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_posts'", 'null': 'True', 'to': "orm['auth.User']"}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'group_posts'", 'symmetrical': 'False', 'through': "orm['askbot.PostToGroup']", 'to': "orm['askbot.Group']"}), 'html': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_edited_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'last_edited_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'last_edited_posts'", 'null': 'True', 'to': "orm['auth.User']"}), 'locked': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'locked_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'locked_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'locked_posts'", 'null': 'True', 'to': "orm['auth.User']"}), 'offensive_flag_count': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), 'old_answer_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'unique': 'True', 'null': 'True', 'blank': 'True'}), 'old_comment_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'unique': 'True', 'null': 'True', 'blank': 'True'}), 'old_question_id': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'unique': 'True', 'null': 'True', 'blank': 'True'}), 'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'comments'", 'null': 'True', 'to': "orm['askbot.Post']"}), 'points': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_column': "'score'"}), 'post_type': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}), 'summary': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'text': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'thread': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'posts'", 'null': 'True', 'blank': 'True', 'to': "orm['askbot.Thread']"}), 'vote_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'vote_up_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'wiki': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'wikified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}) }, 'askbot.postflagreason': { 'Meta': {'object_name': 'PostFlagReason'}, 'added_at': ('django.db.models.fields.DateTimeField', [], {}), 'author': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}), 'details': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'post_reject_reasons'", 'to': "orm['askbot.Post']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '128'}) }, 'askbot.postrevision': { 'Meta': {'ordering': "('-revision',)", 'unique_together': "(('post', 'revision'),)", 'object_name': 'PostRevision'}, 'approved': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}), 'approved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'approved_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}), 'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'postrevisions'", 'to': "orm['auth.User']"}), 'by_email': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'email_address': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_anonymous': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'post': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'revisions'", 'null': 'True', 'to': "orm['askbot.Post']"}), 'revised_at': ('django.db.models.fields.DateTimeField', [], {}), 'revision': ('django.db.models.fields.PositiveIntegerField', [], {}), 'summary': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}), 'tagnames': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '125', 'blank': 'True'}), 'text': ('django.db.models.fields.TextField', [], {}), 'title': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '300', 'blank': 'True'}) }, 'askbot.posttogroup': { 'Meta': {'unique_together': "(('post', 'group'),)", 'object_name': 'PostToGroup', 'db_table': "'askbot_post_groups'"}, 'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Group']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'post': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Post']"}) }, 'askbot.questionview': { 'Meta': {'object_name': 'QuestionView'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'question': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'viewed'", 'to': "orm['askbot.Post']"}), 'when': ('django.db.models.fields.DateTimeField', [], {}), 'who': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'question_views'", 'to': "orm['auth.User']"}) }, 'askbot.questionwidget': { 'Meta': {'object_name': 'QuestionWidget'}, 'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Group']", 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'order_by': ('django.db.models.fields.CharField', [], {'default': "'-added_at'", 'max_length': '18'}), 'question_number': ('django.db.models.fields.PositiveIntegerField', [], {'default': '7'}), 'search_query': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50', 'null': 'True', 'blank': 'True'}), 'style': ('django.db.models.fields.TextField', [], {'default': '"\\n@import url(\'http://fonts.googleapis.com/css?family=Yanone+Kaffeesatz:300,400,700\');\\nbody {\\n overflow: hidden;\\n}\\n\\n#container {\\n width: 200px;\\n height: 350px;\\n}\\nul {\\n list-style: none;\\n padding: 5px;\\n margin: 5px;\\n}\\nli {\\n border-bottom: #CCC 1px solid;\\n padding-bottom: 5px;\\n padding-top: 5px;\\n}\\nli:last-child {\\n border: none;\\n}\\na {\\n text-decoration: none;\\n color: #464646;\\n font-family: \'Yanone Kaffeesatz\', sans-serif;\\n font-size: 15px;\\n}\\n"', 'blank': 'True'}), 'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '50'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'askbot.replyaddress': { 'Meta': {'object_name': 'ReplyAddress'}, 'address': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '25'}), 'allowed_from_email': ('django.db.models.fields.EmailField', [], {'max_length': '150'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'reply_addresses'", 'null': 'True', 'to': "orm['askbot.Post']"}), 'reply_action': ('django.db.models.fields.CharField', [], {'default': "'auto_answer_or_comment'", 'max_length': '32'}), 'response_post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'edit_addresses'", 'null': 'True', 'to': "orm['askbot.Post']"}), 'used_at': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) }, 'askbot.repute': { 'Meta': {'object_name': 'Repute', 'db_table': "u'repute'"}, 'comment': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'negative': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), 'positive': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), 'question': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Post']", 'null': 'True', 'blank': 'True'}), 'reputation': ('django.db.models.fields.IntegerField', [], {'default': '1'}), 'reputation_type': ('django.db.models.fields.SmallIntegerField', [], {}), 'reputed_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) }, 'askbot.tag': { 'Meta': {'ordering': "('-used_count', 'name')", 'object_name': 'Tag', 'db_table': "u'tag'"}, 'created_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'created_tags'", 'to': "orm['auth.User']"}), 'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'deleted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'deleted_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'deleted_tags'", 'null': 'True', 'to': "orm['auth.User']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), 'status': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}), 'suggested_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'suggested_tags'", 'symmetrical': 'False', 'to': "orm['auth.User']"}), 'tag_wiki': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'described_tag'", 'unique': 'True', 'null': 'True', 'to': "orm['askbot.Post']"}), 'used_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}) }, 'askbot.thread': { 'Meta': {'object_name': 'Thread'}, 'accepted_answer': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'+'", 'null': 'True', 'to': "orm['askbot.Post']"}), 'added_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'answer_accepted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'answer_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), 'approved': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}), 'close_reason': ('django.db.models.fields.SmallIntegerField', [], {'null': 'True', 'blank': 'True'}), 'closed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'closed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}), 'closed_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}), 'favorited_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'unused_favorite_threads'", 'symmetrical': 'False', 'through': "orm['askbot.FavoriteQuestion']", 'to': "orm['auth.User']"}), 'favourite_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), 'followed_by': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'followed_threads'", 'symmetrical': 'False', 'to': "orm['auth.User']"}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'group_threads'", 'symmetrical': 'False', 'through': "orm['askbot.ThreadToGroup']", 'to': "orm['askbot.Group']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'last_activity_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_activity_by': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'unused_last_active_in_threads'", 'to': "orm['auth.User']"}), 'points': ('django.db.models.fields.IntegerField', [], {'default': '0', 'db_column': "'score'"}), 'tagnames': ('django.db.models.fields.CharField', [], {'max_length': '125'}), 'tags': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'threads'", 'symmetrical': 'False', 'to': "orm['askbot.Tag']"}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '300'}), 'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}) }, 'askbot.threadtogroup': { 'Meta': {'unique_together': "(('thread', 'group'),)", 'object_name': 'ThreadToGroup', 'db_table': "'askbot_thread_groups'"}, 'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Group']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'thread': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['askbot.Thread']"}), 'visibility': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}) }, 'askbot.vote': { 'Meta': {'unique_together': "(('user', 'voted_post'),)", 'object_name': 'Vote', 'db_table': "u'vote'"}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['auth.User']"}), 'vote': ('django.db.models.fields.SmallIntegerField', [], {}), 'voted_at': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'voted_post': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'votes'", 'to': "orm['askbot.Post']"}) }, 'auth.authusergroups': { 'Meta': {'unique_together': "(('group', 'user'),)", 'object_name': 'AuthUserGroups', 'db_table': "'auth_user_groups'", 'managed': 'False'}, 'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.Group']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}) }, 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'about': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'avatar_type': ('django.db.models.fields.CharField', [], {'default': "'n'", 'max_length': '1'}), 'bronze': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), 'consecutive_days_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'country': ('django_countries.fields.CountryField', [], {'max_length': '2', 'blank': 'True'}), 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'date_of_birth': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}), 'display_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'email_isvalid': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'email_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}), 'email_signature': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'email_tag_filter_strategy': ('django.db.models.fields.SmallIntegerField', [], {'default': '1'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'gold': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), 'gravatar': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'ignored_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'interesting_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_fake': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'location': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}), 'new_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'questions_per_page': ('django.db.models.fields.SmallIntegerField', [], {'default': '10'}), 'real_name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'}), 'reputation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}), 'seen_response_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'show_country': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'show_marked_tags': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'silver': ('django.db.models.fields.SmallIntegerField', [], {'default': '0'}), 'status': ('django.db.models.fields.CharField', [], {'default': "'w'", 'max_length': '2'}), 'subscribed_tags': ('django.db.models.fields.TextField', [], {'blank': 'True'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}), 'website': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) } } complete_apps = ['askbot'] symmetrical = True
gpl-3.0
tutumcloud/urllib3
test/test_connectionpool.py
12
8683
import unittest from urllib3.connectionpool import ( connection_from_url, HTTPConnection, HTTPConnectionPool, ) from urllib3.util.timeout import Timeout from urllib3.packages.ssl_match_hostname import CertificateError from urllib3.exceptions import ( ClosedPoolError, EmptyPoolError, HostChangedError, LocationValueError, MaxRetryError, ProtocolError, SSLError, ) from socket import error as SocketError from ssl import SSLError as BaseSSLError try: # Python 3 from queue import Empty from http.client import HTTPException except ImportError: from Queue import Empty from httplib import HTTPException class TestConnectionPool(unittest.TestCase): """ Tests in this suite should exercise the ConnectionPool functionality without actually making any network requests or connections. """ def test_same_host(self): same_host = [ ('http://google.com/', '/'), ('http://google.com/', 'http://google.com/'), ('http://google.com/', 'http://google.com'), ('http://google.com/', 'http://google.com/abra/cadabra'), ('http://google.com:42/', 'http://google.com:42/abracadabra'), # Test comparison using default ports ('http://google.com:80/', 'http://google.com/abracadabra'), ('http://google.com/', 'http://google.com:80/abracadabra'), ('https://google.com:443/', 'https://google.com/abracadabra'), ('https://google.com/', 'https://google.com:443/abracadabra'), ] for a, b in same_host: c = connection_from_url(a) self.assertTrue(c.is_same_host(b), "%s =? %s" % (a, b)) not_same_host = [ ('https://google.com/', 'http://google.com/'), ('http://google.com/', 'https://google.com/'), ('http://yahoo.com/', 'http://google.com/'), ('http://google.com:42', 'https://google.com/abracadabra'), ('http://google.com', 'https://google.net/'), # Test comparison with default ports ('http://google.com:42', 'http://google.com'), ('https://google.com:42', 'https://google.com'), ('http://google.com:443', 'http://google.com'), ('https://google.com:80', 'https://google.com'), ('http://google.com:443', 'https://google.com'), ('https://google.com:80', 'http://google.com'), ('https://google.com:443', 'http://google.com'), ('http://google.com:80', 'https://google.com'), ] for a, b in not_same_host: c = connection_from_url(a) self.assertFalse(c.is_same_host(b), "%s =? %s" % (a, b)) c = connection_from_url(b) self.assertFalse(c.is_same_host(a), "%s =? %s" % (b, a)) def test_max_connections(self): pool = HTTPConnectionPool(host='localhost', maxsize=1, block=True) pool._get_conn(timeout=0.01) try: pool._get_conn(timeout=0.01) self.fail("Managed to get a connection without EmptyPoolError") except EmptyPoolError: pass try: pool.request('GET', '/', pool_timeout=0.01) self.fail("Managed to get a connection without EmptyPoolError") except EmptyPoolError: pass self.assertEqual(pool.num_connections, 1) def test_pool_edgecases(self): pool = HTTPConnectionPool(host='localhost', maxsize=1, block=False) conn1 = pool._get_conn() conn2 = pool._get_conn() # New because block=False pool._put_conn(conn1) pool._put_conn(conn2) # Should be discarded self.assertEqual(conn1, pool._get_conn()) self.assertNotEqual(conn2, pool._get_conn()) self.assertEqual(pool.num_connections, 3) def test_exception_str(self): self.assertEqual( str(EmptyPoolError(HTTPConnectionPool(host='localhost'), "Test.")), "HTTPConnectionPool(host='localhost', port=None): Test.") def test_retry_exception_str(self): self.assertEqual( str(MaxRetryError( HTTPConnectionPool(host='localhost'), "Test.", None)), "HTTPConnectionPool(host='localhost', port=None): " "Max retries exceeded with url: Test. (Caused by None)") err = SocketError("Test") # using err.__class__ here, as socket.error is an alias for OSError # since Py3.3 and gets printed as this self.assertEqual( str(MaxRetryError( HTTPConnectionPool(host='localhost'), "Test.", err)), "HTTPConnectionPool(host='localhost', port=None): " "Max retries exceeded with url: Test. " "(Caused by %r)" % err) def test_pool_size(self): POOL_SIZE = 1 pool = HTTPConnectionPool(host='localhost', maxsize=POOL_SIZE, block=True) def _raise(ex): raise ex() def _test(exception, expect): pool._make_request = lambda *args, **kwargs: _raise(exception) self.assertRaises(expect, pool.request, 'GET', '/') self.assertEqual(pool.pool.qsize(), POOL_SIZE) # Make sure that all of the exceptions return the connection to the pool _test(Empty, EmptyPoolError) _test(BaseSSLError, SSLError) _test(CertificateError, SSLError) # The pool should never be empty, and with these two exceptions being raised, # a retry will be triggered, but that retry will fail, eventually raising # MaxRetryError, not EmptyPoolError # See: https://github.com/shazow/urllib3/issues/76 pool._make_request = lambda *args, **kwargs: _raise(HTTPException) self.assertRaises(MaxRetryError, pool.request, 'GET', '/', retries=1, pool_timeout=0.01) self.assertEqual(pool.pool.qsize(), POOL_SIZE) def test_assert_same_host(self): c = connection_from_url('http://google.com:80') self.assertRaises(HostChangedError, c.request, 'GET', 'http://yahoo.com:80', assert_same_host=True) def test_pool_close(self): pool = connection_from_url('http://google.com:80') # Populate with some connections conn1 = pool._get_conn() conn2 = pool._get_conn() conn3 = pool._get_conn() pool._put_conn(conn1) pool._put_conn(conn2) old_pool_queue = pool.pool pool.close() self.assertEqual(pool.pool, None) self.assertRaises(ClosedPoolError, pool._get_conn) pool._put_conn(conn3) self.assertRaises(ClosedPoolError, pool._get_conn) self.assertRaises(Empty, old_pool_queue.get, block=False) def test_pool_timeouts(self): pool = HTTPConnectionPool(host='localhost') conn = pool._new_conn() self.assertEqual(conn.__class__, HTTPConnection) self.assertEqual(pool.timeout.__class__, Timeout) self.assertEqual(pool.timeout._read, Timeout.DEFAULT_TIMEOUT) self.assertEqual(pool.timeout._connect, Timeout.DEFAULT_TIMEOUT) self.assertEqual(pool.timeout.total, None) pool = HTTPConnectionPool(host='localhost', timeout=3) self.assertEqual(pool.timeout._read, 3) self.assertEqual(pool.timeout._connect, 3) self.assertEqual(pool.timeout.total, None) def test_no_host(self): self.assertRaises(LocationValueError, HTTPConnectionPool, None) def test_contextmanager(self): with connection_from_url('http://google.com:80') as pool: # Populate with some connections conn1 = pool._get_conn() conn2 = pool._get_conn() conn3 = pool._get_conn() pool._put_conn(conn1) pool._put_conn(conn2) old_pool_queue = pool.pool self.assertEqual(pool.pool, None) self.assertRaises(ClosedPoolError, pool._get_conn) pool._put_conn(conn3) self.assertRaises(ClosedPoolError, pool._get_conn) self.assertRaises(Empty, old_pool_queue.get, block=False) def test_absolute_url(self): c = connection_from_url('http://google.com:80') self.assertEqual( 'http://google.com:80/path?query=foo', c._absolute_url('path?query=foo')) def test_ca_certs_default_cert_required(self): with connection_from_url('https://google.com:80', ca_certs='/etc/ssl/certs/custom.pem') as pool: conn = pool._get_conn() self.assertEqual(conn.cert_reqs, 'CERT_REQUIRED') if __name__ == '__main__': unittest.main()
mit
kingvuplus/boom
lib/python/Components/Converter/RdsInfo.py
2
1971
# Embedded file name: /usr/lib/enigma2/python/Components/Converter/RdsInfo.py from enigma import iRdsDecoder, iPlayableService from Components.Converter.Converter import Converter from Components.Element import cached class RdsInfo(Converter, object): RASS_INTERACTIVE_AVAILABLE = 0 RTP_TEXT_CHANGED = 1 RADIO_TEXT_CHANGED = 2 def __init__(self, type): Converter.__init__(self, type) self.type, self.interesting_events = {'RadioText': (self.RADIO_TEXT_CHANGED, (iPlayableService.evUpdatedRadioText,)), 'RtpText': (self.RTP_TEXT_CHANGED, (iPlayableService.evUpdatedRtpText,)), 'RasInteractiveAvailable': (self.RASS_INTERACTIVE_AVAILABLE, (iPlayableService.evUpdatedRassInteractivePicMask,))}[type] @cached def getText(self): decoder = self.source.decoder text = '' if decoder: if self.type == self.RADIO_TEXT_CHANGED: text = decoder.getText(iRdsDecoder.RadioText) elif self.type == self.RTP_TEXT_CHANGED: text = decoder.getText(iRdsDecoder.RtpText) else: print 'unknown RdsInfo Converter type', self.type return text text = property(getText) @cached def getBoolean(self): decoder = self.source.decoder if self.type == self.RASS_INTERACTIVE_AVAILABLE: mask = decoder and decoder.getRassInteractiveMask() return mask and mask[0] & 1 and True or False if self.type == self.RADIO_TEXT_CHANGED: return len(decoder.getText(iRdsDecoder.RadioText)) and True or False if self.type == self.RTP_TEXT_CHANGED: return len(decoder.getText(iRdsDecoder.RtpText)) and True or False boolean = property(getBoolean) def changed(self, what): if what[0] != self.CHANGED_SPECIFIC or what[1] in self.interesting_events: Converter.changed(self, what)
gpl-2.0
doublsky/MLProfile
prof_blas.py
1
9103
""" Profile all bench_list """ import subprocess as sp import pandas as pd import argparse from util import * import socket rpt_cmd = "opreport -l -n".split() # read a list of interested kernels def trim_func_param(infile, outfile): with open(infile, "r") as inf, open(outfile, "w") as outf: for line in inf: remainder = line.split("(")[0] outf.write(remainder + "\n") def process_rpt(rpt, results_df, idx): # global results_df, idx # read results into a datafram rpt_df = pd.read_table(rpt, delim_whitespace=True, header=None, index_col=False, names=["samples", "percent", "image_name", "symbol_name"]) # select kernels / exclude kernels if args.kexclude: for kernel in kernel_list: rpt_df = rpt_df[~(rpt_df["symbol_name"].str.contains(kernel))] # copy rest kernels for _, row in rpt_df.iterrows(): if args.kexclude: results_df.set_value(idx, row["symbol_name"], row["percent"]) else: if row["symbol_name"] in kernel_list: results_df.set_value(idx, row["symbol_name"], row["percent"]) # move to next record return idx + 1 def test_bench(args): # iterate through all benchmarks with open(args.blist, "r") as bench_list: for bench in bench_list: if bench.startswith("#"): # allow commenting in benchmark list continue test_cmd = ["timeout", "-k", "3", "3", "python", benchfile] config_file = get_config_file(benchfile, args.tool) with open(config_file, "r") as config_list, open(args.output, "w") as outfile: for config in config_list: maybe_create_dataset(config) sp.call(test_cmd + config.split(), stdout=outfile, stderr=outfile) def perf_bench(args): # iterate through all benchmarks with open(args.blist, "r") as bench_list: for bench in bench_list: if bench.startswith("#"): # allow commenting in benchmark list continue # init benchfile = "benchmark/" + bench.rstrip() perf_cmd = ["operf", "--event=CPU_CLK_UNHALTED:3000000", "python", benchfile] results_df = pd.DataFrame() idx = 0 with open(get_config_file(benchfile, "perf"), "r") as config_list: for config in config_list: maybe_create_dataset(config) try: sp.check_call(perf_cmd + config.split()) sp.check_call(rpt_cmd + ["-o", "/tmp/blasrpt.tmp"]) trim_func_param("/tmp/blasrpt.tmp", "/tmp/blasrpt_trimmed.tmp") idx = process_rpt("/tmp/blasrpt_trimmed.tmp", results_df, idx) finally: # post processing (generate signature) #for index, row in results_df.iterrows(): # sig = get_series_signature(row) # results_df.set_value(index, "signature", sig) # export to .csv results_file = benchfile.replace("bench_", "perf_") results_file = results_file.replace(".py", ".csv") results_df.to_csv(results_file, index=False) def time_bench(args): # iterate through all benchmarks with open(args.blist, "r") as bench_list: for bench in bench_list: if bench.startswith("#"): # allow commenting in benchmark list continue # init benchfile = "benchmark/" + bench.rstrip() time_output = benchfile.replace(".py", ".time") cmd = ["/usr/bin/time", "-a", "-o", time_output, "python"] + [benchfile] # foreach configuration with open(get_config_file(benchfile, "time"), "r") as config_file: for config in config_file: maybe_create_dataset(config) sp.check_call(cmd + config.split()) def trace2csv(csvfile, count, comm_mat): total = 0 for key, value in comm_mat.iteritems(): total += value with open(csvfile, "a") as resutls: for key, value in comm_mat.iteritems(): resutls.write("{},{},{},{}\n".format(count, key[0], key[1], float(value)/total)) def accumulate_comm_mat(partial_comm_mat, comm_mat): total = 0 for key, value in partial_comm_mat.iteritems(): total += value for key, value in partial_comm_mat.iteritems(): if key in comm_mat: comm_mat[key] += float(partial_comm_mat[key]) / total else: comm_mat[key] = float(partial_comm_mat[key]) / total def pin_bench(args): # force numpy to run in single thread os.environ["OMP_NUM_THREADS"] = "1" # get pin root pin_home = os.environ["PIN_ROOT"] pin_cmd = [pin_home+"/pin", "-t", "pintools/obj-intel64/procatrace.so"] if not os.path.exists(args.outdir): os.makedirs(args.outdir) # iterate through all benchmarks with open(args.blist, "r") as bench_list: for bench in bench_list: if bench.startswith("#"): # allow commenting in benchmark list continue # init bench = bench.rstrip() benchfile = "benchmark/" + bench config_file = get_config_file(benchfile, "pin") count = 0 outfile = benchfile.replace(".py", "_pin.csv") if os.path.exists(outfile): os.remove(outfile) with open(outfile, "w") as f: f.write("use case,producer,consumer,comm weight\n") with open(config_file, 'r') as config_list: for configs in config_list: # init tracefile = bench.replace(".py", "_config"+str(count)+".trace") tracefile = os.path.join(args.outdir, tracefile) # skip profile if output file exist if not os.path.exists(tracefile): # create dataset if not exist maybe_create_dataset(configs) # call pin full_cmd = list(pin_cmd) full_cmd += ["-output", tracefile, "--", "python", benchfile] full_cmd += configs.split() try: sp.check_call(full_cmd) except: os.remove(tracefile) raise with open(tracefile, "r") as trace: comm_mat = parse_trace(trace) trace2csv(outfile, count, comm_mat) # remove tracefile if it is too large if os.path.getsize(tracefile) > 1e10: os.remove(tracefile) count += 1 if __name__ == "__main__": # top level parser parser = argparse.ArgumentParser(description="Run benchmarks, collect data") parser.add_argument("--blist", default="bench_list.txt", help="path to benchmark list") subparsers = parser.add_subparsers(help="available sub-command") # parser for time parser_time = subparsers.add_parser("time", help="time each benchmark") parser_time.set_defaults(func=time_bench) # parser for operf parser_perf = subparsers.add_parser("perf", help="profile using operf") parser_perf.add_argument("--klist", default="kernel_list.txt", help="path to kernel list") parser_perf.add_argument("--kexclude", action="store_true", help="exclude kernels in klist") parser_perf.add_argument("--test", action="store_true", help="Test benchmarks, do not profile.") parser_perf.set_defaults(func=perf_bench) # parser for pin parser_pin = subparsers.add_parser("pin", help="run Pin, generate memory reference trace") parser_pin.add_argument("--klist", default="kernel_list.txt", help="path to kernel list file") parser_pin.add_argument("--outdir", default="pin_out", help="path to output directory") parser_pin.set_defaults(func=pin_bench) # parser for test parser_test = subparsers.add_parser("test", help="test validity of benchmark configurations") parser_test.add_argument("--tool", default="perf", choices=["time", "perf", "pin"], help="for which tool") parser_test.add_argument("--output", default="test.log", help="path to test results file") parser_test.set_defaults(func=test_bench) # parser command-line args args = parser.parse_args() with open(args.klist, "r") as klist_file: kernel_list = klist_file.readlines() kernel_list = map(lambda x: x.rstrip(), kernel_list) args.func(args)
mit
matips/iosr-2015
sahara/tests/unit/utils/test_cluster_progress_ops.py
5
6358
# Copyright (c) 2014 Mirantis Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import uuid import mock from sahara import conductor from sahara import context from sahara.tests.unit import base from sahara.tests.unit.conductor import test_api from sahara.utils import cluster_progress_ops as cpo class FakeInstance(object): def __init__(self): self.id = uuid.uuid4() self.name = uuid.uuid4() self.cluster_id = uuid.uuid4() class ClusterProgressOpsTest(base.SaharaWithDbTestCase): def setUp(self): super(ClusterProgressOpsTest, self).setUp() self.api = conductor.API def _make_sample(self): ctx = context.ctx() cluster = self.api.cluster_create(ctx, test_api.SAMPLE_CLUSTER) return ctx, cluster def test_update_provisioning_steps(self): ctx, cluster = self._make_sample() step_id1 = self.api.cluster_provision_step_add(ctx, cluster.id, { "step_name": "some_name1", "total": 2, }) self.api.cluster_event_add(ctx, step_id1, { "event_info": "INFO", "successful": True }) self.api.cluster_provision_progress_update(ctx, cluster.id) # check that we have correct provision step result_cluster = self.api.cluster_get(ctx, cluster.id) result_step = result_cluster.provision_progress[0] self.assertIsNone(result_step.successful) # check updating in case of successful provision step self.api.cluster_event_add(ctx, step_id1, { "event_info": "INFO", "successful": True }) self.api.cluster_provision_progress_update(ctx, cluster.id) result_cluster = self.api.cluster_get(ctx, cluster.id) result_step = result_cluster.provision_progress[0] self.assertEqual(True, result_step.successful) # check updating in case of failed provision step step_id2 = self.api.cluster_provision_step_add(ctx, cluster.id, { "step_name": "some_name1", "total": 2, }) self.api.cluster_event_add(ctx, step_id2, { "event_info": "INFO", "successful": False, }) self.api.cluster_provision_progress_update(ctx, cluster.id) result_cluster = self.api.cluster_get(ctx, cluster.id) for step in result_cluster.provision_progress: if step.id == step_id2: self.assertEqual(False, step.successful) # check that it's possible to add provision step after failed step step_id3 = cpo.add_provisioning_step(cluster.id, "some_name", 2) self.assertEqual( step_id3, cpo.get_current_provisioning_step(cluster.id)) def test_get_cluster_events(self): ctx, cluster = self._make_sample() step_id1 = self.api.cluster_provision_step_add(ctx, cluster.id, { 'step_name': "some_name1", 'total': 3, }) step_id2 = self.api.cluster_provision_step_add(ctx, cluster.id, { 'step_name': "some_name", 'total': 2, }) self.api.cluster_event_add(ctx, step_id1, { "event_info": "INFO", 'successful': True, }) self.api.cluster_event_add(ctx, step_id2, { "event_info": "INFO", 'successful': True, }) cluster = self.api.cluster_get(context.ctx(), cluster.id, True) for step in cluster.provision_progress: self.assertEqual(1, len(step.events)) def _make_checks(self, instance_info, sleep=True): ctx = context.ctx() if sleep: context.sleep(2) current_instance_info = ctx.current_instance_info self.assertEqual(instance_info, current_instance_info) def test_instance_context_manager(self): fake_instances = [FakeInstance() for _ in range(50)] # check that InstanceContextManager works fine sequentially for instance in fake_instances: info = context.InstanceInfo( None, instance.id, instance.name, None) with context.InstanceInfoManager(info): self._make_checks(info, sleep=False) # check that InstanceContextManager works fine in parallel with context.ThreadGroup() as tg: for instance in fake_instances: info = context.InstanceInfo( None, instance.id, instance.name, None) with context.InstanceInfoManager(info): tg.spawn("make_checks", self._make_checks, info) @cpo.event_wrapper(True) def _do_nothing(self): pass @mock.patch('sahara.utils.cluster_progress_ops._find_in_args') @mock.patch('sahara.utils.general.check_cluster_exists') def test_event_wrapper(self, p_check_cluster_exists, p_find): self.override_config("disable_event_log", True) self._do_nothing() self.assertEqual(0, p_find.call_count) self.override_config("disable_event_log", False) p_find.return_value = FakeInstance() p_check_cluster_exists.return_value = False self._do_nothing() self.assertEqual(1, p_find.call_count) self.assertEqual(1, p_check_cluster_exists.call_count) def test_cluster_get_with_events(self): ctx, cluster = self._make_sample() step_id = cpo.add_provisioning_step(cluster.id, "Some name", 3) self.api.cluster_event_add(ctx, step_id, { 'event_info': "INFO", 'successful': True}) cluster = self.api.cluster_get(ctx, cluster.id, True) steps = cluster.provision_progress step = steps[0] self.assertEqual("Some name", step.step_name) self.assertEqual(3, step.total) self.assertEqual("INFO", step.events[0].event_info)
apache-2.0